update python-packages like mako/tornado/etc. to last version.

pull/111/head
Apex Liu 2018-09-14 06:00:33 +08:00
parent 209f5b8da3
commit 19a394b187
142 changed files with 18666 additions and 2430 deletions

View File

@ -0,0 +1,23 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
__title__ = "cryptography"
__summary__ = ("cryptography is a package which provides cryptographic recipes"
" and primitives to Python developers.")
__uri__ = "https://github.com/pyca/cryptography"
__version__ = "2.3.1"
__author__ = "The cryptography developers"
__email__ = "cryptography-dev@python.org"
__license__ = "BSD or Apache License, Version 2.0"
__copyright__ = "Copyright 2013-2017 {0}".format(__author__)

View File

@ -0,0 +1,16 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography.__about__ import (
__author__, __copyright__, __email__, __license__, __summary__, __title__,
__uri__, __version__
)
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]

View File

@ -0,0 +1,57 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from enum import Enum
class _Reasons(Enum):
BACKEND_MISSING_INTERFACE = 0
UNSUPPORTED_HASH = 1
UNSUPPORTED_CIPHER = 2
UNSUPPORTED_PADDING = 3
UNSUPPORTED_MGF = 4
UNSUPPORTED_PUBLIC_KEY_ALGORITHM = 5
UNSUPPORTED_ELLIPTIC_CURVE = 6
UNSUPPORTED_SERIALIZATION = 7
UNSUPPORTED_X509 = 8
UNSUPPORTED_EXCHANGE_ALGORITHM = 9
UNSUPPORTED_DIFFIE_HELLMAN = 10
class UnsupportedAlgorithm(Exception):
def __init__(self, message, reason=None):
super(UnsupportedAlgorithm, self).__init__(message)
self._reason = reason
class AlreadyFinalized(Exception):
pass
class AlreadyUpdated(Exception):
pass
class NotYetFinalized(Exception):
pass
class InvalidTag(Exception):
pass
class InvalidSignature(Exception):
pass
class InternalError(Exception):
def __init__(self, msg, err_code):
super(InternalError, self).__init__(msg)
self.err_code = err_code
class InvalidKey(Exception):
pass

View File

@ -0,0 +1,173 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import base64
import binascii
import os
import struct
import time
import six
from cryptography.exceptions import InvalidSignature
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes, padding
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from cryptography.hazmat.primitives.hmac import HMAC
class InvalidToken(Exception):
pass
_MAX_CLOCK_SKEW = 60
class Fernet(object):
def __init__(self, key, backend=None):
if backend is None:
backend = default_backend()
key = base64.urlsafe_b64decode(key)
if len(key) != 32:
raise ValueError(
"Fernet key must be 32 url-safe base64-encoded bytes."
)
self._signing_key = key[:16]
self._encryption_key = key[16:]
self._backend = backend
@classmethod
def generate_key(cls):
return base64.urlsafe_b64encode(os.urandom(32))
def encrypt(self, data):
current_time = int(time.time())
iv = os.urandom(16)
return self._encrypt_from_parts(data, current_time, iv)
def _encrypt_from_parts(self, data, current_time, iv):
if not isinstance(data, bytes):
raise TypeError("data must be bytes.")
padder = padding.PKCS7(algorithms.AES.block_size).padder()
padded_data = padder.update(data) + padder.finalize()
encryptor = Cipher(
algorithms.AES(self._encryption_key), modes.CBC(iv), self._backend
).encryptor()
ciphertext = encryptor.update(padded_data) + encryptor.finalize()
basic_parts = (
b"\x80" + struct.pack(">Q", current_time) + iv + ciphertext
)
h = HMAC(self._signing_key, hashes.SHA256(), backend=self._backend)
h.update(basic_parts)
hmac = h.finalize()
return base64.urlsafe_b64encode(basic_parts + hmac)
def decrypt(self, token, ttl=None):
timestamp, data = Fernet._get_unverified_token_data(token)
return self._decrypt_data(data, timestamp, ttl)
def extract_timestamp(self, token):
timestamp, data = Fernet._get_unverified_token_data(token)
# Verify the token was not tampered with.
self._verify_signature(data)
return timestamp
@staticmethod
def _get_unverified_token_data(token):
if not isinstance(token, bytes):
raise TypeError("token must be bytes.")
try:
data = base64.urlsafe_b64decode(token)
except (TypeError, binascii.Error):
raise InvalidToken
if not data or six.indexbytes(data, 0) != 0x80:
raise InvalidToken
try:
timestamp, = struct.unpack(">Q", data[1:9])
except struct.error:
raise InvalidToken
return timestamp, data
def _verify_signature(self, data):
h = HMAC(self._signing_key, hashes.SHA256(), backend=self._backend)
h.update(data[:-32])
try:
h.verify(data[-32:])
except InvalidSignature:
raise InvalidToken
def _decrypt_data(self, data, timestamp, ttl):
current_time = int(time.time())
if ttl is not None:
if timestamp + ttl < current_time:
raise InvalidToken
if current_time + _MAX_CLOCK_SKEW < timestamp:
raise InvalidToken
self._verify_signature(data)
iv = data[9:25]
ciphertext = data[25:-32]
decryptor = Cipher(
algorithms.AES(self._encryption_key), modes.CBC(iv), self._backend
).decryptor()
plaintext_padded = decryptor.update(ciphertext)
try:
plaintext_padded += decryptor.finalize()
except ValueError:
raise InvalidToken
unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder()
unpadded = unpadder.update(plaintext_padded)
try:
unpadded += unpadder.finalize()
except ValueError:
raise InvalidToken
return unpadded
class MultiFernet(object):
def __init__(self, fernets):
fernets = list(fernets)
if not fernets:
raise ValueError(
"MultiFernet requires at least one Fernet instance"
)
self._fernets = fernets
def encrypt(self, msg):
return self._fernets[0].encrypt(msg)
def rotate(self, msg):
timestamp, data = Fernet._get_unverified_token_data(msg)
for f in self._fernets:
try:
p = f._decrypt_data(data, timestamp, None)
break
except InvalidToken:
pass
else:
raise InvalidToken
iv = os.urandom(16)
return self._fernets[0]._encrypt_from_parts(p, timestamp, iv)
def decrypt(self, msg, ttl=None):
for f in self._fernets:
try:
return f.decrypt(msg, ttl)
except InvalidToken:
pass
raise InvalidToken

View File

@ -0,0 +1,11 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
"""
Hazardous Materials
This is a "Hazardous Materials" module. You should ONLY use it if you're
100% absolutely sure that you know what you're doing because this module
is full of land mines, dragons, and dinosaurs with laser guns.
"""
from __future__ import absolute_import, division, print_function

View File

@ -0,0 +1,18 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
_default_backend = None
def default_backend():
global _default_backend
if _default_backend is None:
from cryptography.hazmat.backends.openssl.backend import backend
_default_backend = backend
return _default_backend

View File

@ -0,0 +1,395 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class CipherBackend(object):
@abc.abstractmethod
def cipher_supported(self, cipher, mode):
"""
Return True if the given cipher and mode are supported.
"""
@abc.abstractmethod
def create_symmetric_encryption_ctx(self, cipher, mode):
"""
Get a CipherContext that can be used for encryption.
"""
@abc.abstractmethod
def create_symmetric_decryption_ctx(self, cipher, mode):
"""
Get a CipherContext that can be used for decryption.
"""
@six.add_metaclass(abc.ABCMeta)
class HashBackend(object):
@abc.abstractmethod
def hash_supported(self, algorithm):
"""
Return True if the hash algorithm is supported by this backend.
"""
@abc.abstractmethod
def create_hash_ctx(self, algorithm):
"""
Create a HashContext for calculating a message digest.
"""
@six.add_metaclass(abc.ABCMeta)
class HMACBackend(object):
@abc.abstractmethod
def hmac_supported(self, algorithm):
"""
Return True if the hash algorithm is supported for HMAC by this
backend.
"""
@abc.abstractmethod
def create_hmac_ctx(self, key, algorithm):
"""
Create a MACContext for calculating a message authentication code.
"""
@six.add_metaclass(abc.ABCMeta)
class CMACBackend(object):
@abc.abstractmethod
def cmac_algorithm_supported(self, algorithm):
"""
Returns True if the block cipher is supported for CMAC by this backend
"""
@abc.abstractmethod
def create_cmac_ctx(self, algorithm):
"""
Create a MACContext for calculating a message authentication code.
"""
@six.add_metaclass(abc.ABCMeta)
class PBKDF2HMACBackend(object):
@abc.abstractmethod
def pbkdf2_hmac_supported(self, algorithm):
"""
Return True if the hash algorithm is supported for PBKDF2 by this
backend.
"""
@abc.abstractmethod
def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations,
key_material):
"""
Return length bytes derived from provided PBKDF2 parameters.
"""
@six.add_metaclass(abc.ABCMeta)
class RSABackend(object):
@abc.abstractmethod
def generate_rsa_private_key(self, public_exponent, key_size):
"""
Generate an RSAPrivateKey instance with public_exponent and a modulus
of key_size bits.
"""
@abc.abstractmethod
def rsa_padding_supported(self, padding):
"""
Returns True if the backend supports the given padding options.
"""
@abc.abstractmethod
def generate_rsa_parameters_supported(self, public_exponent, key_size):
"""
Returns True if the backend supports the given parameters for key
generation.
"""
@abc.abstractmethod
def load_rsa_private_numbers(self, numbers):
"""
Returns an RSAPrivateKey provider.
"""
@abc.abstractmethod
def load_rsa_public_numbers(self, numbers):
"""
Returns an RSAPublicKey provider.
"""
@six.add_metaclass(abc.ABCMeta)
class DSABackend(object):
@abc.abstractmethod
def generate_dsa_parameters(self, key_size):
"""
Generate a DSAParameters instance with a modulus of key_size bits.
"""
@abc.abstractmethod
def generate_dsa_private_key(self, parameters):
"""
Generate a DSAPrivateKey instance with parameters as a DSAParameters
object.
"""
@abc.abstractmethod
def generate_dsa_private_key_and_parameters(self, key_size):
"""
Generate a DSAPrivateKey instance using key size only.
"""
@abc.abstractmethod
def dsa_hash_supported(self, algorithm):
"""
Return True if the hash algorithm is supported by the backend for DSA.
"""
@abc.abstractmethod
def dsa_parameters_supported(self, p, q, g):
"""
Return True if the parameters are supported by the backend for DSA.
"""
@abc.abstractmethod
def load_dsa_private_numbers(self, numbers):
"""
Returns a DSAPrivateKey provider.
"""
@abc.abstractmethod
def load_dsa_public_numbers(self, numbers):
"""
Returns a DSAPublicKey provider.
"""
@abc.abstractmethod
def load_dsa_parameter_numbers(self, numbers):
"""
Returns a DSAParameters provider.
"""
@six.add_metaclass(abc.ABCMeta)
class EllipticCurveBackend(object):
@abc.abstractmethod
def elliptic_curve_signature_algorithm_supported(
self, signature_algorithm, curve
):
"""
Returns True if the backend supports the named elliptic curve with the
specified signature algorithm.
"""
@abc.abstractmethod
def elliptic_curve_supported(self, curve):
"""
Returns True if the backend supports the named elliptic curve.
"""
@abc.abstractmethod
def generate_elliptic_curve_private_key(self, curve):
"""
Return an object conforming to the EllipticCurvePrivateKey interface.
"""
@abc.abstractmethod
def load_elliptic_curve_public_numbers(self, numbers):
"""
Return an EllipticCurvePublicKey provider using the given numbers.
"""
@abc.abstractmethod
def load_elliptic_curve_private_numbers(self, numbers):
"""
Return an EllipticCurvePrivateKey provider using the given numbers.
"""
@abc.abstractmethod
def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve):
"""
Returns whether the exchange algorithm is supported by this backend.
"""
@abc.abstractmethod
def derive_elliptic_curve_private_key(self, private_value, curve):
"""
Compute the private key given the private value and curve.
"""
@six.add_metaclass(abc.ABCMeta)
class PEMSerializationBackend(object):
@abc.abstractmethod
def load_pem_private_key(self, data, password):
"""
Loads a private key from PEM encoded data, using the provided password
if the data is encrypted.
"""
@abc.abstractmethod
def load_pem_public_key(self, data):
"""
Loads a public key from PEM encoded data.
"""
@abc.abstractmethod
def load_pem_parameters(self, data):
"""
Load encryption parameters from PEM encoded data.
"""
@six.add_metaclass(abc.ABCMeta)
class DERSerializationBackend(object):
@abc.abstractmethod
def load_der_private_key(self, data, password):
"""
Loads a private key from DER encoded data. Uses the provided password
if the data is encrypted.
"""
@abc.abstractmethod
def load_der_public_key(self, data):
"""
Loads a public key from DER encoded data.
"""
@abc.abstractmethod
def load_der_parameters(self, data):
"""
Load encryption parameters from DER encoded data.
"""
@six.add_metaclass(abc.ABCMeta)
class X509Backend(object):
@abc.abstractmethod
def load_pem_x509_certificate(self, data):
"""
Load an X.509 certificate from PEM encoded data.
"""
@abc.abstractmethod
def load_der_x509_certificate(self, data):
"""
Load an X.509 certificate from DER encoded data.
"""
@abc.abstractmethod
def load_der_x509_csr(self, data):
"""
Load an X.509 CSR from DER encoded data.
"""
@abc.abstractmethod
def load_pem_x509_csr(self, data):
"""
Load an X.509 CSR from PEM encoded data.
"""
@abc.abstractmethod
def create_x509_csr(self, builder, private_key, algorithm):
"""
Create and sign an X.509 CSR from a CSR builder object.
"""
@abc.abstractmethod
def create_x509_certificate(self, builder, private_key, algorithm):
"""
Create and sign an X.509 certificate from a CertificateBuilder object.
"""
@abc.abstractmethod
def create_x509_crl(self, builder, private_key, algorithm):
"""
Create and sign an X.509 CertificateRevocationList from a
CertificateRevocationListBuilder object.
"""
@abc.abstractmethod
def create_x509_revoked_certificate(self, builder):
"""
Create a RevokedCertificate object from a RevokedCertificateBuilder
object.
"""
@abc.abstractmethod
def x509_name_bytes(self, name):
"""
Compute the DER encoded bytes of an X509 Name object.
"""
@six.add_metaclass(abc.ABCMeta)
class DHBackend(object):
@abc.abstractmethod
def generate_dh_parameters(self, generator, key_size):
"""
Generate a DHParameters instance with a modulus of key_size bits.
Using the given generator. Often 2 or 5.
"""
@abc.abstractmethod
def generate_dh_private_key(self, parameters):
"""
Generate a DHPrivateKey instance with parameters as a DHParameters
object.
"""
@abc.abstractmethod
def generate_dh_private_key_and_parameters(self, generator, key_size):
"""
Generate a DHPrivateKey instance using key size only.
Using the given generator. Often 2 or 5.
"""
@abc.abstractmethod
def load_dh_private_numbers(self, numbers):
"""
Load a DHPrivateKey from DHPrivateNumbers
"""
@abc.abstractmethod
def load_dh_public_numbers(self, numbers):
"""
Load a DHPublicKey from DHPublicNumbers.
"""
@abc.abstractmethod
def load_dh_parameter_numbers(self, numbers):
"""
Load DHParameters from DHParameterNumbers.
"""
@abc.abstractmethod
def dh_parameters_supported(self, p, g, q=None):
"""
Returns whether the backend supports DH with these parameter values.
"""
@abc.abstractmethod
def dh_x942_serialization_supported(self):
"""
Returns True if the backend supports the serialization of DH objects
with subgroup order (q).
"""
@six.add_metaclass(abc.ABCMeta)
class ScryptBackend(object):
@abc.abstractmethod
def derive_scrypt(self, key_material, salt, length, n, r, p):
"""
Return bytes derived from provided Scrypt parameters.
"""

View File

@ -0,0 +1,10 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography.hazmat.backends.openssl.backend import backend
__all__ = ["backend"]

View File

@ -0,0 +1,159 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography.exceptions import InvalidTag
_ENCRYPT = 1
_DECRYPT = 0
def _aead_cipher_name(cipher):
from cryptography.hazmat.primitives.ciphers.aead import (
AESCCM, AESGCM, ChaCha20Poly1305
)
if isinstance(cipher, ChaCha20Poly1305):
return b"chacha20-poly1305"
elif isinstance(cipher, AESCCM):
return "aes-{0}-ccm".format(len(cipher._key) * 8).encode("ascii")
else:
assert isinstance(cipher, AESGCM)
return "aes-{0}-gcm".format(len(cipher._key) * 8).encode("ascii")
def _aead_setup(backend, cipher_name, key, nonce, tag, tag_len, operation):
evp_cipher = backend._lib.EVP_get_cipherbyname(cipher_name)
backend.openssl_assert(evp_cipher != backend._ffi.NULL)
ctx = backend._lib.EVP_CIPHER_CTX_new()
ctx = backend._ffi.gc(ctx, backend._lib.EVP_CIPHER_CTX_free)
res = backend._lib.EVP_CipherInit_ex(
ctx, evp_cipher,
backend._ffi.NULL,
backend._ffi.NULL,
backend._ffi.NULL,
int(operation == _ENCRYPT)
)
backend.openssl_assert(res != 0)
res = backend._lib.EVP_CIPHER_CTX_set_key_length(ctx, len(key))
backend.openssl_assert(res != 0)
res = backend._lib.EVP_CIPHER_CTX_ctrl(
ctx, backend._lib.EVP_CTRL_AEAD_SET_IVLEN, len(nonce),
backend._ffi.NULL
)
backend.openssl_assert(res != 0)
if operation == _DECRYPT:
res = backend._lib.EVP_CIPHER_CTX_ctrl(
ctx, backend._lib.EVP_CTRL_AEAD_SET_TAG, len(tag), tag
)
backend.openssl_assert(res != 0)
else:
res = backend._lib.EVP_CIPHER_CTX_ctrl(
ctx, backend._lib.EVP_CTRL_AEAD_SET_TAG, tag_len, backend._ffi.NULL
)
res = backend._lib.EVP_CipherInit_ex(
ctx,
backend._ffi.NULL,
backend._ffi.NULL,
key,
nonce,
int(operation == _ENCRYPT)
)
backend.openssl_assert(res != 0)
return ctx
def _set_length(backend, ctx, data_len):
intptr = backend._ffi.new("int *")
res = backend._lib.EVP_CipherUpdate(
ctx,
backend._ffi.NULL,
intptr,
backend._ffi.NULL,
data_len
)
backend.openssl_assert(res != 0)
def _process_aad(backend, ctx, associated_data):
outlen = backend._ffi.new("int *")
res = backend._lib.EVP_CipherUpdate(
ctx, backend._ffi.NULL, outlen, associated_data, len(associated_data)
)
backend.openssl_assert(res != 0)
def _process_data(backend, ctx, data):
outlen = backend._ffi.new("int *")
buf = backend._ffi.new("unsigned char[]", len(data))
res = backend._lib.EVP_CipherUpdate(ctx, buf, outlen, data, len(data))
backend.openssl_assert(res != 0)
return backend._ffi.buffer(buf, outlen[0])[:]
def _encrypt(backend, cipher, nonce, data, associated_data, tag_length):
from cryptography.hazmat.primitives.ciphers.aead import AESCCM
cipher_name = _aead_cipher_name(cipher)
ctx = _aead_setup(
backend, cipher_name, cipher._key, nonce, None, tag_length, _ENCRYPT
)
# CCM requires us to pass the length of the data before processing anything
# However calling this with any other AEAD results in an error
if isinstance(cipher, AESCCM):
_set_length(backend, ctx, len(data))
_process_aad(backend, ctx, associated_data)
processed_data = _process_data(backend, ctx, data)
outlen = backend._ffi.new("int *")
res = backend._lib.EVP_CipherFinal_ex(ctx, backend._ffi.NULL, outlen)
backend.openssl_assert(res != 0)
backend.openssl_assert(outlen[0] == 0)
tag_buf = backend._ffi.new("unsigned char[]", tag_length)
res = backend._lib.EVP_CIPHER_CTX_ctrl(
ctx, backend._lib.EVP_CTRL_AEAD_GET_TAG, tag_length, tag_buf
)
backend.openssl_assert(res != 0)
tag = backend._ffi.buffer(tag_buf)[:]
return processed_data + tag
def _decrypt(backend, cipher, nonce, data, associated_data, tag_length):
from cryptography.hazmat.primitives.ciphers.aead import AESCCM
if len(data) < tag_length:
raise InvalidTag
tag = data[-tag_length:]
data = data[:-tag_length]
cipher_name = _aead_cipher_name(cipher)
ctx = _aead_setup(
backend, cipher_name, cipher._key, nonce, tag, tag_length, _DECRYPT
)
# CCM requires us to pass the length of the data before processing anything
# However calling this with any other AEAD results in an error
if isinstance(cipher, AESCCM):
_set_length(backend, ctx, len(data))
_process_aad(backend, ctx, associated_data)
# CCM has a different error path if the tag doesn't match. Errors are
# raised in Update and Final is irrelevant.
if isinstance(cipher, AESCCM):
outlen = backend._ffi.new("int *")
buf = backend._ffi.new("unsigned char[]", len(data))
res = backend._lib.EVP_CipherUpdate(ctx, buf, outlen, data, len(data))
if res != 1:
backend._consume_errors()
raise InvalidTag
processed_data = backend._ffi.buffer(buf, outlen[0])[:]
else:
processed_data = _process_data(backend, ctx, data)
outlen = backend._ffi.new("int *")
res = backend._lib.EVP_CipherFinal_ex(ctx, backend._ffi.NULL, outlen)
if res == 0:
backend._consume_errors()
raise InvalidTag
return processed_data

View File

@ -0,0 +1,222 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import InvalidTag, UnsupportedAlgorithm, _Reasons
from cryptography.hazmat.primitives import ciphers
from cryptography.hazmat.primitives.ciphers import modes
@utils.register_interface(ciphers.CipherContext)
@utils.register_interface(ciphers.AEADCipherContext)
@utils.register_interface(ciphers.AEADEncryptionContext)
@utils.register_interface(ciphers.AEADDecryptionContext)
class _CipherContext(object):
_ENCRYPT = 1
_DECRYPT = 0
def __init__(self, backend, cipher, mode, operation):
self._backend = backend
self._cipher = cipher
self._mode = mode
self._operation = operation
self._tag = None
if isinstance(self._cipher, ciphers.BlockCipherAlgorithm):
self._block_size_bytes = self._cipher.block_size // 8
else:
self._block_size_bytes = 1
ctx = self._backend._lib.EVP_CIPHER_CTX_new()
ctx = self._backend._ffi.gc(
ctx, self._backend._lib.EVP_CIPHER_CTX_free
)
registry = self._backend._cipher_registry
try:
adapter = registry[type(cipher), type(mode)]
except KeyError:
raise UnsupportedAlgorithm(
"cipher {0} in {1} mode is not supported "
"by this backend.".format(
cipher.name, mode.name if mode else mode),
_Reasons.UNSUPPORTED_CIPHER
)
evp_cipher = adapter(self._backend, cipher, mode)
if evp_cipher == self._backend._ffi.NULL:
raise UnsupportedAlgorithm(
"cipher {0} in {1} mode is not supported "
"by this backend.".format(
cipher.name, mode.name if mode else mode),
_Reasons.UNSUPPORTED_CIPHER
)
if isinstance(mode, modes.ModeWithInitializationVector):
iv_nonce = mode.initialization_vector
elif isinstance(mode, modes.ModeWithTweak):
iv_nonce = mode.tweak
elif isinstance(mode, modes.ModeWithNonce):
iv_nonce = mode.nonce
elif isinstance(cipher, modes.ModeWithNonce):
iv_nonce = cipher.nonce
else:
iv_nonce = self._backend._ffi.NULL
# begin init with cipher and operation type
res = self._backend._lib.EVP_CipherInit_ex(ctx, evp_cipher,
self._backend._ffi.NULL,
self._backend._ffi.NULL,
self._backend._ffi.NULL,
operation)
self._backend.openssl_assert(res != 0)
# set the key length to handle variable key ciphers
res = self._backend._lib.EVP_CIPHER_CTX_set_key_length(
ctx, len(cipher.key)
)
self._backend.openssl_assert(res != 0)
if isinstance(mode, modes.GCM):
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
ctx, self._backend._lib.EVP_CTRL_AEAD_SET_IVLEN,
len(iv_nonce), self._backend._ffi.NULL
)
self._backend.openssl_assert(res != 0)
if mode.tag is not None:
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
ctx, self._backend._lib.EVP_CTRL_AEAD_SET_TAG,
len(mode.tag), mode.tag
)
self._backend.openssl_assert(res != 0)
self._tag = mode.tag
elif (
self._operation == self._DECRYPT and
self._backend._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_102 and
not self._backend._lib.CRYPTOGRAPHY_IS_LIBRESSL
):
raise NotImplementedError(
"delayed passing of GCM tag requires OpenSSL >= 1.0.2."
" To use this feature please update OpenSSL"
)
# pass key/iv
res = self._backend._lib.EVP_CipherInit_ex(
ctx,
self._backend._ffi.NULL,
self._backend._ffi.NULL,
cipher.key,
iv_nonce,
operation
)
self._backend.openssl_assert(res != 0)
# We purposely disable padding here as it's handled higher up in the
# API.
self._backend._lib.EVP_CIPHER_CTX_set_padding(ctx, 0)
self._ctx = ctx
def update(self, data):
buf = bytearray(len(data) + self._block_size_bytes - 1)
n = self.update_into(data, buf)
return bytes(buf[:n])
def update_into(self, data, buf):
if len(buf) < (len(data) + self._block_size_bytes - 1):
raise ValueError(
"buffer must be at least {0} bytes for this "
"payload".format(len(data) + self._block_size_bytes - 1)
)
buf = self._backend._ffi.cast(
"unsigned char *", self._backend._ffi.from_buffer(buf)
)
outlen = self._backend._ffi.new("int *")
res = self._backend._lib.EVP_CipherUpdate(self._ctx, buf, outlen,
data, len(data))
self._backend.openssl_assert(res != 0)
return outlen[0]
def finalize(self):
# OpenSSL 1.0.1 on Ubuntu 12.04 (and possibly other distributions)
# appears to have a bug where you must make at least one call to update
# even if you are only using authenticate_additional_data or the
# GCM tag will be wrong. An (empty) call to update resolves this
# and is harmless for all other versions of OpenSSL.
if isinstance(self._mode, modes.GCM):
self.update(b"")
if (
self._operation == self._DECRYPT and
isinstance(self._mode, modes.ModeWithAuthenticationTag) and
self.tag is None
):
raise ValueError(
"Authentication tag must be provided when decrypting."
)
buf = self._backend._ffi.new("unsigned char[]", self._block_size_bytes)
outlen = self._backend._ffi.new("int *")
res = self._backend._lib.EVP_CipherFinal_ex(self._ctx, buf, outlen)
if res == 0:
errors = self._backend._consume_errors()
if not errors and isinstance(self._mode, modes.GCM):
raise InvalidTag
self._backend.openssl_assert(
errors[0]._lib_reason_match(
self._backend._lib.ERR_LIB_EVP,
self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH
)
)
raise ValueError(
"The length of the provided data is not a multiple of "
"the block length."
)
if (isinstance(self._mode, modes.GCM) and
self._operation == self._ENCRYPT):
tag_buf = self._backend._ffi.new(
"unsigned char[]", self._block_size_bytes
)
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
self._ctx, self._backend._lib.EVP_CTRL_AEAD_GET_TAG,
self._block_size_bytes, tag_buf
)
self._backend.openssl_assert(res != 0)
self._tag = self._backend._ffi.buffer(tag_buf)[:]
res = self._backend._lib.EVP_CIPHER_CTX_cleanup(self._ctx)
self._backend.openssl_assert(res == 1)
return self._backend._ffi.buffer(buf)[:outlen[0]]
def finalize_with_tag(self, tag):
if (
self._backend._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_102 and
not self._backend._lib.CRYPTOGRAPHY_IS_LIBRESSL
):
raise NotImplementedError(
"finalize_with_tag requires OpenSSL >= 1.0.2. To use this "
"method please update OpenSSL"
)
if len(tag) < self._mode._min_tag_length:
raise ValueError(
"Authentication tag must be {0} bytes or longer.".format(
self._mode._min_tag_length)
)
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
self._ctx, self._backend._lib.EVP_CTRL_AEAD_SET_TAG,
len(tag), tag
)
self._backend.openssl_assert(res != 0)
self._tag = tag
return self.finalize()
def authenticate_additional_data(self, data):
outlen = self._backend._ffi.new("int *")
res = self._backend._lib.EVP_CipherUpdate(
self._ctx, self._backend._ffi.NULL, outlen, data, len(data)
)
self._backend.openssl_assert(res != 0)
tag = utils.read_only_property("_tag")

View File

@ -0,0 +1,81 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import (
InvalidSignature, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.primitives import constant_time, mac
from cryptography.hazmat.primitives.ciphers.modes import CBC
@utils.register_interface(mac.MACContext)
class _CMACContext(object):
def __init__(self, backend, algorithm, ctx=None):
if not backend.cmac_algorithm_supported(algorithm):
raise UnsupportedAlgorithm("This backend does not support CMAC.",
_Reasons.UNSUPPORTED_CIPHER)
self._backend = backend
self._key = algorithm.key
self._algorithm = algorithm
self._output_length = algorithm.block_size // 8
if ctx is None:
registry = self._backend._cipher_registry
adapter = registry[type(algorithm), CBC]
evp_cipher = adapter(self._backend, algorithm, CBC)
ctx = self._backend._lib.CMAC_CTX_new()
self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
ctx = self._backend._ffi.gc(ctx, self._backend._lib.CMAC_CTX_free)
res = self._backend._lib.CMAC_Init(
ctx, self._key, len(self._key),
evp_cipher, self._backend._ffi.NULL
)
self._backend.openssl_assert(res == 1)
self._ctx = ctx
algorithm = utils.read_only_property("_algorithm")
def update(self, data):
res = self._backend._lib.CMAC_Update(self._ctx, data, len(data))
self._backend.openssl_assert(res == 1)
def finalize(self):
buf = self._backend._ffi.new("unsigned char[]", self._output_length)
length = self._backend._ffi.new("size_t *", self._output_length)
res = self._backend._lib.CMAC_Final(
self._ctx, buf, length
)
self._backend.openssl_assert(res == 1)
self._ctx = None
return self._backend._ffi.buffer(buf)[:]
def copy(self):
copied_ctx = self._backend._lib.CMAC_CTX_new()
copied_ctx = self._backend._ffi.gc(
copied_ctx, self._backend._lib.CMAC_CTX_free
)
res = self._backend._lib.CMAC_CTX_copy(
copied_ctx, self._ctx
)
self._backend.openssl_assert(res == 1)
return _CMACContext(
self._backend, self._algorithm, ctx=copied_ctx
)
def verify(self, signature):
digest = self.finalize()
if not constant_time.bytes_eq(digest, signature):
raise InvalidSignature("Signature did not match digest.")

View File

@ -0,0 +1,826 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import datetime
import ipaddress
from asn1crypto.core import Integer, SequenceOf
from cryptography import x509
from cryptography.x509.extensions import _TLS_FEATURE_TYPE_TO_ENUM
from cryptography.x509.name import _ASN1_TYPE_TO_ENUM
from cryptography.x509.oid import (
CRLEntryExtensionOID, CertificatePoliciesOID, ExtensionOID
)
class _Integers(SequenceOf):
_child_spec = Integer
def _obj2txt(backend, obj):
# Set to 80 on the recommendation of
# https://www.openssl.org/docs/crypto/OBJ_nid2ln.html#return_values
#
# But OIDs longer than this occur in real life (e.g. Active
# Directory makes some very long OIDs). So we need to detect
# and properly handle the case where the default buffer is not
# big enough.
#
buf_len = 80
buf = backend._ffi.new("char[]", buf_len)
# 'res' is the number of bytes that *would* be written if the
# buffer is large enough. If 'res' > buf_len - 1, we need to
# alloc a big-enough buffer and go again.
res = backend._lib.OBJ_obj2txt(buf, buf_len, obj, 1)
if res > buf_len - 1: # account for terminating null byte
buf_len = res + 1
buf = backend._ffi.new("char[]", buf_len)
res = backend._lib.OBJ_obj2txt(buf, buf_len, obj, 1)
backend.openssl_assert(res > 0)
return backend._ffi.buffer(buf, res)[:].decode()
def _decode_x509_name_entry(backend, x509_name_entry):
obj = backend._lib.X509_NAME_ENTRY_get_object(x509_name_entry)
backend.openssl_assert(obj != backend._ffi.NULL)
data = backend._lib.X509_NAME_ENTRY_get_data(x509_name_entry)
backend.openssl_assert(data != backend._ffi.NULL)
value = _asn1_string_to_utf8(backend, data)
oid = _obj2txt(backend, obj)
type = _ASN1_TYPE_TO_ENUM[data.type]
return x509.NameAttribute(x509.ObjectIdentifier(oid), value, type)
def _decode_x509_name(backend, x509_name):
count = backend._lib.X509_NAME_entry_count(x509_name)
attributes = []
prev_set_id = -1
for x in range(count):
entry = backend._lib.X509_NAME_get_entry(x509_name, x)
attribute = _decode_x509_name_entry(backend, entry)
set_id = backend._lib.Cryptography_X509_NAME_ENTRY_set(entry)
if set_id != prev_set_id:
attributes.append(set([attribute]))
else:
# is in the same RDN a previous entry
attributes[-1].add(attribute)
prev_set_id = set_id
return x509.Name(x509.RelativeDistinguishedName(rdn) for rdn in attributes)
def _decode_general_names(backend, gns):
num = backend._lib.sk_GENERAL_NAME_num(gns)
names = []
for i in range(num):
gn = backend._lib.sk_GENERAL_NAME_value(gns, i)
backend.openssl_assert(gn != backend._ffi.NULL)
names.append(_decode_general_name(backend, gn))
return names
def _decode_general_name(backend, gn):
if gn.type == backend._lib.GEN_DNS:
# Convert to bytes and then decode to utf8. We don't use
# asn1_string_to_utf8 here because it doesn't properly convert
# utf8 from ia5strings.
data = _asn1_string_to_bytes(backend, gn.d.dNSName).decode("utf8")
# We don't use the constructor for DNSName so we can bypass validation
# This allows us to create DNSName objects that have unicode chars
# when a certificate (against the RFC) contains them.
return x509.DNSName._init_without_validation(data)
elif gn.type == backend._lib.GEN_URI:
# Convert to bytes and then decode to utf8. We don't use
# asn1_string_to_utf8 here because it doesn't properly convert
# utf8 from ia5strings.
data = _asn1_string_to_bytes(
backend, gn.d.uniformResourceIdentifier
).decode("utf8")
# We don't use the constructor for URI so we can bypass validation
# This allows us to create URI objects that have unicode chars
# when a certificate (against the RFC) contains them.
return x509.UniformResourceIdentifier._init_without_validation(data)
elif gn.type == backend._lib.GEN_RID:
oid = _obj2txt(backend, gn.d.registeredID)
return x509.RegisteredID(x509.ObjectIdentifier(oid))
elif gn.type == backend._lib.GEN_IPADD:
data = _asn1_string_to_bytes(backend, gn.d.iPAddress)
data_len = len(data)
if data_len == 8 or data_len == 32:
# This is an IPv4 or IPv6 Network and not a single IP. This
# type of data appears in Name Constraints. Unfortunately,
# ipaddress doesn't support packed bytes + netmask. Additionally,
# IPv6Network can only handle CIDR rather than the full 16 byte
# netmask. To handle this we convert the netmask to integer, then
# find the first 0 bit, which will be the prefix. If another 1
# bit is present after that the netmask is invalid.
base = ipaddress.ip_address(data[:data_len // 2])
netmask = ipaddress.ip_address(data[data_len // 2:])
bits = bin(int(netmask))[2:]
prefix = bits.find('0')
# If no 0 bits are found it is a /32 or /128
if prefix == -1:
prefix = len(bits)
if "1" in bits[prefix:]:
raise ValueError("Invalid netmask")
ip = ipaddress.ip_network(base.exploded + u"/{0}".format(prefix))
else:
ip = ipaddress.ip_address(data)
return x509.IPAddress(ip)
elif gn.type == backend._lib.GEN_DIRNAME:
return x509.DirectoryName(
_decode_x509_name(backend, gn.d.directoryName)
)
elif gn.type == backend._lib.GEN_EMAIL:
# Convert to bytes and then decode to utf8. We don't use
# asn1_string_to_utf8 here because it doesn't properly convert
# utf8 from ia5strings.
data = _asn1_string_to_bytes(backend, gn.d.rfc822Name).decode("utf8")
# We don't use the constructor for RFC822Name so we can bypass
# validation. This allows us to create RFC822Name objects that have
# unicode chars when a certificate (against the RFC) contains them.
return x509.RFC822Name._init_without_validation(data)
elif gn.type == backend._lib.GEN_OTHERNAME:
type_id = _obj2txt(backend, gn.d.otherName.type_id)
value = _asn1_to_der(backend, gn.d.otherName.value)
return x509.OtherName(x509.ObjectIdentifier(type_id), value)
else:
# x400Address or ediPartyName
raise x509.UnsupportedGeneralNameType(
"{0} is not a supported type".format(
x509._GENERAL_NAMES.get(gn.type, gn.type)
),
gn.type
)
def _decode_ocsp_no_check(backend, ext):
return x509.OCSPNoCheck()
def _decode_crl_number(backend, ext):
asn1_int = backend._ffi.cast("ASN1_INTEGER *", ext)
asn1_int = backend._ffi.gc(asn1_int, backend._lib.ASN1_INTEGER_free)
return x509.CRLNumber(_asn1_integer_to_int(backend, asn1_int))
def _decode_delta_crl_indicator(backend, ext):
asn1_int = backend._ffi.cast("ASN1_INTEGER *", ext)
asn1_int = backend._ffi.gc(asn1_int, backend._lib.ASN1_INTEGER_free)
return x509.DeltaCRLIndicator(_asn1_integer_to_int(backend, asn1_int))
class _X509ExtensionParser(object):
def __init__(self, ext_count, get_ext, handlers):
self.ext_count = ext_count
self.get_ext = get_ext
self.handlers = handlers
def parse(self, backend, x509_obj):
extensions = []
seen_oids = set()
for i in range(self.ext_count(backend, x509_obj)):
ext = self.get_ext(backend, x509_obj, i)
backend.openssl_assert(ext != backend._ffi.NULL)
crit = backend._lib.X509_EXTENSION_get_critical(ext)
critical = crit == 1
oid = x509.ObjectIdentifier(
_obj2txt(backend, backend._lib.X509_EXTENSION_get_object(ext))
)
if oid in seen_oids:
raise x509.DuplicateExtension(
"Duplicate {0} extension found".format(oid), oid
)
# This OID is only supported in OpenSSL 1.1.0+ but we want
# to support it in all versions of OpenSSL so we decode it
# ourselves.
if oid == ExtensionOID.TLS_FEATURE:
data = backend._lib.X509_EXTENSION_get_data(ext)
parsed = _Integers.load(_asn1_string_to_bytes(backend, data))
value = x509.TLSFeature(
[_TLS_FEATURE_TYPE_TO_ENUM[x.native] for x in parsed]
)
extensions.append(x509.Extension(oid, critical, value))
seen_oids.add(oid)
continue
try:
handler = self.handlers[oid]
except KeyError:
# Dump the DER payload into an UnrecognizedExtension object
data = backend._lib.X509_EXTENSION_get_data(ext)
backend.openssl_assert(data != backend._ffi.NULL)
der = backend._ffi.buffer(data.data, data.length)[:]
unrecognized = x509.UnrecognizedExtension(oid, der)
extensions.append(
x509.Extension(oid, critical, unrecognized)
)
else:
ext_data = backend._lib.X509V3_EXT_d2i(ext)
if ext_data == backend._ffi.NULL:
backend._consume_errors()
raise ValueError(
"The {0} extension is invalid and can't be "
"parsed".format(oid)
)
value = handler(backend, ext_data)
extensions.append(x509.Extension(oid, critical, value))
seen_oids.add(oid)
return x509.Extensions(extensions)
def _decode_certificate_policies(backend, cp):
cp = backend._ffi.cast("Cryptography_STACK_OF_POLICYINFO *", cp)
cp = backend._ffi.gc(cp, backend._lib.CERTIFICATEPOLICIES_free)
num = backend._lib.sk_POLICYINFO_num(cp)
certificate_policies = []
for i in range(num):
qualifiers = None
pi = backend._lib.sk_POLICYINFO_value(cp, i)
oid = x509.ObjectIdentifier(_obj2txt(backend, pi.policyid))
if pi.qualifiers != backend._ffi.NULL:
qnum = backend._lib.sk_POLICYQUALINFO_num(pi.qualifiers)
qualifiers = []
for j in range(qnum):
pqi = backend._lib.sk_POLICYQUALINFO_value(
pi.qualifiers, j
)
pqualid = x509.ObjectIdentifier(
_obj2txt(backend, pqi.pqualid)
)
if pqualid == CertificatePoliciesOID.CPS_QUALIFIER:
cpsuri = backend._ffi.buffer(
pqi.d.cpsuri.data, pqi.d.cpsuri.length
)[:].decode('ascii')
qualifiers.append(cpsuri)
else:
assert pqualid == CertificatePoliciesOID.CPS_USER_NOTICE
user_notice = _decode_user_notice(
backend, pqi.d.usernotice
)
qualifiers.append(user_notice)
certificate_policies.append(
x509.PolicyInformation(oid, qualifiers)
)
return x509.CertificatePolicies(certificate_policies)
def _decode_user_notice(backend, un):
explicit_text = None
notice_reference = None
if un.exptext != backend._ffi.NULL:
explicit_text = _asn1_string_to_utf8(backend, un.exptext)
if un.noticeref != backend._ffi.NULL:
organization = _asn1_string_to_utf8(
backend, un.noticeref.organization
)
num = backend._lib.sk_ASN1_INTEGER_num(
un.noticeref.noticenos
)
notice_numbers = []
for i in range(num):
asn1_int = backend._lib.sk_ASN1_INTEGER_value(
un.noticeref.noticenos, i
)
notice_num = _asn1_integer_to_int(backend, asn1_int)
notice_numbers.append(notice_num)
notice_reference = x509.NoticeReference(
organization, notice_numbers
)
return x509.UserNotice(notice_reference, explicit_text)
def _decode_basic_constraints(backend, bc_st):
basic_constraints = backend._ffi.cast("BASIC_CONSTRAINTS *", bc_st)
basic_constraints = backend._ffi.gc(
basic_constraints, backend._lib.BASIC_CONSTRAINTS_free
)
# The byte representation of an ASN.1 boolean true is \xff. OpenSSL
# chooses to just map this to its ordinal value, so true is 255 and
# false is 0.
ca = basic_constraints.ca == 255
path_length = _asn1_integer_to_int_or_none(
backend, basic_constraints.pathlen
)
return x509.BasicConstraints(ca, path_length)
def _decode_subject_key_identifier(backend, asn1_string):
asn1_string = backend._ffi.cast("ASN1_OCTET_STRING *", asn1_string)
asn1_string = backend._ffi.gc(
asn1_string, backend._lib.ASN1_OCTET_STRING_free
)
return x509.SubjectKeyIdentifier(
backend._ffi.buffer(asn1_string.data, asn1_string.length)[:]
)
def _decode_authority_key_identifier(backend, akid):
akid = backend._ffi.cast("AUTHORITY_KEYID *", akid)
akid = backend._ffi.gc(akid, backend._lib.AUTHORITY_KEYID_free)
key_identifier = None
authority_cert_issuer = None
if akid.keyid != backend._ffi.NULL:
key_identifier = backend._ffi.buffer(
akid.keyid.data, akid.keyid.length
)[:]
if akid.issuer != backend._ffi.NULL:
authority_cert_issuer = _decode_general_names(
backend, akid.issuer
)
authority_cert_serial_number = _asn1_integer_to_int_or_none(
backend, akid.serial
)
return x509.AuthorityKeyIdentifier(
key_identifier, authority_cert_issuer, authority_cert_serial_number
)
def _decode_authority_information_access(backend, aia):
aia = backend._ffi.cast("Cryptography_STACK_OF_ACCESS_DESCRIPTION *", aia)
aia = backend._ffi.gc(aia, backend._lib.sk_ACCESS_DESCRIPTION_free)
num = backend._lib.sk_ACCESS_DESCRIPTION_num(aia)
access_descriptions = []
for i in range(num):
ad = backend._lib.sk_ACCESS_DESCRIPTION_value(aia, i)
backend.openssl_assert(ad.method != backend._ffi.NULL)
oid = x509.ObjectIdentifier(_obj2txt(backend, ad.method))
backend.openssl_assert(ad.location != backend._ffi.NULL)
gn = _decode_general_name(backend, ad.location)
access_descriptions.append(x509.AccessDescription(oid, gn))
return x509.AuthorityInformationAccess(access_descriptions)
def _decode_key_usage(backend, bit_string):
bit_string = backend._ffi.cast("ASN1_BIT_STRING *", bit_string)
bit_string = backend._ffi.gc(bit_string, backend._lib.ASN1_BIT_STRING_free)
get_bit = backend._lib.ASN1_BIT_STRING_get_bit
digital_signature = get_bit(bit_string, 0) == 1
content_commitment = get_bit(bit_string, 1) == 1
key_encipherment = get_bit(bit_string, 2) == 1
data_encipherment = get_bit(bit_string, 3) == 1
key_agreement = get_bit(bit_string, 4) == 1
key_cert_sign = get_bit(bit_string, 5) == 1
crl_sign = get_bit(bit_string, 6) == 1
encipher_only = get_bit(bit_string, 7) == 1
decipher_only = get_bit(bit_string, 8) == 1
return x509.KeyUsage(
digital_signature,
content_commitment,
key_encipherment,
data_encipherment,
key_agreement,
key_cert_sign,
crl_sign,
encipher_only,
decipher_only
)
def _decode_general_names_extension(backend, gns):
gns = backend._ffi.cast("GENERAL_NAMES *", gns)
gns = backend._ffi.gc(gns, backend._lib.GENERAL_NAMES_free)
general_names = _decode_general_names(backend, gns)
return general_names
def _decode_subject_alt_name(backend, ext):
return x509.SubjectAlternativeName(
_decode_general_names_extension(backend, ext)
)
def _decode_issuer_alt_name(backend, ext):
return x509.IssuerAlternativeName(
_decode_general_names_extension(backend, ext)
)
def _decode_name_constraints(backend, nc):
nc = backend._ffi.cast("NAME_CONSTRAINTS *", nc)
nc = backend._ffi.gc(nc, backend._lib.NAME_CONSTRAINTS_free)
permitted = _decode_general_subtrees(backend, nc.permittedSubtrees)
excluded = _decode_general_subtrees(backend, nc.excludedSubtrees)
return x509.NameConstraints(
permitted_subtrees=permitted, excluded_subtrees=excluded
)
def _decode_general_subtrees(backend, stack_subtrees):
if stack_subtrees == backend._ffi.NULL:
return None
num = backend._lib.sk_GENERAL_SUBTREE_num(stack_subtrees)
subtrees = []
for i in range(num):
obj = backend._lib.sk_GENERAL_SUBTREE_value(stack_subtrees, i)
backend.openssl_assert(obj != backend._ffi.NULL)
name = _decode_general_name(backend, obj.base)
subtrees.append(name)
return subtrees
def _decode_policy_constraints(backend, pc):
pc = backend._ffi.cast("POLICY_CONSTRAINTS *", pc)
pc = backend._ffi.gc(pc, backend._lib.POLICY_CONSTRAINTS_free)
require_explicit_policy = _asn1_integer_to_int_or_none(
backend, pc.requireExplicitPolicy
)
inhibit_policy_mapping = _asn1_integer_to_int_or_none(
backend, pc.inhibitPolicyMapping
)
return x509.PolicyConstraints(
require_explicit_policy, inhibit_policy_mapping
)
def _decode_extended_key_usage(backend, sk):
sk = backend._ffi.cast("Cryptography_STACK_OF_ASN1_OBJECT *", sk)
sk = backend._ffi.gc(sk, backend._lib.sk_ASN1_OBJECT_free)
num = backend._lib.sk_ASN1_OBJECT_num(sk)
ekus = []
for i in range(num):
obj = backend._lib.sk_ASN1_OBJECT_value(sk, i)
backend.openssl_assert(obj != backend._ffi.NULL)
oid = x509.ObjectIdentifier(_obj2txt(backend, obj))
ekus.append(oid)
return x509.ExtendedKeyUsage(ekus)
_DISTPOINT_TYPE_FULLNAME = 0
_DISTPOINT_TYPE_RELATIVENAME = 1
def _decode_dist_points(backend, cdps):
cdps = backend._ffi.cast("Cryptography_STACK_OF_DIST_POINT *", cdps)
cdps = backend._ffi.gc(cdps, backend._lib.CRL_DIST_POINTS_free)
num = backend._lib.sk_DIST_POINT_num(cdps)
dist_points = []
for i in range(num):
full_name = None
relative_name = None
crl_issuer = None
reasons = None
cdp = backend._lib.sk_DIST_POINT_value(cdps, i)
if cdp.reasons != backend._ffi.NULL:
# We will check each bit from RFC 5280
# ReasonFlags ::= BIT STRING {
# unused (0),
# keyCompromise (1),
# cACompromise (2),
# affiliationChanged (3),
# superseded (4),
# cessationOfOperation (5),
# certificateHold (6),
# privilegeWithdrawn (7),
# aACompromise (8) }
reasons = []
get_bit = backend._lib.ASN1_BIT_STRING_get_bit
if get_bit(cdp.reasons, 1):
reasons.append(x509.ReasonFlags.key_compromise)
if get_bit(cdp.reasons, 2):
reasons.append(x509.ReasonFlags.ca_compromise)
if get_bit(cdp.reasons, 3):
reasons.append(x509.ReasonFlags.affiliation_changed)
if get_bit(cdp.reasons, 4):
reasons.append(x509.ReasonFlags.superseded)
if get_bit(cdp.reasons, 5):
reasons.append(x509.ReasonFlags.cessation_of_operation)
if get_bit(cdp.reasons, 6):
reasons.append(x509.ReasonFlags.certificate_hold)
if get_bit(cdp.reasons, 7):
reasons.append(x509.ReasonFlags.privilege_withdrawn)
if get_bit(cdp.reasons, 8):
reasons.append(x509.ReasonFlags.aa_compromise)
reasons = frozenset(reasons)
if cdp.CRLissuer != backend._ffi.NULL:
crl_issuer = _decode_general_names(backend, cdp.CRLissuer)
# Certificates may have a crl_issuer/reasons and no distribution
# point so make sure it's not null.
if cdp.distpoint != backend._ffi.NULL:
# Type 0 is fullName, there is no #define for it in the code.
if cdp.distpoint.type == _DISTPOINT_TYPE_FULLNAME:
full_name = _decode_general_names(
backend, cdp.distpoint.name.fullname
)
# OpenSSL code doesn't test for a specific type for
# relativename, everything that isn't fullname is considered
# relativename. Per RFC 5280:
#
# DistributionPointName ::= CHOICE {
# fullName [0] GeneralNames,
# nameRelativeToCRLIssuer [1] RelativeDistinguishedName }
else:
rns = cdp.distpoint.name.relativename
rnum = backend._lib.sk_X509_NAME_ENTRY_num(rns)
attributes = set()
for i in range(rnum):
rn = backend._lib.sk_X509_NAME_ENTRY_value(
rns, i
)
backend.openssl_assert(rn != backend._ffi.NULL)
attributes.add(
_decode_x509_name_entry(backend, rn)
)
relative_name = x509.RelativeDistinguishedName(attributes)
dist_points.append(
x509.DistributionPoint(
full_name, relative_name, reasons, crl_issuer
)
)
return dist_points
def _decode_crl_distribution_points(backend, cdps):
dist_points = _decode_dist_points(backend, cdps)
return x509.CRLDistributionPoints(dist_points)
def _decode_freshest_crl(backend, cdps):
dist_points = _decode_dist_points(backend, cdps)
return x509.FreshestCRL(dist_points)
def _decode_inhibit_any_policy(backend, asn1_int):
asn1_int = backend._ffi.cast("ASN1_INTEGER *", asn1_int)
asn1_int = backend._ffi.gc(asn1_int, backend._lib.ASN1_INTEGER_free)
skip_certs = _asn1_integer_to_int(backend, asn1_int)
return x509.InhibitAnyPolicy(skip_certs)
def _decode_precert_signed_certificate_timestamps(backend, asn1_scts):
from cryptography.hazmat.backends.openssl.x509 import (
_SignedCertificateTimestamp
)
asn1_scts = backend._ffi.cast("Cryptography_STACK_OF_SCT *", asn1_scts)
asn1_scts = backend._ffi.gc(asn1_scts, backend._lib.SCT_LIST_free)
scts = []
for i in range(backend._lib.sk_SCT_num(asn1_scts)):
sct = backend._lib.sk_SCT_value(asn1_scts, i)
scts.append(_SignedCertificateTimestamp(backend, asn1_scts, sct))
return x509.PrecertificateSignedCertificateTimestamps(scts)
# CRLReason ::= ENUMERATED {
# unspecified (0),
# keyCompromise (1),
# cACompromise (2),
# affiliationChanged (3),
# superseded (4),
# cessationOfOperation (5),
# certificateHold (6),
# -- value 7 is not used
# removeFromCRL (8),
# privilegeWithdrawn (9),
# aACompromise (10) }
_CRL_ENTRY_REASON_CODE_TO_ENUM = {
0: x509.ReasonFlags.unspecified,
1: x509.ReasonFlags.key_compromise,
2: x509.ReasonFlags.ca_compromise,
3: x509.ReasonFlags.affiliation_changed,
4: x509.ReasonFlags.superseded,
5: x509.ReasonFlags.cessation_of_operation,
6: x509.ReasonFlags.certificate_hold,
8: x509.ReasonFlags.remove_from_crl,
9: x509.ReasonFlags.privilege_withdrawn,
10: x509.ReasonFlags.aa_compromise,
}
_CRL_ENTRY_REASON_ENUM_TO_CODE = {
x509.ReasonFlags.unspecified: 0,
x509.ReasonFlags.key_compromise: 1,
x509.ReasonFlags.ca_compromise: 2,
x509.ReasonFlags.affiliation_changed: 3,
x509.ReasonFlags.superseded: 4,
x509.ReasonFlags.cessation_of_operation: 5,
x509.ReasonFlags.certificate_hold: 6,
x509.ReasonFlags.remove_from_crl: 8,
x509.ReasonFlags.privilege_withdrawn: 9,
x509.ReasonFlags.aa_compromise: 10
}
def _decode_crl_reason(backend, enum):
enum = backend._ffi.cast("ASN1_ENUMERATED *", enum)
enum = backend._ffi.gc(enum, backend._lib.ASN1_ENUMERATED_free)
code = backend._lib.ASN1_ENUMERATED_get(enum)
try:
return x509.CRLReason(_CRL_ENTRY_REASON_CODE_TO_ENUM[code])
except KeyError:
raise ValueError("Unsupported reason code: {0}".format(code))
def _decode_invalidity_date(backend, inv_date):
generalized_time = backend._ffi.cast(
"ASN1_GENERALIZEDTIME *", inv_date
)
generalized_time = backend._ffi.gc(
generalized_time, backend._lib.ASN1_GENERALIZEDTIME_free
)
return x509.InvalidityDate(
_parse_asn1_generalized_time(backend, generalized_time)
)
def _decode_cert_issuer(backend, gns):
gns = backend._ffi.cast("GENERAL_NAMES *", gns)
gns = backend._ffi.gc(gns, backend._lib.GENERAL_NAMES_free)
general_names = _decode_general_names(backend, gns)
return x509.CertificateIssuer(general_names)
def _asn1_to_der(backend, asn1_type):
buf = backend._ffi.new("unsigned char **")
res = backend._lib.i2d_ASN1_TYPE(asn1_type, buf)
backend.openssl_assert(res >= 0)
backend.openssl_assert(buf[0] != backend._ffi.NULL)
buf = backend._ffi.gc(
buf, lambda buffer: backend._lib.OPENSSL_free(buffer[0])
)
return backend._ffi.buffer(buf[0], res)[:]
def _asn1_integer_to_int(backend, asn1_int):
bn = backend._lib.ASN1_INTEGER_to_BN(asn1_int, backend._ffi.NULL)
backend.openssl_assert(bn != backend._ffi.NULL)
bn = backend._ffi.gc(bn, backend._lib.BN_free)
return backend._bn_to_int(bn)
def _asn1_integer_to_int_or_none(backend, asn1_int):
if asn1_int == backend._ffi.NULL:
return None
else:
return _asn1_integer_to_int(backend, asn1_int)
def _asn1_string_to_bytes(backend, asn1_string):
return backend._ffi.buffer(asn1_string.data, asn1_string.length)[:]
def _asn1_string_to_ascii(backend, asn1_string):
return _asn1_string_to_bytes(backend, asn1_string).decode("ascii")
def _asn1_string_to_utf8(backend, asn1_string):
buf = backend._ffi.new("unsigned char **")
res = backend._lib.ASN1_STRING_to_UTF8(buf, asn1_string)
if res == -1:
raise ValueError(
"Unsupported ASN1 string type. Type: {0}".format(asn1_string.type)
)
backend.openssl_assert(buf[0] != backend._ffi.NULL)
buf = backend._ffi.gc(
buf, lambda buffer: backend._lib.OPENSSL_free(buffer[0])
)
return backend._ffi.buffer(buf[0], res)[:].decode('utf8')
def _parse_asn1_time(backend, asn1_time):
backend.openssl_assert(asn1_time != backend._ffi.NULL)
generalized_time = backend._lib.ASN1_TIME_to_generalizedtime(
asn1_time, backend._ffi.NULL
)
if generalized_time == backend._ffi.NULL:
raise ValueError(
"Couldn't parse ASN.1 time as generalizedtime {!r}".format(
_asn1_string_to_bytes(backend, asn1_time)
)
)
generalized_time = backend._ffi.gc(
generalized_time, backend._lib.ASN1_GENERALIZEDTIME_free
)
return _parse_asn1_generalized_time(backend, generalized_time)
def _parse_asn1_generalized_time(backend, generalized_time):
time = _asn1_string_to_ascii(
backend, backend._ffi.cast("ASN1_STRING *", generalized_time)
)
return datetime.datetime.strptime(time, "%Y%m%d%H%M%SZ")
_EXTENSION_HANDLERS_NO_SCT = {
ExtensionOID.BASIC_CONSTRAINTS: _decode_basic_constraints,
ExtensionOID.SUBJECT_KEY_IDENTIFIER: _decode_subject_key_identifier,
ExtensionOID.KEY_USAGE: _decode_key_usage,
ExtensionOID.SUBJECT_ALTERNATIVE_NAME: _decode_subject_alt_name,
ExtensionOID.EXTENDED_KEY_USAGE: _decode_extended_key_usage,
ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _decode_authority_key_identifier,
ExtensionOID.AUTHORITY_INFORMATION_ACCESS: (
_decode_authority_information_access
),
ExtensionOID.CERTIFICATE_POLICIES: _decode_certificate_policies,
ExtensionOID.CRL_DISTRIBUTION_POINTS: _decode_crl_distribution_points,
ExtensionOID.FRESHEST_CRL: _decode_freshest_crl,
ExtensionOID.OCSP_NO_CHECK: _decode_ocsp_no_check,
ExtensionOID.INHIBIT_ANY_POLICY: _decode_inhibit_any_policy,
ExtensionOID.ISSUER_ALTERNATIVE_NAME: _decode_issuer_alt_name,
ExtensionOID.NAME_CONSTRAINTS: _decode_name_constraints,
ExtensionOID.POLICY_CONSTRAINTS: _decode_policy_constraints,
}
_EXTENSION_HANDLERS = _EXTENSION_HANDLERS_NO_SCT.copy()
_EXTENSION_HANDLERS[
ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS
] = _decode_precert_signed_certificate_timestamps
_REVOKED_EXTENSION_HANDLERS = {
CRLEntryExtensionOID.CRL_REASON: _decode_crl_reason,
CRLEntryExtensionOID.INVALIDITY_DATE: _decode_invalidity_date,
CRLEntryExtensionOID.CERTIFICATE_ISSUER: _decode_cert_issuer,
}
_CRL_EXTENSION_HANDLERS = {
ExtensionOID.CRL_NUMBER: _decode_crl_number,
ExtensionOID.DELTA_CRL_INDICATOR: _decode_delta_crl_indicator,
ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _decode_authority_key_identifier,
ExtensionOID.ISSUER_ALTERNATIVE_NAME: _decode_issuer_alt_name,
ExtensionOID.AUTHORITY_INFORMATION_ACCESS: (
_decode_authority_information_access
),
}
_CERTIFICATE_EXTENSION_PARSER_NO_SCT = _X509ExtensionParser(
ext_count=lambda backend, x: backend._lib.X509_get_ext_count(x),
get_ext=lambda backend, x, i: backend._lib.X509_get_ext(x, i),
handlers=_EXTENSION_HANDLERS_NO_SCT
)
_CERTIFICATE_EXTENSION_PARSER = _X509ExtensionParser(
ext_count=lambda backend, x: backend._lib.X509_get_ext_count(x),
get_ext=lambda backend, x, i: backend._lib.X509_get_ext(x, i),
handlers=_EXTENSION_HANDLERS
)
_CSR_EXTENSION_PARSER = _X509ExtensionParser(
ext_count=lambda backend, x: backend._lib.sk_X509_EXTENSION_num(x),
get_ext=lambda backend, x, i: backend._lib.sk_X509_EXTENSION_value(x, i),
handlers=_EXTENSION_HANDLERS
)
_REVOKED_CERTIFICATE_EXTENSION_PARSER = _X509ExtensionParser(
ext_count=lambda backend, x: backend._lib.X509_REVOKED_get_ext_count(x),
get_ext=lambda backend, x, i: backend._lib.X509_REVOKED_get_ext(x, i),
handlers=_REVOKED_EXTENSION_HANDLERS,
)
_CRL_EXTENSION_PARSER = _X509ExtensionParser(
ext_count=lambda backend, x: backend._lib.X509_CRL_get_ext_count(x),
get_ext=lambda backend, x, i: backend._lib.X509_CRL_get_ext(x, i),
handlers=_CRL_EXTENSION_HANDLERS,
)

View File

@ -0,0 +1,280 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import dh
def _dh_params_dup(dh_cdata, backend):
lib = backend._lib
ffi = backend._ffi
param_cdata = lib.DHparams_dup(dh_cdata)
backend.openssl_assert(param_cdata != ffi.NULL)
param_cdata = ffi.gc(param_cdata, lib.DH_free)
if lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_102:
# In OpenSSL versions < 1.0.2 or libressl DHparams_dup don't copy q
q = ffi.new("BIGNUM **")
lib.DH_get0_pqg(dh_cdata, ffi.NULL, q, ffi.NULL)
q_dup = lib.BN_dup(q[0])
res = lib.DH_set0_pqg(param_cdata, ffi.NULL, q_dup, ffi.NULL)
backend.openssl_assert(res == 1)
return param_cdata
def _dh_cdata_to_parameters(dh_cdata, backend):
param_cdata = _dh_params_dup(dh_cdata, backend)
return _DHParameters(backend, param_cdata)
@utils.register_interface(dh.DHParametersWithSerialization)
class _DHParameters(object):
def __init__(self, backend, dh_cdata):
self._backend = backend
self._dh_cdata = dh_cdata
def parameter_numbers(self):
p = self._backend._ffi.new("BIGNUM **")
g = self._backend._ffi.new("BIGNUM **")
q = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DH_get0_pqg(self._dh_cdata, p, q, g)
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
if q[0] == self._backend._ffi.NULL:
q_val = None
else:
q_val = self._backend._bn_to_int(q[0])
return dh.DHParameterNumbers(
p=self._backend._bn_to_int(p[0]),
g=self._backend._bn_to_int(g[0]),
q=q_val
)
def generate_private_key(self):
return self._backend.generate_dh_private_key(self)
def parameter_bytes(self, encoding, format):
if format is not serialization.ParameterFormat.PKCS3:
raise ValueError(
"Only PKCS3 serialization is supported"
)
if not self._backend._lib.Cryptography_HAS_EVP_PKEY_DHX:
q = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DH_get0_pqg(self._dh_cdata,
self._backend._ffi.NULL,
q,
self._backend._ffi.NULL)
if q[0] != self._backend._ffi.NULL:
raise UnsupportedAlgorithm(
"DH X9.42 serialization is not supported",
_Reasons.UNSUPPORTED_SERIALIZATION)
return self._backend._parameter_bytes(
encoding,
format,
self._dh_cdata
)
def _handle_dh_compute_key_error(errors, backend):
lib = backend._lib
backend.openssl_assert(
errors[0]._lib_reason_match(
lib.ERR_LIB_DH, lib.DH_R_INVALID_PUBKEY
)
)
raise ValueError("Public key value is invalid for this exchange.")
def _get_dh_num_bits(backend, dh_cdata):
p = backend._ffi.new("BIGNUM **")
backend._lib.DH_get0_pqg(dh_cdata, p,
backend._ffi.NULL,
backend._ffi.NULL)
backend.openssl_assert(p[0] != backend._ffi.NULL)
return backend._lib.BN_num_bits(p[0])
@utils.register_interface(dh.DHPrivateKeyWithSerialization)
class _DHPrivateKey(object):
def __init__(self, backend, dh_cdata, evp_pkey):
self._backend = backend
self._dh_cdata = dh_cdata
self._evp_pkey = evp_pkey
self._key_size_bytes = self._backend._lib.DH_size(dh_cdata)
@property
def key_size(self):
return _get_dh_num_bits(self._backend, self._dh_cdata)
def private_numbers(self):
p = self._backend._ffi.new("BIGNUM **")
g = self._backend._ffi.new("BIGNUM **")
q = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DH_get0_pqg(self._dh_cdata, p, q, g)
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
if q[0] == self._backend._ffi.NULL:
q_val = None
else:
q_val = self._backend._bn_to_int(q[0])
pub_key = self._backend._ffi.new("BIGNUM **")
priv_key = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DH_get0_key(self._dh_cdata, pub_key, priv_key)
self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(priv_key[0] != self._backend._ffi.NULL)
return dh.DHPrivateNumbers(
public_numbers=dh.DHPublicNumbers(
parameter_numbers=dh.DHParameterNumbers(
p=self._backend._bn_to_int(p[0]),
g=self._backend._bn_to_int(g[0]),
q=q_val
),
y=self._backend._bn_to_int(pub_key[0])
),
x=self._backend._bn_to_int(priv_key[0])
)
def exchange(self, peer_public_key):
buf = self._backend._ffi.new("unsigned char[]", self._key_size_bytes)
pub_key = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DH_get0_key(peer_public_key._dh_cdata, pub_key,
self._backend._ffi.NULL)
self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
res = self._backend._lib.DH_compute_key(
buf,
pub_key[0],
self._dh_cdata
)
if res == -1:
errors = self._backend._consume_errors()
return _handle_dh_compute_key_error(errors, self._backend)
else:
self._backend.openssl_assert(res >= 1)
key = self._backend._ffi.buffer(buf)[:res]
pad = self._key_size_bytes - len(key)
if pad > 0:
key = (b"\x00" * pad) + key
return key
def public_key(self):
dh_cdata = _dh_params_dup(self._dh_cdata, self._backend)
pub_key = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DH_get0_key(self._dh_cdata,
pub_key, self._backend._ffi.NULL)
self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
pub_key_dup = self._backend._lib.BN_dup(pub_key[0])
self._backend.openssl_assert(pub_key_dup != self._backend._ffi.NULL)
res = self._backend._lib.DH_set0_key(dh_cdata,
pub_key_dup,
self._backend._ffi.NULL)
self._backend.openssl_assert(res == 1)
evp_pkey = self._backend._dh_cdata_to_evp_pkey(dh_cdata)
return _DHPublicKey(self._backend, dh_cdata, evp_pkey)
def parameters(self):
return _dh_cdata_to_parameters(self._dh_cdata, self._backend)
def private_bytes(self, encoding, format, encryption_algorithm):
if format is not serialization.PrivateFormat.PKCS8:
raise ValueError(
"DH private keys support only PKCS8 serialization"
)
if not self._backend._lib.Cryptography_HAS_EVP_PKEY_DHX:
q = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DH_get0_pqg(self._dh_cdata,
self._backend._ffi.NULL,
q,
self._backend._ffi.NULL)
if q[0] != self._backend._ffi.NULL:
raise UnsupportedAlgorithm(
"DH X9.42 serialization is not supported",
_Reasons.UNSUPPORTED_SERIALIZATION)
return self._backend._private_key_bytes(
encoding,
format,
encryption_algorithm,
self._evp_pkey,
self._dh_cdata
)
@utils.register_interface(dh.DHPublicKeyWithSerialization)
class _DHPublicKey(object):
def __init__(self, backend, dh_cdata, evp_pkey):
self._backend = backend
self._dh_cdata = dh_cdata
self._evp_pkey = evp_pkey
self._key_size_bits = _get_dh_num_bits(self._backend, self._dh_cdata)
@property
def key_size(self):
return self._key_size_bits
def public_numbers(self):
p = self._backend._ffi.new("BIGNUM **")
g = self._backend._ffi.new("BIGNUM **")
q = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DH_get0_pqg(self._dh_cdata, p, q, g)
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
if q[0] == self._backend._ffi.NULL:
q_val = None
else:
q_val = self._backend._bn_to_int(q[0])
pub_key = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DH_get0_key(self._dh_cdata,
pub_key, self._backend._ffi.NULL)
self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
return dh.DHPublicNumbers(
parameter_numbers=dh.DHParameterNumbers(
p=self._backend._bn_to_int(p[0]),
g=self._backend._bn_to_int(g[0]),
q=q_val
),
y=self._backend._bn_to_int(pub_key[0])
)
def parameters(self):
return _dh_cdata_to_parameters(self._dh_cdata, self._backend)
def public_bytes(self, encoding, format):
if format is not serialization.PublicFormat.SubjectPublicKeyInfo:
raise ValueError(
"DH public keys support only "
"SubjectPublicKeyInfo serialization"
)
if not self._backend._lib.Cryptography_HAS_EVP_PKEY_DHX:
q = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DH_get0_pqg(self._dh_cdata,
self._backend._ffi.NULL,
q,
self._backend._ffi.NULL)
if q[0] != self._backend._ffi.NULL:
raise UnsupportedAlgorithm(
"DH X9.42 serialization is not supported",
_Reasons.UNSUPPORTED_SERIALIZATION)
return self._backend._public_key_bytes(
encoding,
format,
self,
self._evp_pkey,
None
)

View File

@ -0,0 +1,269 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import InvalidSignature
from cryptography.hazmat.backends.openssl.utils import (
_calculate_digest_and_algorithm, _check_not_prehashed,
_warn_sign_verify_deprecated
)
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import (
AsymmetricSignatureContext, AsymmetricVerificationContext, dsa
)
def _dsa_sig_sign(backend, private_key, data):
sig_buf_len = backend._lib.DSA_size(private_key._dsa_cdata)
sig_buf = backend._ffi.new("unsigned char[]", sig_buf_len)
buflen = backend._ffi.new("unsigned int *")
# The first parameter passed to DSA_sign is unused by OpenSSL but
# must be an integer.
res = backend._lib.DSA_sign(
0, data, len(data), sig_buf, buflen, private_key._dsa_cdata
)
backend.openssl_assert(res == 1)
backend.openssl_assert(buflen[0])
return backend._ffi.buffer(sig_buf)[:buflen[0]]
def _dsa_sig_verify(backend, public_key, signature, data):
# The first parameter passed to DSA_verify is unused by OpenSSL but
# must be an integer.
res = backend._lib.DSA_verify(
0, data, len(data), signature, len(signature), public_key._dsa_cdata
)
if res != 1:
backend._consume_errors()
raise InvalidSignature
@utils.register_interface(AsymmetricVerificationContext)
class _DSAVerificationContext(object):
def __init__(self, backend, public_key, signature, algorithm):
self._backend = backend
self._public_key = public_key
self._signature = signature
self._algorithm = algorithm
self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
def update(self, data):
self._hash_ctx.update(data)
def verify(self):
data_to_verify = self._hash_ctx.finalize()
_dsa_sig_verify(
self._backend, self._public_key, self._signature, data_to_verify
)
@utils.register_interface(AsymmetricSignatureContext)
class _DSASignatureContext(object):
def __init__(self, backend, private_key, algorithm):
self._backend = backend
self._private_key = private_key
self._algorithm = algorithm
self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
def update(self, data):
self._hash_ctx.update(data)
def finalize(self):
data_to_sign = self._hash_ctx.finalize()
return _dsa_sig_sign(self._backend, self._private_key, data_to_sign)
@utils.register_interface(dsa.DSAParametersWithNumbers)
class _DSAParameters(object):
def __init__(self, backend, dsa_cdata):
self._backend = backend
self._dsa_cdata = dsa_cdata
def parameter_numbers(self):
p = self._backend._ffi.new("BIGNUM **")
q = self._backend._ffi.new("BIGNUM **")
g = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g)
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
return dsa.DSAParameterNumbers(
p=self._backend._bn_to_int(p[0]),
q=self._backend._bn_to_int(q[0]),
g=self._backend._bn_to_int(g[0])
)
def generate_private_key(self):
return self._backend.generate_dsa_private_key(self)
@utils.register_interface(dsa.DSAPrivateKeyWithSerialization)
class _DSAPrivateKey(object):
def __init__(self, backend, dsa_cdata, evp_pkey):
self._backend = backend
self._dsa_cdata = dsa_cdata
self._evp_pkey = evp_pkey
p = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DSA_get0_pqg(
dsa_cdata, p, self._backend._ffi.NULL, self._backend._ffi.NULL
)
self._backend.openssl_assert(p[0] != backend._ffi.NULL)
self._key_size = self._backend._lib.BN_num_bits(p[0])
key_size = utils.read_only_property("_key_size")
def signer(self, signature_algorithm):
_warn_sign_verify_deprecated()
_check_not_prehashed(signature_algorithm)
return _DSASignatureContext(self._backend, self, signature_algorithm)
def private_numbers(self):
p = self._backend._ffi.new("BIGNUM **")
q = self._backend._ffi.new("BIGNUM **")
g = self._backend._ffi.new("BIGNUM **")
pub_key = self._backend._ffi.new("BIGNUM **")
priv_key = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g)
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
self._backend._lib.DSA_get0_key(self._dsa_cdata, pub_key, priv_key)
self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(priv_key[0] != self._backend._ffi.NULL)
return dsa.DSAPrivateNumbers(
public_numbers=dsa.DSAPublicNumbers(
parameter_numbers=dsa.DSAParameterNumbers(
p=self._backend._bn_to_int(p[0]),
q=self._backend._bn_to_int(q[0]),
g=self._backend._bn_to_int(g[0])
),
y=self._backend._bn_to_int(pub_key[0])
),
x=self._backend._bn_to_int(priv_key[0])
)
def public_key(self):
dsa_cdata = self._backend._lib.DSAparams_dup(self._dsa_cdata)
self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL)
dsa_cdata = self._backend._ffi.gc(
dsa_cdata, self._backend._lib.DSA_free
)
pub_key = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DSA_get0_key(
self._dsa_cdata, pub_key, self._backend._ffi.NULL
)
self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
pub_key_dup = self._backend._lib.BN_dup(pub_key[0])
res = self._backend._lib.DSA_set0_key(
dsa_cdata, pub_key_dup, self._backend._ffi.NULL
)
self._backend.openssl_assert(res == 1)
evp_pkey = self._backend._dsa_cdata_to_evp_pkey(dsa_cdata)
return _DSAPublicKey(self._backend, dsa_cdata, evp_pkey)
def parameters(self):
dsa_cdata = self._backend._lib.DSAparams_dup(self._dsa_cdata)
self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL)
dsa_cdata = self._backend._ffi.gc(
dsa_cdata, self._backend._lib.DSA_free
)
return _DSAParameters(self._backend, dsa_cdata)
def private_bytes(self, encoding, format, encryption_algorithm):
return self._backend._private_key_bytes(
encoding,
format,
encryption_algorithm,
self._evp_pkey,
self._dsa_cdata
)
def sign(self, data, algorithm):
data, algorithm = _calculate_digest_and_algorithm(
self._backend, data, algorithm
)
return _dsa_sig_sign(self._backend, self, data)
@utils.register_interface(dsa.DSAPublicKeyWithSerialization)
class _DSAPublicKey(object):
def __init__(self, backend, dsa_cdata, evp_pkey):
self._backend = backend
self._dsa_cdata = dsa_cdata
self._evp_pkey = evp_pkey
p = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DSA_get0_pqg(
dsa_cdata, p, self._backend._ffi.NULL, self._backend._ffi.NULL
)
self._backend.openssl_assert(p[0] != backend._ffi.NULL)
self._key_size = self._backend._lib.BN_num_bits(p[0])
key_size = utils.read_only_property("_key_size")
def verifier(self, signature, signature_algorithm):
_warn_sign_verify_deprecated()
if not isinstance(signature, bytes):
raise TypeError("signature must be bytes.")
_check_not_prehashed(signature_algorithm)
return _DSAVerificationContext(
self._backend, self, signature, signature_algorithm
)
def public_numbers(self):
p = self._backend._ffi.new("BIGNUM **")
q = self._backend._ffi.new("BIGNUM **")
g = self._backend._ffi.new("BIGNUM **")
pub_key = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g)
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
self._backend._lib.DSA_get0_key(
self._dsa_cdata, pub_key, self._backend._ffi.NULL
)
self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
return dsa.DSAPublicNumbers(
parameter_numbers=dsa.DSAParameterNumbers(
p=self._backend._bn_to_int(p[0]),
q=self._backend._bn_to_int(q[0]),
g=self._backend._bn_to_int(g[0])
),
y=self._backend._bn_to_int(pub_key[0])
)
def parameters(self):
dsa_cdata = self._backend._lib.DSAparams_dup(self._dsa_cdata)
dsa_cdata = self._backend._ffi.gc(
dsa_cdata, self._backend._lib.DSA_free
)
return _DSAParameters(self._backend, dsa_cdata)
def public_bytes(self, encoding, format):
if format is serialization.PublicFormat.PKCS1:
raise ValueError(
"DSA public keys do not support PKCS1 serialization"
)
return self._backend._public_key_bytes(
encoding,
format,
self,
self._evp_pkey,
None
)
def verify(self, signature, data, algorithm):
data, algorithm = _calculate_digest_and_algorithm(
self._backend, data, algorithm
)
return _dsa_sig_verify(self._backend, self, signature, data)

View File

@ -0,0 +1,298 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import (
InvalidSignature, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.backends.openssl.utils import (
_calculate_digest_and_algorithm, _check_not_prehashed,
_warn_sign_verify_deprecated
)
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import (
AsymmetricSignatureContext, AsymmetricVerificationContext, ec
)
def _check_signature_algorithm(signature_algorithm):
if not isinstance(signature_algorithm, ec.ECDSA):
raise UnsupportedAlgorithm(
"Unsupported elliptic curve signature algorithm.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def _ec_key_curve_sn(backend, ec_key):
group = backend._lib.EC_KEY_get0_group(ec_key)
backend.openssl_assert(group != backend._ffi.NULL)
nid = backend._lib.EC_GROUP_get_curve_name(group)
# The following check is to find EC keys with unnamed curves and raise
# an error for now.
if nid == backend._lib.NID_undef:
raise NotImplementedError(
"ECDSA certificates with unnamed curves are unsupported "
"at this time"
)
curve_name = backend._lib.OBJ_nid2sn(nid)
backend.openssl_assert(curve_name != backend._ffi.NULL)
sn = backend._ffi.string(curve_name).decode('ascii')
return sn
def _mark_asn1_named_ec_curve(backend, ec_cdata):
"""
Set the named curve flag on the EC_KEY. This causes OpenSSL to
serialize EC keys along with their curve OID which makes
deserialization easier.
"""
backend._lib.EC_KEY_set_asn1_flag(
ec_cdata, backend._lib.OPENSSL_EC_NAMED_CURVE
)
def _sn_to_elliptic_curve(backend, sn):
try:
return ec._CURVE_TYPES[sn]()
except KeyError:
raise UnsupportedAlgorithm(
"{0} is not a supported elliptic curve".format(sn),
_Reasons.UNSUPPORTED_ELLIPTIC_CURVE
)
def _ecdsa_sig_sign(backend, private_key, data):
max_size = backend._lib.ECDSA_size(private_key._ec_key)
backend.openssl_assert(max_size > 0)
sigbuf = backend._ffi.new("unsigned char[]", max_size)
siglen_ptr = backend._ffi.new("unsigned int[]", 1)
res = backend._lib.ECDSA_sign(
0, data, len(data), sigbuf, siglen_ptr, private_key._ec_key
)
backend.openssl_assert(res == 1)
return backend._ffi.buffer(sigbuf)[:siglen_ptr[0]]
def _ecdsa_sig_verify(backend, public_key, signature, data):
res = backend._lib.ECDSA_verify(
0, data, len(data), signature, len(signature), public_key._ec_key
)
if res != 1:
backend._consume_errors()
raise InvalidSignature
@utils.register_interface(AsymmetricSignatureContext)
class _ECDSASignatureContext(object):
def __init__(self, backend, private_key, algorithm):
self._backend = backend
self._private_key = private_key
self._digest = hashes.Hash(algorithm, backend)
def update(self, data):
self._digest.update(data)
def finalize(self):
digest = self._digest.finalize()
return _ecdsa_sig_sign(self._backend, self._private_key, digest)
@utils.register_interface(AsymmetricVerificationContext)
class _ECDSAVerificationContext(object):
def __init__(self, backend, public_key, signature, algorithm):
self._backend = backend
self._public_key = public_key
self._signature = signature
self._digest = hashes.Hash(algorithm, backend)
def update(self, data):
self._digest.update(data)
def verify(self):
digest = self._digest.finalize()
_ecdsa_sig_verify(
self._backend, self._public_key, self._signature, digest
)
@utils.register_interface(ec.EllipticCurvePrivateKeyWithSerialization)
class _EllipticCurvePrivateKey(object):
def __init__(self, backend, ec_key_cdata, evp_pkey):
self._backend = backend
_mark_asn1_named_ec_curve(backend, ec_key_cdata)
self._ec_key = ec_key_cdata
self._evp_pkey = evp_pkey
sn = _ec_key_curve_sn(backend, ec_key_cdata)
self._curve = _sn_to_elliptic_curve(backend, sn)
curve = utils.read_only_property("_curve")
@property
def key_size(self):
return self.curve.key_size
def signer(self, signature_algorithm):
_warn_sign_verify_deprecated()
_check_signature_algorithm(signature_algorithm)
_check_not_prehashed(signature_algorithm.algorithm)
return _ECDSASignatureContext(
self._backend, self, signature_algorithm.algorithm
)
def exchange(self, algorithm, peer_public_key):
if not (
self._backend.elliptic_curve_exchange_algorithm_supported(
algorithm, self.curve
)
):
raise UnsupportedAlgorithm(
"This backend does not support the ECDH algorithm.",
_Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM
)
if peer_public_key.curve.name != self.curve.name:
raise ValueError(
"peer_public_key and self are not on the same curve"
)
group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
z_len = (self._backend._lib.EC_GROUP_get_degree(group) + 7) // 8
self._backend.openssl_assert(z_len > 0)
z_buf = self._backend._ffi.new("uint8_t[]", z_len)
peer_key = self._backend._lib.EC_KEY_get0_public_key(
peer_public_key._ec_key
)
r = self._backend._lib.ECDH_compute_key(
z_buf, z_len, peer_key, self._ec_key, self._backend._ffi.NULL
)
self._backend.openssl_assert(r > 0)
return self._backend._ffi.buffer(z_buf)[:z_len]
def public_key(self):
group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
self._backend.openssl_assert(group != self._backend._ffi.NULL)
curve_nid = self._backend._lib.EC_GROUP_get_curve_name(group)
public_ec_key = self._backend._lib.EC_KEY_new_by_curve_name(curve_nid)
self._backend.openssl_assert(public_ec_key != self._backend._ffi.NULL)
public_ec_key = self._backend._ffi.gc(
public_ec_key, self._backend._lib.EC_KEY_free
)
point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
self._backend.openssl_assert(point != self._backend._ffi.NULL)
res = self._backend._lib.EC_KEY_set_public_key(public_ec_key, point)
self._backend.openssl_assert(res == 1)
evp_pkey = self._backend._ec_cdata_to_evp_pkey(public_ec_key)
return _EllipticCurvePublicKey(self._backend, public_ec_key, evp_pkey)
def private_numbers(self):
bn = self._backend._lib.EC_KEY_get0_private_key(self._ec_key)
private_value = self._backend._bn_to_int(bn)
return ec.EllipticCurvePrivateNumbers(
private_value=private_value,
public_numbers=self.public_key().public_numbers()
)
def private_bytes(self, encoding, format, encryption_algorithm):
return self._backend._private_key_bytes(
encoding,
format,
encryption_algorithm,
self._evp_pkey,
self._ec_key
)
def sign(self, data, signature_algorithm):
_check_signature_algorithm(signature_algorithm)
data, algorithm = _calculate_digest_and_algorithm(
self._backend, data, signature_algorithm._algorithm
)
return _ecdsa_sig_sign(self._backend, self, data)
@utils.register_interface(ec.EllipticCurvePublicKeyWithSerialization)
class _EllipticCurvePublicKey(object):
def __init__(self, backend, ec_key_cdata, evp_pkey):
self._backend = backend
_mark_asn1_named_ec_curve(backend, ec_key_cdata)
self._ec_key = ec_key_cdata
self._evp_pkey = evp_pkey
sn = _ec_key_curve_sn(backend, ec_key_cdata)
self._curve = _sn_to_elliptic_curve(backend, sn)
curve = utils.read_only_property("_curve")
@property
def key_size(self):
return self.curve.key_size
def verifier(self, signature, signature_algorithm):
_warn_sign_verify_deprecated()
if not isinstance(signature, bytes):
raise TypeError("signature must be bytes.")
_check_signature_algorithm(signature_algorithm)
_check_not_prehashed(signature_algorithm.algorithm)
return _ECDSAVerificationContext(
self._backend, self, signature, signature_algorithm.algorithm
)
def public_numbers(self):
get_func, group = (
self._backend._ec_key_determine_group_get_func(self._ec_key)
)
point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
self._backend.openssl_assert(point != self._backend._ffi.NULL)
with self._backend._tmp_bn_ctx() as bn_ctx:
bn_x = self._backend._lib.BN_CTX_get(bn_ctx)
bn_y = self._backend._lib.BN_CTX_get(bn_ctx)
res = get_func(group, point, bn_x, bn_y, bn_ctx)
self._backend.openssl_assert(res == 1)
x = self._backend._bn_to_int(bn_x)
y = self._backend._bn_to_int(bn_y)
return ec.EllipticCurvePublicNumbers(
x=x,
y=y,
curve=self._curve
)
def public_bytes(self, encoding, format):
if format is serialization.PublicFormat.PKCS1:
raise ValueError(
"EC public keys do not support PKCS1 serialization"
)
return self._backend._public_key_bytes(
encoding,
format,
self,
self._evp_pkey,
None
)
def verify(self, signature, data, signature_algorithm):
_check_signature_algorithm(signature_algorithm)
data, algorithm = _calculate_digest_and_algorithm(
self._backend, data, signature_algorithm._algorithm
)
_ecdsa_sig_verify(self._backend, self, signature, data)

View File

@ -0,0 +1,611 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import calendar
import ipaddress
import six
from cryptography import utils, x509
from cryptography.hazmat.backends.openssl.decode_asn1 import (
_CRL_ENTRY_REASON_ENUM_TO_CODE, _DISTPOINT_TYPE_FULLNAME,
_DISTPOINT_TYPE_RELATIVENAME
)
from cryptography.x509.name import _ASN1Type
from cryptography.x509.oid import CRLEntryExtensionOID, ExtensionOID
def _encode_asn1_int(backend, x):
"""
Converts a python integer to an ASN1_INTEGER. The returned ASN1_INTEGER
will not be garbage collected (to support adding them to structs that take
ownership of the object). Be sure to register it for GC if it will be
discarded after use.
"""
# Convert Python integer to OpenSSL "bignum" in case value exceeds
# machine's native integer limits (note: `int_to_bn` doesn't automatically
# GC).
i = backend._int_to_bn(x)
i = backend._ffi.gc(i, backend._lib.BN_free)
# Wrap in an ASN.1 integer. Don't GC -- as documented.
i = backend._lib.BN_to_ASN1_INTEGER(i, backend._ffi.NULL)
backend.openssl_assert(i != backend._ffi.NULL)
return i
def _encode_asn1_int_gc(backend, x):
i = _encode_asn1_int(backend, x)
i = backend._ffi.gc(i, backend._lib.ASN1_INTEGER_free)
return i
def _encode_asn1_str(backend, data, length):
"""
Create an ASN1_OCTET_STRING from a Python byte string.
"""
s = backend._lib.ASN1_OCTET_STRING_new()
res = backend._lib.ASN1_OCTET_STRING_set(s, data, length)
backend.openssl_assert(res == 1)
return s
def _encode_asn1_utf8_str(backend, string):
"""
Create an ASN1_UTF8STRING from a Python unicode string.
This object will be an ASN1_STRING with UTF8 type in OpenSSL and
can be decoded with ASN1_STRING_to_UTF8.
"""
s = backend._lib.ASN1_UTF8STRING_new()
res = backend._lib.ASN1_STRING_set(
s, string.encode("utf8"), len(string.encode("utf8"))
)
backend.openssl_assert(res == 1)
return s
def _encode_asn1_str_gc(backend, data, length):
s = _encode_asn1_str(backend, data, length)
s = backend._ffi.gc(s, backend._lib.ASN1_OCTET_STRING_free)
return s
def _encode_inhibit_any_policy(backend, inhibit_any_policy):
return _encode_asn1_int_gc(backend, inhibit_any_policy.skip_certs)
def _encode_name(backend, name):
"""
The X509_NAME created will not be gc'd. Use _encode_name_gc if needed.
"""
subject = backend._lib.X509_NAME_new()
for rdn in name.rdns:
set_flag = 0 # indicate whether to add to last RDN or create new RDN
for attribute in rdn:
name_entry = _encode_name_entry(backend, attribute)
# X509_NAME_add_entry dups the object so we need to gc this copy
name_entry = backend._ffi.gc(
name_entry, backend._lib.X509_NAME_ENTRY_free
)
res = backend._lib.X509_NAME_add_entry(
subject, name_entry, -1, set_flag)
backend.openssl_assert(res == 1)
set_flag = -1
return subject
def _encode_name_gc(backend, attributes):
subject = _encode_name(backend, attributes)
subject = backend._ffi.gc(subject, backend._lib.X509_NAME_free)
return subject
def _encode_sk_name_entry(backend, attributes):
"""
The sk_X509_NAME_ENTRY created will not be gc'd.
"""
stack = backend._lib.sk_X509_NAME_ENTRY_new_null()
for attribute in attributes:
name_entry = _encode_name_entry(backend, attribute)
res = backend._lib.sk_X509_NAME_ENTRY_push(stack, name_entry)
backend.openssl_assert(res == 1)
return stack
def _encode_name_entry(backend, attribute):
if attribute._type is _ASN1Type.BMPString:
value = attribute.value.encode('utf_16_be')
else:
value = attribute.value.encode('utf8')
obj = _txt2obj_gc(backend, attribute.oid.dotted_string)
name_entry = backend._lib.X509_NAME_ENTRY_create_by_OBJ(
backend._ffi.NULL, obj, attribute._type.value, value, len(value)
)
return name_entry
def _encode_crl_number_delta_crl_indicator(backend, ext):
return _encode_asn1_int_gc(backend, ext.crl_number)
def _encode_crl_reason(backend, crl_reason):
asn1enum = backend._lib.ASN1_ENUMERATED_new()
backend.openssl_assert(asn1enum != backend._ffi.NULL)
asn1enum = backend._ffi.gc(asn1enum, backend._lib.ASN1_ENUMERATED_free)
res = backend._lib.ASN1_ENUMERATED_set(
asn1enum, _CRL_ENTRY_REASON_ENUM_TO_CODE[crl_reason.reason]
)
backend.openssl_assert(res == 1)
return asn1enum
def _encode_invalidity_date(backend, invalidity_date):
time = backend._lib.ASN1_GENERALIZEDTIME_set(
backend._ffi.NULL, calendar.timegm(
invalidity_date.invalidity_date.timetuple()
)
)
backend.openssl_assert(time != backend._ffi.NULL)
time = backend._ffi.gc(time, backend._lib.ASN1_GENERALIZEDTIME_free)
return time
def _encode_certificate_policies(backend, certificate_policies):
cp = backend._lib.sk_POLICYINFO_new_null()
backend.openssl_assert(cp != backend._ffi.NULL)
cp = backend._ffi.gc(cp, backend._lib.sk_POLICYINFO_free)
for policy_info in certificate_policies:
pi = backend._lib.POLICYINFO_new()
backend.openssl_assert(pi != backend._ffi.NULL)
res = backend._lib.sk_POLICYINFO_push(cp, pi)
backend.openssl_assert(res >= 1)
oid = _txt2obj(backend, policy_info.policy_identifier.dotted_string)
pi.policyid = oid
if policy_info.policy_qualifiers:
pqis = backend._lib.sk_POLICYQUALINFO_new_null()
backend.openssl_assert(pqis != backend._ffi.NULL)
for qualifier in policy_info.policy_qualifiers:
pqi = backend._lib.POLICYQUALINFO_new()
backend.openssl_assert(pqi != backend._ffi.NULL)
res = backend._lib.sk_POLICYQUALINFO_push(pqis, pqi)
backend.openssl_assert(res >= 1)
if isinstance(qualifier, six.text_type):
pqi.pqualid = _txt2obj(
backend, x509.OID_CPS_QUALIFIER.dotted_string
)
pqi.d.cpsuri = _encode_asn1_str(
backend,
qualifier.encode("ascii"),
len(qualifier.encode("ascii"))
)
else:
assert isinstance(qualifier, x509.UserNotice)
pqi.pqualid = _txt2obj(
backend, x509.OID_CPS_USER_NOTICE.dotted_string
)
un = backend._lib.USERNOTICE_new()
backend.openssl_assert(un != backend._ffi.NULL)
pqi.d.usernotice = un
if qualifier.explicit_text:
un.exptext = _encode_asn1_utf8_str(
backend, qualifier.explicit_text
)
un.noticeref = _encode_notice_reference(
backend, qualifier.notice_reference
)
pi.qualifiers = pqis
return cp
def _encode_notice_reference(backend, notice):
if notice is None:
return backend._ffi.NULL
else:
nr = backend._lib.NOTICEREF_new()
backend.openssl_assert(nr != backend._ffi.NULL)
# organization is a required field
nr.organization = _encode_asn1_utf8_str(backend, notice.organization)
notice_stack = backend._lib.sk_ASN1_INTEGER_new_null()
nr.noticenos = notice_stack
for number in notice.notice_numbers:
num = _encode_asn1_int(backend, number)
res = backend._lib.sk_ASN1_INTEGER_push(notice_stack, num)
backend.openssl_assert(res >= 1)
return nr
def _txt2obj(backend, name):
"""
Converts a Python string with an ASN.1 object ID in dotted form to a
ASN1_OBJECT.
"""
name = name.encode('ascii')
obj = backend._lib.OBJ_txt2obj(name, 1)
backend.openssl_assert(obj != backend._ffi.NULL)
return obj
def _txt2obj_gc(backend, name):
obj = _txt2obj(backend, name)
obj = backend._ffi.gc(obj, backend._lib.ASN1_OBJECT_free)
return obj
def _encode_ocsp_nocheck(backend, ext):
"""
The OCSP No Check extension is defined as a null ASN.1 value embedded in
an ASN.1 string.
"""
return _encode_asn1_str_gc(backend, b"\x05\x00", 2)
def _encode_key_usage(backend, key_usage):
set_bit = backend._lib.ASN1_BIT_STRING_set_bit
ku = backend._lib.ASN1_BIT_STRING_new()
ku = backend._ffi.gc(ku, backend._lib.ASN1_BIT_STRING_free)
res = set_bit(ku, 0, key_usage.digital_signature)
backend.openssl_assert(res == 1)
res = set_bit(ku, 1, key_usage.content_commitment)
backend.openssl_assert(res == 1)
res = set_bit(ku, 2, key_usage.key_encipherment)
backend.openssl_assert(res == 1)
res = set_bit(ku, 3, key_usage.data_encipherment)
backend.openssl_assert(res == 1)
res = set_bit(ku, 4, key_usage.key_agreement)
backend.openssl_assert(res == 1)
res = set_bit(ku, 5, key_usage.key_cert_sign)
backend.openssl_assert(res == 1)
res = set_bit(ku, 6, key_usage.crl_sign)
backend.openssl_assert(res == 1)
if key_usage.key_agreement:
res = set_bit(ku, 7, key_usage.encipher_only)
backend.openssl_assert(res == 1)
res = set_bit(ku, 8, key_usage.decipher_only)
backend.openssl_assert(res == 1)
else:
res = set_bit(ku, 7, 0)
backend.openssl_assert(res == 1)
res = set_bit(ku, 8, 0)
backend.openssl_assert(res == 1)
return ku
def _encode_authority_key_identifier(backend, authority_keyid):
akid = backend._lib.AUTHORITY_KEYID_new()
backend.openssl_assert(akid != backend._ffi.NULL)
akid = backend._ffi.gc(akid, backend._lib.AUTHORITY_KEYID_free)
if authority_keyid.key_identifier is not None:
akid.keyid = _encode_asn1_str(
backend,
authority_keyid.key_identifier,
len(authority_keyid.key_identifier)
)
if authority_keyid.authority_cert_issuer is not None:
akid.issuer = _encode_general_names(
backend, authority_keyid.authority_cert_issuer
)
if authority_keyid.authority_cert_serial_number is not None:
akid.serial = _encode_asn1_int(
backend, authority_keyid.authority_cert_serial_number
)
return akid
def _encode_basic_constraints(backend, basic_constraints):
constraints = backend._lib.BASIC_CONSTRAINTS_new()
constraints = backend._ffi.gc(
constraints, backend._lib.BASIC_CONSTRAINTS_free
)
constraints.ca = 255 if basic_constraints.ca else 0
if basic_constraints.ca and basic_constraints.path_length is not None:
constraints.pathlen = _encode_asn1_int(
backend, basic_constraints.path_length
)
return constraints
def _encode_authority_information_access(backend, authority_info_access):
aia = backend._lib.sk_ACCESS_DESCRIPTION_new_null()
backend.openssl_assert(aia != backend._ffi.NULL)
aia = backend._ffi.gc(
aia, backend._lib.sk_ACCESS_DESCRIPTION_free
)
for access_description in authority_info_access:
ad = backend._lib.ACCESS_DESCRIPTION_new()
method = _txt2obj(
backend, access_description.access_method.dotted_string
)
gn = _encode_general_name(backend, access_description.access_location)
ad.method = method
ad.location = gn
res = backend._lib.sk_ACCESS_DESCRIPTION_push(aia, ad)
backend.openssl_assert(res >= 1)
return aia
def _encode_general_names(backend, names):
general_names = backend._lib.GENERAL_NAMES_new()
backend.openssl_assert(general_names != backend._ffi.NULL)
for name in names:
gn = _encode_general_name(backend, name)
res = backend._lib.sk_GENERAL_NAME_push(general_names, gn)
backend.openssl_assert(res != 0)
return general_names
def _encode_alt_name(backend, san):
general_names = _encode_general_names(backend, san)
general_names = backend._ffi.gc(
general_names, backend._lib.GENERAL_NAMES_free
)
return general_names
def _encode_subject_key_identifier(backend, ski):
return _encode_asn1_str_gc(backend, ski.digest, len(ski.digest))
def _encode_general_name(backend, name):
if isinstance(name, x509.DNSName):
gn = backend._lib.GENERAL_NAME_new()
backend.openssl_assert(gn != backend._ffi.NULL)
gn.type = backend._lib.GEN_DNS
ia5 = backend._lib.ASN1_IA5STRING_new()
backend.openssl_assert(ia5 != backend._ffi.NULL)
# ia5strings are supposed to be ITU T.50 but to allow round-tripping
# of broken certs that encode utf8 we'll encode utf8 here too.
value = name.value.encode("utf8")
res = backend._lib.ASN1_STRING_set(ia5, value, len(value))
backend.openssl_assert(res == 1)
gn.d.dNSName = ia5
elif isinstance(name, x509.RegisteredID):
gn = backend._lib.GENERAL_NAME_new()
backend.openssl_assert(gn != backend._ffi.NULL)
gn.type = backend._lib.GEN_RID
obj = backend._lib.OBJ_txt2obj(
name.value.dotted_string.encode('ascii'), 1
)
backend.openssl_assert(obj != backend._ffi.NULL)
gn.d.registeredID = obj
elif isinstance(name, x509.DirectoryName):
gn = backend._lib.GENERAL_NAME_new()
backend.openssl_assert(gn != backend._ffi.NULL)
dir_name = _encode_name(backend, name.value)
gn.type = backend._lib.GEN_DIRNAME
gn.d.directoryName = dir_name
elif isinstance(name, x509.IPAddress):
gn = backend._lib.GENERAL_NAME_new()
backend.openssl_assert(gn != backend._ffi.NULL)
if isinstance(name.value, ipaddress.IPv4Network):
packed = (
name.value.network_address.packed +
utils.int_to_bytes(((1 << 32) - name.value.num_addresses), 4)
)
elif isinstance(name.value, ipaddress.IPv6Network):
packed = (
name.value.network_address.packed +
utils.int_to_bytes((1 << 128) - name.value.num_addresses, 16)
)
else:
packed = name.value.packed
ipaddr = _encode_asn1_str(backend, packed, len(packed))
gn.type = backend._lib.GEN_IPADD
gn.d.iPAddress = ipaddr
elif isinstance(name, x509.OtherName):
gn = backend._lib.GENERAL_NAME_new()
backend.openssl_assert(gn != backend._ffi.NULL)
other_name = backend._lib.OTHERNAME_new()
backend.openssl_assert(other_name != backend._ffi.NULL)
type_id = backend._lib.OBJ_txt2obj(
name.type_id.dotted_string.encode('ascii'), 1
)
backend.openssl_assert(type_id != backend._ffi.NULL)
data = backend._ffi.new("unsigned char[]", name.value)
data_ptr_ptr = backend._ffi.new("unsigned char **")
data_ptr_ptr[0] = data
value = backend._lib.d2i_ASN1_TYPE(
backend._ffi.NULL, data_ptr_ptr, len(name.value)
)
if value == backend._ffi.NULL:
backend._consume_errors()
raise ValueError("Invalid ASN.1 data")
other_name.type_id = type_id
other_name.value = value
gn.type = backend._lib.GEN_OTHERNAME
gn.d.otherName = other_name
elif isinstance(name, x509.RFC822Name):
gn = backend._lib.GENERAL_NAME_new()
backend.openssl_assert(gn != backend._ffi.NULL)
# ia5strings are supposed to be ITU T.50 but to allow round-tripping
# of broken certs that encode utf8 we'll encode utf8 here too.
data = name.value.encode("utf8")
asn1_str = _encode_asn1_str(backend, data, len(data))
gn.type = backend._lib.GEN_EMAIL
gn.d.rfc822Name = asn1_str
elif isinstance(name, x509.UniformResourceIdentifier):
gn = backend._lib.GENERAL_NAME_new()
backend.openssl_assert(gn != backend._ffi.NULL)
# ia5strings are supposed to be ITU T.50 but to allow round-tripping
# of broken certs that encode utf8 we'll encode utf8 here too.
data = name.value.encode("utf8")
asn1_str = _encode_asn1_str(backend, data, len(data))
gn.type = backend._lib.GEN_URI
gn.d.uniformResourceIdentifier = asn1_str
else:
raise ValueError(
"{0} is an unknown GeneralName type".format(name)
)
return gn
def _encode_extended_key_usage(backend, extended_key_usage):
eku = backend._lib.sk_ASN1_OBJECT_new_null()
eku = backend._ffi.gc(eku, backend._lib.sk_ASN1_OBJECT_free)
for oid in extended_key_usage:
obj = _txt2obj(backend, oid.dotted_string)
res = backend._lib.sk_ASN1_OBJECT_push(eku, obj)
backend.openssl_assert(res >= 1)
return eku
_CRLREASONFLAGS = {
x509.ReasonFlags.key_compromise: 1,
x509.ReasonFlags.ca_compromise: 2,
x509.ReasonFlags.affiliation_changed: 3,
x509.ReasonFlags.superseded: 4,
x509.ReasonFlags.cessation_of_operation: 5,
x509.ReasonFlags.certificate_hold: 6,
x509.ReasonFlags.privilege_withdrawn: 7,
x509.ReasonFlags.aa_compromise: 8,
}
def _encode_cdps_freshest_crl(backend, cdps):
cdp = backend._lib.sk_DIST_POINT_new_null()
cdp = backend._ffi.gc(cdp, backend._lib.sk_DIST_POINT_free)
for point in cdps:
dp = backend._lib.DIST_POINT_new()
backend.openssl_assert(dp != backend._ffi.NULL)
if point.reasons:
bitmask = backend._lib.ASN1_BIT_STRING_new()
backend.openssl_assert(bitmask != backend._ffi.NULL)
dp.reasons = bitmask
for reason in point.reasons:
res = backend._lib.ASN1_BIT_STRING_set_bit(
bitmask, _CRLREASONFLAGS[reason], 1
)
backend.openssl_assert(res == 1)
if point.full_name:
dpn = backend._lib.DIST_POINT_NAME_new()
backend.openssl_assert(dpn != backend._ffi.NULL)
dpn.type = _DISTPOINT_TYPE_FULLNAME
dpn.name.fullname = _encode_general_names(backend, point.full_name)
dp.distpoint = dpn
if point.relative_name:
dpn = backend._lib.DIST_POINT_NAME_new()
backend.openssl_assert(dpn != backend._ffi.NULL)
dpn.type = _DISTPOINT_TYPE_RELATIVENAME
relativename = _encode_sk_name_entry(backend, point.relative_name)
backend.openssl_assert(relativename != backend._ffi.NULL)
dpn.name.relativename = relativename
dp.distpoint = dpn
if point.crl_issuer:
dp.CRLissuer = _encode_general_names(backend, point.crl_issuer)
res = backend._lib.sk_DIST_POINT_push(cdp, dp)
backend.openssl_assert(res >= 1)
return cdp
def _encode_name_constraints(backend, name_constraints):
nc = backend._lib.NAME_CONSTRAINTS_new()
backend.openssl_assert(nc != backend._ffi.NULL)
nc = backend._ffi.gc(nc, backend._lib.NAME_CONSTRAINTS_free)
permitted = _encode_general_subtree(
backend, name_constraints.permitted_subtrees
)
nc.permittedSubtrees = permitted
excluded = _encode_general_subtree(
backend, name_constraints.excluded_subtrees
)
nc.excludedSubtrees = excluded
return nc
def _encode_policy_constraints(backend, policy_constraints):
pc = backend._lib.POLICY_CONSTRAINTS_new()
backend.openssl_assert(pc != backend._ffi.NULL)
pc = backend._ffi.gc(pc, backend._lib.POLICY_CONSTRAINTS_free)
if policy_constraints.require_explicit_policy is not None:
pc.requireExplicitPolicy = _encode_asn1_int(
backend, policy_constraints.require_explicit_policy
)
if policy_constraints.inhibit_policy_mapping is not None:
pc.inhibitPolicyMapping = _encode_asn1_int(
backend, policy_constraints.inhibit_policy_mapping
)
return pc
def _encode_general_subtree(backend, subtrees):
if subtrees is None:
return backend._ffi.NULL
else:
general_subtrees = backend._lib.sk_GENERAL_SUBTREE_new_null()
for name in subtrees:
gs = backend._lib.GENERAL_SUBTREE_new()
gs.base = _encode_general_name(backend, name)
res = backend._lib.sk_GENERAL_SUBTREE_push(general_subtrees, gs)
assert res >= 1
return general_subtrees
_EXTENSION_ENCODE_HANDLERS = {
ExtensionOID.BASIC_CONSTRAINTS: _encode_basic_constraints,
ExtensionOID.SUBJECT_KEY_IDENTIFIER: _encode_subject_key_identifier,
ExtensionOID.KEY_USAGE: _encode_key_usage,
ExtensionOID.SUBJECT_ALTERNATIVE_NAME: _encode_alt_name,
ExtensionOID.ISSUER_ALTERNATIVE_NAME: _encode_alt_name,
ExtensionOID.EXTENDED_KEY_USAGE: _encode_extended_key_usage,
ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _encode_authority_key_identifier,
ExtensionOID.CERTIFICATE_POLICIES: _encode_certificate_policies,
ExtensionOID.AUTHORITY_INFORMATION_ACCESS: (
_encode_authority_information_access
),
ExtensionOID.CRL_DISTRIBUTION_POINTS: _encode_cdps_freshest_crl,
ExtensionOID.FRESHEST_CRL: _encode_cdps_freshest_crl,
ExtensionOID.INHIBIT_ANY_POLICY: _encode_inhibit_any_policy,
ExtensionOID.OCSP_NO_CHECK: _encode_ocsp_nocheck,
ExtensionOID.NAME_CONSTRAINTS: _encode_name_constraints,
ExtensionOID.POLICY_CONSTRAINTS: _encode_policy_constraints,
}
_CRL_EXTENSION_ENCODE_HANDLERS = {
ExtensionOID.ISSUER_ALTERNATIVE_NAME: _encode_alt_name,
ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _encode_authority_key_identifier,
ExtensionOID.AUTHORITY_INFORMATION_ACCESS: (
_encode_authority_information_access
),
ExtensionOID.CRL_NUMBER: _encode_crl_number_delta_crl_indicator,
ExtensionOID.DELTA_CRL_INDICATOR: _encode_crl_number_delta_crl_indicator,
}
_CRL_ENTRY_EXTENSION_ENCODE_HANDLERS = {
CRLEntryExtensionOID.CERTIFICATE_ISSUER: _encode_alt_name,
CRLEntryExtensionOID.CRL_REASON: _encode_crl_reason,
CRLEntryExtensionOID.INVALIDITY_DATE: _encode_invalidity_date,
}

View File

@ -0,0 +1,61 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
from cryptography.hazmat.primitives import hashes
@utils.register_interface(hashes.HashContext)
class _HashContext(object):
def __init__(self, backend, algorithm, ctx=None):
self._algorithm = algorithm
self._backend = backend
if ctx is None:
ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new()
ctx = self._backend._ffi.gc(
ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free
)
name = self._backend._build_openssl_digest_name(algorithm)
evp_md = self._backend._lib.EVP_get_digestbyname(name)
if evp_md == self._backend._ffi.NULL:
raise UnsupportedAlgorithm(
"{0} is not a supported hash on this backend.".format(
name),
_Reasons.UNSUPPORTED_HASH
)
res = self._backend._lib.EVP_DigestInit_ex(ctx, evp_md,
self._backend._ffi.NULL)
self._backend.openssl_assert(res != 0)
self._ctx = ctx
algorithm = utils.read_only_property("_algorithm")
def copy(self):
copied_ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new()
copied_ctx = self._backend._ffi.gc(
copied_ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free
)
res = self._backend._lib.EVP_MD_CTX_copy_ex(copied_ctx, self._ctx)
self._backend.openssl_assert(res != 0)
return _HashContext(self._backend, self.algorithm, ctx=copied_ctx)
def update(self, data):
res = self._backend._lib.EVP_DigestUpdate(self._ctx, data, len(data))
self._backend.openssl_assert(res != 0)
def finalize(self):
buf = self._backend._ffi.new("unsigned char[]",
self._backend._lib.EVP_MAX_MD_SIZE)
outlen = self._backend._ffi.new("unsigned int *")
res = self._backend._lib.EVP_DigestFinal_ex(self._ctx, buf, outlen)
self._backend.openssl_assert(res != 0)
self._backend.openssl_assert(outlen[0] == self.algorithm.digest_size)
return self._backend._ffi.buffer(buf)[:outlen[0]]

View File

@ -0,0 +1,73 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import (
InvalidSignature, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.primitives import constant_time, hashes, mac
@utils.register_interface(mac.MACContext)
@utils.register_interface(hashes.HashContext)
class _HMACContext(object):
def __init__(self, backend, key, algorithm, ctx=None):
self._algorithm = algorithm
self._backend = backend
if ctx is None:
ctx = self._backend._lib.Cryptography_HMAC_CTX_new()
self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
ctx = self._backend._ffi.gc(
ctx, self._backend._lib.Cryptography_HMAC_CTX_free
)
name = self._backend._build_openssl_digest_name(algorithm)
evp_md = self._backend._lib.EVP_get_digestbyname(name)
if evp_md == self._backend._ffi.NULL:
raise UnsupportedAlgorithm(
"{0} is not a supported hash on this backend".format(name),
_Reasons.UNSUPPORTED_HASH
)
res = self._backend._lib.HMAC_Init_ex(
ctx, key, len(key), evp_md, self._backend._ffi.NULL
)
self._backend.openssl_assert(res != 0)
self._ctx = ctx
self._key = key
algorithm = utils.read_only_property("_algorithm")
def copy(self):
copied_ctx = self._backend._lib.Cryptography_HMAC_CTX_new()
self._backend.openssl_assert(copied_ctx != self._backend._ffi.NULL)
copied_ctx = self._backend._ffi.gc(
copied_ctx, self._backend._lib.Cryptography_HMAC_CTX_free
)
res = self._backend._lib.HMAC_CTX_copy(copied_ctx, self._ctx)
self._backend.openssl_assert(res != 0)
return _HMACContext(
self._backend, self._key, self.algorithm, ctx=copied_ctx
)
def update(self, data):
res = self._backend._lib.HMAC_Update(self._ctx, data, len(data))
self._backend.openssl_assert(res != 0)
def finalize(self):
buf = self._backend._ffi.new("unsigned char[]",
self._backend._lib.EVP_MAX_MD_SIZE)
outlen = self._backend._ffi.new("unsigned int *")
res = self._backend._lib.HMAC_Final(self._ctx, buf, outlen)
self._backend.openssl_assert(res != 0)
self._backend.openssl_assert(outlen[0] == self.algorithm.digest_size)
return self._backend._ffi.buffer(buf)[:outlen[0]]
def verify(self, signature):
digest = self.finalize()
if not constant_time.bytes_eq(digest, signature):
raise InvalidSignature("Signature did not match digest.")

View File

@ -0,0 +1,475 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import math
from cryptography import utils
from cryptography.exceptions import (
InvalidSignature, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.backends.openssl.utils import (
_calculate_digest_and_algorithm, _check_not_prehashed,
_warn_sign_verify_deprecated
)
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import (
AsymmetricSignatureContext, AsymmetricVerificationContext, rsa
)
from cryptography.hazmat.primitives.asymmetric.padding import (
AsymmetricPadding, MGF1, OAEP, PKCS1v15, PSS, calculate_max_pss_salt_length
)
from cryptography.hazmat.primitives.asymmetric.rsa import (
RSAPrivateKeyWithSerialization, RSAPublicKeyWithSerialization
)
def _get_rsa_pss_salt_length(pss, key, hash_algorithm):
salt = pss._salt_length
if salt is MGF1.MAX_LENGTH or salt is PSS.MAX_LENGTH:
return calculate_max_pss_salt_length(key, hash_algorithm)
else:
return salt
def _enc_dec_rsa(backend, key, data, padding):
if not isinstance(padding, AsymmetricPadding):
raise TypeError("Padding must be an instance of AsymmetricPadding.")
if isinstance(padding, PKCS1v15):
padding_enum = backend._lib.RSA_PKCS1_PADDING
elif isinstance(padding, OAEP):
padding_enum = backend._lib.RSA_PKCS1_OAEP_PADDING
if not isinstance(padding._mgf, MGF1):
raise UnsupportedAlgorithm(
"Only MGF1 is supported by this backend.",
_Reasons.UNSUPPORTED_MGF
)
if not backend.rsa_padding_supported(padding):
raise UnsupportedAlgorithm(
"This combination of padding and hash algorithm is not "
"supported by this backend.",
_Reasons.UNSUPPORTED_PADDING
)
else:
raise UnsupportedAlgorithm(
"{0} is not supported by this backend.".format(
padding.name
),
_Reasons.UNSUPPORTED_PADDING
)
return _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum, padding)
def _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum, padding):
if isinstance(key, _RSAPublicKey):
init = backend._lib.EVP_PKEY_encrypt_init
crypt = backend._lib.EVP_PKEY_encrypt
else:
init = backend._lib.EVP_PKEY_decrypt_init
crypt = backend._lib.EVP_PKEY_decrypt
pkey_ctx = backend._lib.EVP_PKEY_CTX_new(
key._evp_pkey, backend._ffi.NULL
)
backend.openssl_assert(pkey_ctx != backend._ffi.NULL)
pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free)
res = init(pkey_ctx)
backend.openssl_assert(res == 1)
res = backend._lib.EVP_PKEY_CTX_set_rsa_padding(
pkey_ctx, padding_enum)
backend.openssl_assert(res > 0)
buf_size = backend._lib.EVP_PKEY_size(key._evp_pkey)
backend.openssl_assert(buf_size > 0)
if (
isinstance(padding, OAEP) and
backend._lib.Cryptography_HAS_RSA_OAEP_MD
):
mgf1_md = backend._lib.EVP_get_digestbyname(
padding._mgf._algorithm.name.encode("ascii"))
backend.openssl_assert(mgf1_md != backend._ffi.NULL)
res = backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, mgf1_md)
backend.openssl_assert(res > 0)
oaep_md = backend._lib.EVP_get_digestbyname(
padding._algorithm.name.encode("ascii"))
backend.openssl_assert(oaep_md != backend._ffi.NULL)
res = backend._lib.EVP_PKEY_CTX_set_rsa_oaep_md(pkey_ctx, oaep_md)
backend.openssl_assert(res > 0)
if (
isinstance(padding, OAEP) and
padding._label is not None and
len(padding._label) > 0
):
# set0_rsa_oaep_label takes ownership of the char * so we need to
# copy it into some new memory
labelptr = backend._lib.OPENSSL_malloc(len(padding._label))
backend.openssl_assert(labelptr != backend._ffi.NULL)
backend._ffi.memmove(labelptr, padding._label, len(padding._label))
res = backend._lib.EVP_PKEY_CTX_set0_rsa_oaep_label(
pkey_ctx, labelptr, len(padding._label)
)
backend.openssl_assert(res == 1)
outlen = backend._ffi.new("size_t *", buf_size)
buf = backend._ffi.new("unsigned char[]", buf_size)
res = crypt(pkey_ctx, buf, outlen, data, len(data))
if res <= 0:
_handle_rsa_enc_dec_error(backend, key)
return backend._ffi.buffer(buf)[:outlen[0]]
def _handle_rsa_enc_dec_error(backend, key):
errors = backend._consume_errors()
backend.openssl_assert(errors)
assert errors[0].lib == backend._lib.ERR_LIB_RSA
if isinstance(key, _RSAPublicKey):
assert (errors[0].reason ==
backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE)
raise ValueError(
"Data too long for key size. Encrypt less data or use a "
"larger key size."
)
else:
decoding_errors = [
backend._lib.RSA_R_BLOCK_TYPE_IS_NOT_01,
backend._lib.RSA_R_BLOCK_TYPE_IS_NOT_02,
backend._lib.RSA_R_OAEP_DECODING_ERROR,
# Though this error looks similar to the
# RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE, this occurs on decrypts,
# rather than on encrypts
backend._lib.RSA_R_DATA_TOO_LARGE_FOR_MODULUS,
]
if backend._lib.Cryptography_HAS_RSA_R_PKCS_DECODING_ERROR:
decoding_errors.append(backend._lib.RSA_R_PKCS_DECODING_ERROR)
assert errors[0].reason in decoding_errors
raise ValueError("Decryption failed.")
def _rsa_sig_determine_padding(backend, key, padding, algorithm):
if not isinstance(padding, AsymmetricPadding):
raise TypeError("Expected provider of AsymmetricPadding.")
pkey_size = backend._lib.EVP_PKEY_size(key._evp_pkey)
backend.openssl_assert(pkey_size > 0)
if isinstance(padding, PKCS1v15):
padding_enum = backend._lib.RSA_PKCS1_PADDING
elif isinstance(padding, PSS):
if not isinstance(padding._mgf, MGF1):
raise UnsupportedAlgorithm(
"Only MGF1 is supported by this backend.",
_Reasons.UNSUPPORTED_MGF
)
# Size of key in bytes - 2 is the maximum
# PSS signature length (salt length is checked later)
if pkey_size - algorithm.digest_size - 2 < 0:
raise ValueError("Digest too large for key size. Use a larger "
"key or different digest.")
padding_enum = backend._lib.RSA_PKCS1_PSS_PADDING
else:
raise UnsupportedAlgorithm(
"{0} is not supported by this backend.".format(padding.name),
_Reasons.UNSUPPORTED_PADDING
)
return padding_enum
def _rsa_sig_setup(backend, padding, algorithm, key, data, init_func):
padding_enum = _rsa_sig_determine_padding(backend, key, padding, algorithm)
evp_md = backend._lib.EVP_get_digestbyname(algorithm.name.encode("ascii"))
backend.openssl_assert(evp_md != backend._ffi.NULL)
pkey_ctx = backend._lib.EVP_PKEY_CTX_new(key._evp_pkey, backend._ffi.NULL)
backend.openssl_assert(pkey_ctx != backend._ffi.NULL)
pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free)
res = init_func(pkey_ctx)
backend.openssl_assert(res == 1)
res = backend._lib.EVP_PKEY_CTX_set_signature_md(pkey_ctx, evp_md)
backend.openssl_assert(res > 0)
res = backend._lib.EVP_PKEY_CTX_set_rsa_padding(pkey_ctx, padding_enum)
backend.openssl_assert(res > 0)
if isinstance(padding, PSS):
res = backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen(
pkey_ctx, _get_rsa_pss_salt_length(padding, key, algorithm)
)
backend.openssl_assert(res > 0)
mgf1_md = backend._lib.EVP_get_digestbyname(
padding._mgf._algorithm.name.encode("ascii")
)
backend.openssl_assert(mgf1_md != backend._ffi.NULL)
res = backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, mgf1_md)
backend.openssl_assert(res > 0)
return pkey_ctx
def _rsa_sig_sign(backend, padding, algorithm, private_key, data):
pkey_ctx = _rsa_sig_setup(
backend, padding, algorithm, private_key, data,
backend._lib.EVP_PKEY_sign_init
)
buflen = backend._ffi.new("size_t *")
res = backend._lib.EVP_PKEY_sign(
pkey_ctx,
backend._ffi.NULL,
buflen,
data,
len(data)
)
backend.openssl_assert(res == 1)
buf = backend._ffi.new("unsigned char[]", buflen[0])
res = backend._lib.EVP_PKEY_sign(
pkey_ctx, buf, buflen, data, len(data))
if res != 1:
errors = backend._consume_errors()
assert errors[0].lib == backend._lib.ERR_LIB_RSA
reason = None
if (errors[0].reason ==
backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE):
reason = ("Salt length too long for key size. Try using "
"MAX_LENGTH instead.")
else:
assert (errors[0].reason ==
backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY)
reason = "Digest too large for key size. Use a larger key."
assert reason is not None
raise ValueError(reason)
return backend._ffi.buffer(buf)[:]
def _rsa_sig_verify(backend, padding, algorithm, public_key, signature, data):
pkey_ctx = _rsa_sig_setup(
backend, padding, algorithm, public_key, data,
backend._lib.EVP_PKEY_verify_init
)
res = backend._lib.EVP_PKEY_verify(
pkey_ctx, signature, len(signature), data, len(data)
)
# The previous call can return negative numbers in the event of an
# error. This is not a signature failure but we need to fail if it
# occurs.
backend.openssl_assert(res >= 0)
if res == 0:
backend._consume_errors()
raise InvalidSignature
@utils.register_interface(AsymmetricSignatureContext)
class _RSASignatureContext(object):
def __init__(self, backend, private_key, padding, algorithm):
self._backend = backend
self._private_key = private_key
# We now call _rsa_sig_determine_padding in _rsa_sig_setup. However
# we need to make a pointless call to it here so we maintain the
# API of erroring on init with this context if the values are invalid.
_rsa_sig_determine_padding(backend, private_key, padding, algorithm)
self._padding = padding
self._algorithm = algorithm
self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
def update(self, data):
self._hash_ctx.update(data)
def finalize(self):
return _rsa_sig_sign(
self._backend,
self._padding,
self._algorithm,
self._private_key,
self._hash_ctx.finalize()
)
@utils.register_interface(AsymmetricVerificationContext)
class _RSAVerificationContext(object):
def __init__(self, backend, public_key, signature, padding, algorithm):
self._backend = backend
self._public_key = public_key
self._signature = signature
self._padding = padding
# We now call _rsa_sig_determine_padding in _rsa_sig_setup. However
# we need to make a pointless call to it here so we maintain the
# API of erroring on init with this context if the values are invalid.
_rsa_sig_determine_padding(backend, public_key, padding, algorithm)
padding = padding
self._algorithm = algorithm
self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
def update(self, data):
self._hash_ctx.update(data)
def verify(self):
return _rsa_sig_verify(
self._backend,
self._padding,
self._algorithm,
self._public_key,
self._signature,
self._hash_ctx.finalize()
)
@utils.register_interface(RSAPrivateKeyWithSerialization)
class _RSAPrivateKey(object):
def __init__(self, backend, rsa_cdata, evp_pkey):
self._backend = backend
self._rsa_cdata = rsa_cdata
self._evp_pkey = evp_pkey
n = self._backend._ffi.new("BIGNUM **")
self._backend._lib.RSA_get0_key(
self._rsa_cdata, n, self._backend._ffi.NULL,
self._backend._ffi.NULL
)
self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
self._key_size = self._backend._lib.BN_num_bits(n[0])
key_size = utils.read_only_property("_key_size")
def signer(self, padding, algorithm):
_warn_sign_verify_deprecated()
_check_not_prehashed(algorithm)
return _RSASignatureContext(self._backend, self, padding, algorithm)
def decrypt(self, ciphertext, padding):
key_size_bytes = int(math.ceil(self.key_size / 8.0))
if key_size_bytes != len(ciphertext):
raise ValueError("Ciphertext length must be equal to key size.")
return _enc_dec_rsa(self._backend, self, ciphertext, padding)
def public_key(self):
ctx = self._backend._lib.RSAPublicKey_dup(self._rsa_cdata)
self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
ctx = self._backend._ffi.gc(ctx, self._backend._lib.RSA_free)
res = self._backend._lib.RSA_blinding_on(ctx, self._backend._ffi.NULL)
self._backend.openssl_assert(res == 1)
evp_pkey = self._backend._rsa_cdata_to_evp_pkey(ctx)
return _RSAPublicKey(self._backend, ctx, evp_pkey)
def private_numbers(self):
n = self._backend._ffi.new("BIGNUM **")
e = self._backend._ffi.new("BIGNUM **")
d = self._backend._ffi.new("BIGNUM **")
p = self._backend._ffi.new("BIGNUM **")
q = self._backend._ffi.new("BIGNUM **")
dmp1 = self._backend._ffi.new("BIGNUM **")
dmq1 = self._backend._ffi.new("BIGNUM **")
iqmp = self._backend._ffi.new("BIGNUM **")
self._backend._lib.RSA_get0_key(self._rsa_cdata, n, e, d)
self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(e[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(d[0] != self._backend._ffi.NULL)
self._backend._lib.RSA_get0_factors(self._rsa_cdata, p, q)
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
self._backend._lib.RSA_get0_crt_params(
self._rsa_cdata, dmp1, dmq1, iqmp
)
self._backend.openssl_assert(dmp1[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(dmq1[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(iqmp[0] != self._backend._ffi.NULL)
return rsa.RSAPrivateNumbers(
p=self._backend._bn_to_int(p[0]),
q=self._backend._bn_to_int(q[0]),
d=self._backend._bn_to_int(d[0]),
dmp1=self._backend._bn_to_int(dmp1[0]),
dmq1=self._backend._bn_to_int(dmq1[0]),
iqmp=self._backend._bn_to_int(iqmp[0]),
public_numbers=rsa.RSAPublicNumbers(
e=self._backend._bn_to_int(e[0]),
n=self._backend._bn_to_int(n[0]),
)
)
def private_bytes(self, encoding, format, encryption_algorithm):
return self._backend._private_key_bytes(
encoding,
format,
encryption_algorithm,
self._evp_pkey,
self._rsa_cdata
)
def sign(self, data, padding, algorithm):
data, algorithm = _calculate_digest_and_algorithm(
self._backend, data, algorithm
)
return _rsa_sig_sign(self._backend, padding, algorithm, self, data)
@utils.register_interface(RSAPublicKeyWithSerialization)
class _RSAPublicKey(object):
def __init__(self, backend, rsa_cdata, evp_pkey):
self._backend = backend
self._rsa_cdata = rsa_cdata
self._evp_pkey = evp_pkey
n = self._backend._ffi.new("BIGNUM **")
self._backend._lib.RSA_get0_key(
self._rsa_cdata, n, self._backend._ffi.NULL,
self._backend._ffi.NULL
)
self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
self._key_size = self._backend._lib.BN_num_bits(n[0])
key_size = utils.read_only_property("_key_size")
def verifier(self, signature, padding, algorithm):
_warn_sign_verify_deprecated()
if not isinstance(signature, bytes):
raise TypeError("signature must be bytes.")
_check_not_prehashed(algorithm)
return _RSAVerificationContext(
self._backend, self, signature, padding, algorithm
)
def encrypt(self, plaintext, padding):
return _enc_dec_rsa(self._backend, self, plaintext, padding)
def public_numbers(self):
n = self._backend._ffi.new("BIGNUM **")
e = self._backend._ffi.new("BIGNUM **")
self._backend._lib.RSA_get0_key(
self._rsa_cdata, n, e, self._backend._ffi.NULL
)
self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(e[0] != self._backend._ffi.NULL)
return rsa.RSAPublicNumbers(
e=self._backend._bn_to_int(e[0]),
n=self._backend._bn_to_int(n[0]),
)
def public_bytes(self, encoding, format):
return self._backend._public_key_bytes(
encoding,
format,
self,
self._evp_pkey,
self._rsa_cdata
)
def verify(self, signature, data, padding, algorithm):
data, algorithm = _calculate_digest_and_algorithm(
self._backend, data, algorithm
)
return _rsa_sig_verify(
self._backend, padding, algorithm, self, signature, data
)

View File

@ -0,0 +1,45 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import warnings
from cryptography import utils
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric.utils import Prehashed
def _calculate_digest_and_algorithm(backend, data, algorithm):
if not isinstance(algorithm, Prehashed):
hash_ctx = hashes.Hash(algorithm, backend)
hash_ctx.update(data)
data = hash_ctx.finalize()
else:
algorithm = algorithm._algorithm
if len(data) != algorithm.digest_size:
raise ValueError(
"The provided data must be the same length as the hash "
"algorithm's digest size."
)
return (data, algorithm)
def _check_not_prehashed(signature_algorithm):
if isinstance(signature_algorithm, Prehashed):
raise TypeError(
"Prehashed is only supported in the sign and verify methods. "
"It cannot be used with signer or verifier."
)
def _warn_sign_verify_deprecated():
warnings.warn(
"signer and verifier have been deprecated. Please use sign "
"and verify instead.",
utils.PersistentlyDeprecated,
stacklevel=3
)

View File

@ -0,0 +1,79 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.hazmat.primitives.asymmetric.x25519 import (
X25519PrivateKey, X25519PublicKey
)
@utils.register_interface(X25519PublicKey)
class _X25519PublicKey(object):
def __init__(self, backend, evp_pkey):
self._backend = backend
self._evp_pkey = evp_pkey
def public_bytes(self):
ucharpp = self._backend._ffi.new("unsigned char **")
res = self._backend._lib.EVP_PKEY_get1_tls_encodedpoint(
self._evp_pkey, ucharpp
)
self._backend.openssl_assert(res == 32)
self._backend.openssl_assert(ucharpp[0] != self._backend._ffi.NULL)
data = self._backend._ffi.gc(
ucharpp[0], self._backend._lib.OPENSSL_free
)
return self._backend._ffi.buffer(data, res)[:]
@utils.register_interface(X25519PrivateKey)
class _X25519PrivateKey(object):
def __init__(self, backend, evp_pkey):
self._backend = backend
self._evp_pkey = evp_pkey
def public_key(self):
bio = self._backend._create_mem_bio_gc()
res = self._backend._lib.i2d_PUBKEY_bio(bio, self._evp_pkey)
self._backend.openssl_assert(res == 1)
evp_pkey = self._backend._lib.d2i_PUBKEY_bio(
bio, self._backend._ffi.NULL
)
self._backend.openssl_assert(evp_pkey != self._backend._ffi.NULL)
evp_pkey = self._backend._ffi.gc(
evp_pkey, self._backend._lib.EVP_PKEY_free
)
return _X25519PublicKey(self._backend, evp_pkey)
def exchange(self, peer_public_key):
if not isinstance(peer_public_key, X25519PublicKey):
raise TypeError("peer_public_key must be X25519PublicKey.")
ctx = self._backend._lib.EVP_PKEY_CTX_new(
self._evp_pkey, self._backend._ffi.NULL
)
self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
ctx = self._backend._ffi.gc(ctx, self._backend._lib.EVP_PKEY_CTX_free)
res = self._backend._lib.EVP_PKEY_derive_init(ctx)
self._backend.openssl_assert(res == 1)
res = self._backend._lib.EVP_PKEY_derive_set_peer(
ctx, peer_public_key._evp_pkey
)
self._backend.openssl_assert(res == 1)
keylen = self._backend._ffi.new("size_t *")
res = self._backend._lib.EVP_PKEY_derive(
ctx, self._backend._ffi.NULL, keylen
)
self._backend.openssl_assert(res == 1)
self._backend.openssl_assert(keylen[0] > 0)
buf = self._backend._ffi.new("unsigned char[]", keylen[0])
res = self._backend._lib.EVP_PKEY_derive(ctx, buf, keylen)
if res != 1:
raise ValueError(
"Null shared key derived from public/private pair."
)
return self._backend._ffi.buffer(buf, keylen[0])[:]

View File

@ -0,0 +1,518 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import datetime
import operator
import warnings
from cryptography import utils, x509
from cryptography.exceptions import UnsupportedAlgorithm
from cryptography.hazmat.backends.openssl.decode_asn1 import (
_CERTIFICATE_EXTENSION_PARSER, _CERTIFICATE_EXTENSION_PARSER_NO_SCT,
_CRL_EXTENSION_PARSER, _CSR_EXTENSION_PARSER,
_REVOKED_CERTIFICATE_EXTENSION_PARSER, _asn1_integer_to_int,
_asn1_string_to_bytes, _decode_x509_name, _obj2txt, _parse_asn1_time
)
from cryptography.hazmat.backends.openssl.encode_asn1 import (
_encode_asn1_int_gc
)
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa
@utils.register_interface(x509.Certificate)
class _Certificate(object):
def __init__(self, backend, x509):
self._backend = backend
self._x509 = x509
def __repr__(self):
return "<Certificate(subject={0}, ...)>".format(self.subject)
def __eq__(self, other):
if not isinstance(other, x509.Certificate):
return NotImplemented
res = self._backend._lib.X509_cmp(self._x509, other._x509)
return res == 0
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(self.public_bytes(serialization.Encoding.DER))
def fingerprint(self, algorithm):
h = hashes.Hash(algorithm, self._backend)
h.update(self.public_bytes(serialization.Encoding.DER))
return h.finalize()
@property
def version(self):
version = self._backend._lib.X509_get_version(self._x509)
if version == 0:
return x509.Version.v1
elif version == 2:
return x509.Version.v3
else:
raise x509.InvalidVersion(
"{0} is not a valid X509 version".format(version), version
)
@property
def serial(self):
warnings.warn(
"Certificate serial is deprecated, use serial_number instead.",
utils.PersistentlyDeprecated,
stacklevel=2
)
return self.serial_number
@property
def serial_number(self):
asn1_int = self._backend._lib.X509_get_serialNumber(self._x509)
self._backend.openssl_assert(asn1_int != self._backend._ffi.NULL)
return _asn1_integer_to_int(self._backend, asn1_int)
def public_key(self):
pkey = self._backend._lib.X509_get_pubkey(self._x509)
if pkey == self._backend._ffi.NULL:
# Remove errors from the stack.
self._backend._consume_errors()
raise ValueError("Certificate public key is of an unknown type")
pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free)
return self._backend._evp_pkey_to_public_key(pkey)
@property
def not_valid_before(self):
asn1_time = self._backend._lib.X509_get_notBefore(self._x509)
return _parse_asn1_time(self._backend, asn1_time)
@property
def not_valid_after(self):
asn1_time = self._backend._lib.X509_get_notAfter(self._x509)
return _parse_asn1_time(self._backend, asn1_time)
@property
def issuer(self):
issuer = self._backend._lib.X509_get_issuer_name(self._x509)
self._backend.openssl_assert(issuer != self._backend._ffi.NULL)
return _decode_x509_name(self._backend, issuer)
@property
def subject(self):
subject = self._backend._lib.X509_get_subject_name(self._x509)
self._backend.openssl_assert(subject != self._backend._ffi.NULL)
return _decode_x509_name(self._backend, subject)
@property
def signature_hash_algorithm(self):
oid = self.signature_algorithm_oid
try:
return x509._SIG_OIDS_TO_HASH[oid]
except KeyError:
raise UnsupportedAlgorithm(
"Signature algorithm OID:{0} not recognized".format(oid)
)
@property
def signature_algorithm_oid(self):
alg = self._backend._ffi.new("X509_ALGOR **")
self._backend._lib.X509_get0_signature(
self._backend._ffi.NULL, alg, self._x509
)
self._backend.openssl_assert(alg[0] != self._backend._ffi.NULL)
oid = _obj2txt(self._backend, alg[0].algorithm)
return x509.ObjectIdentifier(oid)
@utils.cached_property
def extensions(self):
if self._backend._lib.CRYPTOGRAPHY_OPENSSL_110_OR_GREATER:
return _CERTIFICATE_EXTENSION_PARSER.parse(
self._backend, self._x509
)
else:
return _CERTIFICATE_EXTENSION_PARSER_NO_SCT.parse(
self._backend, self._x509
)
@property
def signature(self):
sig = self._backend._ffi.new("ASN1_BIT_STRING **")
self._backend._lib.X509_get0_signature(
sig, self._backend._ffi.NULL, self._x509
)
self._backend.openssl_assert(sig[0] != self._backend._ffi.NULL)
return _asn1_string_to_bytes(self._backend, sig[0])
@property
def tbs_certificate_bytes(self):
pp = self._backend._ffi.new("unsigned char **")
res = self._backend._lib.i2d_re_X509_tbs(self._x509, pp)
self._backend.openssl_assert(res > 0)
pp = self._backend._ffi.gc(
pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0])
)
return self._backend._ffi.buffer(pp[0], res)[:]
def public_bytes(self, encoding):
bio = self._backend._create_mem_bio_gc()
if encoding is serialization.Encoding.PEM:
res = self._backend._lib.PEM_write_bio_X509(bio, self._x509)
elif encoding is serialization.Encoding.DER:
res = self._backend._lib.i2d_X509_bio(bio, self._x509)
else:
raise TypeError("encoding must be an item from the Encoding enum")
self._backend.openssl_assert(res == 1)
return self._backend._read_mem_bio(bio)
@utils.register_interface(x509.RevokedCertificate)
class _RevokedCertificate(object):
def __init__(self, backend, crl, x509_revoked):
self._backend = backend
# The X509_REVOKED_value is a X509_REVOKED * that has
# no reference counting. This means when X509_CRL_free is
# called then the CRL and all X509_REVOKED * are freed. Since
# you can retain a reference to a single revoked certificate
# and let the CRL fall out of scope we need to retain a
# private reference to the CRL inside the RevokedCertificate
# object to prevent the gc from being called inappropriately.
self._crl = crl
self._x509_revoked = x509_revoked
@property
def serial_number(self):
asn1_int = self._backend._lib.X509_REVOKED_get0_serialNumber(
self._x509_revoked
)
self._backend.openssl_assert(asn1_int != self._backend._ffi.NULL)
return _asn1_integer_to_int(self._backend, asn1_int)
@property
def revocation_date(self):
return _parse_asn1_time(
self._backend,
self._backend._lib.X509_REVOKED_get0_revocationDate(
self._x509_revoked
)
)
@utils.cached_property
def extensions(self):
return _REVOKED_CERTIFICATE_EXTENSION_PARSER.parse(
self._backend, self._x509_revoked
)
@utils.register_interface(x509.CertificateRevocationList)
class _CertificateRevocationList(object):
def __init__(self, backend, x509_crl):
self._backend = backend
self._x509_crl = x509_crl
def __eq__(self, other):
if not isinstance(other, x509.CertificateRevocationList):
return NotImplemented
res = self._backend._lib.X509_CRL_cmp(self._x509_crl, other._x509_crl)
return res == 0
def __ne__(self, other):
return not self == other
def fingerprint(self, algorithm):
h = hashes.Hash(algorithm, self._backend)
bio = self._backend._create_mem_bio_gc()
res = self._backend._lib.i2d_X509_CRL_bio(
bio, self._x509_crl
)
self._backend.openssl_assert(res == 1)
der = self._backend._read_mem_bio(bio)
h.update(der)
return h.finalize()
def get_revoked_certificate_by_serial_number(self, serial_number):
revoked = self._backend._ffi.new("X509_REVOKED **")
asn1_int = _encode_asn1_int_gc(self._backend, serial_number)
res = self._backend._lib.X509_CRL_get0_by_serial(
self._x509_crl, revoked, asn1_int
)
if res == 0:
return None
else:
self._backend.openssl_assert(
revoked[0] != self._backend._ffi.NULL
)
return _RevokedCertificate(
self._backend, self._x509_crl, revoked[0]
)
@property
def signature_hash_algorithm(self):
oid = self.signature_algorithm_oid
try:
return x509._SIG_OIDS_TO_HASH[oid]
except KeyError:
raise UnsupportedAlgorithm(
"Signature algorithm OID:{0} not recognized".format(oid)
)
@property
def signature_algorithm_oid(self):
alg = self._backend._ffi.new("X509_ALGOR **")
self._backend._lib.X509_CRL_get0_signature(
self._x509_crl, self._backend._ffi.NULL, alg
)
self._backend.openssl_assert(alg[0] != self._backend._ffi.NULL)
oid = _obj2txt(self._backend, alg[0].algorithm)
return x509.ObjectIdentifier(oid)
@property
def issuer(self):
issuer = self._backend._lib.X509_CRL_get_issuer(self._x509_crl)
self._backend.openssl_assert(issuer != self._backend._ffi.NULL)
return _decode_x509_name(self._backend, issuer)
@property
def next_update(self):
nu = self._backend._lib.X509_CRL_get_nextUpdate(self._x509_crl)
self._backend.openssl_assert(nu != self._backend._ffi.NULL)
return _parse_asn1_time(self._backend, nu)
@property
def last_update(self):
lu = self._backend._lib.X509_CRL_get_lastUpdate(self._x509_crl)
self._backend.openssl_assert(lu != self._backend._ffi.NULL)
return _parse_asn1_time(self._backend, lu)
@property
def signature(self):
sig = self._backend._ffi.new("ASN1_BIT_STRING **")
self._backend._lib.X509_CRL_get0_signature(
self._x509_crl, sig, self._backend._ffi.NULL
)
self._backend.openssl_assert(sig[0] != self._backend._ffi.NULL)
return _asn1_string_to_bytes(self._backend, sig[0])
@property
def tbs_certlist_bytes(self):
pp = self._backend._ffi.new("unsigned char **")
res = self._backend._lib.i2d_re_X509_CRL_tbs(self._x509_crl, pp)
self._backend.openssl_assert(res > 0)
pp = self._backend._ffi.gc(
pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0])
)
return self._backend._ffi.buffer(pp[0], res)[:]
def public_bytes(self, encoding):
bio = self._backend._create_mem_bio_gc()
if encoding is serialization.Encoding.PEM:
res = self._backend._lib.PEM_write_bio_X509_CRL(
bio, self._x509_crl
)
elif encoding is serialization.Encoding.DER:
res = self._backend._lib.i2d_X509_CRL_bio(bio, self._x509_crl)
else:
raise TypeError("encoding must be an item from the Encoding enum")
self._backend.openssl_assert(res == 1)
return self._backend._read_mem_bio(bio)
def _revoked_cert(self, idx):
revoked = self._backend._lib.X509_CRL_get_REVOKED(self._x509_crl)
r = self._backend._lib.sk_X509_REVOKED_value(revoked, idx)
self._backend.openssl_assert(r != self._backend._ffi.NULL)
return _RevokedCertificate(self._backend, self, r)
def __iter__(self):
for i in range(len(self)):
yield self._revoked_cert(i)
def __getitem__(self, idx):
if isinstance(idx, slice):
start, stop, step = idx.indices(len(self))
return [self._revoked_cert(i) for i in range(start, stop, step)]
else:
idx = operator.index(idx)
if idx < 0:
idx += len(self)
if not 0 <= idx < len(self):
raise IndexError
return self._revoked_cert(idx)
def __len__(self):
revoked = self._backend._lib.X509_CRL_get_REVOKED(self._x509_crl)
if revoked == self._backend._ffi.NULL:
return 0
else:
return self._backend._lib.sk_X509_REVOKED_num(revoked)
@utils.cached_property
def extensions(self):
return _CRL_EXTENSION_PARSER.parse(self._backend, self._x509_crl)
def is_signature_valid(self, public_key):
if not isinstance(public_key, (dsa.DSAPublicKey, rsa.RSAPublicKey,
ec.EllipticCurvePublicKey)):
raise TypeError('Expecting one of DSAPublicKey, RSAPublicKey,'
' or EllipticCurvePublicKey.')
res = self._backend._lib.X509_CRL_verify(
self._x509_crl, public_key._evp_pkey
)
if res != 1:
self._backend._consume_errors()
return False
return True
@utils.register_interface(x509.CertificateSigningRequest)
class _CertificateSigningRequest(object):
def __init__(self, backend, x509_req):
self._backend = backend
self._x509_req = x509_req
def __eq__(self, other):
if not isinstance(other, _CertificateSigningRequest):
return NotImplemented
self_bytes = self.public_bytes(serialization.Encoding.DER)
other_bytes = other.public_bytes(serialization.Encoding.DER)
return self_bytes == other_bytes
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(self.public_bytes(serialization.Encoding.DER))
def public_key(self):
pkey = self._backend._lib.X509_REQ_get_pubkey(self._x509_req)
self._backend.openssl_assert(pkey != self._backend._ffi.NULL)
pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free)
return self._backend._evp_pkey_to_public_key(pkey)
@property
def subject(self):
subject = self._backend._lib.X509_REQ_get_subject_name(self._x509_req)
self._backend.openssl_assert(subject != self._backend._ffi.NULL)
return _decode_x509_name(self._backend, subject)
@property
def signature_hash_algorithm(self):
oid = self.signature_algorithm_oid
try:
return x509._SIG_OIDS_TO_HASH[oid]
except KeyError:
raise UnsupportedAlgorithm(
"Signature algorithm OID:{0} not recognized".format(oid)
)
@property
def signature_algorithm_oid(self):
alg = self._backend._ffi.new("X509_ALGOR **")
self._backend._lib.X509_REQ_get0_signature(
self._x509_req, self._backend._ffi.NULL, alg
)
self._backend.openssl_assert(alg[0] != self._backend._ffi.NULL)
oid = _obj2txt(self._backend, alg[0].algorithm)
return x509.ObjectIdentifier(oid)
@utils.cached_property
def extensions(self):
x509_exts = self._backend._lib.X509_REQ_get_extensions(self._x509_req)
return _CSR_EXTENSION_PARSER.parse(self._backend, x509_exts)
def public_bytes(self, encoding):
bio = self._backend._create_mem_bio_gc()
if encoding is serialization.Encoding.PEM:
res = self._backend._lib.PEM_write_bio_X509_REQ(
bio, self._x509_req
)
elif encoding is serialization.Encoding.DER:
res = self._backend._lib.i2d_X509_REQ_bio(bio, self._x509_req)
else:
raise TypeError("encoding must be an item from the Encoding enum")
self._backend.openssl_assert(res == 1)
return self._backend._read_mem_bio(bio)
@property
def tbs_certrequest_bytes(self):
pp = self._backend._ffi.new("unsigned char **")
res = self._backend._lib.i2d_re_X509_REQ_tbs(self._x509_req, pp)
self._backend.openssl_assert(res > 0)
pp = self._backend._ffi.gc(
pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0])
)
return self._backend._ffi.buffer(pp[0], res)[:]
@property
def signature(self):
sig = self._backend._ffi.new("ASN1_BIT_STRING **")
self._backend._lib.X509_REQ_get0_signature(
self._x509_req, sig, self._backend._ffi.NULL
)
self._backend.openssl_assert(sig[0] != self._backend._ffi.NULL)
return _asn1_string_to_bytes(self._backend, sig[0])
@property
def is_signature_valid(self):
pkey = self._backend._lib.X509_REQ_get_pubkey(self._x509_req)
self._backend.openssl_assert(pkey != self._backend._ffi.NULL)
pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free)
res = self._backend._lib.X509_REQ_verify(self._x509_req, pkey)
if res != 1:
self._backend._consume_errors()
return False
return True
@utils.register_interface(
x509.certificate_transparency.SignedCertificateTimestamp
)
class _SignedCertificateTimestamp(object):
def __init__(self, backend, sct_list, sct):
self._backend = backend
# Keep the SCT_LIST that this SCT came from alive.
self._sct_list = sct_list
self._sct = sct
@property
def version(self):
version = self._backend._lib.SCT_get_version(self._sct)
assert version == self._backend._lib.SCT_VERSION_V1
return x509.certificate_transparency.Version.v1
@property
def log_id(self):
out = self._backend._ffi.new("unsigned char **")
log_id_length = self._backend._lib.SCT_get0_log_id(self._sct, out)
assert log_id_length >= 0
return self._backend._ffi.buffer(out[0], log_id_length)[:]
@property
def timestamp(self):
timestamp = self._backend._lib.SCT_get_timestamp(self._sct)
milliseconds = timestamp % 1000
return datetime.datetime.utcfromtimestamp(
timestamp // 1000
).replace(microsecond=milliseconds * 1000)
@property
def entry_type(self):
entry_type = self._backend._lib.SCT_get_log_entry_type(self._sct)
# We currently only support loading SCTs from the X.509 extension, so
# we only have precerts.
assert entry_type == self._backend._lib.CT_LOG_ENTRY_TYPE_PRECERT
return x509.certificate_transparency.LogEntryType.PRE_CERTIFICATE

View File

@ -0,0 +1,5 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function

View File

@ -0,0 +1,5 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function

View File

@ -0,0 +1,302 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
def cryptography_has_ec2m():
return [
"EC_POINT_set_affine_coordinates_GF2m",
"EC_POINT_get_affine_coordinates_GF2m",
"EC_POINT_set_compressed_coordinates_GF2m",
]
def cryptography_has_ec_1_0_2():
return [
"EC_curve_nid2nist",
]
def cryptography_has_set_ecdh_auto():
return [
"SSL_CTX_set_ecdh_auto",
]
def cryptography_has_rsa_r_pkcs_decoding_error():
return [
"RSA_R_PKCS_DECODING_ERROR"
]
def cryptography_has_rsa_oaep_md():
return [
"EVP_PKEY_CTX_set_rsa_oaep_md",
]
def cryptography_has_rsa_oaep_label():
return [
"EVP_PKEY_CTX_set0_rsa_oaep_label",
]
def cryptography_has_ssl3_method():
return [
"SSLv3_method",
"SSLv3_client_method",
"SSLv3_server_method",
]
def cryptography_has_alpn():
return [
"SSL_CTX_set_alpn_protos",
"SSL_set_alpn_protos",
"SSL_CTX_set_alpn_select_cb",
"SSL_get0_alpn_selected",
]
def cryptography_has_compression():
return [
"SSL_get_current_compression",
"SSL_get_current_expansion",
"SSL_COMP_get_name",
]
def cryptography_has_get_server_tmp_key():
return [
"SSL_get_server_tmp_key",
]
def cryptography_has_102_verification_error_codes():
return [
'X509_V_ERR_SUITE_B_INVALID_VERSION',
'X509_V_ERR_SUITE_B_INVALID_ALGORITHM',
'X509_V_ERR_SUITE_B_INVALID_CURVE',
'X509_V_ERR_SUITE_B_INVALID_SIGNATURE_ALGORITHM',
'X509_V_ERR_SUITE_B_LOS_NOT_ALLOWED',
'X509_V_ERR_SUITE_B_CANNOT_SIGN_P_384_WITH_P_256',
'X509_V_ERR_HOSTNAME_MISMATCH',
'X509_V_ERR_EMAIL_MISMATCH',
'X509_V_ERR_IP_ADDRESS_MISMATCH'
]
def cryptography_has_102_verification_params():
return [
"X509_V_FLAG_SUITEB_128_LOS_ONLY",
"X509_V_FLAG_SUITEB_192_LOS",
"X509_V_FLAG_SUITEB_128_LOS",
"X509_VERIFY_PARAM_set1_host",
"X509_VERIFY_PARAM_set1_email",
"X509_VERIFY_PARAM_set1_ip",
"X509_VERIFY_PARAM_set1_ip_asc",
"X509_VERIFY_PARAM_set_hostflags",
]
def cryptography_has_x509_v_flag_trusted_first():
return [
"X509_V_FLAG_TRUSTED_FIRST",
]
def cryptography_has_x509_v_flag_partial_chain():
return [
"X509_V_FLAG_PARTIAL_CHAIN",
]
def cryptography_has_set_cert_cb():
return [
"SSL_CTX_set_cert_cb",
"SSL_set_cert_cb",
]
def cryptography_has_ssl_st():
return [
"SSL_ST_BEFORE",
"SSL_ST_OK",
"SSL_ST_INIT",
"SSL_ST_RENEGOTIATE",
]
def cryptography_has_tls_st():
return [
"TLS_ST_BEFORE",
"TLS_ST_OK",
]
def cryptography_has_locking_callbacks():
return [
"CRYPTO_LOCK",
"CRYPTO_UNLOCK",
"CRYPTO_READ",
"CRYPTO_LOCK_SSL",
"CRYPTO_lock",
]
def cryptography_has_scrypt():
return [
"EVP_PBE_scrypt",
]
def cryptography_has_generic_dtls_method():
return [
"DTLS_method",
"DTLS_server_method",
"DTLS_client_method",
"SSL_OP_NO_DTLSv1",
"SSL_OP_NO_DTLSv1_2",
"DTLS_set_link_mtu",
"DTLS_get_link_min_mtu",
]
def cryptography_has_evp_pkey_dhx():
return [
"EVP_PKEY_DHX",
]
def cryptography_has_mem_functions():
return [
"Cryptography_CRYPTO_set_mem_functions",
]
def cryptography_has_sct():
return [
"SCT_get_version",
"SCT_get_log_entry_type",
"SCT_get0_log_id",
"SCT_get_timestamp",
"SCT_set_source",
"sk_SCT_num",
"sk_SCT_value",
"SCT_LIST_free",
]
def cryptography_has_x509_store_ctx_get_issuer():
return [
"X509_STORE_get_get_issuer",
"X509_STORE_set_get_issuer",
]
def cryptography_has_x25519():
return [
"EVP_PKEY_X25519",
"NID_X25519",
]
def cryptography_has_evp_pkey_get_set_tls_encodedpoint():
return [
"EVP_PKEY_get1_tls_encodedpoint",
"EVP_PKEY_set1_tls_encodedpoint",
]
def cryptography_has_fips():
return [
"FIPS_set_mode",
"FIPS_mode",
]
def cryptography_has_ssl_sigalgs():
return [
"SSL_CTX_set1_sigalgs_list",
"SSL_get_sigalgs",
]
def cryptography_has_psk():
return [
"SSL_CTX_use_psk_identity_hint",
"SSL_CTX_set_psk_server_callback",
"SSL_CTX_set_psk_client_callback",
]
def cryptography_has_custom_ext():
return [
"SSL_CTX_add_client_custom_ext",
"SSL_CTX_add_server_custom_ext",
"SSL_extension_supported",
]
def cryptography_has_openssl_cleanup():
return [
"OPENSSL_cleanup",
]
# This is a mapping of
# {condition: function-returning-names-dependent-on-that-condition} so we can
# loop over them and delete unsupported names at runtime. It will be removed
# when cffi supports #if in cdef. We use functions instead of just a dict of
# lists so we can use coverage to measure which are used.
CONDITIONAL_NAMES = {
"Cryptography_HAS_EC2M": cryptography_has_ec2m,
"Cryptography_HAS_EC_1_0_2": cryptography_has_ec_1_0_2,
"Cryptography_HAS_SET_ECDH_AUTO": cryptography_has_set_ecdh_auto,
"Cryptography_HAS_RSA_R_PKCS_DECODING_ERROR": (
cryptography_has_rsa_r_pkcs_decoding_error
),
"Cryptography_HAS_RSA_OAEP_MD": cryptography_has_rsa_oaep_md,
"Cryptography_HAS_RSA_OAEP_LABEL": cryptography_has_rsa_oaep_label,
"Cryptography_HAS_SSL3_METHOD": cryptography_has_ssl3_method,
"Cryptography_HAS_ALPN": cryptography_has_alpn,
"Cryptography_HAS_COMPRESSION": cryptography_has_compression,
"Cryptography_HAS_GET_SERVER_TMP_KEY": cryptography_has_get_server_tmp_key,
"Cryptography_HAS_102_VERIFICATION_ERROR_CODES": (
cryptography_has_102_verification_error_codes
),
"Cryptography_HAS_102_VERIFICATION_PARAMS": (
cryptography_has_102_verification_params
),
"Cryptography_HAS_X509_V_FLAG_TRUSTED_FIRST": (
cryptography_has_x509_v_flag_trusted_first
),
"Cryptography_HAS_X509_V_FLAG_PARTIAL_CHAIN": (
cryptography_has_x509_v_flag_partial_chain
),
"Cryptography_HAS_SET_CERT_CB": cryptography_has_set_cert_cb,
"Cryptography_HAS_SSL_ST": cryptography_has_ssl_st,
"Cryptography_HAS_TLS_ST": cryptography_has_tls_st,
"Cryptography_HAS_LOCKING_CALLBACKS": cryptography_has_locking_callbacks,
"Cryptography_HAS_SCRYPT": cryptography_has_scrypt,
"Cryptography_HAS_GENERIC_DTLS_METHOD": (
cryptography_has_generic_dtls_method
),
"Cryptography_HAS_EVP_PKEY_DHX": cryptography_has_evp_pkey_dhx,
"Cryptography_HAS_MEM_FUNCTIONS": cryptography_has_mem_functions,
"Cryptography_HAS_SCT": cryptography_has_sct,
"Cryptography_HAS_X509_STORE_CTX_GET_ISSUER": (
cryptography_has_x509_store_ctx_get_issuer
),
"Cryptography_HAS_X25519": cryptography_has_x25519,
"Cryptography_HAS_EVP_PKEY_get_set_tls_encodedpoint": (
cryptography_has_evp_pkey_get_set_tls_encodedpoint
),
"Cryptography_HAS_FIPS": cryptography_has_fips,
"Cryptography_HAS_SIGALGS": cryptography_has_ssl_sigalgs,
"Cryptography_HAS_PSK": cryptography_has_psk,
"Cryptography_HAS_CUSTOM_EXT": cryptography_has_custom_ext,
"Cryptography_HAS_OPENSSL_CLEANUP": cryptography_has_openssl_cleanup,
}

View File

@ -0,0 +1,157 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import collections
import threading
import types
from cryptography import utils
from cryptography.exceptions import InternalError
from cryptography.hazmat.bindings._openssl import ffi, lib
from cryptography.hazmat.bindings.openssl._conditional import CONDITIONAL_NAMES
_OpenSSLErrorWithText = collections.namedtuple(
"_OpenSSLErrorWithText", ["code", "lib", "func", "reason", "reason_text"]
)
class _OpenSSLError(object):
def __init__(self, code, lib, func, reason):
self._code = code
self._lib = lib
self._func = func
self._reason = reason
def _lib_reason_match(self, lib, reason):
return lib == self.lib and reason == self.reason
code = utils.read_only_property("_code")
lib = utils.read_only_property("_lib")
func = utils.read_only_property("_func")
reason = utils.read_only_property("_reason")
def _consume_errors(lib):
errors = []
while True:
code = lib.ERR_get_error()
if code == 0:
break
err_lib = lib.ERR_GET_LIB(code)
err_func = lib.ERR_GET_FUNC(code)
err_reason = lib.ERR_GET_REASON(code)
errors.append(_OpenSSLError(code, err_lib, err_func, err_reason))
return errors
def _openssl_assert(lib, ok):
if not ok:
errors = _consume_errors(lib)
errors_with_text = []
for err in errors:
buf = ffi.new("char[]", 256)
lib.ERR_error_string_n(err.code, buf, len(buf))
err_text_reason = ffi.string(buf)
errors_with_text.append(
_OpenSSLErrorWithText(
err.code, err.lib, err.func, err.reason, err_text_reason
)
)
raise InternalError(
"Unknown OpenSSL error. This error is commonly encountered when "
"another library is not cleaning up the OpenSSL error stack. If "
"you are using cryptography with another library that uses "
"OpenSSL try disabling it before reporting a bug. Otherwise "
"please file an issue at https://github.com/pyca/cryptography/"
"issues with information on how to reproduce "
"this. ({0!r})".format(errors_with_text),
errors_with_text
)
def build_conditional_library(lib, conditional_names):
conditional_lib = types.ModuleType("lib")
conditional_lib._original_lib = lib
excluded_names = set()
for condition, names_cb in conditional_names.items():
if not getattr(lib, condition):
excluded_names.update(names_cb())
for attr in dir(lib):
if attr not in excluded_names:
setattr(conditional_lib, attr, getattr(lib, attr))
return conditional_lib
class Binding(object):
"""
OpenSSL API wrapper.
"""
lib = None
ffi = ffi
_lib_loaded = False
_init_lock = threading.Lock()
_lock_init_lock = threading.Lock()
def __init__(self):
self._ensure_ffi_initialized()
@classmethod
def _register_osrandom_engine(cls):
# Clear any errors extant in the queue before we start. In many
# scenarios other things may be interacting with OpenSSL in the same
# process space and it has proven untenable to assume that they will
# reliably clear the error queue. Once we clear it here we will
# error on any subsequent unexpected item in the stack.
cls.lib.ERR_clear_error()
cls._osrandom_engine_id = cls.lib.Cryptography_osrandom_engine_id
cls._osrandom_engine_name = cls.lib.Cryptography_osrandom_engine_name
result = cls.lib.Cryptography_add_osrandom_engine()
_openssl_assert(cls.lib, result in (1, 2))
@classmethod
def _ensure_ffi_initialized(cls):
with cls._init_lock:
if not cls._lib_loaded:
cls.lib = build_conditional_library(lib, CONDITIONAL_NAMES)
cls._lib_loaded = True
# initialize the SSL library
cls.lib.SSL_library_init()
# adds all ciphers/digests for EVP
cls.lib.OpenSSL_add_all_algorithms()
# loads error strings for libcrypto and libssl functions
cls.lib.SSL_load_error_strings()
cls._register_osrandom_engine()
@classmethod
def init_static_locks(cls):
with cls._lock_init_lock:
cls._ensure_ffi_initialized()
# Use Python's implementation if available, importing _ssl triggers
# the setup for this.
__import__("_ssl")
if cls.lib.CRYPTO_get_locking_callback() != cls.ffi.NULL:
return
# If nothing else has setup a locking callback already, we set up
# our own
res = lib.Cryptography_setup_ssl_threads()
_openssl_assert(cls.lib, res == 1)
# OpenSSL is not thread safe until the locks are initialized. We call this
# method in module scope so that it executes with the import lock. On
# Pythons < 3.4 this import lock is a global lock, which can prevent a race
# condition registering the OpenSSL locks. On Python 3.4+ the import lock
# is per module so this approach will not work.
Binding.init_static_locks()

View File

@ -0,0 +1,5 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function

View File

@ -0,0 +1,40 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class AsymmetricSignatureContext(object):
@abc.abstractmethod
def update(self, data):
"""
Processes the provided bytes and returns nothing.
"""
@abc.abstractmethod
def finalize(self):
"""
Returns the signature as bytes.
"""
@six.add_metaclass(abc.ABCMeta)
class AsymmetricVerificationContext(object):
@abc.abstractmethod
def update(self, data):
"""
Processes the provided bytes and returns nothing.
"""
@abc.abstractmethod
def verify(self):
"""
Raises an exception if the bytes provided to update do not match the
signature or the signature does not match the public key.
"""

View File

@ -0,0 +1,212 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import abc
import six
from cryptography import utils
def generate_parameters(generator, key_size, backend):
return backend.generate_dh_parameters(generator, key_size)
class DHPrivateNumbers(object):
def __init__(self, x, public_numbers):
if not isinstance(x, six.integer_types):
raise TypeError("x must be an integer.")
if not isinstance(public_numbers, DHPublicNumbers):
raise TypeError("public_numbers must be an instance of "
"DHPublicNumbers.")
self._x = x
self._public_numbers = public_numbers
def __eq__(self, other):
if not isinstance(other, DHPrivateNumbers):
return NotImplemented
return (
self._x == other._x and
self._public_numbers == other._public_numbers
)
def __ne__(self, other):
return not self == other
def private_key(self, backend):
return backend.load_dh_private_numbers(self)
public_numbers = utils.read_only_property("_public_numbers")
x = utils.read_only_property("_x")
class DHPublicNumbers(object):
def __init__(self, y, parameter_numbers):
if not isinstance(y, six.integer_types):
raise TypeError("y must be an integer.")
if not isinstance(parameter_numbers, DHParameterNumbers):
raise TypeError(
"parameters must be an instance of DHParameterNumbers.")
self._y = y
self._parameter_numbers = parameter_numbers
def __eq__(self, other):
if not isinstance(other, DHPublicNumbers):
return NotImplemented
return (
self._y == other._y and
self._parameter_numbers == other._parameter_numbers
)
def __ne__(self, other):
return not self == other
def public_key(self, backend):
return backend.load_dh_public_numbers(self)
y = utils.read_only_property("_y")
parameter_numbers = utils.read_only_property("_parameter_numbers")
class DHParameterNumbers(object):
def __init__(self, p, g, q=None):
if (
not isinstance(p, six.integer_types) or
not isinstance(g, six.integer_types)
):
raise TypeError("p and g must be integers")
if q is not None and not isinstance(q, six.integer_types):
raise TypeError("q must be integer or None")
if g < 2:
raise ValueError("DH generator must be 2 or greater")
self._p = p
self._g = g
self._q = q
def __eq__(self, other):
if not isinstance(other, DHParameterNumbers):
return NotImplemented
return (
self._p == other._p and
self._g == other._g and
self._q == other._q
)
def __ne__(self, other):
return not self == other
def parameters(self, backend):
return backend.load_dh_parameter_numbers(self)
p = utils.read_only_property("_p")
g = utils.read_only_property("_g")
q = utils.read_only_property("_q")
@six.add_metaclass(abc.ABCMeta)
class DHParameters(object):
@abc.abstractmethod
def generate_private_key(self):
"""
Generates and returns a DHPrivateKey.
"""
@abc.abstractmethod
def parameter_bytes(self, encoding, format):
"""
Returns the parameters serialized as bytes.
"""
@abc.abstractmethod
def parameter_numbers(self):
"""
Returns a DHParameterNumbers.
"""
DHParametersWithSerialization = DHParameters
@six.add_metaclass(abc.ABCMeta)
class DHPrivateKey(object):
@abc.abstractproperty
def key_size(self):
"""
The bit length of the prime modulus.
"""
@abc.abstractmethod
def public_key(self):
"""
The DHPublicKey associated with this private key.
"""
@abc.abstractmethod
def parameters(self):
"""
The DHParameters object associated with this private key.
"""
@abc.abstractmethod
def exchange(self, peer_public_key):
"""
Given peer's DHPublicKey, carry out the key exchange and
return shared key as bytes.
"""
@six.add_metaclass(abc.ABCMeta)
class DHPrivateKeyWithSerialization(DHPrivateKey):
@abc.abstractmethod
def private_numbers(self):
"""
Returns a DHPrivateNumbers.
"""
@abc.abstractmethod
def private_bytes(self, encoding, format, encryption_algorithm):
"""
Returns the key serialized as bytes.
"""
@six.add_metaclass(abc.ABCMeta)
class DHPublicKey(object):
@abc.abstractproperty
def key_size(self):
"""
The bit length of the prime modulus.
"""
@abc.abstractmethod
def parameters(self):
"""
The DHParameters object associated with this public key.
"""
@abc.abstractmethod
def public_numbers(self):
"""
Returns a DHPublicNumbers.
"""
@abc.abstractmethod
def public_bytes(self, encoding, format):
"""
Returns the key serialized as bytes.
"""
DHPublicKeyWithSerialization = DHPublicKey

View File

@ -0,0 +1,254 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import abc
import six
from cryptography import utils
@six.add_metaclass(abc.ABCMeta)
class DSAParameters(object):
@abc.abstractmethod
def generate_private_key(self):
"""
Generates and returns a DSAPrivateKey.
"""
@six.add_metaclass(abc.ABCMeta)
class DSAParametersWithNumbers(DSAParameters):
@abc.abstractmethod
def parameter_numbers(self):
"""
Returns a DSAParameterNumbers.
"""
@six.add_metaclass(abc.ABCMeta)
class DSAPrivateKey(object):
@abc.abstractproperty
def key_size(self):
"""
The bit length of the prime modulus.
"""
@abc.abstractmethod
def public_key(self):
"""
The DSAPublicKey associated with this private key.
"""
@abc.abstractmethod
def parameters(self):
"""
The DSAParameters object associated with this private key.
"""
@abc.abstractmethod
def signer(self, signature_algorithm):
"""
Returns an AsymmetricSignatureContext used for signing data.
"""
@abc.abstractmethod
def sign(self, data, algorithm):
"""
Signs the data
"""
@six.add_metaclass(abc.ABCMeta)
class DSAPrivateKeyWithSerialization(DSAPrivateKey):
@abc.abstractmethod
def private_numbers(self):
"""
Returns a DSAPrivateNumbers.
"""
@abc.abstractmethod
def private_bytes(self, encoding, format, encryption_algorithm):
"""
Returns the key serialized as bytes.
"""
@six.add_metaclass(abc.ABCMeta)
class DSAPublicKey(object):
@abc.abstractproperty
def key_size(self):
"""
The bit length of the prime modulus.
"""
@abc.abstractmethod
def parameters(self):
"""
The DSAParameters object associated with this public key.
"""
@abc.abstractmethod
def verifier(self, signature, signature_algorithm):
"""
Returns an AsymmetricVerificationContext used for signing data.
"""
@abc.abstractmethod
def public_numbers(self):
"""
Returns a DSAPublicNumbers.
"""
@abc.abstractmethod
def public_bytes(self, encoding, format):
"""
Returns the key serialized as bytes.
"""
@abc.abstractmethod
def verify(self, signature, data, algorithm):
"""
Verifies the signature of the data.
"""
DSAPublicKeyWithSerialization = DSAPublicKey
def generate_parameters(key_size, backend):
return backend.generate_dsa_parameters(key_size)
def generate_private_key(key_size, backend):
return backend.generate_dsa_private_key_and_parameters(key_size)
def _check_dsa_parameters(parameters):
if parameters.p.bit_length() not in [1024, 2048, 3072]:
raise ValueError("p must be exactly 1024, 2048, or 3072 bits long")
if parameters.q.bit_length() not in [160, 224, 256]:
raise ValueError("q must be exactly 160, 224, or 256 bits long")
if not (1 < parameters.g < parameters.p):
raise ValueError("g, p don't satisfy 1 < g < p.")
def _check_dsa_private_numbers(numbers):
parameters = numbers.public_numbers.parameter_numbers
_check_dsa_parameters(parameters)
if numbers.x <= 0 or numbers.x >= parameters.q:
raise ValueError("x must be > 0 and < q.")
if numbers.public_numbers.y != pow(parameters.g, numbers.x, parameters.p):
raise ValueError("y must be equal to (g ** x % p).")
class DSAParameterNumbers(object):
def __init__(self, p, q, g):
if (
not isinstance(p, six.integer_types) or
not isinstance(q, six.integer_types) or
not isinstance(g, six.integer_types)
):
raise TypeError(
"DSAParameterNumbers p, q, and g arguments must be integers."
)
self._p = p
self._q = q
self._g = g
p = utils.read_only_property("_p")
q = utils.read_only_property("_q")
g = utils.read_only_property("_g")
def parameters(self, backend):
return backend.load_dsa_parameter_numbers(self)
def __eq__(self, other):
if not isinstance(other, DSAParameterNumbers):
return NotImplemented
return self.p == other.p and self.q == other.q and self.g == other.g
def __ne__(self, other):
return not self == other
def __repr__(self):
return (
"<DSAParameterNumbers(p={self.p}, q={self.q}, g={self.g})>".format(
self=self
)
)
class DSAPublicNumbers(object):
def __init__(self, y, parameter_numbers):
if not isinstance(y, six.integer_types):
raise TypeError("DSAPublicNumbers y argument must be an integer.")
if not isinstance(parameter_numbers, DSAParameterNumbers):
raise TypeError(
"parameter_numbers must be a DSAParameterNumbers instance."
)
self._y = y
self._parameter_numbers = parameter_numbers
y = utils.read_only_property("_y")
parameter_numbers = utils.read_only_property("_parameter_numbers")
def public_key(self, backend):
return backend.load_dsa_public_numbers(self)
def __eq__(self, other):
if not isinstance(other, DSAPublicNumbers):
return NotImplemented
return (
self.y == other.y and
self.parameter_numbers == other.parameter_numbers
)
def __ne__(self, other):
return not self == other
def __repr__(self):
return (
"<DSAPublicNumbers(y={self.y}, "
"parameter_numbers={self.parameter_numbers})>".format(self=self)
)
class DSAPrivateNumbers(object):
def __init__(self, x, public_numbers):
if not isinstance(x, six.integer_types):
raise TypeError("DSAPrivateNumbers x argument must be an integer.")
if not isinstance(public_numbers, DSAPublicNumbers):
raise TypeError(
"public_numbers must be a DSAPublicNumbers instance."
)
self._public_numbers = public_numbers
self._x = x
x = utils.read_only_property("_x")
public_numbers = utils.read_only_property("_public_numbers")
def private_key(self, backend):
return backend.load_dsa_private_numbers(self)
def __eq__(self, other):
if not isinstance(other, DSAPrivateNumbers):
return NotImplemented
return (
self.x == other.x and self.public_numbers == other.public_numbers
)
def __ne__(self, other):
return not self == other

View File

@ -0,0 +1,411 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import abc
import six
from cryptography import utils
@six.add_metaclass(abc.ABCMeta)
class EllipticCurve(object):
@abc.abstractproperty
def name(self):
"""
The name of the curve. e.g. secp256r1.
"""
@abc.abstractproperty
def key_size(self):
"""
Bit size of a secret scalar for the curve.
"""
@six.add_metaclass(abc.ABCMeta)
class EllipticCurveSignatureAlgorithm(object):
@abc.abstractproperty
def algorithm(self):
"""
The digest algorithm used with this signature.
"""
@six.add_metaclass(abc.ABCMeta)
class EllipticCurvePrivateKey(object):
@abc.abstractmethod
def signer(self, signature_algorithm):
"""
Returns an AsymmetricSignatureContext used for signing data.
"""
@abc.abstractmethod
def exchange(self, algorithm, peer_public_key):
"""
Performs a key exchange operation using the provided algorithm with the
provided peer's public key.
"""
@abc.abstractmethod
def public_key(self):
"""
The EllipticCurvePublicKey for this private key.
"""
@abc.abstractproperty
def curve(self):
"""
The EllipticCurve that this key is on.
"""
@abc.abstractproperty
def key_size(self):
"""
Bit size of a secret scalar for the curve.
"""
@abc.abstractproperty
def sign(self, data, signature_algorithm):
"""
Signs the data
"""
@six.add_metaclass(abc.ABCMeta)
class EllipticCurvePrivateKeyWithSerialization(EllipticCurvePrivateKey):
@abc.abstractmethod
def private_numbers(self):
"""
Returns an EllipticCurvePrivateNumbers.
"""
@abc.abstractmethod
def private_bytes(self, encoding, format, encryption_algorithm):
"""
Returns the key serialized as bytes.
"""
@six.add_metaclass(abc.ABCMeta)
class EllipticCurvePublicKey(object):
@abc.abstractmethod
def verifier(self, signature, signature_algorithm):
"""
Returns an AsymmetricVerificationContext used for signing data.
"""
@abc.abstractproperty
def curve(self):
"""
The EllipticCurve that this key is on.
"""
@abc.abstractproperty
def key_size(self):
"""
Bit size of a secret scalar for the curve.
"""
@abc.abstractmethod
def public_numbers(self):
"""
Returns an EllipticCurvePublicNumbers.
"""
@abc.abstractmethod
def public_bytes(self, encoding, format):
"""
Returns the key serialized as bytes.
"""
@abc.abstractmethod
def verify(self, signature, data, signature_algorithm):
"""
Verifies the signature of the data.
"""
EllipticCurvePublicKeyWithSerialization = EllipticCurvePublicKey
@utils.register_interface(EllipticCurve)
class SECT571R1(object):
name = "sect571r1"
key_size = 570
@utils.register_interface(EllipticCurve)
class SECT409R1(object):
name = "sect409r1"
key_size = 409
@utils.register_interface(EllipticCurve)
class SECT283R1(object):
name = "sect283r1"
key_size = 283
@utils.register_interface(EllipticCurve)
class SECT233R1(object):
name = "sect233r1"
key_size = 233
@utils.register_interface(EllipticCurve)
class SECT163R2(object):
name = "sect163r2"
key_size = 163
@utils.register_interface(EllipticCurve)
class SECT571K1(object):
name = "sect571k1"
key_size = 571
@utils.register_interface(EllipticCurve)
class SECT409K1(object):
name = "sect409k1"
key_size = 409
@utils.register_interface(EllipticCurve)
class SECT283K1(object):
name = "sect283k1"
key_size = 283
@utils.register_interface(EllipticCurve)
class SECT233K1(object):
name = "sect233k1"
key_size = 233
@utils.register_interface(EllipticCurve)
class SECT163K1(object):
name = "sect163k1"
key_size = 163
@utils.register_interface(EllipticCurve)
class SECP521R1(object):
name = "secp521r1"
key_size = 521
@utils.register_interface(EllipticCurve)
class SECP384R1(object):
name = "secp384r1"
key_size = 384
@utils.register_interface(EllipticCurve)
class SECP256R1(object):
name = "secp256r1"
key_size = 256
@utils.register_interface(EllipticCurve)
class SECP256K1(object):
name = "secp256k1"
key_size = 256
@utils.register_interface(EllipticCurve)
class SECP224R1(object):
name = "secp224r1"
key_size = 224
@utils.register_interface(EllipticCurve)
class SECP192R1(object):
name = "secp192r1"
key_size = 192
@utils.register_interface(EllipticCurve)
class BrainpoolP256R1(object):
name = "brainpoolP256r1"
key_size = 256
@utils.register_interface(EllipticCurve)
class BrainpoolP384R1(object):
name = "brainpoolP384r1"
key_size = 384
@utils.register_interface(EllipticCurve)
class BrainpoolP512R1(object):
name = "brainpoolP512r1"
key_size = 512
_CURVE_TYPES = {
"prime192v1": SECP192R1,
"prime256v1": SECP256R1,
"secp192r1": SECP192R1,
"secp224r1": SECP224R1,
"secp256r1": SECP256R1,
"secp384r1": SECP384R1,
"secp521r1": SECP521R1,
"secp256k1": SECP256K1,
"sect163k1": SECT163K1,
"sect233k1": SECT233K1,
"sect283k1": SECT283K1,
"sect409k1": SECT409K1,
"sect571k1": SECT571K1,
"sect163r2": SECT163R2,
"sect233r1": SECT233R1,
"sect283r1": SECT283R1,
"sect409r1": SECT409R1,
"sect571r1": SECT571R1,
"brainpoolP256r1": BrainpoolP256R1,
"brainpoolP384r1": BrainpoolP384R1,
"brainpoolP512r1": BrainpoolP512R1,
}
@utils.register_interface(EllipticCurveSignatureAlgorithm)
class ECDSA(object):
def __init__(self, algorithm):
self._algorithm = algorithm
algorithm = utils.read_only_property("_algorithm")
def generate_private_key(curve, backend):
return backend.generate_elliptic_curve_private_key(curve)
def derive_private_key(private_value, curve, backend):
if not isinstance(private_value, six.integer_types):
raise TypeError("private_value must be an integer type.")
if private_value <= 0:
raise ValueError("private_value must be a positive integer.")
if not isinstance(curve, EllipticCurve):
raise TypeError("curve must provide the EllipticCurve interface.")
return backend.derive_elliptic_curve_private_key(private_value, curve)
class EllipticCurvePublicNumbers(object):
def __init__(self, x, y, curve):
if (
not isinstance(x, six.integer_types) or
not isinstance(y, six.integer_types)
):
raise TypeError("x and y must be integers.")
if not isinstance(curve, EllipticCurve):
raise TypeError("curve must provide the EllipticCurve interface.")
self._y = y
self._x = x
self._curve = curve
def public_key(self, backend):
return backend.load_elliptic_curve_public_numbers(self)
def encode_point(self):
# key_size is in bits. Convert to bytes and round up
byte_length = (self.curve.key_size + 7) // 8
return (
b'\x04' + utils.int_to_bytes(self.x, byte_length) +
utils.int_to_bytes(self.y, byte_length)
)
@classmethod
def from_encoded_point(cls, curve, data):
if not isinstance(curve, EllipticCurve):
raise TypeError("curve must be an EllipticCurve instance")
if data.startswith(b'\x04'):
# key_size is in bits. Convert to bytes and round up
byte_length = (curve.key_size + 7) // 8
if len(data) == 2 * byte_length + 1:
x = utils.int_from_bytes(data[1:byte_length + 1], 'big')
y = utils.int_from_bytes(data[byte_length + 1:], 'big')
return cls(x, y, curve)
else:
raise ValueError('Invalid elliptic curve point data length')
else:
raise ValueError('Unsupported elliptic curve point type')
curve = utils.read_only_property("_curve")
x = utils.read_only_property("_x")
y = utils.read_only_property("_y")
def __eq__(self, other):
if not isinstance(other, EllipticCurvePublicNumbers):
return NotImplemented
return (
self.x == other.x and
self.y == other.y and
self.curve.name == other.curve.name and
self.curve.key_size == other.curve.key_size
)
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((self.x, self.y, self.curve.name, self.curve.key_size))
def __repr__(self):
return (
"<EllipticCurvePublicNumbers(curve={0.curve.name}, x={0.x}, "
"y={0.y}>".format(self)
)
class EllipticCurvePrivateNumbers(object):
def __init__(self, private_value, public_numbers):
if not isinstance(private_value, six.integer_types):
raise TypeError("private_value must be an integer.")
if not isinstance(public_numbers, EllipticCurvePublicNumbers):
raise TypeError(
"public_numbers must be an EllipticCurvePublicNumbers "
"instance."
)
self._private_value = private_value
self._public_numbers = public_numbers
def private_key(self, backend):
return backend.load_elliptic_curve_private_numbers(self)
private_value = utils.read_only_property("_private_value")
public_numbers = utils.read_only_property("_public_numbers")
def __eq__(self, other):
if not isinstance(other, EllipticCurvePrivateNumbers):
return NotImplemented
return (
self.private_value == other.private_value and
self.public_numbers == other.public_numbers
)
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((self.private_value, self.public_numbers))
class ECDH(object):
pass

View File

@ -0,0 +1,79 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import abc
import math
import six
from cryptography import utils
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import rsa
@six.add_metaclass(abc.ABCMeta)
class AsymmetricPadding(object):
@abc.abstractproperty
def name(self):
"""
A string naming this padding (e.g. "PSS", "PKCS1").
"""
@utils.register_interface(AsymmetricPadding)
class PKCS1v15(object):
name = "EMSA-PKCS1-v1_5"
@utils.register_interface(AsymmetricPadding)
class PSS(object):
MAX_LENGTH = object()
name = "EMSA-PSS"
def __init__(self, mgf, salt_length):
self._mgf = mgf
if (not isinstance(salt_length, six.integer_types) and
salt_length is not self.MAX_LENGTH):
raise TypeError("salt_length must be an integer.")
if salt_length is not self.MAX_LENGTH and salt_length < 0:
raise ValueError("salt_length must be zero or greater.")
self._salt_length = salt_length
@utils.register_interface(AsymmetricPadding)
class OAEP(object):
name = "EME-OAEP"
def __init__(self, mgf, algorithm, label):
if not isinstance(algorithm, hashes.HashAlgorithm):
raise TypeError("Expected instance of hashes.HashAlgorithm.")
self._mgf = mgf
self._algorithm = algorithm
self._label = label
class MGF1(object):
MAX_LENGTH = object()
def __init__(self, algorithm):
if not isinstance(algorithm, hashes.HashAlgorithm):
raise TypeError("Expected instance of hashes.HashAlgorithm.")
self._algorithm = algorithm
def calculate_max_pss_salt_length(key, hash_algorithm):
if not isinstance(key, (rsa.RSAPrivateKey, rsa.RSAPublicKey)):
raise TypeError("key must be an RSA public or private key")
# bit length - 1 per RFC 3447
emlen = int(math.ceil((key.key_size - 1) / 8.0))
salt_length = emlen - hash_algorithm.digest_size - 2
assert salt_length >= 0
return salt_length

View File

@ -0,0 +1,368 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import abc
try:
# Only available in math in 3.5+
from math import gcd
except ImportError:
from fractions import gcd
import six
from cryptography import utils
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
from cryptography.hazmat.backends.interfaces import RSABackend
@six.add_metaclass(abc.ABCMeta)
class RSAPrivateKey(object):
@abc.abstractmethod
def signer(self, padding, algorithm):
"""
Returns an AsymmetricSignatureContext used for signing data.
"""
@abc.abstractmethod
def decrypt(self, ciphertext, padding):
"""
Decrypts the provided ciphertext.
"""
@abc.abstractproperty
def key_size(self):
"""
The bit length of the public modulus.
"""
@abc.abstractmethod
def public_key(self):
"""
The RSAPublicKey associated with this private key.
"""
@abc.abstractmethod
def sign(self, data, padding, algorithm):
"""
Signs the data.
"""
@six.add_metaclass(abc.ABCMeta)
class RSAPrivateKeyWithSerialization(RSAPrivateKey):
@abc.abstractmethod
def private_numbers(self):
"""
Returns an RSAPrivateNumbers.
"""
@abc.abstractmethod
def private_bytes(self, encoding, format, encryption_algorithm):
"""
Returns the key serialized as bytes.
"""
@six.add_metaclass(abc.ABCMeta)
class RSAPublicKey(object):
@abc.abstractmethod
def verifier(self, signature, padding, algorithm):
"""
Returns an AsymmetricVerificationContext used for verifying signatures.
"""
@abc.abstractmethod
def encrypt(self, plaintext, padding):
"""
Encrypts the given plaintext.
"""
@abc.abstractproperty
def key_size(self):
"""
The bit length of the public modulus.
"""
@abc.abstractmethod
def public_numbers(self):
"""
Returns an RSAPublicNumbers
"""
@abc.abstractmethod
def public_bytes(self, encoding, format):
"""
Returns the key serialized as bytes.
"""
@abc.abstractmethod
def verify(self, signature, data, padding, algorithm):
"""
Verifies the signature of the data.
"""
RSAPublicKeyWithSerialization = RSAPublicKey
def generate_private_key(public_exponent, key_size, backend):
if not isinstance(backend, RSABackend):
raise UnsupportedAlgorithm(
"Backend object does not implement RSABackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
_verify_rsa_parameters(public_exponent, key_size)
return backend.generate_rsa_private_key(public_exponent, key_size)
def _verify_rsa_parameters(public_exponent, key_size):
if public_exponent < 3:
raise ValueError("public_exponent must be >= 3.")
if public_exponent & 1 == 0:
raise ValueError("public_exponent must be odd.")
if key_size < 512:
raise ValueError("key_size must be at least 512-bits.")
def _check_private_key_components(p, q, private_exponent, dmp1, dmq1, iqmp,
public_exponent, modulus):
if modulus < 3:
raise ValueError("modulus must be >= 3.")
if p >= modulus:
raise ValueError("p must be < modulus.")
if q >= modulus:
raise ValueError("q must be < modulus.")
if dmp1 >= modulus:
raise ValueError("dmp1 must be < modulus.")
if dmq1 >= modulus:
raise ValueError("dmq1 must be < modulus.")
if iqmp >= modulus:
raise ValueError("iqmp must be < modulus.")
if private_exponent >= modulus:
raise ValueError("private_exponent must be < modulus.")
if public_exponent < 3 or public_exponent >= modulus:
raise ValueError("public_exponent must be >= 3 and < modulus.")
if public_exponent & 1 == 0:
raise ValueError("public_exponent must be odd.")
if dmp1 & 1 == 0:
raise ValueError("dmp1 must be odd.")
if dmq1 & 1 == 0:
raise ValueError("dmq1 must be odd.")
if p * q != modulus:
raise ValueError("p*q must equal modulus.")
def _check_public_key_components(e, n):
if n < 3:
raise ValueError("n must be >= 3.")
if e < 3 or e >= n:
raise ValueError("e must be >= 3 and < n.")
if e & 1 == 0:
raise ValueError("e must be odd.")
def _modinv(e, m):
"""
Modular Multiplicative Inverse. Returns x such that: (x*e) mod m == 1
"""
x1, y1, x2, y2 = 1, 0, 0, 1
a, b = e, m
while b > 0:
q, r = divmod(a, b)
xn, yn = x1 - q * x2, y1 - q * y2
a, b, x1, y1, x2, y2 = b, r, x2, y2, xn, yn
return x1 % m
def rsa_crt_iqmp(p, q):
"""
Compute the CRT (q ** -1) % p value from RSA primes p and q.
"""
return _modinv(q, p)
def rsa_crt_dmp1(private_exponent, p):
"""
Compute the CRT private_exponent % (p - 1) value from the RSA
private_exponent (d) and p.
"""
return private_exponent % (p - 1)
def rsa_crt_dmq1(private_exponent, q):
"""
Compute the CRT private_exponent % (q - 1) value from the RSA
private_exponent (d) and q.
"""
return private_exponent % (q - 1)
# Controls the number of iterations rsa_recover_prime_factors will perform
# to obtain the prime factors. Each iteration increments by 2 so the actual
# maximum attempts is half this number.
_MAX_RECOVERY_ATTEMPTS = 1000
def rsa_recover_prime_factors(n, e, d):
"""
Compute factors p and q from the private exponent d. We assume that n has
no more than two factors. This function is adapted from code in PyCrypto.
"""
# See 8.2.2(i) in Handbook of Applied Cryptography.
ktot = d * e - 1
# The quantity d*e-1 is a multiple of phi(n), even,
# and can be represented as t*2^s.
t = ktot
while t % 2 == 0:
t = t // 2
# Cycle through all multiplicative inverses in Zn.
# The algorithm is non-deterministic, but there is a 50% chance
# any candidate a leads to successful factoring.
# See "Digitalized Signatures and Public Key Functions as Intractable
# as Factorization", M. Rabin, 1979
spotted = False
a = 2
while not spotted and a < _MAX_RECOVERY_ATTEMPTS:
k = t
# Cycle through all values a^{t*2^i}=a^k
while k < ktot:
cand = pow(a, k, n)
# Check if a^k is a non-trivial root of unity (mod n)
if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1:
# We have found a number such that (cand-1)(cand+1)=0 (mod n).
# Either of the terms divides n.
p = gcd(cand + 1, n)
spotted = True
break
k *= 2
# This value was not any good... let's try another!
a += 2
if not spotted:
raise ValueError("Unable to compute factors p and q from exponent d.")
# Found !
q, r = divmod(n, p)
assert r == 0
p, q = sorted((p, q), reverse=True)
return (p, q)
class RSAPrivateNumbers(object):
def __init__(self, p, q, d, dmp1, dmq1, iqmp,
public_numbers):
if (
not isinstance(p, six.integer_types) or
not isinstance(q, six.integer_types) or
not isinstance(d, six.integer_types) or
not isinstance(dmp1, six.integer_types) or
not isinstance(dmq1, six.integer_types) or
not isinstance(iqmp, six.integer_types)
):
raise TypeError(
"RSAPrivateNumbers p, q, d, dmp1, dmq1, iqmp arguments must"
" all be an integers."
)
if not isinstance(public_numbers, RSAPublicNumbers):
raise TypeError(
"RSAPrivateNumbers public_numbers must be an RSAPublicNumbers"
" instance."
)
self._p = p
self._q = q
self._d = d
self._dmp1 = dmp1
self._dmq1 = dmq1
self._iqmp = iqmp
self._public_numbers = public_numbers
p = utils.read_only_property("_p")
q = utils.read_only_property("_q")
d = utils.read_only_property("_d")
dmp1 = utils.read_only_property("_dmp1")
dmq1 = utils.read_only_property("_dmq1")
iqmp = utils.read_only_property("_iqmp")
public_numbers = utils.read_only_property("_public_numbers")
def private_key(self, backend):
return backend.load_rsa_private_numbers(self)
def __eq__(self, other):
if not isinstance(other, RSAPrivateNumbers):
return NotImplemented
return (
self.p == other.p and
self.q == other.q and
self.d == other.d and
self.dmp1 == other.dmp1 and
self.dmq1 == other.dmq1 and
self.iqmp == other.iqmp and
self.public_numbers == other.public_numbers
)
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((
self.p,
self.q,
self.d,
self.dmp1,
self.dmq1,
self.iqmp,
self.public_numbers,
))
class RSAPublicNumbers(object):
def __init__(self, e, n):
if (
not isinstance(e, six.integer_types) or
not isinstance(n, six.integer_types)
):
raise TypeError("RSAPublicNumbers arguments must be integers.")
self._e = e
self._n = n
e = utils.read_only_property("_e")
n = utils.read_only_property("_n")
def public_key(self, backend):
return backend.load_rsa_public_numbers(self)
def __repr__(self):
return "<RSAPublicNumbers(e={0.e}, n={0.n})>".format(self)
def __eq__(self, other):
if not isinstance(other, RSAPublicNumbers):
return NotImplemented
return self.e == other.e and self.n == other.n
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((self.e, self.n))

View File

@ -0,0 +1,60 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import warnings
from asn1crypto.algos import DSASignature
import six
from cryptography import utils
from cryptography.hazmat.primitives import hashes
def decode_rfc6979_signature(signature):
warnings.warn(
"decode_rfc6979_signature is deprecated and will "
"be removed in a future version, use decode_dss_signature instead.",
utils.PersistentlyDeprecated,
stacklevel=2
)
return decode_dss_signature(signature)
def decode_dss_signature(signature):
data = DSASignature.load(signature, strict=True).native
return data['r'], data['s']
def encode_rfc6979_signature(r, s):
warnings.warn(
"encode_rfc6979_signature is deprecated and will "
"be removed in a future version, use encode_dss_signature instead.",
utils.PersistentlyDeprecated,
stacklevel=2
)
return encode_dss_signature(r, s)
def encode_dss_signature(r, s):
if (
not isinstance(r, six.integer_types) or
not isinstance(s, six.integer_types)
):
raise ValueError("Both r and s must be integers")
return DSASignature({'r': r, 's': s}).dump()
class Prehashed(object):
def __init__(self, algorithm):
if not isinstance(algorithm, hashes.HashAlgorithm):
raise TypeError("Expected instance of HashAlgorithm.")
self._algorithm = algorithm
self._digest_size = algorithm.digest_size
digest_size = utils.read_only_property("_digest_size")

View File

@ -0,0 +1,54 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import abc
import six
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
@six.add_metaclass(abc.ABCMeta)
class X25519PublicKey(object):
@classmethod
def from_public_bytes(cls, data):
from cryptography.hazmat.backends.openssl.backend import backend
if not backend.x25519_supported():
raise UnsupportedAlgorithm(
"X25519 is not supported by this version of OpenSSL.",
_Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM
)
return backend.x25519_load_public_bytes(data)
@abc.abstractmethod
def public_bytes(self):
pass
@six.add_metaclass(abc.ABCMeta)
class X25519PrivateKey(object):
@classmethod
def generate(cls):
from cryptography.hazmat.backends.openssl.backend import backend
if not backend.x25519_supported():
raise UnsupportedAlgorithm(
"X25519 is not supported by this version of OpenSSL.",
_Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM
)
return backend.x25519_generate_key()
@classmethod
def _from_private_bytes(cls, data):
from cryptography.hazmat.backends.openssl.backend import backend
return backend.x25519_load_private_bytes(data)
@abc.abstractmethod
def public_key(self):
pass
@abc.abstractmethod
def exchange(self, peer_public_key):
pass

View File

@ -0,0 +1,21 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography.hazmat.primitives.ciphers.base import (
AEADCipherContext, AEADDecryptionContext, AEADEncryptionContext,
BlockCipherAlgorithm, Cipher, CipherAlgorithm, CipherContext
)
__all__ = [
"Cipher",
"CipherAlgorithm",
"BlockCipherAlgorithm",
"CipherContext",
"AEADCipherContext",
"AEADDecryptionContext",
"AEADEncryptionContext",
]

View File

@ -0,0 +1,188 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import os
from cryptography import exceptions, utils
from cryptography.hazmat.backends.openssl import aead
from cryptography.hazmat.backends.openssl.backend import backend
class ChaCha20Poly1305(object):
_MAX_SIZE = 2 ** 32
def __init__(self, key):
if not backend.aead_cipher_supported(self):
raise exceptions.UnsupportedAlgorithm(
"ChaCha20Poly1305 is not supported by this version of OpenSSL",
exceptions._Reasons.UNSUPPORTED_CIPHER
)
utils._check_bytes("key", key)
if len(key) != 32:
raise ValueError("ChaCha20Poly1305 key must be 32 bytes.")
self._key = key
@classmethod
def generate_key(cls):
return os.urandom(32)
def encrypt(self, nonce, data, associated_data):
if associated_data is None:
associated_data = b""
if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE:
# This is OverflowError to match what cffi would raise
raise OverflowError(
"Data or associated data too long. Max 2**32 bytes"
)
self._check_params(nonce, data, associated_data)
return aead._encrypt(
backend, self, nonce, data, associated_data, 16
)
def decrypt(self, nonce, data, associated_data):
if associated_data is None:
associated_data = b""
self._check_params(nonce, data, associated_data)
return aead._decrypt(
backend, self, nonce, data, associated_data, 16
)
def _check_params(self, nonce, data, associated_data):
utils._check_bytes("nonce", nonce)
utils._check_bytes("data", data)
utils._check_bytes("associated_data", associated_data)
if len(nonce) != 12:
raise ValueError("Nonce must be 12 bytes")
class AESCCM(object):
_MAX_SIZE = 2 ** 32
def __init__(self, key, tag_length=16):
utils._check_bytes("key", key)
if len(key) not in (16, 24, 32):
raise ValueError("AESCCM key must be 128, 192, or 256 bits.")
self._key = key
if not isinstance(tag_length, int):
raise TypeError("tag_length must be an integer")
if tag_length not in (4, 6, 8, 12, 14, 16):
raise ValueError("Invalid tag_length")
self._tag_length = tag_length
if not backend.aead_cipher_supported(self):
raise exceptions.UnsupportedAlgorithm(
"AESCCM is not supported by this version of OpenSSL",
exceptions._Reasons.UNSUPPORTED_CIPHER
)
@classmethod
def generate_key(cls, bit_length):
if not isinstance(bit_length, int):
raise TypeError("bit_length must be an integer")
if bit_length not in (128, 192, 256):
raise ValueError("bit_length must be 128, 192, or 256")
return os.urandom(bit_length // 8)
def encrypt(self, nonce, data, associated_data):
if associated_data is None:
associated_data = b""
if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE:
# This is OverflowError to match what cffi would raise
raise OverflowError(
"Data or associated data too long. Max 2**32 bytes"
)
self._check_params(nonce, data, associated_data)
self._validate_lengths(nonce, len(data))
return aead._encrypt(
backend, self, nonce, data, associated_data, self._tag_length
)
def decrypt(self, nonce, data, associated_data):
if associated_data is None:
associated_data = b""
self._check_params(nonce, data, associated_data)
return aead._decrypt(
backend, self, nonce, data, associated_data, self._tag_length
)
def _validate_lengths(self, nonce, data_len):
# For information about computing this, see
# https://tools.ietf.org/html/rfc3610#section-2.1
l_val = 15 - len(nonce)
if 2 ** (8 * l_val) < data_len:
raise ValueError("Nonce too long for data")
def _check_params(self, nonce, data, associated_data):
utils._check_bytes("nonce", nonce)
utils._check_bytes("data", data)
utils._check_bytes("associated_data", associated_data)
if not 7 <= len(nonce) <= 13:
raise ValueError("Nonce must be between 7 and 13 bytes")
class AESGCM(object):
_MAX_SIZE = 2 ** 32
def __init__(self, key):
utils._check_bytes("key", key)
if len(key) not in (16, 24, 32):
raise ValueError("AESGCM key must be 128, 192, or 256 bits.")
self._key = key
@classmethod
def generate_key(cls, bit_length):
if not isinstance(bit_length, int):
raise TypeError("bit_length must be an integer")
if bit_length not in (128, 192, 256):
raise ValueError("bit_length must be 128, 192, or 256")
return os.urandom(bit_length // 8)
def encrypt(self, nonce, data, associated_data):
if associated_data is None:
associated_data = b""
if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE:
# This is OverflowError to match what cffi would raise
raise OverflowError(
"Data or associated data too long. Max 2**32 bytes"
)
self._check_params(nonce, data, associated_data)
return aead._encrypt(
backend, self, nonce, data, associated_data, 16
)
def decrypt(self, nonce, data, associated_data):
if associated_data is None:
associated_data = b""
self._check_params(nonce, data, associated_data)
return aead._decrypt(
backend, self, nonce, data, associated_data, 16
)
def _check_params(self, nonce, data, associated_data):
utils._check_bytes("nonce", nonce)
utils._check_bytes("data", data)
utils._check_bytes("associated_data", associated_data)
if len(nonce) == 0:
raise ValueError("Nonce must be at least 1 byte")

View File

@ -0,0 +1,168 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.hazmat.primitives.ciphers import (
BlockCipherAlgorithm, CipherAlgorithm
)
from cryptography.hazmat.primitives.ciphers.modes import ModeWithNonce
def _verify_key_size(algorithm, key):
# Verify that the key is instance of bytes
utils._check_bytes("key", key)
# Verify that the key size matches the expected key size
if len(key) * 8 not in algorithm.key_sizes:
raise ValueError("Invalid key size ({0}) for {1}.".format(
len(key) * 8, algorithm.name
))
return key
@utils.register_interface(BlockCipherAlgorithm)
@utils.register_interface(CipherAlgorithm)
class AES(object):
name = "AES"
block_size = 128
# 512 added to support AES-256-XTS, which uses 512-bit keys
key_sizes = frozenset([128, 192, 256, 512])
def __init__(self, key):
self.key = _verify_key_size(self, key)
@property
def key_size(self):
return len(self.key) * 8
@utils.register_interface(BlockCipherAlgorithm)
@utils.register_interface(CipherAlgorithm)
class Camellia(object):
name = "camellia"
block_size = 128
key_sizes = frozenset([128, 192, 256])
def __init__(self, key):
self.key = _verify_key_size(self, key)
@property
def key_size(self):
return len(self.key) * 8
@utils.register_interface(BlockCipherAlgorithm)
@utils.register_interface(CipherAlgorithm)
class TripleDES(object):
name = "3DES"
block_size = 64
key_sizes = frozenset([64, 128, 192])
def __init__(self, key):
if len(key) == 8:
key += key + key
elif len(key) == 16:
key += key[:8]
self.key = _verify_key_size(self, key)
@property
def key_size(self):
return len(self.key) * 8
@utils.register_interface(BlockCipherAlgorithm)
@utils.register_interface(CipherAlgorithm)
class Blowfish(object):
name = "Blowfish"
block_size = 64
key_sizes = frozenset(range(32, 449, 8))
def __init__(self, key):
self.key = _verify_key_size(self, key)
@property
def key_size(self):
return len(self.key) * 8
@utils.register_interface(BlockCipherAlgorithm)
@utils.register_interface(CipherAlgorithm)
class CAST5(object):
name = "CAST5"
block_size = 64
key_sizes = frozenset(range(40, 129, 8))
def __init__(self, key):
self.key = _verify_key_size(self, key)
@property
def key_size(self):
return len(self.key) * 8
@utils.register_interface(CipherAlgorithm)
class ARC4(object):
name = "RC4"
key_sizes = frozenset([40, 56, 64, 80, 128, 160, 192, 256])
def __init__(self, key):
self.key = _verify_key_size(self, key)
@property
def key_size(self):
return len(self.key) * 8
@utils.register_interface(CipherAlgorithm)
class IDEA(object):
name = "IDEA"
block_size = 64
key_sizes = frozenset([128])
def __init__(self, key):
self.key = _verify_key_size(self, key)
@property
def key_size(self):
return len(self.key) * 8
@utils.register_interface(BlockCipherAlgorithm)
@utils.register_interface(CipherAlgorithm)
class SEED(object):
name = "SEED"
block_size = 128
key_sizes = frozenset([128])
def __init__(self, key):
self.key = _verify_key_size(self, key)
@property
def key_size(self):
return len(self.key) * 8
@utils.register_interface(CipherAlgorithm)
@utils.register_interface(ModeWithNonce)
class ChaCha20(object):
name = "ChaCha20"
key_sizes = frozenset([256])
def __init__(self, key, nonce):
self.key = _verify_key_size(self, key)
if not isinstance(nonce, bytes):
raise TypeError("nonce must be bytes")
if len(nonce) != 16:
raise ValueError("nonce must be 128-bits (16 bytes)")
self._nonce = nonce
nonce = utils.read_only_property("_nonce")
@property
def key_size(self):
return len(self.key) * 8

View File

@ -0,0 +1,235 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import abc
import six
from cryptography import utils
from cryptography.exceptions import (
AlreadyFinalized, AlreadyUpdated, NotYetFinalized, UnsupportedAlgorithm,
_Reasons
)
from cryptography.hazmat.backends.interfaces import CipherBackend
from cryptography.hazmat.primitives.ciphers import modes
@six.add_metaclass(abc.ABCMeta)
class CipherAlgorithm(object):
@abc.abstractproperty
def name(self):
"""
A string naming this mode (e.g. "AES", "Camellia").
"""
@abc.abstractproperty
def key_size(self):
"""
The size of the key being used as an integer in bits (e.g. 128, 256).
"""
@six.add_metaclass(abc.ABCMeta)
class BlockCipherAlgorithm(object):
@abc.abstractproperty
def block_size(self):
"""
The size of a block as an integer in bits (e.g. 64, 128).
"""
@six.add_metaclass(abc.ABCMeta)
class CipherContext(object):
@abc.abstractmethod
def update(self, data):
"""
Processes the provided bytes through the cipher and returns the results
as bytes.
"""
@abc.abstractmethod
def update_into(self, data, buf):
"""
Processes the provided bytes and writes the resulting data into the
provided buffer. Returns the number of bytes written.
"""
@abc.abstractmethod
def finalize(self):
"""
Returns the results of processing the final block as bytes.
"""
@six.add_metaclass(abc.ABCMeta)
class AEADCipherContext(object):
@abc.abstractmethod
def authenticate_additional_data(self, data):
"""
Authenticates the provided bytes.
"""
@six.add_metaclass(abc.ABCMeta)
class AEADDecryptionContext(object):
@abc.abstractmethod
def finalize_with_tag(self, tag):
"""
Returns the results of processing the final block as bytes and allows
delayed passing of the authentication tag.
"""
@six.add_metaclass(abc.ABCMeta)
class AEADEncryptionContext(object):
@abc.abstractproperty
def tag(self):
"""
Returns tag bytes. This is only available after encryption is
finalized.
"""
class Cipher(object):
def __init__(self, algorithm, mode, backend):
if not isinstance(backend, CipherBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement CipherBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
if not isinstance(algorithm, CipherAlgorithm):
raise TypeError("Expected interface of CipherAlgorithm.")
if mode is not None:
mode.validate_for_algorithm(algorithm)
self.algorithm = algorithm
self.mode = mode
self._backend = backend
def encryptor(self):
if isinstance(self.mode, modes.ModeWithAuthenticationTag):
if self.mode.tag is not None:
raise ValueError(
"Authentication tag must be None when encrypting."
)
ctx = self._backend.create_symmetric_encryption_ctx(
self.algorithm, self.mode
)
return self._wrap_ctx(ctx, encrypt=True)
def decryptor(self):
ctx = self._backend.create_symmetric_decryption_ctx(
self.algorithm, self.mode
)
return self._wrap_ctx(ctx, encrypt=False)
def _wrap_ctx(self, ctx, encrypt):
if isinstance(self.mode, modes.ModeWithAuthenticationTag):
if encrypt:
return _AEADEncryptionContext(ctx)
else:
return _AEADCipherContext(ctx)
else:
return _CipherContext(ctx)
@utils.register_interface(CipherContext)
class _CipherContext(object):
def __init__(self, ctx):
self._ctx = ctx
def update(self, data):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
return self._ctx.update(data)
def update_into(self, data, buf):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
return self._ctx.update_into(data, buf)
def finalize(self):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
data = self._ctx.finalize()
self._ctx = None
return data
@utils.register_interface(AEADCipherContext)
@utils.register_interface(CipherContext)
@utils.register_interface(AEADDecryptionContext)
class _AEADCipherContext(object):
def __init__(self, ctx):
self._ctx = ctx
self._bytes_processed = 0
self._aad_bytes_processed = 0
self._tag = None
self._updated = False
def _check_limit(self, data_size):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
self._updated = True
self._bytes_processed += data_size
if self._bytes_processed > self._ctx._mode._MAX_ENCRYPTED_BYTES:
raise ValueError(
"{0} has a maximum encrypted byte limit of {1}".format(
self._ctx._mode.name, self._ctx._mode._MAX_ENCRYPTED_BYTES
)
)
def update(self, data):
self._check_limit(len(data))
return self._ctx.update(data)
def update_into(self, data, buf):
self._check_limit(len(data))
return self._ctx.update_into(data, buf)
def finalize(self):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
data = self._ctx.finalize()
self._tag = self._ctx.tag
self._ctx = None
return data
def finalize_with_tag(self, tag):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
data = self._ctx.finalize_with_tag(tag)
self._tag = self._ctx.tag
self._ctx = None
return data
def authenticate_additional_data(self, data):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
if self._updated:
raise AlreadyUpdated("Update has been called on this context.")
self._aad_bytes_processed += len(data)
if self._aad_bytes_processed > self._ctx._mode._MAX_AAD_BYTES:
raise ValueError(
"{0} has a maximum AAD byte limit of {1}".format(
self._ctx._mode.name, self._ctx._mode._MAX_AAD_BYTES
)
)
self._ctx.authenticate_additional_data(data)
@utils.register_interface(AEADEncryptionContext)
class _AEADEncryptionContext(_AEADCipherContext):
@property
def tag(self):
if self._ctx is not None:
raise NotYetFinalized("You must finalize encryption before "
"getting the tag.")
return self._tag

View File

@ -0,0 +1,231 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import abc
import six
from cryptography import utils
@six.add_metaclass(abc.ABCMeta)
class Mode(object):
@abc.abstractproperty
def name(self):
"""
A string naming this mode (e.g. "ECB", "CBC").
"""
@abc.abstractmethod
def validate_for_algorithm(self, algorithm):
"""
Checks that all the necessary invariants of this (mode, algorithm)
combination are met.
"""
@six.add_metaclass(abc.ABCMeta)
class ModeWithInitializationVector(object):
@abc.abstractproperty
def initialization_vector(self):
"""
The value of the initialization vector for this mode as bytes.
"""
@six.add_metaclass(abc.ABCMeta)
class ModeWithTweak(object):
@abc.abstractproperty
def tweak(self):
"""
The value of the tweak for this mode as bytes.
"""
@six.add_metaclass(abc.ABCMeta)
class ModeWithNonce(object):
@abc.abstractproperty
def nonce(self):
"""
The value of the nonce for this mode as bytes.
"""
@six.add_metaclass(abc.ABCMeta)
class ModeWithAuthenticationTag(object):
@abc.abstractproperty
def tag(self):
"""
The value of the tag supplied to the constructor of this mode.
"""
def _check_aes_key_length(self, algorithm):
if algorithm.key_size > 256 and algorithm.name == "AES":
raise ValueError(
"Only 128, 192, and 256 bit keys are allowed for this AES mode"
)
def _check_iv_length(self, algorithm):
if len(self.initialization_vector) * 8 != algorithm.block_size:
raise ValueError("Invalid IV size ({0}) for {1}.".format(
len(self.initialization_vector), self.name
))
def _check_iv_and_key_length(self, algorithm):
_check_aes_key_length(self, algorithm)
_check_iv_length(self, algorithm)
@utils.register_interface(Mode)
@utils.register_interface(ModeWithInitializationVector)
class CBC(object):
name = "CBC"
def __init__(self, initialization_vector):
if not isinstance(initialization_vector, bytes):
raise TypeError("initialization_vector must be bytes")
self._initialization_vector = initialization_vector
initialization_vector = utils.read_only_property("_initialization_vector")
validate_for_algorithm = _check_iv_and_key_length
@utils.register_interface(Mode)
@utils.register_interface(ModeWithTweak)
class XTS(object):
name = "XTS"
def __init__(self, tweak):
if not isinstance(tweak, bytes):
raise TypeError("tweak must be bytes")
if len(tweak) != 16:
raise ValueError("tweak must be 128-bits (16 bytes)")
self._tweak = tweak
tweak = utils.read_only_property("_tweak")
def validate_for_algorithm(self, algorithm):
if algorithm.key_size not in (256, 512):
raise ValueError(
"The XTS specification requires a 256-bit key for AES-128-XTS"
" and 512-bit key for AES-256-XTS"
)
@utils.register_interface(Mode)
class ECB(object):
name = "ECB"
validate_for_algorithm = _check_aes_key_length
@utils.register_interface(Mode)
@utils.register_interface(ModeWithInitializationVector)
class OFB(object):
name = "OFB"
def __init__(self, initialization_vector):
if not isinstance(initialization_vector, bytes):
raise TypeError("initialization_vector must be bytes")
self._initialization_vector = initialization_vector
initialization_vector = utils.read_only_property("_initialization_vector")
validate_for_algorithm = _check_iv_and_key_length
@utils.register_interface(Mode)
@utils.register_interface(ModeWithInitializationVector)
class CFB(object):
name = "CFB"
def __init__(self, initialization_vector):
if not isinstance(initialization_vector, bytes):
raise TypeError("initialization_vector must be bytes")
self._initialization_vector = initialization_vector
initialization_vector = utils.read_only_property("_initialization_vector")
validate_for_algorithm = _check_iv_and_key_length
@utils.register_interface(Mode)
@utils.register_interface(ModeWithInitializationVector)
class CFB8(object):
name = "CFB8"
def __init__(self, initialization_vector):
if not isinstance(initialization_vector, bytes):
raise TypeError("initialization_vector must be bytes")
self._initialization_vector = initialization_vector
initialization_vector = utils.read_only_property("_initialization_vector")
validate_for_algorithm = _check_iv_and_key_length
@utils.register_interface(Mode)
@utils.register_interface(ModeWithNonce)
class CTR(object):
name = "CTR"
def __init__(self, nonce):
if not isinstance(nonce, bytes):
raise TypeError("nonce must be bytes")
self._nonce = nonce
nonce = utils.read_only_property("_nonce")
def validate_for_algorithm(self, algorithm):
_check_aes_key_length(self, algorithm)
if len(self.nonce) * 8 != algorithm.block_size:
raise ValueError("Invalid nonce size ({0}) for {1}.".format(
len(self.nonce), self.name
))
@utils.register_interface(Mode)
@utils.register_interface(ModeWithInitializationVector)
@utils.register_interface(ModeWithAuthenticationTag)
class GCM(object):
name = "GCM"
_MAX_ENCRYPTED_BYTES = (2 ** 39 - 256) // 8
_MAX_AAD_BYTES = (2 ** 64) // 8
def __init__(self, initialization_vector, tag=None, min_tag_length=16):
# len(initialization_vector) must in [1, 2 ** 64), but it's impossible
# to actually construct a bytes object that large, so we don't check
# for it
if not isinstance(initialization_vector, bytes):
raise TypeError("initialization_vector must be bytes")
if len(initialization_vector) == 0:
raise ValueError("initialization_vector must be at least 1 byte")
self._initialization_vector = initialization_vector
if tag is not None:
if not isinstance(tag, bytes):
raise TypeError("tag must be bytes or None")
if min_tag_length < 4:
raise ValueError("min_tag_length must be >= 4")
if len(tag) < min_tag_length:
raise ValueError(
"Authentication tag must be {0} bytes or longer.".format(
min_tag_length)
)
self._tag = tag
self._min_tag_length = min_tag_length
tag = utils.read_only_property("_tag")
initialization_vector = utils.read_only_property("_initialization_vector")
def validate_for_algorithm(self, algorithm):
_check_aes_key_length(self, algorithm)

View File

@ -0,0 +1,66 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import (
AlreadyFinalized, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.backends.interfaces import CMACBackend
from cryptography.hazmat.primitives import ciphers, mac
@utils.register_interface(mac.MACContext)
class CMAC(object):
def __init__(self, algorithm, backend, ctx=None):
if not isinstance(backend, CMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement CMACBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
if not isinstance(algorithm, ciphers.BlockCipherAlgorithm):
raise TypeError(
"Expected instance of BlockCipherAlgorithm."
)
self._algorithm = algorithm
self._backend = backend
if ctx is None:
self._ctx = self._backend.create_cmac_ctx(self._algorithm)
else:
self._ctx = ctx
def update(self, data):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
if not isinstance(data, bytes):
raise TypeError("data must be bytes.")
self._ctx.update(data)
def finalize(self):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
digest = self._ctx.finalize()
self._ctx = None
return digest
def verify(self, signature):
if not isinstance(signature, bytes):
raise TypeError("signature must be bytes.")
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
ctx, self._ctx = self._ctx, None
ctx.verify(signature)
def copy(self):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
return CMAC(
self._algorithm,
backend=self._backend,
ctx=self._ctx.copy()
)

View File

@ -0,0 +1,35 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import hmac
import warnings
from cryptography import utils
from cryptography.hazmat.bindings._constant_time import lib
if hasattr(hmac, "compare_digest"):
def bytes_eq(a, b):
if not isinstance(a, bytes) or not isinstance(b, bytes):
raise TypeError("a and b must be bytes.")
return hmac.compare_digest(a, b)
else:
warnings.warn(
"Support for your Python version is deprecated. The next version of "
"cryptography will remove support. Please upgrade to a 2.7.x "
"release that supports hmac.compare_digest as soon as possible.",
utils.DeprecatedIn23,
)
def bytes_eq(a, b):
if not isinstance(a, bytes) or not isinstance(b, bytes):
raise TypeError("a and b must be bytes.")
return lib.Cryptography_constant_time_bytes_eq(
a, len(a), b, len(b)
) == 1

View File

@ -0,0 +1,185 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import abc
import six
from cryptography import utils
from cryptography.exceptions import (
AlreadyFinalized, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.backends.interfaces import HashBackend
@six.add_metaclass(abc.ABCMeta)
class HashAlgorithm(object):
@abc.abstractproperty
def name(self):
"""
A string naming this algorithm (e.g. "sha256", "md5").
"""
@abc.abstractproperty
def digest_size(self):
"""
The size of the resulting digest in bytes.
"""
@six.add_metaclass(abc.ABCMeta)
class HashContext(object):
@abc.abstractproperty
def algorithm(self):
"""
A HashAlgorithm that will be used by this context.
"""
@abc.abstractmethod
def update(self, data):
"""
Processes the provided bytes through the hash.
"""
@abc.abstractmethod
def finalize(self):
"""
Finalizes the hash context and returns the hash digest as bytes.
"""
@abc.abstractmethod
def copy(self):
"""
Return a HashContext that is a copy of the current context.
"""
@utils.register_interface(HashContext)
class Hash(object):
def __init__(self, algorithm, backend, ctx=None):
if not isinstance(backend, HashBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HashBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
if not isinstance(algorithm, HashAlgorithm):
raise TypeError("Expected instance of hashes.HashAlgorithm.")
self._algorithm = algorithm
self._backend = backend
if ctx is None:
self._ctx = self._backend.create_hash_ctx(self.algorithm)
else:
self._ctx = ctx
algorithm = utils.read_only_property("_algorithm")
def update(self, data):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
if not isinstance(data, bytes):
raise TypeError("data must be bytes.")
self._ctx.update(data)
def copy(self):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
return Hash(
self.algorithm, backend=self._backend, ctx=self._ctx.copy()
)
def finalize(self):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
digest = self._ctx.finalize()
self._ctx = None
return digest
@utils.register_interface(HashAlgorithm)
class SHA1(object):
name = "sha1"
digest_size = 20
block_size = 64
@utils.register_interface(HashAlgorithm)
class SHA224(object):
name = "sha224"
digest_size = 28
block_size = 64
@utils.register_interface(HashAlgorithm)
class SHA256(object):
name = "sha256"
digest_size = 32
block_size = 64
@utils.register_interface(HashAlgorithm)
class SHA384(object):
name = "sha384"
digest_size = 48
block_size = 128
@utils.register_interface(HashAlgorithm)
class SHA512(object):
name = "sha512"
digest_size = 64
block_size = 128
@utils.register_interface(HashAlgorithm)
class MD5(object):
name = "md5"
digest_size = 16
block_size = 64
@utils.register_interface(HashAlgorithm)
class BLAKE2b(object):
name = "blake2b"
_max_digest_size = 64
_min_digest_size = 1
block_size = 128
def __init__(self, digest_size):
if (
digest_size > self._max_digest_size or
digest_size < self._min_digest_size
):
raise ValueError("Digest size must be {0}-{1}".format(
self._min_digest_size, self._max_digest_size)
)
self._digest_size = digest_size
digest_size = utils.read_only_property("_digest_size")
@utils.register_interface(HashAlgorithm)
class BLAKE2s(object):
name = "blake2s"
block_size = 64
_max_digest_size = 32
_min_digest_size = 1
def __init__(self, digest_size):
if (
digest_size > self._max_digest_size or
digest_size < self._min_digest_size
):
raise ValueError("Digest size must be {0}-{1}".format(
self._min_digest_size, self._max_digest_size)
)
self._digest_size = digest_size
digest_size = utils.read_only_property("_digest_size")

View File

@ -0,0 +1,69 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import (
AlreadyFinalized, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.backends.interfaces import HMACBackend
from cryptography.hazmat.primitives import hashes, mac
@utils.register_interface(mac.MACContext)
@utils.register_interface(hashes.HashContext)
class HMAC(object):
def __init__(self, key, algorithm, backend, ctx=None):
if not isinstance(backend, HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HMACBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
if not isinstance(algorithm, hashes.HashAlgorithm):
raise TypeError("Expected instance of hashes.HashAlgorithm.")
self._algorithm = algorithm
self._backend = backend
self._key = key
if ctx is None:
self._ctx = self._backend.create_hmac_ctx(key, self.algorithm)
else:
self._ctx = ctx
algorithm = utils.read_only_property("_algorithm")
def update(self, data):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
if not isinstance(data, bytes):
raise TypeError("data must be bytes.")
self._ctx.update(data)
def copy(self):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
return HMAC(
self._key,
self.algorithm,
backend=self._backend,
ctx=self._ctx.copy()
)
def finalize(self):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
digest = self._ctx.finalize()
self._ctx = None
return digest
def verify(self, signature):
if not isinstance(signature, bytes):
raise TypeError("signature must be bytes.")
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
ctx, self._ctx = self._ctx, None
ctx.verify(signature)

View File

@ -0,0 +1,26 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class KeyDerivationFunction(object):
@abc.abstractmethod
def derive(self, key_material):
"""
Deterministically generates and returns a new key based on the existing
key material.
"""
@abc.abstractmethod
def verify(self, key_material, expected_key):
"""
Checks whether the key generated by the key material matches the
expected derived key. Raises an exception if they do not match.
"""

View File

@ -0,0 +1,125 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import struct
from cryptography import utils
from cryptography.exceptions import (
AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.backends.interfaces import HMACBackend
from cryptography.hazmat.backends.interfaces import HashBackend
from cryptography.hazmat.primitives import constant_time, hashes, hmac
from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
def _int_to_u32be(n):
return struct.pack('>I', n)
def _common_args_checks(algorithm, length, otherinfo):
max_length = algorithm.digest_size * (2 ** 32 - 1)
if length > max_length:
raise ValueError(
"Can not derive keys larger than {0} bits.".format(
max_length
))
if not (otherinfo is None or isinstance(otherinfo, bytes)):
raise TypeError("otherinfo must be bytes.")
def _concatkdf_derive(key_material, length, auxfn, otherinfo):
if not isinstance(key_material, bytes):
raise TypeError("key_material must be bytes.")
output = [b""]
outlen = 0
counter = 1
while (length > outlen):
h = auxfn()
h.update(_int_to_u32be(counter))
h.update(key_material)
h.update(otherinfo)
output.append(h.finalize())
outlen += len(output[-1])
counter += 1
return b"".join(output)[:length]
@utils.register_interface(KeyDerivationFunction)
class ConcatKDFHash(object):
def __init__(self, algorithm, length, otherinfo, backend):
_common_args_checks(algorithm, length, otherinfo)
self._algorithm = algorithm
self._length = length
self._otherinfo = otherinfo
if self._otherinfo is None:
self._otherinfo = b""
if not isinstance(backend, HashBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HashBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
self._backend = backend
self._used = False
def _hash(self):
return hashes.Hash(self._algorithm, self._backend)
def derive(self, key_material):
if self._used:
raise AlreadyFinalized
self._used = True
return _concatkdf_derive(key_material, self._length,
self._hash, self._otherinfo)
def verify(self, key_material, expected_key):
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
raise InvalidKey
@utils.register_interface(KeyDerivationFunction)
class ConcatKDFHMAC(object):
def __init__(self, algorithm, length, salt, otherinfo, backend):
_common_args_checks(algorithm, length, otherinfo)
self._algorithm = algorithm
self._length = length
self._otherinfo = otherinfo
if self._otherinfo is None:
self._otherinfo = b""
if not (salt is None or isinstance(salt, bytes)):
raise TypeError("salt must be bytes.")
if salt is None:
salt = b"\x00" * algorithm.block_size
self._salt = salt
if not isinstance(backend, HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HMACBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
self._backend = backend
self._used = False
def _hmac(self):
return hmac.HMAC(self._salt, self._algorithm, self._backend)
def derive(self, key_material):
if self._used:
raise AlreadyFinalized
self._used = True
return _concatkdf_derive(key_material, self._length,
self._hmac, self._otherinfo)
def verify(self, key_material, expected_key):
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
raise InvalidKey

View File

@ -0,0 +1,116 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import six
from cryptography import utils
from cryptography.exceptions import (
AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.backends.interfaces import HMACBackend
from cryptography.hazmat.primitives import constant_time, hmac
from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
@utils.register_interface(KeyDerivationFunction)
class HKDF(object):
def __init__(self, algorithm, length, salt, info, backend):
if not isinstance(backend, HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HMACBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
self._algorithm = algorithm
if not (salt is None or isinstance(salt, bytes)):
raise TypeError("salt must be bytes.")
if salt is None:
salt = b"\x00" * self._algorithm.digest_size
self._salt = salt
self._backend = backend
self._hkdf_expand = HKDFExpand(self._algorithm, length, info, backend)
def _extract(self, key_material):
h = hmac.HMAC(self._salt, self._algorithm, backend=self._backend)
h.update(key_material)
return h.finalize()
def derive(self, key_material):
if not isinstance(key_material, bytes):
raise TypeError("key_material must be bytes.")
return self._hkdf_expand.derive(self._extract(key_material))
def verify(self, key_material, expected_key):
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
raise InvalidKey
@utils.register_interface(KeyDerivationFunction)
class HKDFExpand(object):
def __init__(self, algorithm, length, info, backend):
if not isinstance(backend, HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HMACBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
self._algorithm = algorithm
self._backend = backend
max_length = 255 * algorithm.digest_size
if length > max_length:
raise ValueError(
"Can not derive keys larger than {0} octets.".format(
max_length
))
self._length = length
if not (info is None or isinstance(info, bytes)):
raise TypeError("info must be bytes.")
if info is None:
info = b""
self._info = info
self._used = False
def _expand(self, key_material):
output = [b""]
counter = 1
while self._algorithm.digest_size * (len(output) - 1) < self._length:
h = hmac.HMAC(key_material, self._algorithm, backend=self._backend)
h.update(output[-1])
h.update(self._info)
h.update(six.int2byte(counter))
output.append(h.finalize())
counter += 1
return b"".join(output)[:self._length]
def derive(self, key_material):
if not isinstance(key_material, bytes):
raise TypeError("key_material must be bytes.")
if self._used:
raise AlreadyFinalized
self._used = True
return self._expand(key_material)
def verify(self, key_material, expected_key):
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
raise InvalidKey

View File

@ -0,0 +1,148 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from enum import Enum
from six.moves import range
from cryptography import utils
from cryptography.exceptions import (
AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.backends.interfaces import HMACBackend
from cryptography.hazmat.primitives import constant_time, hashes, hmac
from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
class Mode(Enum):
CounterMode = "ctr"
class CounterLocation(Enum):
BeforeFixed = "before_fixed"
AfterFixed = "after_fixed"
@utils.register_interface(KeyDerivationFunction)
class KBKDFHMAC(object):
def __init__(self, algorithm, mode, length, rlen, llen,
location, label, context, fixed, backend):
if not isinstance(backend, HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HMACBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
if not isinstance(algorithm, hashes.HashAlgorithm):
raise UnsupportedAlgorithm(
"Algorithm supplied is not a supported hash algorithm.",
_Reasons.UNSUPPORTED_HASH
)
if not backend.hmac_supported(algorithm):
raise UnsupportedAlgorithm(
"Algorithm supplied is not a supported hmac algorithm.",
_Reasons.UNSUPPORTED_HASH
)
if not isinstance(mode, Mode):
raise TypeError("mode must be of type Mode")
if not isinstance(location, CounterLocation):
raise TypeError("location must be of type CounterLocation")
if (label or context) and fixed:
raise ValueError("When supplying fixed data, "
"label and context are ignored.")
if rlen is None or not self._valid_byte_length(rlen):
raise ValueError("rlen must be between 1 and 4")
if llen is None and fixed is None:
raise ValueError("Please specify an llen")
if llen is not None and not isinstance(llen, int):
raise TypeError("llen must be an integer")
if label is None:
label = b''
if context is None:
context = b''
if (not isinstance(label, bytes) or
not isinstance(context, bytes)):
raise TypeError('label and context must be of type bytes')
self._algorithm = algorithm
self._mode = mode
self._length = length
self._rlen = rlen
self._llen = llen
self._location = location
self._label = label
self._context = context
self._backend = backend
self._used = False
self._fixed_data = fixed
def _valid_byte_length(self, value):
if not isinstance(value, int):
raise TypeError('value must be of type int')
value_bin = utils.int_to_bytes(1, value)
if not 1 <= len(value_bin) <= 4:
return False
return True
def derive(self, key_material):
if self._used:
raise AlreadyFinalized
if not isinstance(key_material, bytes):
raise TypeError('key_material must be bytes')
self._used = True
# inverse floor division (equivalent to ceiling)
rounds = -(-self._length // self._algorithm.digest_size)
output = [b'']
# For counter mode, the number of iterations shall not be
# larger than 2^r-1, where r <= 32 is the binary length of the counter
# This ensures that the counter values used as an input to the
# PRF will not repeat during a particular call to the KDF function.
r_bin = utils.int_to_bytes(1, self._rlen)
if rounds > pow(2, len(r_bin) * 8) - 1:
raise ValueError('There are too many iterations.')
for i in range(1, rounds + 1):
h = hmac.HMAC(key_material, self._algorithm, backend=self._backend)
counter = utils.int_to_bytes(i, self._rlen)
if self._location == CounterLocation.BeforeFixed:
h.update(counter)
h.update(self._generate_fixed_input())
if self._location == CounterLocation.AfterFixed:
h.update(counter)
output.append(h.finalize())
return b''.join(output)[:self._length]
def _generate_fixed_input(self):
if self._fixed_data and isinstance(self._fixed_data, bytes):
return self._fixed_data
l_val = utils.int_to_bytes(self._length * 8, self._llen)
return b"".join([self._label, b"\x00", self._context, l_val])
def verify(self, key_material, expected_key):
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
raise InvalidKey

View File

@ -0,0 +1,58 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import (
AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.backends.interfaces import PBKDF2HMACBackend
from cryptography.hazmat.primitives import constant_time
from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
@utils.register_interface(KeyDerivationFunction)
class PBKDF2HMAC(object):
def __init__(self, algorithm, length, salt, iterations, backend):
if not isinstance(backend, PBKDF2HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement PBKDF2HMACBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
if not backend.pbkdf2_hmac_supported(algorithm):
raise UnsupportedAlgorithm(
"{0} is not supported for PBKDF2 by this backend.".format(
algorithm.name),
_Reasons.UNSUPPORTED_HASH
)
self._used = False
self._algorithm = algorithm
self._length = length
if not isinstance(salt, bytes):
raise TypeError("salt must be bytes.")
self._salt = salt
self._iterations = iterations
self._backend = backend
def derive(self, key_material):
if self._used:
raise AlreadyFinalized("PBKDF2 instances can only be used once.")
self._used = True
if not isinstance(key_material, bytes):
raise TypeError("key_material must be bytes.")
return self._backend.derive_pbkdf2_hmac(
self._algorithm,
self._length,
self._salt,
self._iterations,
key_material
)
def verify(self, key_material, expected_key):
derived_key = self.derive(key_material)
if not constant_time.bytes_eq(derived_key, expected_key):
raise InvalidKey("Keys do not match.")

View File

@ -0,0 +1,66 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import sys
from cryptography import utils
from cryptography.exceptions import (
AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.backends.interfaces import ScryptBackend
from cryptography.hazmat.primitives import constant_time
from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
# This is used by the scrypt tests to skip tests that require more memory
# than the MEM_LIMIT
_MEM_LIMIT = sys.maxsize // 2
@utils.register_interface(KeyDerivationFunction)
class Scrypt(object):
def __init__(self, salt, length, n, r, p, backend):
if not isinstance(backend, ScryptBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement ScryptBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
self._length = length
if not isinstance(salt, bytes):
raise TypeError("salt must be bytes.")
if n < 2 or (n & (n - 1)) != 0:
raise ValueError("n must be greater than 1 and be a power of 2.")
if r < 1:
raise ValueError("r must be greater than or equal to 1.")
if p < 1:
raise ValueError("p must be greater than or equal to 1.")
self._used = False
self._salt = salt
self._n = n
self._r = r
self._p = p
self._backend = backend
def derive(self, key_material):
if self._used:
raise AlreadyFinalized("Scrypt instances can only be used once.")
self._used = True
if not isinstance(key_material, bytes):
raise TypeError("key_material must be bytes.")
return self._backend.derive_scrypt(
key_material, self._salt, self._length, self._n, self._r, self._p
)
def verify(self, key_material, expected_key):
derived_key = self.derive(key_material)
if not constant_time.bytes_eq(derived_key, expected_key):
raise InvalidKey("Keys do not match.")

View File

@ -0,0 +1,70 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import struct
from cryptography import utils
from cryptography.exceptions import (
AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.backends.interfaces import HashBackend
from cryptography.hazmat.primitives import constant_time, hashes
from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
def _int_to_u32be(n):
return struct.pack('>I', n)
@utils.register_interface(KeyDerivationFunction)
class X963KDF(object):
def __init__(self, algorithm, length, sharedinfo, backend):
max_len = algorithm.digest_size * (2 ** 32 - 1)
if length > max_len:
raise ValueError(
"Can not derive keys larger than {0} bits.".format(max_len))
if not (sharedinfo is None or isinstance(sharedinfo, bytes)):
raise TypeError("sharedinfo must be bytes.")
self._algorithm = algorithm
self._length = length
self._sharedinfo = sharedinfo
if not isinstance(backend, HashBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HashBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
self._backend = backend
self._used = False
def derive(self, key_material):
if self._used:
raise AlreadyFinalized
self._used = True
if not isinstance(key_material, bytes):
raise TypeError("key_material must be bytes.")
output = [b""]
outlen = 0
counter = 1
while self._length > outlen:
h = hashes.Hash(self._algorithm, self._backend)
h.update(key_material)
h.update(_int_to_u32be(counter))
if self._sharedinfo is not None:
h.update(self._sharedinfo)
output.append(h.finalize())
outlen += len(output[-1])
counter += 1
return b"".join(output)[:self._length]
def verify(self, key_material, expected_key):
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
raise InvalidKey

View File

@ -0,0 +1,154 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import struct
from cryptography.hazmat.primitives.ciphers import Cipher
from cryptography.hazmat.primitives.ciphers.algorithms import AES
from cryptography.hazmat.primitives.ciphers.modes import ECB
from cryptography.hazmat.primitives.constant_time import bytes_eq
def _wrap_core(wrapping_key, a, r, backend):
# RFC 3394 Key Wrap - 2.2.1 (index method)
encryptor = Cipher(AES(wrapping_key), ECB(), backend).encryptor()
n = len(r)
for j in range(6):
for i in range(n):
# every encryption operation is a discrete 16 byte chunk (because
# AES has a 128-bit block size) and since we're using ECB it is
# safe to reuse the encryptor for the entire operation
b = encryptor.update(a + r[i])
# pack/unpack are safe as these are always 64-bit chunks
a = struct.pack(
">Q", struct.unpack(">Q", b[:8])[0] ^ ((n * j) + i + 1)
)
r[i] = b[-8:]
assert encryptor.finalize() == b""
return a + b"".join(r)
def aes_key_wrap(wrapping_key, key_to_wrap, backend):
if len(wrapping_key) not in [16, 24, 32]:
raise ValueError("The wrapping key must be a valid AES key length")
if len(key_to_wrap) < 16:
raise ValueError("The key to wrap must be at least 16 bytes")
if len(key_to_wrap) % 8 != 0:
raise ValueError("The key to wrap must be a multiple of 8 bytes")
a = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6"
r = [key_to_wrap[i:i + 8] for i in range(0, len(key_to_wrap), 8)]
return _wrap_core(wrapping_key, a, r, backend)
def _unwrap_core(wrapping_key, a, r, backend):
# Implement RFC 3394 Key Unwrap - 2.2.2 (index method)
decryptor = Cipher(AES(wrapping_key), ECB(), backend).decryptor()
n = len(r)
for j in reversed(range(6)):
for i in reversed(range(n)):
# pack/unpack are safe as these are always 64-bit chunks
atr = struct.pack(
">Q", struct.unpack(">Q", a)[0] ^ ((n * j) + i + 1)
) + r[i]
# every decryption operation is a discrete 16 byte chunk so
# it is safe to reuse the decryptor for the entire operation
b = decryptor.update(atr)
a = b[:8]
r[i] = b[-8:]
assert decryptor.finalize() == b""
return a, r
def aes_key_wrap_with_padding(wrapping_key, key_to_wrap, backend):
if len(wrapping_key) not in [16, 24, 32]:
raise ValueError("The wrapping key must be a valid AES key length")
aiv = b"\xA6\x59\x59\xA6" + struct.pack(">i", len(key_to_wrap))
# pad the key to wrap if necessary
pad = (8 - (len(key_to_wrap) % 8)) % 8
key_to_wrap = key_to_wrap + b"\x00" * pad
if len(key_to_wrap) == 8:
# RFC 5649 - 4.1 - exactly 8 octets after padding
encryptor = Cipher(AES(wrapping_key), ECB(), backend).encryptor()
b = encryptor.update(aiv + key_to_wrap)
assert encryptor.finalize() == b""
return b
else:
r = [key_to_wrap[i:i + 8] for i in range(0, len(key_to_wrap), 8)]
return _wrap_core(wrapping_key, aiv, r, backend)
def aes_key_unwrap_with_padding(wrapping_key, wrapped_key, backend):
if len(wrapped_key) < 16:
raise InvalidUnwrap("Must be at least 16 bytes")
if len(wrapping_key) not in [16, 24, 32]:
raise ValueError("The wrapping key must be a valid AES key length")
if len(wrapped_key) == 16:
# RFC 5649 - 4.2 - exactly two 64-bit blocks
decryptor = Cipher(AES(wrapping_key), ECB(), backend).decryptor()
b = decryptor.update(wrapped_key)
assert decryptor.finalize() == b""
a = b[:8]
data = b[8:]
n = 1
else:
r = [wrapped_key[i:i + 8] for i in range(0, len(wrapped_key), 8)]
encrypted_aiv = r.pop(0)
n = len(r)
a, r = _unwrap_core(wrapping_key, encrypted_aiv, r, backend)
data = b"".join(r)
# 1) Check that MSB(32,A) = A65959A6.
# 2) Check that 8*(n-1) < LSB(32,A) <= 8*n. If so, let
# MLI = LSB(32,A).
# 3) Let b = (8*n)-MLI, and then check that the rightmost b octets of
# the output data are zero.
(mli,) = struct.unpack(">I", a[4:])
b = (8 * n) - mli
if (
not bytes_eq(a[:4], b"\xa6\x59\x59\xa6") or not
8 * (n - 1) < mli <= 8 * n or (
b != 0 and not bytes_eq(data[-b:], b"\x00" * b)
)
):
raise InvalidUnwrap()
if b == 0:
return data
else:
return data[:-b]
def aes_key_unwrap(wrapping_key, wrapped_key, backend):
if len(wrapped_key) < 24:
raise InvalidUnwrap("Must be at least 24 bytes")
if len(wrapped_key) % 8 != 0:
raise InvalidUnwrap("The wrapped key must be a multiple of 8 bytes")
if len(wrapping_key) not in [16, 24, 32]:
raise ValueError("The wrapping key must be a valid AES key length")
aiv = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6"
r = [wrapped_key[i:i + 8] for i in range(0, len(wrapped_key), 8)]
a = r.pop(0)
a, r = _unwrap_core(wrapping_key, a, r, backend)
if not bytes_eq(a, aiv):
raise InvalidUnwrap()
return b"".join(r)
class InvalidUnwrap(Exception):
pass

View File

@ -0,0 +1,37 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class MACContext(object):
@abc.abstractmethod
def update(self, data):
"""
Processes the provided bytes.
"""
@abc.abstractmethod
def finalize(self):
"""
Returns the message authentication code as bytes.
"""
@abc.abstractmethod
def copy(self):
"""
Return a MACContext that is a copy of the current context.
"""
@abc.abstractmethod
def verify(self, signature):
"""
Checks if the generated message authentication code matches the
signature.
"""

View File

@ -0,0 +1,202 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import abc
import six
from cryptography import utils
from cryptography.exceptions import AlreadyFinalized
from cryptography.hazmat.bindings._padding import lib
@six.add_metaclass(abc.ABCMeta)
class PaddingContext(object):
@abc.abstractmethod
def update(self, data):
"""
Pads the provided bytes and returns any available data as bytes.
"""
@abc.abstractmethod
def finalize(self):
"""
Finalize the padding, returns bytes.
"""
def _byte_padding_check(block_size):
if not (0 <= block_size <= 2040):
raise ValueError("block_size must be in range(0, 2041).")
if block_size % 8 != 0:
raise ValueError("block_size must be a multiple of 8.")
def _byte_padding_update(buffer_, data, block_size):
if buffer_ is None:
raise AlreadyFinalized("Context was already finalized.")
if not isinstance(data, bytes):
raise TypeError("data must be bytes.")
buffer_ += data
finished_blocks = len(buffer_) // (block_size // 8)
result = buffer_[:finished_blocks * (block_size // 8)]
buffer_ = buffer_[finished_blocks * (block_size // 8):]
return buffer_, result
def _byte_padding_pad(buffer_, block_size, paddingfn):
if buffer_ is None:
raise AlreadyFinalized("Context was already finalized.")
pad_size = block_size // 8 - len(buffer_)
return buffer_ + paddingfn(pad_size)
def _byte_unpadding_update(buffer_, data, block_size):
if buffer_ is None:
raise AlreadyFinalized("Context was already finalized.")
if not isinstance(data, bytes):
raise TypeError("data must be bytes.")
buffer_ += data
finished_blocks = max(len(buffer_) // (block_size // 8) - 1, 0)
result = buffer_[:finished_blocks * (block_size // 8)]
buffer_ = buffer_[finished_blocks * (block_size // 8):]
return buffer_, result
def _byte_unpadding_check(buffer_, block_size, checkfn):
if buffer_ is None:
raise AlreadyFinalized("Context was already finalized.")
if len(buffer_) != block_size // 8:
raise ValueError("Invalid padding bytes.")
valid = checkfn(buffer_, block_size // 8)
if not valid:
raise ValueError("Invalid padding bytes.")
pad_size = six.indexbytes(buffer_, -1)
return buffer_[:-pad_size]
class PKCS7(object):
def __init__(self, block_size):
_byte_padding_check(block_size)
self.block_size = block_size
def padder(self):
return _PKCS7PaddingContext(self.block_size)
def unpadder(self):
return _PKCS7UnpaddingContext(self.block_size)
@utils.register_interface(PaddingContext)
class _PKCS7PaddingContext(object):
def __init__(self, block_size):
self.block_size = block_size
# TODO: more copies than necessary, we should use zero-buffer (#193)
self._buffer = b""
def update(self, data):
self._buffer, result = _byte_padding_update(
self._buffer, data, self.block_size)
return result
def _padding(self, size):
return six.int2byte(size) * size
def finalize(self):
result = _byte_padding_pad(
self._buffer, self.block_size, self._padding)
self._buffer = None
return result
@utils.register_interface(PaddingContext)
class _PKCS7UnpaddingContext(object):
def __init__(self, block_size):
self.block_size = block_size
# TODO: more copies than necessary, we should use zero-buffer (#193)
self._buffer = b""
def update(self, data):
self._buffer, result = _byte_unpadding_update(
self._buffer, data, self.block_size)
return result
def finalize(self):
result = _byte_unpadding_check(
self._buffer, self.block_size,
lib.Cryptography_check_pkcs7_padding)
self._buffer = None
return result
class ANSIX923(object):
def __init__(self, block_size):
_byte_padding_check(block_size)
self.block_size = block_size
def padder(self):
return _ANSIX923PaddingContext(self.block_size)
def unpadder(self):
return _ANSIX923UnpaddingContext(self.block_size)
@utils.register_interface(PaddingContext)
class _ANSIX923PaddingContext(object):
def __init__(self, block_size):
self.block_size = block_size
# TODO: more copies than necessary, we should use zero-buffer (#193)
self._buffer = b""
def update(self, data):
self._buffer, result = _byte_padding_update(
self._buffer, data, self.block_size)
return result
def _padding(self, size):
return six.int2byte(0) * (size - 1) + six.int2byte(size)
def finalize(self):
result = _byte_padding_pad(
self._buffer, self.block_size, self._padding)
self._buffer = None
return result
@utils.register_interface(PaddingContext)
class _ANSIX923UnpaddingContext(object):
def __init__(self, block_size):
self.block_size = block_size
# TODO: more copies than necessary, we should use zero-buffer (#193)
self._buffer = b""
def update(self, data):
self._buffer, result = _byte_unpadding_update(
self._buffer, data, self.block_size)
return result
def finalize(self):
result = _byte_unpadding_check(
self._buffer, self.block_size,
lib.Cryptography_check_ansix923_padding)
self._buffer = None
return result

View File

@ -0,0 +1,209 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import abc
import base64
import struct
from enum import Enum
import six
from cryptography import utils
from cryptography.exceptions import UnsupportedAlgorithm
from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa
def load_pem_private_key(data, password, backend):
return backend.load_pem_private_key(data, password)
def load_pem_public_key(data, backend):
return backend.load_pem_public_key(data)
def load_pem_parameters(data, backend):
return backend.load_pem_parameters(data)
def load_der_private_key(data, password, backend):
return backend.load_der_private_key(data, password)
def load_der_public_key(data, backend):
return backend.load_der_public_key(data)
def load_der_parameters(data, backend):
return backend.load_der_parameters(data)
def load_ssh_public_key(data, backend):
key_parts = data.split(b' ', 2)
if len(key_parts) < 2:
raise ValueError(
'Key is not in the proper format or contains extra data.')
key_type = key_parts[0]
if key_type == b'ssh-rsa':
loader = _load_ssh_rsa_public_key
elif key_type == b'ssh-dss':
loader = _load_ssh_dss_public_key
elif key_type in [
b'ecdsa-sha2-nistp256', b'ecdsa-sha2-nistp384', b'ecdsa-sha2-nistp521',
]:
loader = _load_ssh_ecdsa_public_key
else:
raise UnsupportedAlgorithm('Key type is not supported.')
key_body = key_parts[1]
try:
decoded_data = base64.b64decode(key_body)
except TypeError:
raise ValueError('Key is not in the proper format.')
inner_key_type, rest = _ssh_read_next_string(decoded_data)
if inner_key_type != key_type:
raise ValueError(
'Key header and key body contain different key type values.'
)
return loader(key_type, rest, backend)
def _load_ssh_rsa_public_key(key_type, decoded_data, backend):
e, rest = _ssh_read_next_mpint(decoded_data)
n, rest = _ssh_read_next_mpint(rest)
if rest:
raise ValueError('Key body contains extra bytes.')
return rsa.RSAPublicNumbers(e, n).public_key(backend)
def _load_ssh_dss_public_key(key_type, decoded_data, backend):
p, rest = _ssh_read_next_mpint(decoded_data)
q, rest = _ssh_read_next_mpint(rest)
g, rest = _ssh_read_next_mpint(rest)
y, rest = _ssh_read_next_mpint(rest)
if rest:
raise ValueError('Key body contains extra bytes.')
parameter_numbers = dsa.DSAParameterNumbers(p, q, g)
public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers)
return public_numbers.public_key(backend)
def _load_ssh_ecdsa_public_key(expected_key_type, decoded_data, backend):
curve_name, rest = _ssh_read_next_string(decoded_data)
data, rest = _ssh_read_next_string(rest)
if expected_key_type != b"ecdsa-sha2-" + curve_name:
raise ValueError(
'Key header and key body contain different key type values.'
)
if rest:
raise ValueError('Key body contains extra bytes.')
curve = {
b"nistp256": ec.SECP256R1,
b"nistp384": ec.SECP384R1,
b"nistp521": ec.SECP521R1,
}[curve_name]()
if six.indexbytes(data, 0) != 4:
raise NotImplementedError(
"Compressed elliptic curve points are not supported"
)
numbers = ec.EllipticCurvePublicNumbers.from_encoded_point(curve, data)
return numbers.public_key(backend)
def _ssh_read_next_string(data):
"""
Retrieves the next RFC 4251 string value from the data.
While the RFC calls these strings, in Python they are bytes objects.
"""
if len(data) < 4:
raise ValueError("Key is not in the proper format")
str_len, = struct.unpack('>I', data[:4])
if len(data) < str_len + 4:
raise ValueError("Key is not in the proper format")
return data[4:4 + str_len], data[4 + str_len:]
def _ssh_read_next_mpint(data):
"""
Reads the next mpint from the data.
Currently, all mpints are interpreted as unsigned.
"""
mpint_data, rest = _ssh_read_next_string(data)
return (
utils.int_from_bytes(mpint_data, byteorder='big', signed=False), rest
)
def _ssh_write_string(data):
return struct.pack(">I", len(data)) + data
def _ssh_write_mpint(value):
data = utils.int_to_bytes(value)
if six.indexbytes(data, 0) & 0x80:
data = b"\x00" + data
return _ssh_write_string(data)
class Encoding(Enum):
PEM = "PEM"
DER = "DER"
OpenSSH = "OpenSSH"
class PrivateFormat(Enum):
PKCS8 = "PKCS8"
TraditionalOpenSSL = "TraditionalOpenSSL"
class PublicFormat(Enum):
SubjectPublicKeyInfo = "X.509 subjectPublicKeyInfo with PKCS#1"
PKCS1 = "Raw PKCS#1"
OpenSSH = "OpenSSH"
class ParameterFormat(Enum):
PKCS3 = "PKCS3"
@six.add_metaclass(abc.ABCMeta)
class KeySerializationEncryption(object):
pass
@utils.register_interface(KeySerializationEncryption)
class BestAvailableEncryption(object):
def __init__(self, password):
if not isinstance(password, bytes) or len(password) == 0:
raise ValueError("Password must be 1 or more bytes.")
self.password = password
@utils.register_interface(KeySerializationEncryption)
class NoEncryption(object):
pass

View File

@ -0,0 +1,9 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
class InvalidToken(Exception):
pass

View File

@ -0,0 +1,68 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import struct
import six
from cryptography.exceptions import (
UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.backends.interfaces import HMACBackend
from cryptography.hazmat.primitives import constant_time, hmac
from cryptography.hazmat.primitives.hashes import SHA1, SHA256, SHA512
from cryptography.hazmat.primitives.twofactor import InvalidToken
from cryptography.hazmat.primitives.twofactor.utils import _generate_uri
class HOTP(object):
def __init__(self, key, length, algorithm, backend,
enforce_key_length=True):
if not isinstance(backend, HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HMACBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
if len(key) < 16 and enforce_key_length is True:
raise ValueError("Key length has to be at least 128 bits.")
if not isinstance(length, six.integer_types):
raise TypeError("Length parameter must be an integer type.")
if length < 6 or length > 8:
raise ValueError("Length of HOTP has to be between 6 to 8.")
if not isinstance(algorithm, (SHA1, SHA256, SHA512)):
raise TypeError("Algorithm must be SHA1, SHA256 or SHA512.")
self._key = key
self._length = length
self._algorithm = algorithm
self._backend = backend
def generate(self, counter):
truncated_value = self._dynamic_truncate(counter)
hotp = truncated_value % (10 ** self._length)
return "{0:0{1}}".format(hotp, self._length).encode()
def verify(self, hotp, counter):
if not constant_time.bytes_eq(self.generate(counter), hotp):
raise InvalidToken("Supplied HOTP value does not match.")
def _dynamic_truncate(self, counter):
ctx = hmac.HMAC(self._key, self._algorithm, self._backend)
ctx.update(struct.pack(">Q", counter))
hmac_value = ctx.finalize()
offset = six.indexbytes(hmac_value, len(hmac_value) - 1) & 0b1111
p = hmac_value[offset:offset + 4]
return struct.unpack(">I", p)[0] & 0x7fffffff
def get_provisioning_uri(self, account_name, counter, issuer):
return _generate_uri(self, "hotp", account_name, issuer, [
("counter", int(counter)),
])

View File

@ -0,0 +1,40 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography.exceptions import (
UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.backends.interfaces import HMACBackend
from cryptography.hazmat.primitives import constant_time
from cryptography.hazmat.primitives.twofactor import InvalidToken
from cryptography.hazmat.primitives.twofactor.hotp import HOTP
from cryptography.hazmat.primitives.twofactor.utils import _generate_uri
class TOTP(object):
def __init__(self, key, length, algorithm, time_step, backend,
enforce_key_length=True):
if not isinstance(backend, HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HMACBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
self._time_step = time_step
self._hotp = HOTP(key, length, algorithm, backend, enforce_key_length)
def generate(self, time):
counter = int(time / self._time_step)
return self._hotp.generate(counter)
def verify(self, totp, time):
if not constant_time.bytes_eq(self.generate(time), totp):
raise InvalidToken("Supplied TOTP value does not match.")
def get_provisioning_uri(self, account_name, issuer):
return _generate_uri(self._hotp, "totp", account_name, issuer, [
("period", int(self._time_step)),
])

View File

@ -0,0 +1,30 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import base64
from six.moves.urllib.parse import quote, urlencode
def _generate_uri(hotp, type_name, account_name, issuer, extra_parameters):
parameters = [
("digits", hotp._length),
("secret", base64.b32encode(hotp._key)),
("algorithm", hotp._algorithm.name.upper()),
]
if issuer is not None:
parameters.append(("issuer", issuer))
parameters.extend(extra_parameters)
uriparts = {
"type": type_name,
"label": ("%s:%s" % (quote(issuer), quote(account_name)) if issuer
else quote(account_name)),
"parameters": urlencode(parameters),
}
return "otpauth://{type}/{label}?{parameters}".format(**uriparts)

View File

@ -0,0 +1,165 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import abc
import binascii
import inspect
import sys
import warnings
# We use a UserWarning subclass, instead of DeprecationWarning, because CPython
# decided deprecation warnings should be invisble by default.
class CryptographyDeprecationWarning(UserWarning):
pass
# Several APIs were deprecated with no specific end-of-life date because of the
# ubiquity of their use. They should not be removed until we agree on when that
# cycle ends.
PersistentlyDeprecated = CryptographyDeprecationWarning
DeprecatedIn21 = CryptographyDeprecationWarning
DeprecatedIn23 = CryptographyDeprecationWarning
def _check_bytes(name, value):
if not isinstance(value, bytes):
raise TypeError("{0} must be bytes".format(name))
def read_only_property(name):
return property(lambda self: getattr(self, name))
def register_interface(iface):
def register_decorator(klass):
verify_interface(iface, klass)
iface.register(klass)
return klass
return register_decorator
def register_interface_if(predicate, iface):
def register_decorator(klass):
if predicate:
verify_interface(iface, klass)
iface.register(klass)
return klass
return register_decorator
if hasattr(int, "from_bytes"):
int_from_bytes = int.from_bytes
else:
def int_from_bytes(data, byteorder, signed=False):
assert byteorder == 'big'
assert not signed
return int(binascii.hexlify(data), 16)
if hasattr(int, "to_bytes"):
def int_to_bytes(integer, length=None):
return integer.to_bytes(
length or (integer.bit_length() + 7) // 8 or 1, 'big'
)
else:
def int_to_bytes(integer, length=None):
hex_string = '%x' % integer
if length is None:
n = len(hex_string)
else:
n = length * 2
return binascii.unhexlify(hex_string.zfill(n + (n & 1)))
class InterfaceNotImplemented(Exception):
pass
if hasattr(inspect, "signature"):
signature = inspect.signature
else:
signature = inspect.getargspec
def verify_interface(iface, klass):
for method in iface.__abstractmethods__:
if not hasattr(klass, method):
raise InterfaceNotImplemented(
"{0} is missing a {1!r} method".format(klass, method)
)
if isinstance(getattr(iface, method), abc.abstractproperty):
# Can't properly verify these yet.
continue
sig = signature(getattr(iface, method))
actual = signature(getattr(klass, method))
if sig != actual:
raise InterfaceNotImplemented(
"{0}.{1}'s signature differs from the expected. Expected: "
"{2!r}. Received: {3!r}".format(
klass, method, sig, actual
)
)
# No longer needed as of 2.2, but retained because we have external consumers
# who use it.
def bit_length(x):
return x.bit_length()
class _DeprecatedValue(object):
def __init__(self, value, message, warning_class):
self.value = value
self.message = message
self.warning_class = warning_class
class _ModuleWithDeprecations(object):
def __init__(self, module):
self.__dict__["_module"] = module
def __getattr__(self, attr):
obj = getattr(self._module, attr)
if isinstance(obj, _DeprecatedValue):
warnings.warn(obj.message, obj.warning_class, stacklevel=2)
obj = obj.value
return obj
def __setattr__(self, attr, value):
setattr(self._module, attr, value)
def __delattr__(self, attr):
obj = getattr(self._module, attr)
if isinstance(obj, _DeprecatedValue):
warnings.warn(obj.message, obj.warning_class, stacklevel=2)
delattr(self._module, attr)
def __dir__(self):
return ["_module"] + dir(self._module)
def deprecated(value, module_name, message, warning_class):
module = sys.modules[module_name]
if not isinstance(module, _ModuleWithDeprecations):
sys.modules[module_name] = _ModuleWithDeprecations(module)
return _DeprecatedValue(value, message, warning_class)
def cached_property(func):
cached_name = "_cached_{0}".format(func)
sentinel = object()
def inner(instance):
cache = getattr(instance, cached_name, sentinel)
if cache is not sentinel:
return cache
result = func(instance)
setattr(instance, cached_name, result)
return result
return property(inner)

View File

@ -0,0 +1,185 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography.x509 import certificate_transparency
from cryptography.x509.base import (
Certificate, CertificateBuilder, CertificateRevocationList,
CertificateRevocationListBuilder,
CertificateSigningRequest, CertificateSigningRequestBuilder,
InvalidVersion, RevokedCertificate, RevokedCertificateBuilder,
Version, load_der_x509_certificate, load_der_x509_crl, load_der_x509_csr,
load_pem_x509_certificate, load_pem_x509_crl, load_pem_x509_csr,
random_serial_number,
)
from cryptography.x509.extensions import (
AccessDescription, AuthorityInformationAccess,
AuthorityKeyIdentifier, BasicConstraints, CRLDistributionPoints,
CRLNumber, CRLReason, CertificateIssuer, CertificatePolicies,
DeltaCRLIndicator, DistributionPoint, DuplicateExtension, ExtendedKeyUsage,
Extension, ExtensionNotFound, ExtensionType, Extensions, FreshestCRL,
GeneralNames, InhibitAnyPolicy, InvalidityDate, IssuerAlternativeName,
KeyUsage, NameConstraints, NoticeReference, OCSPNoCheck, PolicyConstraints,
PolicyInformation, PrecertificateSignedCertificateTimestamps, ReasonFlags,
SubjectAlternativeName, SubjectKeyIdentifier, TLSFeature, TLSFeatureType,
UnrecognizedExtension, UserNotice
)
from cryptography.x509.general_name import (
DNSName, DirectoryName, GeneralName, IPAddress, OtherName, RFC822Name,
RegisteredID, UniformResourceIdentifier, UnsupportedGeneralNameType,
_GENERAL_NAMES
)
from cryptography.x509.name import (
Name, NameAttribute, RelativeDistinguishedName
)
from cryptography.x509.oid import (
AuthorityInformationAccessOID, CRLEntryExtensionOID,
CertificatePoliciesOID, ExtendedKeyUsageOID, ExtensionOID, NameOID,
ObjectIdentifier, SignatureAlgorithmOID, _SIG_OIDS_TO_HASH
)
OID_AUTHORITY_INFORMATION_ACCESS = ExtensionOID.AUTHORITY_INFORMATION_ACCESS
OID_AUTHORITY_KEY_IDENTIFIER = ExtensionOID.AUTHORITY_KEY_IDENTIFIER
OID_BASIC_CONSTRAINTS = ExtensionOID.BASIC_CONSTRAINTS
OID_CERTIFICATE_POLICIES = ExtensionOID.CERTIFICATE_POLICIES
OID_CRL_DISTRIBUTION_POINTS = ExtensionOID.CRL_DISTRIBUTION_POINTS
OID_EXTENDED_KEY_USAGE = ExtensionOID.EXTENDED_KEY_USAGE
OID_FRESHEST_CRL = ExtensionOID.FRESHEST_CRL
OID_INHIBIT_ANY_POLICY = ExtensionOID.INHIBIT_ANY_POLICY
OID_ISSUER_ALTERNATIVE_NAME = ExtensionOID.ISSUER_ALTERNATIVE_NAME
OID_KEY_USAGE = ExtensionOID.KEY_USAGE
OID_NAME_CONSTRAINTS = ExtensionOID.NAME_CONSTRAINTS
OID_OCSP_NO_CHECK = ExtensionOID.OCSP_NO_CHECK
OID_POLICY_CONSTRAINTS = ExtensionOID.POLICY_CONSTRAINTS
OID_POLICY_MAPPINGS = ExtensionOID.POLICY_MAPPINGS
OID_SUBJECT_ALTERNATIVE_NAME = ExtensionOID.SUBJECT_ALTERNATIVE_NAME
OID_SUBJECT_DIRECTORY_ATTRIBUTES = ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES
OID_SUBJECT_INFORMATION_ACCESS = ExtensionOID.SUBJECT_INFORMATION_ACCESS
OID_SUBJECT_KEY_IDENTIFIER = ExtensionOID.SUBJECT_KEY_IDENTIFIER
OID_DSA_WITH_SHA1 = SignatureAlgorithmOID.DSA_WITH_SHA1
OID_DSA_WITH_SHA224 = SignatureAlgorithmOID.DSA_WITH_SHA224
OID_DSA_WITH_SHA256 = SignatureAlgorithmOID.DSA_WITH_SHA256
OID_ECDSA_WITH_SHA1 = SignatureAlgorithmOID.ECDSA_WITH_SHA1
OID_ECDSA_WITH_SHA224 = SignatureAlgorithmOID.ECDSA_WITH_SHA224
OID_ECDSA_WITH_SHA256 = SignatureAlgorithmOID.ECDSA_WITH_SHA256
OID_ECDSA_WITH_SHA384 = SignatureAlgorithmOID.ECDSA_WITH_SHA384
OID_ECDSA_WITH_SHA512 = SignatureAlgorithmOID.ECDSA_WITH_SHA512
OID_RSA_WITH_MD5 = SignatureAlgorithmOID.RSA_WITH_MD5
OID_RSA_WITH_SHA1 = SignatureAlgorithmOID.RSA_WITH_SHA1
OID_RSA_WITH_SHA224 = SignatureAlgorithmOID.RSA_WITH_SHA224
OID_RSA_WITH_SHA256 = SignatureAlgorithmOID.RSA_WITH_SHA256
OID_RSA_WITH_SHA384 = SignatureAlgorithmOID.RSA_WITH_SHA384
OID_RSA_WITH_SHA512 = SignatureAlgorithmOID.RSA_WITH_SHA512
OID_RSASSA_PSS = SignatureAlgorithmOID.RSASSA_PSS
OID_COMMON_NAME = NameOID.COMMON_NAME
OID_COUNTRY_NAME = NameOID.COUNTRY_NAME
OID_DOMAIN_COMPONENT = NameOID.DOMAIN_COMPONENT
OID_DN_QUALIFIER = NameOID.DN_QUALIFIER
OID_EMAIL_ADDRESS = NameOID.EMAIL_ADDRESS
OID_GENERATION_QUALIFIER = NameOID.GENERATION_QUALIFIER
OID_GIVEN_NAME = NameOID.GIVEN_NAME
OID_LOCALITY_NAME = NameOID.LOCALITY_NAME
OID_ORGANIZATIONAL_UNIT_NAME = NameOID.ORGANIZATIONAL_UNIT_NAME
OID_ORGANIZATION_NAME = NameOID.ORGANIZATION_NAME
OID_PSEUDONYM = NameOID.PSEUDONYM
OID_SERIAL_NUMBER = NameOID.SERIAL_NUMBER
OID_STATE_OR_PROVINCE_NAME = NameOID.STATE_OR_PROVINCE_NAME
OID_SURNAME = NameOID.SURNAME
OID_TITLE = NameOID.TITLE
OID_CLIENT_AUTH = ExtendedKeyUsageOID.CLIENT_AUTH
OID_CODE_SIGNING = ExtendedKeyUsageOID.CODE_SIGNING
OID_EMAIL_PROTECTION = ExtendedKeyUsageOID.EMAIL_PROTECTION
OID_OCSP_SIGNING = ExtendedKeyUsageOID.OCSP_SIGNING
OID_SERVER_AUTH = ExtendedKeyUsageOID.SERVER_AUTH
OID_TIME_STAMPING = ExtendedKeyUsageOID.TIME_STAMPING
OID_ANY_POLICY = CertificatePoliciesOID.ANY_POLICY
OID_CPS_QUALIFIER = CertificatePoliciesOID.CPS_QUALIFIER
OID_CPS_USER_NOTICE = CertificatePoliciesOID.CPS_USER_NOTICE
OID_CERTIFICATE_ISSUER = CRLEntryExtensionOID.CERTIFICATE_ISSUER
OID_CRL_REASON = CRLEntryExtensionOID.CRL_REASON
OID_INVALIDITY_DATE = CRLEntryExtensionOID.INVALIDITY_DATE
OID_CA_ISSUERS = AuthorityInformationAccessOID.CA_ISSUERS
OID_OCSP = AuthorityInformationAccessOID.OCSP
__all__ = [
"certificate_transparency",
"load_pem_x509_certificate",
"load_der_x509_certificate",
"load_pem_x509_csr",
"load_der_x509_csr",
"load_pem_x509_crl",
"load_der_x509_crl",
"random_serial_number",
"InvalidVersion",
"DeltaCRLIndicator",
"DuplicateExtension",
"ExtensionNotFound",
"UnsupportedGeneralNameType",
"NameAttribute",
"Name",
"RelativeDistinguishedName",
"ObjectIdentifier",
"ExtensionType",
"Extensions",
"Extension",
"ExtendedKeyUsage",
"FreshestCRL",
"TLSFeature",
"TLSFeatureType",
"OCSPNoCheck",
"BasicConstraints",
"CRLNumber",
"KeyUsage",
"AuthorityInformationAccess",
"AccessDescription",
"CertificatePolicies",
"PolicyInformation",
"UserNotice",
"NoticeReference",
"SubjectKeyIdentifier",
"NameConstraints",
"CRLDistributionPoints",
"DistributionPoint",
"ReasonFlags",
"InhibitAnyPolicy",
"SubjectAlternativeName",
"IssuerAlternativeName",
"AuthorityKeyIdentifier",
"GeneralNames",
"GeneralName",
"RFC822Name",
"DNSName",
"UniformResourceIdentifier",
"RegisteredID",
"DirectoryName",
"IPAddress",
"OtherName",
"Certificate",
"CertificateRevocationList",
"CertificateRevocationListBuilder",
"CertificateSigningRequest",
"RevokedCertificate",
"RevokedCertificateBuilder",
"CertificateSigningRequestBuilder",
"CertificateBuilder",
"Version",
"_SIG_OIDS_TO_HASH",
"OID_CA_ISSUERS",
"OID_OCSP",
"_GENERAL_NAMES",
"CertificateIssuer",
"CRLReason",
"InvalidityDate",
"UnrecognizedExtension",
"PolicyConstraints",
"PrecertificateSignedCertificateTimestamps",
]

View File

@ -0,0 +1,743 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import abc
import datetime
import os
from enum import Enum
import six
from cryptography import utils
from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa
from cryptography.x509.extensions import Extension, ExtensionType
from cryptography.x509.name import Name
_UNIX_EPOCH = datetime.datetime(1970, 1, 1)
def _convert_to_naive_utc_time(time):
"""Normalizes a datetime to a naive datetime in UTC.
time -- datetime to normalize. Assumed to be in UTC if not timezone
aware.
"""
if time.tzinfo is not None:
offset = time.utcoffset()
offset = offset if offset else datetime.timedelta()
return time.replace(tzinfo=None) - offset
else:
return time
class Version(Enum):
v1 = 0
v3 = 2
def load_pem_x509_certificate(data, backend):
return backend.load_pem_x509_certificate(data)
def load_der_x509_certificate(data, backend):
return backend.load_der_x509_certificate(data)
def load_pem_x509_csr(data, backend):
return backend.load_pem_x509_csr(data)
def load_der_x509_csr(data, backend):
return backend.load_der_x509_csr(data)
def load_pem_x509_crl(data, backend):
return backend.load_pem_x509_crl(data)
def load_der_x509_crl(data, backend):
return backend.load_der_x509_crl(data)
class InvalidVersion(Exception):
def __init__(self, msg, parsed_version):
super(InvalidVersion, self).__init__(msg)
self.parsed_version = parsed_version
@six.add_metaclass(abc.ABCMeta)
class Certificate(object):
@abc.abstractmethod
def fingerprint(self, algorithm):
"""
Returns bytes using digest passed.
"""
@abc.abstractproperty
def serial_number(self):
"""
Returns certificate serial number
"""
@abc.abstractproperty
def version(self):
"""
Returns the certificate version
"""
@abc.abstractmethod
def public_key(self):
"""
Returns the public key
"""
@abc.abstractproperty
def not_valid_before(self):
"""
Not before time (represented as UTC datetime)
"""
@abc.abstractproperty
def not_valid_after(self):
"""
Not after time (represented as UTC datetime)
"""
@abc.abstractproperty
def issuer(self):
"""
Returns the issuer name object.
"""
@abc.abstractproperty
def subject(self):
"""
Returns the subject name object.
"""
@abc.abstractproperty
def signature_hash_algorithm(self):
"""
Returns a HashAlgorithm corresponding to the type of the digest signed
in the certificate.
"""
@abc.abstractproperty
def signature_algorithm_oid(self):
"""
Returns the ObjectIdentifier of the signature algorithm.
"""
@abc.abstractproperty
def extensions(self):
"""
Returns an Extensions object.
"""
@abc.abstractproperty
def signature(self):
"""
Returns the signature bytes.
"""
@abc.abstractproperty
def tbs_certificate_bytes(self):
"""
Returns the tbsCertificate payload bytes as defined in RFC 5280.
"""
@abc.abstractmethod
def __eq__(self, other):
"""
Checks equality.
"""
@abc.abstractmethod
def __ne__(self, other):
"""
Checks not equal.
"""
@abc.abstractmethod
def __hash__(self):
"""
Computes a hash.
"""
@abc.abstractmethod
def public_bytes(self, encoding):
"""
Serializes the certificate to PEM or DER format.
"""
@six.add_metaclass(abc.ABCMeta)
class CertificateRevocationList(object):
@abc.abstractmethod
def public_bytes(self, encoding):
"""
Serializes the CRL to PEM or DER format.
"""
@abc.abstractmethod
def fingerprint(self, algorithm):
"""
Returns bytes using digest passed.
"""
@abc.abstractmethod
def get_revoked_certificate_by_serial_number(self, serial_number):
"""
Returns an instance of RevokedCertificate or None if the serial_number
is not in the CRL.
"""
@abc.abstractproperty
def signature_hash_algorithm(self):
"""
Returns a HashAlgorithm corresponding to the type of the digest signed
in the certificate.
"""
@abc.abstractproperty
def signature_algorithm_oid(self):
"""
Returns the ObjectIdentifier of the signature algorithm.
"""
@abc.abstractproperty
def issuer(self):
"""
Returns the X509Name with the issuer of this CRL.
"""
@abc.abstractproperty
def next_update(self):
"""
Returns the date of next update for this CRL.
"""
@abc.abstractproperty
def last_update(self):
"""
Returns the date of last update for this CRL.
"""
@abc.abstractproperty
def extensions(self):
"""
Returns an Extensions object containing a list of CRL extensions.
"""
@abc.abstractproperty
def signature(self):
"""
Returns the signature bytes.
"""
@abc.abstractproperty
def tbs_certlist_bytes(self):
"""
Returns the tbsCertList payload bytes as defined in RFC 5280.
"""
@abc.abstractmethod
def __eq__(self, other):
"""
Checks equality.
"""
@abc.abstractmethod
def __ne__(self, other):
"""
Checks not equal.
"""
@abc.abstractmethod
def is_signature_valid(self, public_key):
"""
Verifies signature of revocation list against given public key.
"""
@six.add_metaclass(abc.ABCMeta)
class CertificateSigningRequest(object):
@abc.abstractmethod
def __eq__(self, other):
"""
Checks equality.
"""
@abc.abstractmethod
def __ne__(self, other):
"""
Checks not equal.
"""
@abc.abstractmethod
def __hash__(self):
"""
Computes a hash.
"""
@abc.abstractmethod
def public_key(self):
"""
Returns the public key
"""
@abc.abstractproperty
def subject(self):
"""
Returns the subject name object.
"""
@abc.abstractproperty
def signature_hash_algorithm(self):
"""
Returns a HashAlgorithm corresponding to the type of the digest signed
in the certificate.
"""
@abc.abstractproperty
def signature_algorithm_oid(self):
"""
Returns the ObjectIdentifier of the signature algorithm.
"""
@abc.abstractproperty
def extensions(self):
"""
Returns the extensions in the signing request.
"""
@abc.abstractmethod
def public_bytes(self, encoding):
"""
Encodes the request to PEM or DER format.
"""
@abc.abstractproperty
def signature(self):
"""
Returns the signature bytes.
"""
@abc.abstractproperty
def tbs_certrequest_bytes(self):
"""
Returns the PKCS#10 CertificationRequestInfo bytes as defined in RFC
2986.
"""
@abc.abstractproperty
def is_signature_valid(self):
"""
Verifies signature of signing request.
"""
@six.add_metaclass(abc.ABCMeta)
class RevokedCertificate(object):
@abc.abstractproperty
def serial_number(self):
"""
Returns the serial number of the revoked certificate.
"""
@abc.abstractproperty
def revocation_date(self):
"""
Returns the date of when this certificate was revoked.
"""
@abc.abstractproperty
def extensions(self):
"""
Returns an Extensions object containing a list of Revoked extensions.
"""
class CertificateSigningRequestBuilder(object):
def __init__(self, subject_name=None, extensions=[]):
"""
Creates an empty X.509 certificate request (v1).
"""
self._subject_name = subject_name
self._extensions = extensions
def subject_name(self, name):
"""
Sets the certificate requestor's distinguished name.
"""
if not isinstance(name, Name):
raise TypeError('Expecting x509.Name object.')
if self._subject_name is not None:
raise ValueError('The subject name may only be set once.')
return CertificateSigningRequestBuilder(name, self._extensions)
def add_extension(self, extension, critical):
"""
Adds an X.509 extension to the certificate request.
"""
if not isinstance(extension, ExtensionType):
raise TypeError("extension must be an ExtensionType")
extension = Extension(extension.oid, critical, extension)
# TODO: This is quadratic in the number of extensions
for e in self._extensions:
if e.oid == extension.oid:
raise ValueError('This extension has already been set.')
return CertificateSigningRequestBuilder(
self._subject_name, self._extensions + [extension]
)
def sign(self, private_key, algorithm, backend):
"""
Signs the request using the requestor's private key.
"""
if self._subject_name is None:
raise ValueError("A CertificateSigningRequest must have a subject")
return backend.create_x509_csr(self, private_key, algorithm)
class CertificateBuilder(object):
def __init__(self, issuer_name=None, subject_name=None,
public_key=None, serial_number=None, not_valid_before=None,
not_valid_after=None, extensions=[]):
self._version = Version.v3
self._issuer_name = issuer_name
self._subject_name = subject_name
self._public_key = public_key
self._serial_number = serial_number
self._not_valid_before = not_valid_before
self._not_valid_after = not_valid_after
self._extensions = extensions
def issuer_name(self, name):
"""
Sets the CA's distinguished name.
"""
if not isinstance(name, Name):
raise TypeError('Expecting x509.Name object.')
if self._issuer_name is not None:
raise ValueError('The issuer name may only be set once.')
return CertificateBuilder(
name, self._subject_name, self._public_key,
self._serial_number, self._not_valid_before,
self._not_valid_after, self._extensions
)
def subject_name(self, name):
"""
Sets the requestor's distinguished name.
"""
if not isinstance(name, Name):
raise TypeError('Expecting x509.Name object.')
if self._subject_name is not None:
raise ValueError('The subject name may only be set once.')
return CertificateBuilder(
self._issuer_name, name, self._public_key,
self._serial_number, self._not_valid_before,
self._not_valid_after, self._extensions
)
def public_key(self, key):
"""
Sets the requestor's public key (as found in the signing request).
"""
if not isinstance(key, (dsa.DSAPublicKey, rsa.RSAPublicKey,
ec.EllipticCurvePublicKey)):
raise TypeError('Expecting one of DSAPublicKey, RSAPublicKey,'
' or EllipticCurvePublicKey.')
if self._public_key is not None:
raise ValueError('The public key may only be set once.')
return CertificateBuilder(
self._issuer_name, self._subject_name, key,
self._serial_number, self._not_valid_before,
self._not_valid_after, self._extensions
)
def serial_number(self, number):
"""
Sets the certificate serial number.
"""
if not isinstance(number, six.integer_types):
raise TypeError('Serial number must be of integral type.')
if self._serial_number is not None:
raise ValueError('The serial number may only be set once.')
if number <= 0:
raise ValueError('The serial number should be positive.')
# ASN.1 integers are always signed, so most significant bit must be
# zero.
if number.bit_length() >= 160: # As defined in RFC 5280
raise ValueError('The serial number should not be more than 159 '
'bits.')
return CertificateBuilder(
self._issuer_name, self._subject_name,
self._public_key, number, self._not_valid_before,
self._not_valid_after, self._extensions
)
def not_valid_before(self, time):
"""
Sets the certificate activation time.
"""
if not isinstance(time, datetime.datetime):
raise TypeError('Expecting datetime object.')
if self._not_valid_before is not None:
raise ValueError('The not valid before may only be set once.')
time = _convert_to_naive_utc_time(time)
if time <= _UNIX_EPOCH:
raise ValueError('The not valid before date must be after the unix'
' epoch (1970 January 1).')
if self._not_valid_after is not None and time > self._not_valid_after:
raise ValueError(
'The not valid before date must be before the not valid after '
'date.'
)
return CertificateBuilder(
self._issuer_name, self._subject_name,
self._public_key, self._serial_number, time,
self._not_valid_after, self._extensions
)
def not_valid_after(self, time):
"""
Sets the certificate expiration time.
"""
if not isinstance(time, datetime.datetime):
raise TypeError('Expecting datetime object.')
if self._not_valid_after is not None:
raise ValueError('The not valid after may only be set once.')
time = _convert_to_naive_utc_time(time)
if time <= _UNIX_EPOCH:
raise ValueError('The not valid after date must be after the unix'
' epoch (1970 January 1).')
if (self._not_valid_before is not None and
time < self._not_valid_before):
raise ValueError(
'The not valid after date must be after the not valid before '
'date.'
)
return CertificateBuilder(
self._issuer_name, self._subject_name,
self._public_key, self._serial_number, self._not_valid_before,
time, self._extensions
)
def add_extension(self, extension, critical):
"""
Adds an X.509 extension to the certificate.
"""
if not isinstance(extension, ExtensionType):
raise TypeError("extension must be an ExtensionType")
extension = Extension(extension.oid, critical, extension)
# TODO: This is quadratic in the number of extensions
for e in self._extensions:
if e.oid == extension.oid:
raise ValueError('This extension has already been set.')
return CertificateBuilder(
self._issuer_name, self._subject_name,
self._public_key, self._serial_number, self._not_valid_before,
self._not_valid_after, self._extensions + [extension]
)
def sign(self, private_key, algorithm, backend):
"""
Signs the certificate using the CA's private key.
"""
if self._subject_name is None:
raise ValueError("A certificate must have a subject name")
if self._issuer_name is None:
raise ValueError("A certificate must have an issuer name")
if self._serial_number is None:
raise ValueError("A certificate must have a serial number")
if self._not_valid_before is None:
raise ValueError("A certificate must have a not valid before time")
if self._not_valid_after is None:
raise ValueError("A certificate must have a not valid after time")
if self._public_key is None:
raise ValueError("A certificate must have a public key")
return backend.create_x509_certificate(self, private_key, algorithm)
class CertificateRevocationListBuilder(object):
def __init__(self, issuer_name=None, last_update=None, next_update=None,
extensions=[], revoked_certificates=[]):
self._issuer_name = issuer_name
self._last_update = last_update
self._next_update = next_update
self._extensions = extensions
self._revoked_certificates = revoked_certificates
def issuer_name(self, issuer_name):
if not isinstance(issuer_name, Name):
raise TypeError('Expecting x509.Name object.')
if self._issuer_name is not None:
raise ValueError('The issuer name may only be set once.')
return CertificateRevocationListBuilder(
issuer_name, self._last_update, self._next_update,
self._extensions, self._revoked_certificates
)
def last_update(self, last_update):
if not isinstance(last_update, datetime.datetime):
raise TypeError('Expecting datetime object.')
if self._last_update is not None:
raise ValueError('Last update may only be set once.')
last_update = _convert_to_naive_utc_time(last_update)
if last_update <= _UNIX_EPOCH:
raise ValueError('The last update date must be after the unix'
' epoch (1970 January 1).')
if self._next_update is not None and last_update > self._next_update:
raise ValueError(
'The last update date must be before the next update date.'
)
return CertificateRevocationListBuilder(
self._issuer_name, last_update, self._next_update,
self._extensions, self._revoked_certificates
)
def next_update(self, next_update):
if not isinstance(next_update, datetime.datetime):
raise TypeError('Expecting datetime object.')
if self._next_update is not None:
raise ValueError('Last update may only be set once.')
next_update = _convert_to_naive_utc_time(next_update)
if next_update <= _UNIX_EPOCH:
raise ValueError('The last update date must be after the unix'
' epoch (1970 January 1).')
if self._last_update is not None and next_update < self._last_update:
raise ValueError(
'The next update date must be after the last update date.'
)
return CertificateRevocationListBuilder(
self._issuer_name, self._last_update, next_update,
self._extensions, self._revoked_certificates
)
def add_extension(self, extension, critical):
"""
Adds an X.509 extension to the certificate revocation list.
"""
if not isinstance(extension, ExtensionType):
raise TypeError("extension must be an ExtensionType")
extension = Extension(extension.oid, critical, extension)
# TODO: This is quadratic in the number of extensions
for e in self._extensions:
if e.oid == extension.oid:
raise ValueError('This extension has already been set.')
return CertificateRevocationListBuilder(
self._issuer_name, self._last_update, self._next_update,
self._extensions + [extension], self._revoked_certificates
)
def add_revoked_certificate(self, revoked_certificate):
"""
Adds a revoked certificate to the CRL.
"""
if not isinstance(revoked_certificate, RevokedCertificate):
raise TypeError("Must be an instance of RevokedCertificate")
return CertificateRevocationListBuilder(
self._issuer_name, self._last_update,
self._next_update, self._extensions,
self._revoked_certificates + [revoked_certificate]
)
def sign(self, private_key, algorithm, backend):
if self._issuer_name is None:
raise ValueError("A CRL must have an issuer name")
if self._last_update is None:
raise ValueError("A CRL must have a last update time")
if self._next_update is None:
raise ValueError("A CRL must have a next update time")
return backend.create_x509_crl(self, private_key, algorithm)
class RevokedCertificateBuilder(object):
def __init__(self, serial_number=None, revocation_date=None,
extensions=[]):
self._serial_number = serial_number
self._revocation_date = revocation_date
self._extensions = extensions
def serial_number(self, number):
if not isinstance(number, six.integer_types):
raise TypeError('Serial number must be of integral type.')
if self._serial_number is not None:
raise ValueError('The serial number may only be set once.')
if number <= 0:
raise ValueError('The serial number should be positive')
# ASN.1 integers are always signed, so most significant bit must be
# zero.
if number.bit_length() >= 160: # As defined in RFC 5280
raise ValueError('The serial number should not be more than 159 '
'bits.')
return RevokedCertificateBuilder(
number, self._revocation_date, self._extensions
)
def revocation_date(self, time):
if not isinstance(time, datetime.datetime):
raise TypeError('Expecting datetime object.')
if self._revocation_date is not None:
raise ValueError('The revocation date may only be set once.')
time = _convert_to_naive_utc_time(time)
if time <= _UNIX_EPOCH:
raise ValueError('The revocation date must be after the unix'
' epoch (1970 January 1).')
return RevokedCertificateBuilder(
self._serial_number, time, self._extensions
)
def add_extension(self, extension, critical):
if not isinstance(extension, ExtensionType):
raise TypeError("extension must be an ExtensionType")
extension = Extension(extension.oid, critical, extension)
# TODO: This is quadratic in the number of extensions
for e in self._extensions:
if e.oid == extension.oid:
raise ValueError('This extension has already been set.')
return RevokedCertificateBuilder(
self._serial_number, self._revocation_date,
self._extensions + [extension]
)
def build(self, backend):
if self._serial_number is None:
raise ValueError("A revoked certificate must have a serial number")
if self._revocation_date is None:
raise ValueError(
"A revoked certificate must have a revocation date"
)
return backend.create_x509_revoked_certificate(self)
def random_serial_number():
return utils.int_from_bytes(os.urandom(20), "big") >> 1

View File

@ -0,0 +1,46 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import abc
from enum import Enum
import six
class LogEntryType(Enum):
X509_CERTIFICATE = 0
PRE_CERTIFICATE = 1
class Version(Enum):
v1 = 0
@six.add_metaclass(abc.ABCMeta)
class SignedCertificateTimestamp(object):
@abc.abstractproperty
def version(self):
"""
Returns the SCT version.
"""
@abc.abstractproperty
def log_id(self):
"""
Returns an identifier indicating which log this SCT is for.
"""
@abc.abstractproperty
def timestamp(self):
"""
Returns the timestamp for this SCT.
"""
@abc.abstractproperty
def entry_type(self):
"""
Returns whether this is an SCT for a certificate or pre-certificate.
"""

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,345 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import abc
import ipaddress
import warnings
from email.utils import parseaddr
import idna
import six
from six.moves import urllib_parse
from cryptography import utils
from cryptography.x509.name import Name
from cryptography.x509.oid import ObjectIdentifier
_GENERAL_NAMES = {
0: "otherName",
1: "rfc822Name",
2: "dNSName",
3: "x400Address",
4: "directoryName",
5: "ediPartyName",
6: "uniformResourceIdentifier",
7: "iPAddress",
8: "registeredID",
}
class UnsupportedGeneralNameType(Exception):
def __init__(self, msg, type):
super(UnsupportedGeneralNameType, self).__init__(msg)
self.type = type
@six.add_metaclass(abc.ABCMeta)
class GeneralName(object):
@abc.abstractproperty
def value(self):
"""
Return the value of the object
"""
@utils.register_interface(GeneralName)
class RFC822Name(object):
def __init__(self, value):
if isinstance(value, six.text_type):
try:
value.encode("ascii")
except UnicodeEncodeError:
value = self._idna_encode(value)
warnings.warn(
"RFC822Name values should be passed as an A-label string. "
"This means unicode characters should be encoded via "
"idna. Support for passing unicode strings (aka U-label) "
"will be removed in a future version.",
utils.DeprecatedIn21,
stacklevel=2,
)
else:
raise TypeError("value must be string")
name, address = parseaddr(value)
if name or not address:
# parseaddr has found a name (e.g. Name <email>) or the entire
# value is an empty string.
raise ValueError("Invalid rfc822name value")
self._value = value
value = utils.read_only_property("_value")
@classmethod
def _init_without_validation(cls, value):
instance = cls.__new__(cls)
instance._value = value
return instance
def _idna_encode(self, value):
_, address = parseaddr(value)
parts = address.split(u"@")
return parts[0] + "@" + idna.encode(parts[1]).decode("ascii")
def __repr__(self):
return "<RFC822Name(value={0!r})>".format(self.value)
def __eq__(self, other):
if not isinstance(other, RFC822Name):
return NotImplemented
return self.value == other.value
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(self.value)
def _idna_encode(value):
# Retain prefixes '*.' for common/alt names and '.' for name constraints
for prefix in ['*.', '.']:
if value.startswith(prefix):
value = value[len(prefix):]
return prefix + idna.encode(value).decode("ascii")
return idna.encode(value).decode("ascii")
@utils.register_interface(GeneralName)
class DNSName(object):
def __init__(self, value):
if isinstance(value, six.text_type):
try:
value.encode("ascii")
except UnicodeEncodeError:
value = _idna_encode(value)
warnings.warn(
"DNSName values should be passed as an A-label string. "
"This means unicode characters should be encoded via "
"idna. Support for passing unicode strings (aka U-label) "
"will be removed in a future version.",
utils.DeprecatedIn21,
stacklevel=2,
)
else:
raise TypeError("value must be string")
self._value = value
value = utils.read_only_property("_value")
@classmethod
def _init_without_validation(cls, value):
instance = cls.__new__(cls)
instance._value = value
return instance
def __repr__(self):
return "<DNSName(value={0!r})>".format(self.value)
def __eq__(self, other):
if not isinstance(other, DNSName):
return NotImplemented
return self.value == other.value
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(self.value)
@utils.register_interface(GeneralName)
class UniformResourceIdentifier(object):
def __init__(self, value):
if isinstance(value, six.text_type):
try:
value.encode("ascii")
except UnicodeEncodeError:
value = self._idna_encode(value)
warnings.warn(
"URI values should be passed as an A-label string. "
"This means unicode characters should be encoded via "
"idna. Support for passing unicode strings (aka U-label) "
" will be removed in a future version.",
utils.DeprecatedIn21,
stacklevel=2,
)
else:
raise TypeError("value must be string")
self._value = value
value = utils.read_only_property("_value")
@classmethod
def _init_without_validation(cls, value):
instance = cls.__new__(cls)
instance._value = value
return instance
def _idna_encode(self, value):
parsed = urllib_parse.urlparse(value)
if parsed.port:
netloc = (
idna.encode(parsed.hostname) +
":{0}".format(parsed.port).encode("ascii")
).decode("ascii")
else:
netloc = idna.encode(parsed.hostname).decode("ascii")
# Note that building a URL in this fashion means it should be
# semantically indistinguishable from the original but is not
# guaranteed to be exactly the same.
return urllib_parse.urlunparse((
parsed.scheme,
netloc,
parsed.path,
parsed.params,
parsed.query,
parsed.fragment
))
def __repr__(self):
return "<UniformResourceIdentifier(value={0!r})>".format(self.value)
def __eq__(self, other):
if not isinstance(other, UniformResourceIdentifier):
return NotImplemented
return self.value == other.value
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(self.value)
@utils.register_interface(GeneralName)
class DirectoryName(object):
def __init__(self, value):
if not isinstance(value, Name):
raise TypeError("value must be a Name")
self._value = value
value = utils.read_only_property("_value")
def __repr__(self):
return "<DirectoryName(value={0})>".format(self.value)
def __eq__(self, other):
if not isinstance(other, DirectoryName):
return NotImplemented
return self.value == other.value
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(self.value)
@utils.register_interface(GeneralName)
class RegisteredID(object):
def __init__(self, value):
if not isinstance(value, ObjectIdentifier):
raise TypeError("value must be an ObjectIdentifier")
self._value = value
value = utils.read_only_property("_value")
def __repr__(self):
return "<RegisteredID(value={0})>".format(self.value)
def __eq__(self, other):
if not isinstance(other, RegisteredID):
return NotImplemented
return self.value == other.value
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(self.value)
@utils.register_interface(GeneralName)
class IPAddress(object):
def __init__(self, value):
if not isinstance(
value,
(
ipaddress.IPv4Address,
ipaddress.IPv6Address,
ipaddress.IPv4Network,
ipaddress.IPv6Network
)
):
raise TypeError(
"value must be an instance of ipaddress.IPv4Address, "
"ipaddress.IPv6Address, ipaddress.IPv4Network, or "
"ipaddress.IPv6Network"
)
self._value = value
value = utils.read_only_property("_value")
def __repr__(self):
return "<IPAddress(value={0})>".format(self.value)
def __eq__(self, other):
if not isinstance(other, IPAddress):
return NotImplemented
return self.value == other.value
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(self.value)
@utils.register_interface(GeneralName)
class OtherName(object):
def __init__(self, type_id, value):
if not isinstance(type_id, ObjectIdentifier):
raise TypeError("type_id must be an ObjectIdentifier")
if not isinstance(value, bytes):
raise TypeError("value must be a binary string")
self._type_id = type_id
self._value = value
type_id = utils.read_only_property("_type_id")
value = utils.read_only_property("_value")
def __repr__(self):
return "<OtherName(type_id={0}, value={1!r})>".format(
self.type_id, self.value)
def __eq__(self, other):
if not isinstance(other, OtherName):
return NotImplemented
return self.type_id == other.type_id and self.value == other.value
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((self.type_id, self.value))

View File

@ -0,0 +1,190 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from enum import Enum
import six
from cryptography import utils
from cryptography.x509.oid import NameOID, ObjectIdentifier
class _ASN1Type(Enum):
UTF8String = 12
NumericString = 18
PrintableString = 19
T61String = 20
IA5String = 22
UTCTime = 23
GeneralizedTime = 24
VisibleString = 26
UniversalString = 28
BMPString = 30
_ASN1_TYPE_TO_ENUM = dict((i.value, i) for i in _ASN1Type)
_SENTINEL = object()
_NAMEOID_DEFAULT_TYPE = {
NameOID.COUNTRY_NAME: _ASN1Type.PrintableString,
NameOID.JURISDICTION_COUNTRY_NAME: _ASN1Type.PrintableString,
NameOID.SERIAL_NUMBER: _ASN1Type.PrintableString,
NameOID.DN_QUALIFIER: _ASN1Type.PrintableString,
NameOID.EMAIL_ADDRESS: _ASN1Type.IA5String,
NameOID.DOMAIN_COMPONENT: _ASN1Type.IA5String,
}
class NameAttribute(object):
def __init__(self, oid, value, _type=_SENTINEL):
if not isinstance(oid, ObjectIdentifier):
raise TypeError(
"oid argument must be an ObjectIdentifier instance."
)
if not isinstance(value, six.text_type):
raise TypeError(
"value argument must be a text type."
)
if (
oid == NameOID.COUNTRY_NAME or
oid == NameOID.JURISDICTION_COUNTRY_NAME
):
if len(value.encode("utf8")) != 2:
raise ValueError(
"Country name must be a 2 character country code"
)
if len(value) == 0:
raise ValueError("Value cannot be an empty string")
# The appropriate ASN1 string type varies by OID and is defined across
# multiple RFCs including 2459, 3280, and 5280. In general UTF8String
# is preferred (2459), but 3280 and 5280 specify several OIDs with
# alternate types. This means when we see the sentinel value we need
# to look up whether the OID has a non-UTF8 type. If it does, set it
# to that. Otherwise, UTF8!
if _type == _SENTINEL:
_type = _NAMEOID_DEFAULT_TYPE.get(oid, _ASN1Type.UTF8String)
if not isinstance(_type, _ASN1Type):
raise TypeError("_type must be from the _ASN1Type enum")
self._oid = oid
self._value = value
self._type = _type
oid = utils.read_only_property("_oid")
value = utils.read_only_property("_value")
def __eq__(self, other):
if not isinstance(other, NameAttribute):
return NotImplemented
return (
self.oid == other.oid and
self.value == other.value
)
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((self.oid, self.value))
def __repr__(self):
return "<NameAttribute(oid={0.oid}, value={0.value!r})>".format(self)
class RelativeDistinguishedName(object):
def __init__(self, attributes):
attributes = list(attributes)
if not attributes:
raise ValueError("a relative distinguished name cannot be empty")
if not all(isinstance(x, NameAttribute) for x in attributes):
raise TypeError("attributes must be an iterable of NameAttribute")
# Keep list and frozenset to preserve attribute order where it matters
self._attributes = attributes
self._attribute_set = frozenset(attributes)
if len(self._attribute_set) != len(attributes):
raise ValueError("duplicate attributes are not allowed")
def get_attributes_for_oid(self, oid):
return [i for i in self if i.oid == oid]
def __eq__(self, other):
if not isinstance(other, RelativeDistinguishedName):
return NotImplemented
return self._attribute_set == other._attribute_set
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(self._attribute_set)
def __iter__(self):
return iter(self._attributes)
def __len__(self):
return len(self._attributes)
def __repr__(self):
return "<RelativeDistinguishedName({0!r})>".format(list(self))
class Name(object):
def __init__(self, attributes):
attributes = list(attributes)
if all(isinstance(x, NameAttribute) for x in attributes):
self._attributes = [
RelativeDistinguishedName([x]) for x in attributes
]
elif all(isinstance(x, RelativeDistinguishedName) for x in attributes):
self._attributes = attributes
else:
raise TypeError(
"attributes must be a list of NameAttribute"
" or a list RelativeDistinguishedName"
)
def get_attributes_for_oid(self, oid):
return [i for i in self if i.oid == oid]
@property
def rdns(self):
return self._attributes
def public_bytes(self, backend):
return backend.x509_name_bytes(self)
def __eq__(self, other):
if not isinstance(other, Name):
return NotImplemented
return self._attributes == other._attributes
def __ne__(self, other):
return not self == other
def __hash__(self):
# TODO: this is relatively expensive, if this looks like a bottleneck
# for you, consider optimizing!
return hash(tuple(self._attributes))
def __iter__(self):
for rdn in self._attributes:
for ava in rdn:
yield ava
def __len__(self):
return sum(len(rdn) for rdn in self._attributes)
def __repr__(self):
return "<Name({0!r})>".format(list(self))

View File

@ -0,0 +1,271 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.hazmat.primitives import hashes
class ObjectIdentifier(object):
def __init__(self, dotted_string):
self._dotted_string = dotted_string
nodes = self._dotted_string.split(".")
intnodes = []
# There must be at least 2 nodes, the first node must be 0..2, and
# if less than 2, the second node cannot have a value outside the
# range 0..39. All nodes must be integers.
for node in nodes:
try:
intnodes.append(int(node, 0))
except ValueError:
raise ValueError(
"Malformed OID: %s (non-integer nodes)" % (
self._dotted_string))
if len(nodes) < 2:
raise ValueError(
"Malformed OID: %s (insufficient number of nodes)" % (
self._dotted_string))
if intnodes[0] > 2:
raise ValueError(
"Malformed OID: %s (first node outside valid range)" % (
self._dotted_string))
if intnodes[0] < 2 and intnodes[1] >= 40:
raise ValueError(
"Malformed OID: %s (second node outside valid range)" % (
self._dotted_string))
def __eq__(self, other):
if not isinstance(other, ObjectIdentifier):
return NotImplemented
return self.dotted_string == other.dotted_string
def __ne__(self, other):
return not self == other
def __repr__(self):
return "<ObjectIdentifier(oid={0}, name={1})>".format(
self.dotted_string,
self._name
)
def __hash__(self):
return hash(self.dotted_string)
@property
def _name(self):
return _OID_NAMES.get(self, "Unknown OID")
dotted_string = utils.read_only_property("_dotted_string")
class ExtensionOID(object):
SUBJECT_DIRECTORY_ATTRIBUTES = ObjectIdentifier("2.5.29.9")
SUBJECT_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.14")
KEY_USAGE = ObjectIdentifier("2.5.29.15")
SUBJECT_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.17")
ISSUER_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.18")
BASIC_CONSTRAINTS = ObjectIdentifier("2.5.29.19")
NAME_CONSTRAINTS = ObjectIdentifier("2.5.29.30")
CRL_DISTRIBUTION_POINTS = ObjectIdentifier("2.5.29.31")
CERTIFICATE_POLICIES = ObjectIdentifier("2.5.29.32")
POLICY_MAPPINGS = ObjectIdentifier("2.5.29.33")
AUTHORITY_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.35")
POLICY_CONSTRAINTS = ObjectIdentifier("2.5.29.36")
EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37")
FRESHEST_CRL = ObjectIdentifier("2.5.29.46")
INHIBIT_ANY_POLICY = ObjectIdentifier("2.5.29.54")
AUTHORITY_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.1")
SUBJECT_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.11")
OCSP_NO_CHECK = ObjectIdentifier("1.3.6.1.5.5.7.48.1.5")
TLS_FEATURE = ObjectIdentifier("1.3.6.1.5.5.7.1.24")
CRL_NUMBER = ObjectIdentifier("2.5.29.20")
DELTA_CRL_INDICATOR = ObjectIdentifier("2.5.29.27")
PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS = (
ObjectIdentifier("1.3.6.1.4.1.11129.2.4.2")
)
class CRLEntryExtensionOID(object):
CERTIFICATE_ISSUER = ObjectIdentifier("2.5.29.29")
CRL_REASON = ObjectIdentifier("2.5.29.21")
INVALIDITY_DATE = ObjectIdentifier("2.5.29.24")
class NameOID(object):
COMMON_NAME = ObjectIdentifier("2.5.4.3")
COUNTRY_NAME = ObjectIdentifier("2.5.4.6")
LOCALITY_NAME = ObjectIdentifier("2.5.4.7")
STATE_OR_PROVINCE_NAME = ObjectIdentifier("2.5.4.8")
STREET_ADDRESS = ObjectIdentifier("2.5.4.9")
ORGANIZATION_NAME = ObjectIdentifier("2.5.4.10")
ORGANIZATIONAL_UNIT_NAME = ObjectIdentifier("2.5.4.11")
SERIAL_NUMBER = ObjectIdentifier("2.5.4.5")
SURNAME = ObjectIdentifier("2.5.4.4")
GIVEN_NAME = ObjectIdentifier("2.5.4.42")
TITLE = ObjectIdentifier("2.5.4.12")
GENERATION_QUALIFIER = ObjectIdentifier("2.5.4.44")
X500_UNIQUE_IDENTIFIER = ObjectIdentifier("2.5.4.45")
DN_QUALIFIER = ObjectIdentifier("2.5.4.46")
PSEUDONYM = ObjectIdentifier("2.5.4.65")
USER_ID = ObjectIdentifier("0.9.2342.19200300.100.1.1")
DOMAIN_COMPONENT = ObjectIdentifier("0.9.2342.19200300.100.1.25")
EMAIL_ADDRESS = ObjectIdentifier("1.2.840.113549.1.9.1")
JURISDICTION_COUNTRY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.3")
JURISDICTION_LOCALITY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.1")
JURISDICTION_STATE_OR_PROVINCE_NAME = ObjectIdentifier(
"1.3.6.1.4.1.311.60.2.1.2"
)
BUSINESS_CATEGORY = ObjectIdentifier("2.5.4.15")
POSTAL_ADDRESS = ObjectIdentifier("2.5.4.16")
POSTAL_CODE = ObjectIdentifier("2.5.4.17")
class SignatureAlgorithmOID(object):
RSA_WITH_MD5 = ObjectIdentifier("1.2.840.113549.1.1.4")
RSA_WITH_SHA1 = ObjectIdentifier("1.2.840.113549.1.1.5")
# This is an alternate OID for RSA with SHA1 that is occasionally seen
_RSA_WITH_SHA1 = ObjectIdentifier("1.3.14.3.2.29")
RSA_WITH_SHA224 = ObjectIdentifier("1.2.840.113549.1.1.14")
RSA_WITH_SHA256 = ObjectIdentifier("1.2.840.113549.1.1.11")
RSA_WITH_SHA384 = ObjectIdentifier("1.2.840.113549.1.1.12")
RSA_WITH_SHA512 = ObjectIdentifier("1.2.840.113549.1.1.13")
RSASSA_PSS = ObjectIdentifier("1.2.840.113549.1.1.10")
ECDSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10045.4.1")
ECDSA_WITH_SHA224 = ObjectIdentifier("1.2.840.10045.4.3.1")
ECDSA_WITH_SHA256 = ObjectIdentifier("1.2.840.10045.4.3.2")
ECDSA_WITH_SHA384 = ObjectIdentifier("1.2.840.10045.4.3.3")
ECDSA_WITH_SHA512 = ObjectIdentifier("1.2.840.10045.4.3.4")
DSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10040.4.3")
DSA_WITH_SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.3.1")
DSA_WITH_SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.3.2")
_SIG_OIDS_TO_HASH = {
SignatureAlgorithmOID.RSA_WITH_MD5: hashes.MD5(),
SignatureAlgorithmOID.RSA_WITH_SHA1: hashes.SHA1(),
SignatureAlgorithmOID._RSA_WITH_SHA1: hashes.SHA1(),
SignatureAlgorithmOID.RSA_WITH_SHA224: hashes.SHA224(),
SignatureAlgorithmOID.RSA_WITH_SHA256: hashes.SHA256(),
SignatureAlgorithmOID.RSA_WITH_SHA384: hashes.SHA384(),
SignatureAlgorithmOID.RSA_WITH_SHA512: hashes.SHA512(),
SignatureAlgorithmOID.ECDSA_WITH_SHA1: hashes.SHA1(),
SignatureAlgorithmOID.ECDSA_WITH_SHA224: hashes.SHA224(),
SignatureAlgorithmOID.ECDSA_WITH_SHA256: hashes.SHA256(),
SignatureAlgorithmOID.ECDSA_WITH_SHA384: hashes.SHA384(),
SignatureAlgorithmOID.ECDSA_WITH_SHA512: hashes.SHA512(),
SignatureAlgorithmOID.DSA_WITH_SHA1: hashes.SHA1(),
SignatureAlgorithmOID.DSA_WITH_SHA224: hashes.SHA224(),
SignatureAlgorithmOID.DSA_WITH_SHA256: hashes.SHA256()
}
class ExtendedKeyUsageOID(object):
SERVER_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.1")
CLIENT_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.2")
CODE_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.3")
EMAIL_PROTECTION = ObjectIdentifier("1.3.6.1.5.5.7.3.4")
TIME_STAMPING = ObjectIdentifier("1.3.6.1.5.5.7.3.8")
OCSP_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.9")
ANY_EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37.0")
class AuthorityInformationAccessOID(object):
CA_ISSUERS = ObjectIdentifier("1.3.6.1.5.5.7.48.2")
OCSP = ObjectIdentifier("1.3.6.1.5.5.7.48.1")
class CertificatePoliciesOID(object):
CPS_QUALIFIER = ObjectIdentifier("1.3.6.1.5.5.7.2.1")
CPS_USER_NOTICE = ObjectIdentifier("1.3.6.1.5.5.7.2.2")
ANY_POLICY = ObjectIdentifier("2.5.29.32.0")
_OID_NAMES = {
NameOID.COMMON_NAME: "commonName",
NameOID.COUNTRY_NAME: "countryName",
NameOID.LOCALITY_NAME: "localityName",
NameOID.STATE_OR_PROVINCE_NAME: "stateOrProvinceName",
NameOID.STREET_ADDRESS: "streetAddress",
NameOID.ORGANIZATION_NAME: "organizationName",
NameOID.ORGANIZATIONAL_UNIT_NAME: "organizationalUnitName",
NameOID.SERIAL_NUMBER: "serialNumber",
NameOID.SURNAME: "surname",
NameOID.GIVEN_NAME: "givenName",
NameOID.TITLE: "title",
NameOID.GENERATION_QUALIFIER: "generationQualifier",
NameOID.X500_UNIQUE_IDENTIFIER: "x500UniqueIdentifier",
NameOID.DN_QUALIFIER: "dnQualifier",
NameOID.PSEUDONYM: "pseudonym",
NameOID.USER_ID: "userID",
NameOID.DOMAIN_COMPONENT: "domainComponent",
NameOID.EMAIL_ADDRESS: "emailAddress",
NameOID.JURISDICTION_COUNTRY_NAME: "jurisdictionCountryName",
NameOID.JURISDICTION_LOCALITY_NAME: "jurisdictionLocalityName",
NameOID.JURISDICTION_STATE_OR_PROVINCE_NAME: (
"jurisdictionStateOrProvinceName"
),
NameOID.BUSINESS_CATEGORY: "businessCategory",
NameOID.POSTAL_ADDRESS: "postalAddress",
NameOID.POSTAL_CODE: "postalCode",
SignatureAlgorithmOID.RSA_WITH_MD5: "md5WithRSAEncryption",
SignatureAlgorithmOID.RSA_WITH_SHA1: "sha1WithRSAEncryption",
SignatureAlgorithmOID.RSA_WITH_SHA224: "sha224WithRSAEncryption",
SignatureAlgorithmOID.RSA_WITH_SHA256: "sha256WithRSAEncryption",
SignatureAlgorithmOID.RSA_WITH_SHA384: "sha384WithRSAEncryption",
SignatureAlgorithmOID.RSA_WITH_SHA512: "sha512WithRSAEncryption",
SignatureAlgorithmOID.RSASSA_PSS: "RSASSA-PSS",
SignatureAlgorithmOID.ECDSA_WITH_SHA1: "ecdsa-with-SHA1",
SignatureAlgorithmOID.ECDSA_WITH_SHA224: "ecdsa-with-SHA224",
SignatureAlgorithmOID.ECDSA_WITH_SHA256: "ecdsa-with-SHA256",
SignatureAlgorithmOID.ECDSA_WITH_SHA384: "ecdsa-with-SHA384",
SignatureAlgorithmOID.ECDSA_WITH_SHA512: "ecdsa-with-SHA512",
SignatureAlgorithmOID.DSA_WITH_SHA1: "dsa-with-sha1",
SignatureAlgorithmOID.DSA_WITH_SHA224: "dsa-with-sha224",
SignatureAlgorithmOID.DSA_WITH_SHA256: "dsa-with-sha256",
ExtendedKeyUsageOID.SERVER_AUTH: "serverAuth",
ExtendedKeyUsageOID.CLIENT_AUTH: "clientAuth",
ExtendedKeyUsageOID.CODE_SIGNING: "codeSigning",
ExtendedKeyUsageOID.EMAIL_PROTECTION: "emailProtection",
ExtendedKeyUsageOID.TIME_STAMPING: "timeStamping",
ExtendedKeyUsageOID.OCSP_SIGNING: "OCSPSigning",
ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES: "subjectDirectoryAttributes",
ExtensionOID.SUBJECT_KEY_IDENTIFIER: "subjectKeyIdentifier",
ExtensionOID.KEY_USAGE: "keyUsage",
ExtensionOID.SUBJECT_ALTERNATIVE_NAME: "subjectAltName",
ExtensionOID.ISSUER_ALTERNATIVE_NAME: "issuerAltName",
ExtensionOID.BASIC_CONSTRAINTS: "basicConstraints",
ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS: (
"signedCertificateTimestampList"
),
CRLEntryExtensionOID.CRL_REASON: "cRLReason",
CRLEntryExtensionOID.INVALIDITY_DATE: "invalidityDate",
CRLEntryExtensionOID.CERTIFICATE_ISSUER: "certificateIssuer",
ExtensionOID.NAME_CONSTRAINTS: "nameConstraints",
ExtensionOID.CRL_DISTRIBUTION_POINTS: "cRLDistributionPoints",
ExtensionOID.CERTIFICATE_POLICIES: "certificatePolicies",
ExtensionOID.POLICY_MAPPINGS: "policyMappings",
ExtensionOID.AUTHORITY_KEY_IDENTIFIER: "authorityKeyIdentifier",
ExtensionOID.POLICY_CONSTRAINTS: "policyConstraints",
ExtensionOID.EXTENDED_KEY_USAGE: "extendedKeyUsage",
ExtensionOID.FRESHEST_CRL: "freshestCRL",
ExtensionOID.INHIBIT_ANY_POLICY: "inhibitAnyPolicy",
ExtensionOID.AUTHORITY_INFORMATION_ACCESS: "authorityInfoAccess",
ExtensionOID.SUBJECT_INFORMATION_ACCESS: "subjectInfoAccess",
ExtensionOID.OCSP_NO_CHECK: "OCSPNoCheck",
ExtensionOID.CRL_NUMBER: "cRLNumber",
ExtensionOID.DELTA_CRL_INDICATOR: "deltaCRLIndicator",
ExtensionOID.TLS_FEATURE: "TLSFeature",
AuthorityInformationAccessOID.OCSP: "OCSP",
AuthorityInformationAccessOID.CA_ISSUERS: "caIssuers",
CertificatePoliciesOID.CPS_QUALIFIER: "id-qt-cps",
CertificatePoliciesOID.CPS_USER_NOTICE: "id-qt-unotice",
}

View File

@ -5,4 +5,4 @@
# the MIT License: http://www.opensource.org/licenses/mit-license.php
__version__ = '1.0.6'
__version__ = '1.0.7'

View File

@ -187,7 +187,7 @@ def iter_fields(node):
def get_fields(node):
"""Like `iter_fiels` but returns a dict."""
"""Like `iter_fields` but returns a dict."""
return dict(iter_fields(node))

View File

@ -95,7 +95,7 @@ class Cache(object):
**self._get_cache_kw(kw, context))
def set(self, key, value, **kw):
"""Place a value in the cache.
r"""Place a value in the cache.
:param key: the value's key.
:param value: the value.
@ -113,7 +113,7 @@ class Cache(object):
"""
def get(self, key, **kw):
"""Retrieve a value from the cache.
r"""Retrieve a value from the cache.
:param key: the value's key.
:param \**kw: cache configuration arguments. The
@ -125,7 +125,7 @@ class Cache(object):
return self.impl.get(key, **self._get_cache_kw(kw, None))
def invalidate(self, key, **kw):
"""Invalidate a value in the cache.
r"""Invalidate a value in the cache.
:param key: the value's key.
:param \**kw: cache configuration arguments. The
@ -194,7 +194,7 @@ class CacheImpl(object):
"""
def get_or_create(self, key, creation_function, **kw):
"""Retrieve a value from the cache, using the given creation function
r"""Retrieve a value from the cache, using the given creation function
to generate a new value.
This function *must* return a value, either from
@ -212,7 +212,7 @@ class CacheImpl(object):
raise NotImplementedError()
def set(self, key, value, **kw):
"""Place a value in the cache.
r"""Place a value in the cache.
:param key: the value's key.
:param value: the value.
@ -222,7 +222,7 @@ class CacheImpl(object):
raise NotImplementedError()
def get(self, key, **kw):
"""Retrieve a value from the cache.
r"""Retrieve a value from the cache.
:param key: the value's key.
:param \**kw: cache configuration arguments.
@ -231,7 +231,7 @@ class CacheImpl(object):
raise NotImplementedError()
def invalidate(self, key, **kw):
"""Invalidate a value in the cache.
r"""Invalidate a value in the cache.
:param key: the value's key.
:param \**kw: cache configuration arguments.

View File

@ -58,7 +58,7 @@ def cmdline(argv=None):
kw = dict([varsplit(var) for var in options.var])
try:
print(template.render(**kw))
sys.stdout.write(template.render(**kw))
except:
_exit()

View File

@ -68,7 +68,7 @@ class MessageExtractor(object):
else:
continue
# Comments don't apply unless they immediately preceed the message
# Comments don't apply unless they immediately precede the message
if translator_comments and \
translator_comments[-1][0] < node.lineno - 1:
translator_comments = []

View File

@ -258,7 +258,7 @@ class Tag(compat.with_metaclass(_TagMeta, Node)):
def __init__(self, keyword, attributes, expressions,
nonexpressions, required, **kwargs):
"""construct a new Tag instance.
r"""construct a new Tag instance.
this constructor not called directly, and is only called
by subclasses.

View File

@ -21,7 +21,7 @@ import weakref
class Template(object):
"""Represents a compiled template.
r"""Represents a compiled template.
:class:`.Template` includes a reference to the original
template source (via the :attr:`.source` attribute)

View File

@ -35,7 +35,11 @@ from .times import (
DateFromTicks, TimeFromTicks, TimestampFromTicks)
VERSION = (0, 7, 11, None)
VERSION = (0, 9, 2, None)
if VERSION[3] is not None:
VERSION_STRING = "%d.%d.%d_%s" % VERSION
else:
VERSION_STRING = "%d.%d.%d" % VERSION[:3]
threadsafety = 1
apilevel = "2.0"
paramstyle = "pyformat"
@ -96,12 +100,15 @@ del _orig_conn
def get_client_info(): # for MySQLdb compatibility
return '.'.join(map(str, VERSION))
version = VERSION
if VERSION[3] is None:
version = VERSION[:3]
return '.'.join(map(str, version))
connect = Connection = Connect
# we include a doctored version_info here for MySQLdb compatibility
version_info = (1,2,6,"final",0)
version_info = (1, 3, 12, "final", 0)
NULL = "NULL"
@ -113,7 +120,7 @@ def thread_safe():
def install_as_MySQLdb():
"""
After this function is called, any application that imports MySQLdb or
_mysql will unwittingly actually use
_mysql will unwittingly actually use pymysql.
"""
sys.modules["MySQLdb"] = sys.modules["_mysql"] = sys.modules["pymysql"]

File diff suppressed because it is too large Load Diff

View File

@ -18,14 +18,14 @@ MULTI_STATEMENTS = 1 << 16
MULTI_RESULTS = 1 << 17
PS_MULTI_RESULTS = 1 << 18
PLUGIN_AUTH = 1 << 19
CONNECT_ATTRS = 1 << 20
PLUGIN_AUTH_LENENC_CLIENT_DATA = 1 << 21
CAPABILITIES = (
LONG_PASSWORD | LONG_FLAG | PROTOCOL_41 | TRANSACTIONS
| SECURE_CONNECTION | MULTI_STATEMENTS | MULTI_RESULTS
| PLUGIN_AUTH | PLUGIN_AUTH_LENENC_CLIENT_DATA)
| SECURE_CONNECTION | MULTI_RESULTS
| PLUGIN_AUTH | PLUGIN_AUTH_LENENC_CLIENT_DATA | CONNECT_ATTRS)
# Not done yet
CONNECT_ATTRS = 1 << 20
HANDLE_EXPIRED_PASSWORDS = 1 << 22
SESSION_TRACK = 1 << 23
DEPRECATE_EOF = 1 << 24

View File

@ -470,3 +470,6 @@ USERNAME = 1466
HOSTNAME = 1467
WRONG_STRING_LENGTH = 1468
ERROR_LAST = 1468
# https://github.com/PyMySQL/PyMySQL/issues/607
CONSTRAINT_FAILED = 4025

View File

@ -90,9 +90,14 @@ if PY2:
value = value.replace('"', '\\"')
return value
def escape_bytes(value, mapping=None):
def escape_bytes_prefixed(value, mapping=None):
assert isinstance(value, (bytes, bytearray))
return b"_binary'%s'" % escape_string(value)
def escape_bytes(value, mapping=None):
assert isinstance(value, (bytes, bytearray))
return b"'%s'" % escape_string(value)
else:
escape_string = _escape_unicode
@ -102,9 +107,12 @@ else:
# We can escape special chars and surrogateescape at once.
_escape_bytes_table = _escape_table + [chr(i) for i in range(0xdc80, 0xdd00)]
def escape_bytes(value, mapping=None):
def escape_bytes_prefixed(value, mapping=None):
return "_binary'%s'" % value.decode('latin1').translate(_escape_bytes_table)
def escape_bytes(value, mapping=None):
return "'%s'" % value.decode('latin1').translate(_escape_bytes_table)
def escape_unicode(value, mapping=None):
return u"'%s'" % _escape_unicode(value)
@ -211,7 +219,7 @@ def convert_timedelta(obj):
m = TIMEDELTA_RE.match(obj)
if not m:
return None
return obj
try:
groups = list(m.groups())
@ -227,7 +235,7 @@ def convert_timedelta(obj):
) * negate
return tdelta
except ValueError:
return None
return obj
TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})(?:.(\d{1,6}))?")
@ -259,7 +267,7 @@ def convert_time(obj):
m = TIME_RE.match(obj)
if not m:
return None
return obj
try:
groups = list(m.groups())
@ -268,7 +276,7 @@ def convert_time(obj):
return datetime.time(hour=int(hours), minute=int(minutes),
second=int(seconds), microsecond=int(microseconds))
except ValueError:
return None
return obj
def convert_date(obj):
@ -290,7 +298,7 @@ def convert_date(obj):
try:
return datetime.date(*[ int(x) for x in obj.split('-', 2) ])
except ValueError:
return None
return obj
def convert_mysql_timestamp(timestamp):
@ -325,7 +333,7 @@ def convert_mysql_timestamp(timestamp):
try:
return datetime.datetime(year, month, day, hour, minute, second)
except ValueError:
return None
return timestamp
def convert_set(s):
if isinstance(s, (bytes, bytearray)):
@ -373,7 +381,6 @@ encoders = {
set: escape_sequence,
frozenset: escape_sequence,
dict: escape_dict,
bytearray: escape_bytes,
type(None): escape_None,
datetime.date: escape_date,
datetime.datetime: escape_datetime,

View File

@ -12,7 +12,7 @@ from . import err
#: executemany only suports simple bulk insert.
#: You can use it to load large dataset.
RE_INSERT_VALUES = re.compile(
r"\s*((?:INSERT|REPLACE)\s.+\sVALUES?\s+)" +
r"\s*((?:INSERT|REPLACE)\b.+\bVALUES?\s*)" +
r"(\(\s*(?:%s|%\(.+\)s)\s*(?:,\s*(?:%s|%\(.+\)s)\s*)*\))" +
r"(\s*(?:ON DUPLICATE.*)?);?\s*\Z",
re.IGNORECASE | re.DOTALL)
@ -21,6 +21,12 @@ RE_INSERT_VALUES = re.compile(
class Cursor(object):
"""
This is the object you use to interact with the database.
Do not create an instance of a Cursor yourself. Call
connections.Connection.cursor().
See `Cursor <https://www.python.org/dev/peps/pep-0249/#cursor-objects>`_ in
the specification.
"""
#: Max statement size which :meth:`executemany` generates.
@ -32,10 +38,6 @@ class Cursor(object):
_defer_warnings = False
def __init__(self, connection):
"""
Do not create an instance of a Cursor yourself. Call
connections.Connection.cursor().
"""
self.connection = connection
self.description = None
self.rownumber = 0
@ -95,6 +97,8 @@ class Cursor(object):
return None
if not current_result.has_next:
return None
self._result = None
self._clear_result()
conn.next_result(unbuffered=unbuffered)
self._do_get_result()
return True
@ -260,9 +264,10 @@ class Cursor(object):
disconnected.
"""
conn = self._get_db()
for index, arg in enumerate(args):
q = "SET @_%s_%d=%s" % (procname, index, conn.escape(arg))
self._query(q)
if args:
fmt = '@_{0}_%d=%s'.format(procname)
self._query('SET %s' % ','.join(fmt % (index, conn.escape(arg))
for index, arg in enumerate(args)))
self.nextset()
q = "CALL %s(%s)" % (procname,
@ -319,14 +324,23 @@ class Cursor(object):
def _query(self, q):
conn = self._get_db()
self._last_executed = q
self._clear_result()
conn.query(q)
self._do_get_result()
return self.rowcount
def _clear_result(self):
self.rownumber = 0
self._result = None
self.rowcount = 0
self.description = None
self.lastrowid = None
self._rows = None
def _do_get_result(self):
conn = self._get_db()
self.rownumber = 0
self._result = result = conn._result
self.rowcount = result.affected_rows
@ -432,9 +446,12 @@ class SSCursor(Cursor):
finally:
self.connection = None
__del__ = close
def _query(self, q):
conn = self._get_db()
self._last_executed = q
self._clear_result()
conn.query(q, unbuffered=True)
self._do_get_result()
return self.rowcount

View File

@ -78,7 +78,9 @@ _map_error(ProgrammingError, ER.DB_CREATE_EXISTS, ER.SYNTAX_ERROR,
ER.PARSE_ERROR, ER.NO_SUCH_TABLE, ER.WRONG_DB_NAME,
ER.WRONG_TABLE_NAME, ER.FIELD_SPECIFIED_TWICE,
ER.INVALID_GROUP_FUNC_USE, ER.UNSUPPORTED_EXTENSION,
ER.TABLE_MUST_HAVE_COLUMNS, ER.CANT_DO_THIS_DURING_AN_TRANSACTION)
ER.TABLE_MUST_HAVE_COLUMNS, ER.CANT_DO_THIS_DURING_AN_TRANSACTION,
ER.WRONG_DB_NAME, ER.WRONG_COLUMN_NAME,
)
_map_error(DataError, ER.WARN_DATA_TRUNCATED, ER.WARN_NULL_TO_NOTNULL,
ER.WARN_DATA_OUT_OF_RANGE, ER.NO_DEFAULT, ER.PRIMARY_CANT_HAVE_NULL,
ER.DATA_TOO_LONG, ER.DATETIME_FUNCTION_OVERFLOW)
@ -89,7 +91,7 @@ _map_error(NotSupportedError, ER.WARNING_NOT_COMPLETE_ROLLBACK,
ER.NOT_SUPPORTED_YET, ER.FEATURE_DISABLED, ER.UNKNOWN_STORAGE_ENGINE)
_map_error(OperationalError, ER.DBACCESS_DENIED_ERROR, ER.ACCESS_DENIED_ERROR,
ER.CON_COUNT_ERROR, ER.TABLEACCESS_DENIED_ERROR,
ER.COLUMNACCESS_DENIED_ERROR)
ER.COLUMNACCESS_DENIED_ERROR, ER.CONSTRAINT_FAILED, ER.LOCK_DEADLOCK)
del _map_error, ER

View File

@ -7,6 +7,9 @@ else:
class Parser(configparser.RawConfigParser):
def __init__(self, **kwargs):
kwargs['allow_no_value'] = True
configparser.RawConfigParser.__init__(self, **kwargs)
def __remove_quotes(self, value):
quotes = ["'", "\""]

View File

@ -0,0 +1,341 @@
# Python implementation of low level MySQL client-server protocol
# http://dev.mysql.com/doc/internals/en/client-server-protocol.html
from __future__ import print_function
from .charset import MBLENGTH
from ._compat import PY2, range_type
from .constants import FIELD_TYPE, SERVER_STATUS
from . import err
from .util import byte2int
import struct
import sys
DEBUG = False
NULL_COLUMN = 251
UNSIGNED_CHAR_COLUMN = 251
UNSIGNED_SHORT_COLUMN = 252
UNSIGNED_INT24_COLUMN = 253
UNSIGNED_INT64_COLUMN = 254
def dump_packet(data): # pragma: no cover
def printable(data):
if 32 <= byte2int(data) < 127:
if isinstance(data, int):
return chr(data)
return data
return '.'
try:
print("packet length:", len(data))
for i in range(1, 7):
f = sys._getframe(i)
print("call[%d]: %s (line %d)" % (i, f.f_code.co_name, f.f_lineno))
print("-" * 66)
except ValueError:
pass
dump_data = [data[i:i+16] for i in range_type(0, min(len(data), 256), 16)]
for d in dump_data:
print(' '.join("{:02X}".format(byte2int(x)) for x in d) +
' ' * (16 - len(d)) + ' ' * 2 +
''.join(printable(x) for x in d))
print("-" * 66)
print()
class MysqlPacket(object):
"""Representation of a MySQL response packet.
Provides an interface for reading/parsing the packet results.
"""
__slots__ = ('_position', '_data')
def __init__(self, data, encoding):
self._position = 0
self._data = data
def get_all_data(self):
return self._data
def read(self, size):
"""Read the first 'size' bytes in packet and advance cursor past them."""
result = self._data[self._position:(self._position+size)]
if len(result) != size:
error = ('Result length not requested length:\n'
'Expected=%s. Actual=%s. Position: %s. Data Length: %s'
% (size, len(result), self._position, len(self._data)))
if DEBUG:
print(error)
self.dump()
raise AssertionError(error)
self._position += size
return result
def read_all(self):
"""Read all remaining data in the packet.
(Subsequent read() will return errors.)
"""
result = self._data[self._position:]
self._position = None # ensure no subsequent read()
return result
def advance(self, length):
"""Advance the cursor in data buffer 'length' bytes."""
new_position = self._position + length
if new_position < 0 or new_position > len(self._data):
raise Exception('Invalid advance amount (%s) for cursor. '
'Position=%s' % (length, new_position))
self._position = new_position
def rewind(self, position=0):
"""Set the position of the data buffer cursor to 'position'."""
if position < 0 or position > len(self._data):
raise Exception("Invalid position to rewind cursor to: %s." % position)
self._position = position
def get_bytes(self, position, length=1):
"""Get 'length' bytes starting at 'position'.
Position is start of payload (first four packet header bytes are not
included) starting at index '0'.
No error checking is done. If requesting outside end of buffer
an empty string (or string shorter than 'length') may be returned!
"""
return self._data[position:(position+length)]
if PY2:
def read_uint8(self):
result = ord(self._data[self._position])
self._position += 1
return result
else:
def read_uint8(self):
result = self._data[self._position]
self._position += 1
return result
def read_uint16(self):
result = struct.unpack_from('<H', self._data, self._position)[0]
self._position += 2
return result
def read_uint24(self):
low, high = struct.unpack_from('<HB', self._data, self._position)
self._position += 3
return low + (high << 16)
def read_uint32(self):
result = struct.unpack_from('<I', self._data, self._position)[0]
self._position += 4
return result
def read_uint64(self):
result = struct.unpack_from('<Q', self._data, self._position)[0]
self._position += 8
return result
def read_string(self):
end_pos = self._data.find(b'\0', self._position)
if end_pos < 0:
return None
result = self._data[self._position:end_pos]
self._position = end_pos + 1
return result
def read_length_encoded_integer(self):
"""Read a 'Length Coded Binary' number from the data buffer.
Length coded numbers can be anywhere from 1 to 9 bytes depending
on the value of the first byte.
"""
c = self.read_uint8()
if c == NULL_COLUMN:
return None
if c < UNSIGNED_CHAR_COLUMN:
return c
elif c == UNSIGNED_SHORT_COLUMN:
return self.read_uint16()
elif c == UNSIGNED_INT24_COLUMN:
return self.read_uint24()
elif c == UNSIGNED_INT64_COLUMN:
return self.read_uint64()
def read_length_coded_string(self):
"""Read a 'Length Coded String' from the data buffer.
A 'Length Coded String' consists first of a length coded
(unsigned, positive) integer represented in 1-9 bytes followed by
that many bytes of binary data. (For example "cat" would be "3cat".)
"""
length = self.read_length_encoded_integer()
if length is None:
return None
return self.read(length)
def read_struct(self, fmt):
s = struct.Struct(fmt)
result = s.unpack_from(self._data, self._position)
self._position += s.size
return result
def is_ok_packet(self):
# https://dev.mysql.com/doc/internals/en/packet-OK_Packet.html
return self._data[0:1] == b'\0' and len(self._data) >= 7
def is_eof_packet(self):
# http://dev.mysql.com/doc/internals/en/generic-response-packets.html#packet-EOF_Packet
# Caution: \xFE may be LengthEncodedInteger.
# If \xFE is LengthEncodedInteger header, 8bytes followed.
return self._data[0:1] == b'\xfe' and len(self._data) < 9
def is_auth_switch_request(self):
# http://dev.mysql.com/doc/internals/en/connection-phase-packets.html#packet-Protocol::AuthSwitchRequest
return self._data[0:1] == b'\xfe'
def is_extra_auth_data(self):
# https://dev.mysql.com/doc/internals/en/successful-authentication.html
return self._data[0:1] == b'\x01'
def is_resultset_packet(self):
field_count = ord(self._data[0:1])
return 1 <= field_count <= 250
def is_load_local_packet(self):
return self._data[0:1] == b'\xfb'
def is_error_packet(self):
return self._data[0:1] == b'\xff'
def check_error(self):
if self.is_error_packet():
self.rewind()
self.advance(1) # field_count == error (we already know that)
errno = self.read_uint16()
if DEBUG: print("errno =", errno)
err.raise_mysql_exception(self._data)
def dump(self):
dump_packet(self._data)
class FieldDescriptorPacket(MysqlPacket):
"""A MysqlPacket that represents a specific column's metadata in the result.
Parsing is automatically done and the results are exported via public
attributes on the class such as: db, table_name, name, length, type_code.
"""
def __init__(self, data, encoding):
MysqlPacket.__init__(self, data, encoding)
self._parse_field_descriptor(encoding)
def _parse_field_descriptor(self, encoding):
"""Parse the 'Field Descriptor' (Metadata) packet.
This is compatible with MySQL 4.1+ (not compatible with MySQL 4.0).
"""
self.catalog = self.read_length_coded_string()
self.db = self.read_length_coded_string()
self.table_name = self.read_length_coded_string().decode(encoding)
self.org_table = self.read_length_coded_string().decode(encoding)
self.name = self.read_length_coded_string().decode(encoding)
self.org_name = self.read_length_coded_string().decode(encoding)
self.charsetnr, self.length, self.type_code, self.flags, self.scale = (
self.read_struct('<xHIBHBxx'))
# 'default' is a length coded binary and is still in the buffer?
# not used for normal result sets...
def description(self):
"""Provides a 7-item tuple compatible with the Python PEP249 DB Spec."""
return (
self.name,
self.type_code,
None, # TODO: display_length; should this be self.length?
self.get_column_length(), # 'internal_size'
self.get_column_length(), # 'precision' # TODO: why!?!?
self.scale,
self.flags % 2 == 0)
def get_column_length(self):
if self.type_code == FIELD_TYPE.VAR_STRING:
mblen = MBLENGTH.get(self.charsetnr, 1)
return self.length // mblen
return self.length
def __str__(self):
return ('%s %r.%r.%r, type=%s, flags=%x'
% (self.__class__, self.db, self.table_name, self.name,
self.type_code, self.flags))
class OKPacketWrapper(object):
"""
OK Packet Wrapper. It uses an existing packet object, and wraps
around it, exposing useful variables while still providing access
to the original packet objects variables and methods.
"""
def __init__(self, from_packet):
if not from_packet.is_ok_packet():
raise ValueError('Cannot create ' + str(self.__class__.__name__) +
' object from invalid packet type')
self.packet = from_packet
self.packet.advance(1)
self.affected_rows = self.packet.read_length_encoded_integer()
self.insert_id = self.packet.read_length_encoded_integer()
self.server_status, self.warning_count = self.read_struct('<HH')
self.message = self.packet.read_all()
self.has_next = self.server_status & SERVER_STATUS.SERVER_MORE_RESULTS_EXISTS
def __getattr__(self, key):
return getattr(self.packet, key)
class EOFPacketWrapper(object):
"""
EOF Packet Wrapper. It uses an existing packet object, and wraps
around it, exposing useful variables while still providing access
to the original packet objects variables and methods.
"""
def __init__(self, from_packet):
if not from_packet.is_eof_packet():
raise ValueError(
"Cannot create '{0}' object from invalid packet type".format(
self.__class__))
self.packet = from_packet
self.warning_count, self.server_status = self.packet.read_struct('<xhh')
if DEBUG: print("server_status=", self.server_status)
self.has_next = self.server_status & SERVER_STATUS.SERVER_MORE_RESULTS_EXISTS
def __getattr__(self, key):
return getattr(self.packet, key)
class LoadLocalPacketWrapper(object):
"""
Load Local Packet Wrapper. It uses an existing packet object, and wraps
around it, exposing useful variables while still providing access
to the original packet objects variables and methods.
"""
def __init__(self, from_packet):
if not from_packet.is_load_local_packet():
raise ValueError(
"Cannot create '{0}' object from invalid packet type".format(
self.__class__))
self.packet = from_packet
self.filename = self.packet.get_all_data()[1:]
if DEBUG: print("filename=", self.filename)
def __getattr__(self, key):
return getattr(self.packet, key)

View File

@ -0,0 +1,50 @@
# Store all kinds of lookup table.
# # generate rsPoly lookup table.
# from qrcode import base
# def create_bytes(rs_blocks):
# for r in range(len(rs_blocks)):
# dcCount = rs_blocks[r].data_count
# ecCount = rs_blocks[r].total_count - dcCount
# rsPoly = base.Polynomial([1], 0)
# for i in range(ecCount):
# rsPoly = rsPoly * base.Polynomial([1, base.gexp(i)], 0)
# return ecCount, rsPoly
# rsPoly_LUT = {}
# for version in range(1,41):
# for error_correction in range(4):
# rs_blocks_list = base.rs_blocks(version, error_correction)
# ecCount, rsPoly = create_bytes(rs_blocks_list)
# rsPoly_LUT[ecCount]=rsPoly.num
# print(rsPoly_LUT)
# Result. Usage: input: ecCount, output: Polynomial.num
# e.g. rsPoly = base.Polynomial(LUT.rsPoly_LUT[ecCount], 0)
rsPoly_LUT = {
7: [1, 127, 122, 154, 164, 11, 68, 117],
10: [1, 216, 194, 159, 111, 199, 94, 95, 113, 157, 193],
13: [1, 137, 73, 227, 17, 177, 17, 52, 13, 46, 43, 83, 132, 120],
15: [1, 29, 196, 111, 163, 112, 74, 10, 105, 105, 139, 132, 151,
32, 134, 26],
16: [1, 59, 13, 104, 189, 68, 209, 30, 8, 163, 65, 41, 229, 98, 50, 36, 59],
17: [1, 119, 66, 83, 120, 119, 22, 197, 83, 249, 41, 143, 134, 85, 53, 125,
99, 79],
18: [1, 239, 251, 183, 113, 149, 175, 199, 215, 240, 220, 73, 82, 173, 75,
32, 67, 217, 146],
20: [1, 152, 185, 240, 5, 111, 99, 6, 220, 112, 150, 69, 36, 187, 22, 228,
198, 121, 121, 165, 174],
22: [1, 89, 179, 131, 176, 182, 244, 19, 189, 69, 40, 28, 137, 29, 123, 67,
253, 86, 218, 230, 26, 145, 245],
24: [1, 122, 118, 169, 70, 178, 237, 216, 102, 115, 150, 229, 73, 130, 72,
61, 43, 206, 1, 237, 247, 127, 217, 144, 117],
26: [1, 246, 51, 183, 4, 136, 98, 199, 152, 77, 56, 206, 24, 145, 40, 209,
117, 233, 42, 135, 68, 70, 144, 146, 77, 43, 94],
28: [1, 252, 9, 28, 13, 18, 251, 208, 150, 103, 174, 100, 41, 167, 12, 247,
56, 117, 119, 233, 127, 181, 100, 121, 147, 176, 74, 58, 197],
30: [1, 212, 246, 77, 73, 195, 192, 75, 98, 5, 70, 103, 177, 22, 217, 138,
51, 181, 246, 72, 25, 18, 46, 228, 74, 216, 195, 11, 106, 130, 150]
}

View File

@ -288,16 +288,13 @@ class Polynomial:
if not num: # pragma: no cover
raise Exception("%s/%s" % (len(num), shift))
offset = 0
for item in num:
if item != 0:
for offset in range(len(num)):
if num[offset] != 0:
break
else:
offset += 1
self.num = [0] * (len(num) - offset + shift)
for i in range(len(num) - offset):
self.num[i] = num[i + offset]
self.num = num[offset:] + [0] * shift
def __getitem__(self, index):
return self.num[index]
@ -324,8 +321,6 @@ class Polynomial:
ratio = glog(self[0]) - glog(other[0])
num = self[:]
num = [
item ^ gexp(glog(other_item) + ratio)
for item, other_item in zip(self, other)]

View File

@ -30,8 +30,12 @@ error_correction = {
}
def main(args=sys.argv[1:]):
parser = optparse.OptionParser(usage=__doc__.strip())
def main(args=None):
if args is None:
args = sys.argv[1:]
from pkg_resources import get_distribution
version = get_distribution('qrcode').version
parser = optparse.OptionParser(usage=__doc__.strip(), version=version)
parser.add_option(
"--factory", help="Full python path to the image factory class to "
"create the image with. You can use the following shortcuts to the "

View File

@ -18,8 +18,8 @@ class PilImage(qrcode.image.base.BaseImage):
kind = "PNG"
def new_image(self, **kwargs):
back_color = kwargs.get("fill_color", "white")
fill_color = kwargs.get("back_color", "black")
back_color = kwargs.get("back_color", "white")
fill_color = kwargs.get("fill_color", "black")
if fill_color.lower() != "black" or back_color.lower() != "white":
if back_color.lower() == "transparent":
@ -29,6 +29,9 @@ class PilImage(qrcode.image.base.BaseImage):
mode = "RGB"
else:
mode = "1"
# L mode (1 mode) color = (r*299 + g*587 + b*114)//1000
if fill_color.lower() == "black": fill_color = 0
if back_color.lower() == "white": back_color = 255
img = Image.new(mode, (self.pixel_size, self.pixel_size), back_color)
self.fill_color = fill_color

View File

@ -23,12 +23,23 @@ def _check_box_size(size):
"Invalid box size (was %s, expected larger than 0)" % size)
def _check_mask_pattern(mask_pattern):
if mask_pattern is None:
return
if not isinstance(mask_pattern, int):
raise TypeError(
"Invalid mask pattern (was %s, expected int)" % type(mask_pattern))
if mask_pattern < 0 or mask_pattern > 7:
raise ValueError(
"Mask pattern should be in range(8) (got %s)" % mask_pattern)
class QRCode:
def __init__(self, version=None,
error_correction=constants.ERROR_CORRECT_M,
box_size=10, border=4,
image_factory=None):
image_factory=None,
mask_pattern=None):
_check_box_size(box_size)
self.version = version and int(version)
self.error_correction = int(error_correction)
@ -36,6 +47,8 @@ class QRCode:
# Spec says border should be at least four boxes wide, but allow for
# any (e.g. for producing printable QR codes).
self.border = int(border)
_check_mask_pattern(mask_pattern)
self.mask_pattern = mask_pattern
self.image_factory = image_factory
if image_factory is not None:
assert issubclass(image_factory, BaseImage)
@ -62,7 +75,8 @@ class QRCode:
self.data_list.append(data)
else:
if optimize:
self.data_list.extend(util.optimal_data_chunks(data))
self.data_list.extend(
util.optimal_data_chunks(data, minimum=optimize))
else:
self.data_list.append(util.QRData(data))
self.data_cache = None
@ -76,7 +90,10 @@ class QRCode:
"""
if fit or (self.version is None):
self.best_fit(start=self.version)
self.makeImpl(False, self.best_mask_pattern())
if self.mask_pattern is None:
self.makeImpl(False, self.best_mask_pattern())
else:
self.makeImpl(False, self.mask_pattern)
def makeImpl(self, test, mask_pattern):
_check_version(self.version)

View File

@ -1,33 +0,0 @@
import six
# {'code': 'N', 'label': 'Name', 'required': True, 'multipart': [
# 'Last Name', 'First Name']},
PROPERTIES = {
'NICKNAME': {'label': 'Nickname'},
'BDAY': {'label': 'Birthday', 'date': True},
'TEL': {'label': 'Phone'},
'EMAIL': {'label': 'E-mail'},
'ADR': {'label': 'Address', 'multipart': [
'PO Box', 'Room Number', 'House Number', 'City', 'Prefecture',
'Zip Code', 'Country']},
'URL': {'label': 'URL'},
'MEMO': {'label': 'Note'},
}
def build_code(data):
notation = []
name = data['N']
if not isinstance(name, six.text_type):
name = ','.join(name)
notation.append('N', name)
for prop in PROPERTIES:
value = data.get(prop['code'])
if not value:
continue
if prop['date']:
value = value.strftime('%Y%m%d')
elif prop['multipart']:
value = ','.join(value)

View File

@ -0,0 +1,42 @@
"""
This file provides zest.releaser entrypoints using when releasing new
qrcode versions.
"""
import os
import re
import datetime
def update_manpage(data):
"""
Update the version in the manpage document.
"""
if data['name'] != 'qrcode':
print('no qrcode')
return
base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
filename = os.path.join(base_dir, 'doc', 'qr.1')
with open(filename, 'r') as f:
lines = f.readlines()
changed = False
for i, line in enumerate(lines):
if not line.startswith('.TH '):
continue
parts = re.split(r'"([^"]*)"', line)
if len(parts) < 5:
continue
changed = parts[3] != data['new_version']
if changed:
# Update version
parts[3] = data['new_version']
# Update date
parts[1] = datetime.datetime.now().strftime('%-d %b %Y')
lines[i] = '"'.join(parts)
break
if changed:
with open(filename, 'w') as f:
for line in lines:
f.write(line)

View File

@ -1,8 +0,0 @@
import string
import qrcode
qr = qrcode.QRCode()
qr.add_data(string.letters*13)
qr.make()
print(qr.version)

View File

@ -4,7 +4,7 @@ import math
import six
from six.moves import xrange
from qrcode import base, exceptions
from qrcode import base, exceptions, LUT
# QR encoding modes.
MODE_NUMBER = 1 << 0
@ -189,51 +189,39 @@ def _lost_point_level1(modules, modules_count):
lost_point = 0
modules_range = xrange(modules_count)
row_range_first = (0, 1)
row_range_last = (-1, 0)
row_range_standard = (-1, 0, 1)
col_range_first = ((0, 1), (1,))
col_range_last = ((-1, 0), (-1,))
col_range_standard = ((-1, 0, 1), (-1, 1))
container = [0] * (modules_count + 1)
for row in modules_range:
if row == 0:
row_range = row_range_first
elif row == modules_count-1:
row_range = row_range_last
else:
row_range = row_range_standard
this_row = modules[row]
previous_color = this_row[0]
length = 0
for col in modules_range:
sameCount = 0
dark = modules[row][col]
if col == 0:
col_range = col_range_first
elif col == modules_count-1:
col_range = col_range_last
if this_row[col] == previous_color:
length += 1
else:
col_range = col_range_standard
if length >= 5:
container[length] += 1
length = 1
previous_color = this_row[col]
if length >= 5:
container[length] += 1
for r in row_range:
for col in modules_range:
previous_color = modules[0][col]
length = 0
for row in modules_range:
if modules[row][col] == previous_color:
length += 1
else:
if length >= 5:
container[length] += 1
length = 1
previous_color = modules[row][col]
if length >= 5:
container[length] += 1
row_offset = row + r
if r != 0:
col_idx = 0
else:
col_idx = 1
for c in col_range[col_idx]:
if dark == modules[row_offset][col + c]:
sameCount += 1
if sameCount > 5:
lost_point += (3 + sameCount - 5)
lost_point += sum(container[each_length] * (each_length - 2)
for each_length in xrange(5, modules_count + 1))
return lost_point
@ -242,68 +230,111 @@ def _lost_point_level2(modules, modules_count):
lost_point = 0
modules_range = xrange(modules_count - 1)
for row in modules_range:
this_row = modules[row]
next_row = modules[row+1]
for col in modules_range:
count = 0
if this_row[col]:
count += 1
if next_row[col]:
count += 1
if this_row[col + 1]:
count += 1
if next_row[col + 1]:
count += 1
if count == 0 or count == 4:
next_row = modules[row + 1]
# use iter() and next() to skip next four-block. e.g.
# d a f if top-right a != b botton-right,
# c b e then both abcd and abef won't lost any point.
modules_range_iter = iter(modules_range)
for col in modules_range_iter:
top_right = this_row[col + 1]
if top_right != next_row[col + 1]:
# reduce 33.3% of runtime via next().
# None: raise nothing if there is no next item.
next(modules_range_iter, None)
elif top_right != this_row[col]:
continue
elif top_right != next_row[col]:
continue
else:
lost_point += 3
return lost_point
def _lost_point_level3(modules, modules_count):
modules_range_short = xrange(modules_count-6)
# 1 : 1 : 3 : 1 : 1 ratio (dark:light:dark:light:dark) pattern in
# row/column, preceded or followed by light area 4 modules wide. From ISOIEC.
# pattern1: 10111010000
# pattern2: 00001011101
modules_range = xrange(modules_count)
modules_range_short = xrange(modules_count-10)
lost_point = 0
for row in xrange(modules_count):
for row in modules_range:
this_row = modules[row]
for col in modules_range_short:
if (this_row[col]
and not this_row[col + 1]
and this_row[col + 2]
and this_row[col + 3]
modules_range_short_iter = iter(modules_range_short)
col = 0
for col in modules_range_short_iter:
if (
not this_row[col + 1]
and this_row[col + 4]
and not this_row[col + 5]
and this_row[col + 6]):
and this_row[col + 6]
and not this_row[col + 9]
and (
this_row[col + 0]
and this_row[col + 2]
and this_row[col + 3]
and not this_row[col + 7]
and not this_row[col + 8]
and not this_row[col + 10]
or
not this_row[col + 0]
and not this_row[col + 2]
and not this_row[col + 3]
and this_row[col + 7]
and this_row[col + 8]
and this_row[col + 10]
)
):
lost_point += 40
# horspool algorithm.
# if this_row[col + 10] == True, pattern1 shift 4, pattern2 shift 2. So min=2.
# if this_row[col + 10] == False, pattern1 shift 1, pattern2 shift 1. So min=1.
if this_row[col + 10]:
next(modules_range_short_iter, None)
for col in xrange(modules_count):
for row in modules_range_short:
if (modules[row][col]
and not modules[row + 1][col]
and modules[row + 2][col]
and modules[row + 3][col]
for col in modules_range:
modules_range_short_iter = iter(modules_range_short)
row = 0
for row in modules_range_short_iter:
if (
not modules[row + 1][col]
and modules[row + 4][col]
and not modules[row + 5][col]
and modules[row + 6][col]):
and modules[row + 6][col]
and not modules[row + 9][col]
and (
modules[row + 0][col]
and modules[row + 2][col]
and modules[row + 3][col]
and not modules[row + 7][col]
and not modules[row + 8][col]
and not modules[row + 10][col]
or
not modules[row + 0][col]
and not modules[row + 2][col]
and not modules[row + 3][col]
and modules[row + 7][col]
and modules[row + 8][col]
and modules[row + 10][col]
)
):
lost_point += 40
if modules[row + 10][col]:
next(modules_range_short_iter, None)
return lost_point
def _lost_point_level4(modules, modules_count):
modules_range = xrange(modules_count)
dark_count = 0
for row in modules_range:
this_row = modules[row]
for col in modules_range:
if this_row[col]:
dark_count += 1
ratio = abs(100 * dark_count / modules_count / modules_count - 50) / 5
return ratio * 10
dark_count = sum(map(sum, modules))
percent = float(dark_count) / (modules_count**2)
# Every 5% departure from 50%, rating++
rating = int(abs(percent * 100 - 50) / 5)
return rating * 10
def optimal_data_chunks(data, minimum=4):
@ -479,9 +510,12 @@ def create_bytes(buffer, rs_blocks):
offset += dcCount
# Get error correction polynomial.
rsPoly = base.Polynomial([1], 0)
for i in range(ecCount):
rsPoly = rsPoly * base.Polynomial([1, base.gexp(i)], 0)
if ecCount in LUT.rsPoly_LUT:
rsPoly = base.Polynomial(LUT.rsPoly_LUT[ecCount], 0)
else:
rsPoly = base.Polynomial([1], 0)
for i in range(ecCount):
rsPoly = rsPoly * base.Polynomial([1, base.gexp(i)], 0)
rawPoly = base.Polynomial(dcdata[r], len(rsPoly) - 1)

View File

@ -1,6 +1,4 @@
"""Utilities for writing code that runs on Python 2 and 3"""
# Copyright (c) 2010-2015 Benjamin Peterson
# Copyright (c) 2010-2017 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
@ -20,6 +18,8 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Utilities for writing code that runs on Python 2 and 3"""
from __future__ import absolute_import
import functools
@ -29,7 +29,7 @@ import sys
import types
__author__ = "Benjamin Peterson <benjamin@python.org>"
__version__ = "1.10.0"
__version__ = "1.11.0"
# Useful for very coarse version differentiation.
@ -241,6 +241,7 @@ _moved_attributes = [
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
MovedAttribute("getoutput", "commands", "subprocess"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
@ -262,10 +263,11 @@ _moved_attributes = [
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
@ -337,10 +339,12 @@ _urllib_parse_moved_attributes = [
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
MovedAttribute("splitquery", "urllib", "urllib.parse"),
MovedAttribute("splittag", "urllib", "urllib.parse"),
MovedAttribute("splituser", "urllib", "urllib.parse"),
MovedAttribute("splitvalue", "urllib", "urllib.parse"),
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
@ -416,6 +420,8 @@ _urllib_request_moved_attributes = [
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
MovedAttribute("parse_http_list", "urllib2", "urllib.request"),
MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
@ -679,11 +685,15 @@ if PY3:
exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None):
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
try:
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
finally:
value = None
tb = None
else:
def exec_(_code_, _globs_=None, _locs_=None):
@ -699,19 +709,28 @@ else:
exec("""exec _code_ in _globs_, _locs_""")
exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
try:
raise tp, value, tb
finally:
tb = None
""")
if sys.version_info[:2] == (3, 2):
exec_("""def raise_from(value, from_value):
if from_value is None:
raise value
raise value from from_value
try:
if from_value is None:
raise value
raise value from from_value
finally:
value = None
""")
elif sys.version_info[:2] > (3, 2):
exec_("""def raise_from(value, from_value):
raise value from from_value
try:
raise value from from_value
finally:
value = None
""")
else:
def raise_from(value, from_value):
@ -802,10 +821,14 @@ def with_metaclass(meta, *bases):
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(meta):
class metaclass(type):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
@classmethod
def __prepare__(cls, name, this_bases):
return meta.__prepare__(name, bases)
return type.__new__(metaclass, 'temporary_class', (), {})

View File

@ -1,4 +1,3 @@
#!/usr/bin/env python
#
# Copyright 2009 Facebook
#
@ -25,5 +24,5 @@ from __future__ import absolute_import, division, print_function
# is zero for an official release, positive for a development branch,
# or negative for a release candidate or beta (after the base version
# number has been incremented)
version = "4.5.1"
version_info = (4, 5, 1, 0)
version = "5.1"
version_info = (5, 1, 0, 0)

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python
# coding: utf-8
# -*- coding: utf-8 -*-
#
# Copyright 2012 Facebook
#

View File

@ -1,4 +1,3 @@
#!/usr/bin/env python
#
# Copyright 2009 Facebook
#
@ -38,15 +37,14 @@ Example usage for Google OAuth:
class GoogleOAuth2LoginHandler(tornado.web.RequestHandler,
tornado.auth.GoogleOAuth2Mixin):
@tornado.gen.coroutine
def get(self):
async def get(self):
if self.get_argument('code', False):
user = yield self.get_authenticated_user(
user = await self.get_authenticated_user(
redirect_uri='http://your.site.com/auth/google',
code=self.get_argument('code'))
# Save the user with e.g. set_secure_cookie
else:
yield self.authorize_redirect(
await self.authorize_redirect(
redirect_uri='http://your.site.com/auth/google',
client_id=self.settings['google_oauth']['key'],
scope=['profile', 'email'],
@ -74,8 +72,11 @@ import hashlib
import hmac
import time
import uuid
import warnings
from tornado.concurrent import TracebackFuture, return_future, chain_future
from tornado.concurrent import (Future, _non_deprecated_return_future,
future_set_exc_info, chain_future,
future_set_result_unless_cancelled)
from tornado import gen
from tornado import httpclient
from tornado import escape
@ -112,14 +113,19 @@ def _auth_return_future(f):
Note that when using this decorator the ``callback`` parameter
inside the function will actually be a future.
.. deprecated:: 5.1
Will be removed in 6.0.
"""
replacer = ArgReplacer(f, 'callback')
@functools.wraps(f)
def wrapper(*args, **kwargs):
future = TracebackFuture()
future = Future()
callback, args, kwargs = replacer.replace(future, args, kwargs)
if callback is not None:
warnings.warn("callback arguments are deprecated, use the returned Future instead",
DeprecationWarning)
future.add_done_callback(
functools.partial(_auth_future_to_callback, callback))
@ -127,9 +133,9 @@ def _auth_return_future(f):
if future.done():
return False
else:
future.set_exc_info((typ, value, tb))
future_set_exc_info(future, (typ, value, tb))
return True
with ExceptionStackContext(handle_exception):
with ExceptionStackContext(handle_exception, delay_warning=True):
f(*args, **kwargs)
return future
return wrapper
@ -142,7 +148,7 @@ class OpenIdMixin(object):
* ``_OPENID_ENDPOINT``: the identity provider's URI.
"""
@return_future
@_non_deprecated_return_future
def authenticate_redirect(self, callback_uri=None,
ax_attrs=["name", "email", "language", "username"],
callback=None):
@ -161,6 +167,11 @@ class OpenIdMixin(object):
not strictly necessary as this method is synchronous,
but they are supplied for consistency with
`OAuthMixin.authorize_redirect`.
.. deprecated:: 5.1
The ``callback`` argument and returned awaitable will be removed
in Tornado 6.0; this will be an ordinary synchronous function.
"""
callback_uri = callback_uri or self.request.uri
args = self._openid_args(callback_uri, ax_attrs=ax_attrs)
@ -178,6 +189,11 @@ class OpenIdMixin(object):
is present and `authenticate_redirect` if it is not).
The result of this method will generally be used to set a cookie.
.. deprecated:: 5.1
The ``callback`` argument is deprecated and will be removed in 6.0.
Use the returned awaitable object instead.
"""
# Verify the OpenID response via direct request to the OP
args = dict((k, v[-1]) for k, v in self.request.arguments.items())
@ -185,9 +201,9 @@ class OpenIdMixin(object):
url = self._OPENID_ENDPOINT
if http_client is None:
http_client = self.get_auth_http_client()
http_client.fetch(url, functools.partial(
self._on_authentication_verified, callback),
method="POST", body=urllib_parse.urlencode(args))
fut = http_client.fetch(url, method="POST", body=urllib_parse.urlencode(args))
fut.add_done_callback(functools.partial(
self._on_authentication_verified, callback))
def _openid_args(self, callback_uri, ax_attrs=[], oauth_scope=None):
url = urlparse.urljoin(self.request.full_url(), callback_uri)
@ -237,11 +253,16 @@ class OpenIdMixin(object):
})
return args
def _on_authentication_verified(self, future, response):
if response.error or b"is_valid:true" not in response.body:
def _on_authentication_verified(self, future, response_fut):
try:
response = response_fut.result()
except Exception as e:
future.set_exception(AuthError(
"Invalid OpenID response: %s" % (response.error or
response.body)))
"Error response %s" % e))
return
if b"is_valid:true" not in response.body:
future.set_exception(AuthError(
"Invalid OpenID response: %s" % response.body))
return
# Make sure we got back at least an email from attribute exchange
@ -295,7 +316,7 @@ class OpenIdMixin(object):
claimed_id = self.get_argument("openid.claimed_id", None)
if claimed_id:
user["claimed_id"] = claimed_id
future.set_result(user)
future_set_result_unless_cancelled(future, user)
def get_auth_http_client(self):
"""Returns the `.AsyncHTTPClient` instance to be used for auth requests.
@ -322,48 +343,52 @@ class OAuthMixin(object):
Subclasses must also override the `_oauth_get_user_future` and
`_oauth_consumer_token` methods.
"""
@return_future
@_non_deprecated_return_future
def authorize_redirect(self, callback_uri=None, extra_params=None,
http_client=None, callback=None):
"""Redirects the user to obtain OAuth authorization for this service.
The ``callback_uri`` may be omitted if you have previously
registered a callback URI with the third-party service. For
some services (including Friendfeed), you must use a
previously-registered callback URI and cannot specify a
callback via this method.
registered a callback URI with the third-party service. For
some services, you must use a previously-registered callback
URI and cannot specify a callback via this method.
This method sets a cookie called ``_oauth_request_token`` which is
subsequently used (and cleared) in `get_authenticated_user` for
security purposes.
Note that this method is asynchronous, although it calls
`.RequestHandler.finish` for you so it may not be necessary
to pass a callback or use the `.Future` it returns. However,
if this method is called from a function decorated with
`.gen.coroutine`, you must call it with ``yield`` to keep the
response from being closed prematurely.
This method is asynchronous and must be called with ``await``
or ``yield`` (This is different from other ``auth*_redirect``
methods defined in this module). It calls
`.RequestHandler.finish` for you so you should not write any
other response after it returns.
.. versionchanged:: 3.1
Now returns a `.Future` and takes an optional callback, for
compatibility with `.gen.coroutine`.
.. deprecated:: 5.1
The ``callback`` argument is deprecated and will be removed in 6.0.
Use the returned awaitable object instead.
"""
if callback_uri and getattr(self, "_OAUTH_NO_CALLBACKS", False):
raise Exception("This service does not support oauth_callback")
if http_client is None:
http_client = self.get_auth_http_client()
if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a":
http_client.fetch(
fut = http_client.fetch(
self._oauth_request_token_url(callback_uri=callback_uri,
extra_params=extra_params),
functools.partial(
self._on_request_token,
self._OAUTH_AUTHORIZE_URL,
callback_uri,
callback))
extra_params=extra_params))
fut.add_done_callback(functools.partial(
self._on_request_token,
self._OAUTH_AUTHORIZE_URL,
callback_uri,
callback))
else:
http_client.fetch(
self._oauth_request_token_url(),
fut = http_client.fetch(self._oauth_request_token_url())
fut.add_done_callback(
functools.partial(
self._on_request_token, self._OAUTH_AUTHORIZE_URL,
callback_uri,
@ -380,6 +405,11 @@ class OAuthMixin(object):
requests to this service on behalf of the user. The dictionary will
also contain other fields such as ``name``, depending on the service
used.
.. deprecated:: 5.1
The ``callback`` argument is deprecated and will be removed in 6.0.
Use the returned awaitable object instead.
"""
future = callback
request_key = escape.utf8(self.get_argument("oauth_token"))
@ -390,7 +420,8 @@ class OAuthMixin(object):
"Missing OAuth request token cookie"))
return
self.clear_cookie("_oauth_request_token")
cookie_key, cookie_secret = [base64.b64decode(escape.utf8(i)) for i in request_cookie.split("|")]
cookie_key, cookie_secret = [
base64.b64decode(escape.utf8(i)) for i in request_cookie.split("|")]
if cookie_key != request_key:
future.set_exception(AuthError(
"Request token does not match cookie"))
@ -400,8 +431,8 @@ class OAuthMixin(object):
token["verifier"] = oauth_verifier
if http_client is None:
http_client = self.get_auth_http_client()
http_client.fetch(self._oauth_access_token_url(token),
functools.partial(self._on_access_token, callback))
fut = http_client.fetch(self._oauth_access_token_url(token))
fut.add_done_callback(functools.partial(self._on_access_token, callback))
def _oauth_request_token_url(self, callback_uri=None, extra_params=None):
consumer_token = self._oauth_consumer_token()
@ -429,9 +460,11 @@ class OAuthMixin(object):
return url + "?" + urllib_parse.urlencode(args)
def _on_request_token(self, authorize_url, callback_uri, callback,
response):
if response.error:
raise Exception("Could not get request token: %s" % response.error)
response_fut):
try:
response = response_fut.result()
except Exception as e:
raise Exception("Could not get request token: %s" % e)
request_token = _oauth_parse_response(response.body)
data = (base64.b64encode(escape.utf8(request_token["key"])) + b"|" +
base64.b64encode(escape.utf8(request_token["secret"])))
@ -471,13 +504,17 @@ class OAuthMixin(object):
args["oauth_signature"] = signature
return url + "?" + urllib_parse.urlencode(args)
def _on_access_token(self, future, response):
if response.error:
def _on_access_token(self, future, response_fut):
try:
response = response_fut.result()
except Exception:
future.set_exception(AuthError("Could not fetch access token"))
return
access_token = _oauth_parse_response(response.body)
self._oauth_get_user_future(access_token).add_done_callback(
fut = self._oauth_get_user_future(access_token)
fut = gen.convert_yielded(fut)
fut.add_done_callback(
functools.partial(self._on_oauth_get_user, access_token, future))
def _oauth_consumer_token(self):
@ -487,7 +524,7 @@ class OAuthMixin(object):
"""
raise NotImplementedError()
@return_future
@_non_deprecated_return_future
def _oauth_get_user_future(self, access_token, callback):
"""Subclasses must override this to get basic information about the
user.
@ -502,7 +539,18 @@ class OAuthMixin(object):
For backwards compatibility, the callback-based ``_oauth_get_user``
method is also supported.
.. versionchanged:: 5.1
Subclasses may also define this method with ``async def``.
.. deprecated:: 5.1
The ``_oauth_get_user`` fallback is deprecated and support for it
will be removed in 6.0.
"""
warnings.warn("_oauth_get_user is deprecated, override _oauth_get_user_future instead",
DeprecationWarning)
# By default, call the old-style _oauth_get_user, but new code
# should override this method instead.
self._oauth_get_user(access_token, callback)
@ -519,7 +567,7 @@ class OAuthMixin(object):
future.set_exception(AuthError("Error getting user"))
return
user["access_token"] = access_token
future.set_result(user)
future_set_result_unless_cancelled(future, user)
def _oauth_request_parameters(self, url, access_token, parameters={},
method="GET"):
@ -569,7 +617,7 @@ class OAuth2Mixin(object):
* ``_OAUTH_AUTHORIZE_URL``: The service's authorization url.
* ``_OAUTH_ACCESS_TOKEN_URL``: The service's access token url.
"""
@return_future
@_non_deprecated_return_future
def authorize_redirect(self, redirect_uri=None, client_id=None,
client_secret=None, extra_params=None,
callback=None, scope=None, response_type="code"):
@ -586,6 +634,11 @@ class OAuth2Mixin(object):
not strictly necessary as this method is synchronous,
but they are supplied for consistency with
`OAuthMixin.authorize_redirect`.
.. deprecated:: 5.1
The ``callback`` argument and returned awaitable will be removed
in Tornado 6.0; this will be an ordinary synchronous function.
"""
args = {
"redirect_uri": redirect_uri,
@ -629,16 +682,15 @@ class OAuth2Mixin(object):
class MainHandler(tornado.web.RequestHandler,
tornado.auth.FacebookGraphMixin):
@tornado.web.authenticated
@tornado.gen.coroutine
def get(self):
new_entry = yield self.oauth2_request(
async def get(self):
new_entry = await self.oauth2_request(
"https://graph.facebook.com/me/feed",
post_args={"message": "I am posting from my Tornado application!"},
access_token=self.current_user["access_token"])
if not new_entry:
# Call failed; perhaps missing permission?
yield self.authorize_redirect()
await self.authorize_redirect()
return
self.finish("Posted a message!")
@ -646,6 +698,11 @@ class OAuth2Mixin(object):
:hide:
.. versionadded:: 4.3
.. deprecated:: 5.1
The ``callback`` argument is deprecated and will be removed in 6.0.
Use the returned awaitable object instead.
"""
all_args = {}
if access_token:
@ -657,18 +714,19 @@ class OAuth2Mixin(object):
callback = functools.partial(self._on_oauth2_request, callback)
http = self.get_auth_http_client()
if post_args is not None:
http.fetch(url, method="POST", body=urllib_parse.urlencode(post_args),
callback=callback)
fut = http.fetch(url, method="POST", body=urllib_parse.urlencode(post_args))
else:
http.fetch(url, callback=callback)
fut = http.fetch(url)
fut.add_done_callback(callback)
def _on_oauth2_request(self, future, response):
if response.error:
future.set_exception(AuthError("Error response %s fetching %s" %
(response.error, response.request.url)))
def _on_oauth2_request(self, future, response_fut):
try:
response = response_fut.result()
except Exception as e:
future.set_exception(AuthError("Error response %s" % e))
return
future.set_result(escape.json_decode(response.body))
future_set_result_unless_cancelled(future, escape.json_decode(response.body))
def get_auth_http_client(self):
"""Returns the `.AsyncHTTPClient` instance to be used for auth requests.
@ -698,13 +756,12 @@ class TwitterMixin(OAuthMixin):
class TwitterLoginHandler(tornado.web.RequestHandler,
tornado.auth.TwitterMixin):
@tornado.gen.coroutine
def get(self):
async def get(self):
if self.get_argument("oauth_token", None):
user = yield self.get_authenticated_user()
user = await self.get_authenticated_user()
# Save the user using e.g. set_secure_cookie()
else:
yield self.authorize_redirect()
await self.authorize_redirect()
.. testoutput::
:hide:
@ -721,7 +778,7 @@ class TwitterMixin(OAuthMixin):
_OAUTH_NO_CALLBACKS = False
_TWITTER_BASE_URL = "https://api.twitter.com/1.1"
@return_future
@_non_deprecated_return_future
def authenticate_redirect(self, callback_uri=None, callback=None):
"""Just like `~OAuthMixin.authorize_redirect`, but
auto-redirects if authorized.
@ -732,6 +789,11 @@ class TwitterMixin(OAuthMixin):
.. versionchanged:: 3.1
Now returns a `.Future` and takes an optional callback, for
compatibility with `.gen.coroutine`.
.. deprecated:: 5.1
The ``callback`` argument is deprecated and will be removed in 6.0.
Use the returned awaitable object instead.
"""
http = self.get_auth_http_client()
http.fetch(self._oauth_request_token_url(callback_uri=callback_uri),
@ -764,9 +826,8 @@ class TwitterMixin(OAuthMixin):
class MainHandler(tornado.web.RequestHandler,
tornado.auth.TwitterMixin):
@tornado.web.authenticated
@tornado.gen.coroutine
def get(self):
new_entry = yield self.twitter_request(
async def get(self):
new_entry = await self.twitter_request(
"/statuses/update",
post_args={"status": "Testing Tornado Web Server"},
access_token=self.current_user["access_token"])
@ -779,6 +840,10 @@ class TwitterMixin(OAuthMixin):
.. testoutput::
:hide:
.. deprecated:: 5.1
The ``callback`` argument is deprecated and will be removed in 6.0.
Use the returned awaitable object instead.
"""
if path.startswith('http:') or path.startswith('https:'):
# Raw urls are useful for e.g. search which doesn't follow the
@ -798,20 +863,21 @@ class TwitterMixin(OAuthMixin):
if args:
url += "?" + urllib_parse.urlencode(args)
http = self.get_auth_http_client()
http_callback = functools.partial(self._on_twitter_request, callback)
http_callback = functools.partial(self._on_twitter_request, callback, url)
if post_args is not None:
http.fetch(url, method="POST", body=urllib_parse.urlencode(post_args),
callback=http_callback)
fut = http.fetch(url, method="POST", body=urllib_parse.urlencode(post_args))
else:
http.fetch(url, callback=http_callback)
fut = http.fetch(url)
fut.add_done_callback(http_callback)
def _on_twitter_request(self, future, response):
if response.error:
def _on_twitter_request(self, future, url, response_fut):
try:
response = response_fut.result()
except Exception as e:
future.set_exception(AuthError(
"Error response %s fetching %s" % (response.error,
response.request.url)))
"Error response %s fetching %s" % (e, url)))
return
future.set_result(escape.json_decode(response.body))
future_set_result_unless_cancelled(future, escape.json_decode(response.body))
def _oauth_consumer_token(self):
self.require_setting("twitter_consumer_key", "Twitter OAuth")
@ -848,8 +914,8 @@ class GoogleOAuth2Mixin(OAuth2Mixin):
.. versionadded:: 3.2
"""
_OAUTH_AUTHORIZE_URL = "https://accounts.google.com/o/oauth2/auth"
_OAUTH_ACCESS_TOKEN_URL = "https://accounts.google.com/o/oauth2/token"
_OAUTH_AUTHORIZE_URL = "https://accounts.google.com/o/oauth2/v2/auth"
_OAUTH_ACCESS_TOKEN_URL = "https://www.googleapis.com/oauth2/v4/token"
_OAUTH_USERINFO_URL = "https://www.googleapis.com/oauth2/v1/userinfo"
_OAUTH_NO_CALLBACKS = False
_OAUTH_SETTINGS_KEY = 'google_oauth'
@ -872,19 +938,18 @@ class GoogleOAuth2Mixin(OAuth2Mixin):
class GoogleOAuth2LoginHandler(tornado.web.RequestHandler,
tornado.auth.GoogleOAuth2Mixin):
@tornado.gen.coroutine
def get(self):
async def get(self):
if self.get_argument('code', False):
access = yield self.get_authenticated_user(
access = await self.get_authenticated_user(
redirect_uri='http://your.site.com/auth/google',
code=self.get_argument('code'))
user = yield self.oauth2_request(
user = await self.oauth2_request(
"https://www.googleapis.com/oauth2/v1/userinfo",
access_token=access["access_token"])
# Save the user and access token with
# e.g. set_secure_cookie.
else:
yield self.authorize_redirect(
await self.authorize_redirect(
redirect_uri='http://your.site.com/auth/google',
client_id=self.settings['google_oauth']['key'],
scope=['profile', 'email'],
@ -894,7 +959,11 @@ class GoogleOAuth2Mixin(OAuth2Mixin):
.. testoutput::
:hide:
"""
.. deprecated:: 5.1
The ``callback`` argument is deprecated and will be removed in 6.0.
Use the returned awaitable object instead.
""" # noqa: E501
http = self.get_auth_http_client()
body = urllib_parse.urlencode({
"redirect_uri": redirect_uri,
@ -904,18 +973,22 @@ class GoogleOAuth2Mixin(OAuth2Mixin):
"grant_type": "authorization_code",
})
http.fetch(self._OAUTH_ACCESS_TOKEN_URL,
functools.partial(self._on_access_token, callback),
method="POST", headers={'Content-Type': 'application/x-www-form-urlencoded'}, body=body)
fut = http.fetch(self._OAUTH_ACCESS_TOKEN_URL,
method="POST",
headers={'Content-Type': 'application/x-www-form-urlencoded'},
body=body)
fut.add_done_callback(functools.partial(self._on_access_token, callback))
def _on_access_token(self, future, response):
def _on_access_token(self, future, response_fut):
"""Callback function for the exchange to the access token."""
if response.error:
future.set_exception(AuthError('Google auth error: %s' % str(response)))
try:
response = response_fut.result()
except Exception as e:
future.set_exception(AuthError('Google auth error: %s' % str(e)))
return
args = escape.json_decode(response.body)
future.set_result(args)
future_set_result_unless_cancelled(future, args)
class FacebookGraphMixin(OAuth2Mixin):
@ -936,17 +1009,16 @@ class FacebookGraphMixin(OAuth2Mixin):
class FacebookGraphLoginHandler(tornado.web.RequestHandler,
tornado.auth.FacebookGraphMixin):
@tornado.gen.coroutine
def get(self):
async def get(self):
if self.get_argument("code", False):
user = yield self.get_authenticated_user(
user = await self.get_authenticated_user(
redirect_uri='/auth/facebookgraph/',
client_id=self.settings["facebook_api_key"],
client_secret=self.settings["facebook_secret"],
code=self.get_argument("code"))
# Save the user with e.g. set_secure_cookie
else:
yield self.authorize_redirect(
await self.authorize_redirect(
redirect_uri='/auth/facebookgraph/',
client_id=self.settings["facebook_api_key"],
extra_params={"scope": "read_stream,offline_access"})
@ -963,11 +1035,17 @@ class FacebookGraphMixin(OAuth2Mixin):
Tornado it will change from a string to an integer.
* ``id``, ``name``, ``first_name``, ``last_name``, ``locale``, ``picture``,
``link``, plus any fields named in the ``extra_fields`` argument. These
fields are copied from the Facebook graph API `user object <https://developers.facebook.com/docs/graph-api/reference/user>`_
fields are copied from the Facebook graph API
`user object <https://developers.facebook.com/docs/graph-api/reference/user>`_
.. versionchanged:: 4.5
The ``session_expires`` field was updated to support changes made to the
Facebook API in March 2017.
.. deprecated:: 5.1
The ``callback`` argument is deprecated and will be removed in 6.0.
Use the returned awaitable object instead.
"""
http = self.get_auth_http_client()
args = {
@ -982,14 +1060,17 @@ class FacebookGraphMixin(OAuth2Mixin):
if extra_fields:
fields.update(extra_fields)
http.fetch(self._oauth_request_token_url(**args),
functools.partial(self._on_access_token, redirect_uri, client_id,
client_secret, callback, fields))
fut = http.fetch(self._oauth_request_token_url(**args))
fut.add_done_callback(functools.partial(self._on_access_token, redirect_uri, client_id,
client_secret, callback, fields))
@gen.coroutine
def _on_access_token(self, redirect_uri, client_id, client_secret,
future, fields, response):
if response.error:
future.set_exception(AuthError('Facebook auth error: %s' % str(response)))
future, fields, response_fut):
try:
response = response_fut.result()
except Exception as e:
future.set_exception(AuthError('Facebook auth error: %s' % str(e)))
return
args = escape.json_decode(response.body)
@ -998,10 +1079,8 @@ class FacebookGraphMixin(OAuth2Mixin):
"expires_in": args.get("expires_in")
}
self.facebook_request(
user = yield self.facebook_request(
path="/me",
callback=functools.partial(
self._on_get_user_info, future, session, fields),
access_token=session["access_token"],
appsecret_proof=hmac.new(key=client_secret.encode('utf8'),
msg=session["access_token"].encode('utf8'),
@ -1009,9 +1088,8 @@ class FacebookGraphMixin(OAuth2Mixin):
fields=",".join(fields)
)
def _on_get_user_info(self, future, session, fields, user):
if user is None:
future.set_result(None)
future_set_result_unless_cancelled(future, None)
return
fieldmap = {}
@ -1024,7 +1102,7 @@ class FacebookGraphMixin(OAuth2Mixin):
# This should change in Tornado 5.0.
fieldmap.update({"access_token": session["access_token"],
"session_expires": str(session.get("expires_in"))})
future.set_result(fieldmap)
future_set_result_unless_cancelled(future, fieldmap)
@_auth_return_future
def facebook_request(self, path, callback, access_token=None,
@ -1045,14 +1123,13 @@ class FacebookGraphMixin(OAuth2Mixin):
Example usage:
..testcode::
.. testcode::
class MainHandler(tornado.web.RequestHandler,
tornado.auth.FacebookGraphMixin):
@tornado.web.authenticated
@tornado.gen.coroutine
def get(self):
new_entry = yield self.facebook_request(
async def get(self):
new_entry = await self.facebook_request(
"/me/feed",
post_args={"message": "I am posting from my Tornado application!"},
access_token=self.current_user["access_token"])
@ -1075,6 +1152,11 @@ class FacebookGraphMixin(OAuth2Mixin):
.. versionchanged:: 3.1
Added the ability to override ``self._FACEBOOK_BASE_URL``.
.. deprecated:: 5.1
The ``callback`` argument is deprecated and will be removed in 6.0.
Use the returned awaitable object instead.
"""
url = self._FACEBOOK_BASE_URL + path
# Thanks to the _auth_return_future decorator, our "callback"

Some files were not shown because too many files have changed in this diff Show More