Ausgabe der neuen DB Einträge
This commit is contained in:
parent
bad48e1627
commit
cfbbb9ee3d
2399 changed files with 843193 additions and 43 deletions
|
|
@ -0,0 +1,11 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
"""
|
||||
Hazardous Materials
|
||||
|
||||
This is a "Hazardous Materials" module. You should ONLY use it if you're
|
||||
100% absolutely sure that you know what you're doing because this module
|
||||
is full of land mines, dragons, and dinosaurs with laser guns.
|
||||
"""
|
||||
from __future__ import absolute_import, division, print_function
|
||||
156
venv/lib/python3.9/site-packages/cryptography/hazmat/_der.py
Normal file
156
venv/lib/python3.9/site-packages/cryptography/hazmat/_der.py
Normal file
|
|
@ -0,0 +1,156 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import six
|
||||
|
||||
from cryptography.utils import int_from_bytes, int_to_bytes
|
||||
|
||||
|
||||
# This module contains a lightweight DER encoder and decoder. See X.690 for the
|
||||
# specification. This module intentionally does not implement the more complex
|
||||
# BER encoding, only DER.
|
||||
#
|
||||
# Note this implementation treats an element's constructed bit as part of the
|
||||
# tag. This is fine for DER, where the bit is always computable from the type.
|
||||
|
||||
|
||||
CONSTRUCTED = 0x20
|
||||
CONTEXT_SPECIFIC = 0x80
|
||||
|
||||
INTEGER = 0x02
|
||||
BIT_STRING = 0x03
|
||||
OCTET_STRING = 0x04
|
||||
NULL = 0x05
|
||||
OBJECT_IDENTIFIER = 0x06
|
||||
SEQUENCE = 0x10 | CONSTRUCTED
|
||||
SET = 0x11 | CONSTRUCTED
|
||||
PRINTABLE_STRING = 0x13
|
||||
UTC_TIME = 0x17
|
||||
GENERALIZED_TIME = 0x18
|
||||
|
||||
|
||||
class DERReader(object):
|
||||
def __init__(self, data):
|
||||
self.data = memoryview(data)
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
if exc_value is None:
|
||||
self.check_empty()
|
||||
|
||||
def is_empty(self):
|
||||
return len(self.data) == 0
|
||||
|
||||
def check_empty(self):
|
||||
if not self.is_empty():
|
||||
raise ValueError("Invalid DER input: trailing data")
|
||||
|
||||
def read_byte(self):
|
||||
if len(self.data) < 1:
|
||||
raise ValueError("Invalid DER input: insufficient data")
|
||||
ret = six.indexbytes(self.data, 0)
|
||||
self.data = self.data[1:]
|
||||
return ret
|
||||
|
||||
def read_bytes(self, n):
|
||||
if len(self.data) < n:
|
||||
raise ValueError("Invalid DER input: insufficient data")
|
||||
ret = self.data[:n]
|
||||
self.data = self.data[n:]
|
||||
return ret
|
||||
|
||||
def read_any_element(self):
|
||||
tag = self.read_byte()
|
||||
# Tag numbers 31 or higher are stored in multiple bytes. No supported
|
||||
# ASN.1 types use such tags, so reject these.
|
||||
if tag & 0x1F == 0x1F:
|
||||
raise ValueError("Invalid DER input: unexpected high tag number")
|
||||
length_byte = self.read_byte()
|
||||
if length_byte & 0x80 == 0:
|
||||
# If the high bit is clear, the first length byte is the length.
|
||||
length = length_byte
|
||||
else:
|
||||
# If the high bit is set, the first length byte encodes the length
|
||||
# of the length.
|
||||
length_byte &= 0x7F
|
||||
if length_byte == 0:
|
||||
raise ValueError(
|
||||
"Invalid DER input: indefinite length form is not allowed "
|
||||
"in DER"
|
||||
)
|
||||
length = 0
|
||||
for i in range(length_byte):
|
||||
length <<= 8
|
||||
length |= self.read_byte()
|
||||
if length == 0:
|
||||
raise ValueError(
|
||||
"Invalid DER input: length was not minimally-encoded"
|
||||
)
|
||||
if length < 0x80:
|
||||
# If the length could have been encoded in short form, it must
|
||||
# not use long form.
|
||||
raise ValueError(
|
||||
"Invalid DER input: length was not minimally-encoded"
|
||||
)
|
||||
body = self.read_bytes(length)
|
||||
return tag, DERReader(body)
|
||||
|
||||
def read_element(self, expected_tag):
|
||||
tag, body = self.read_any_element()
|
||||
if tag != expected_tag:
|
||||
raise ValueError("Invalid DER input: unexpected tag")
|
||||
return body
|
||||
|
||||
def read_single_element(self, expected_tag):
|
||||
with self:
|
||||
return self.read_element(expected_tag)
|
||||
|
||||
def read_optional_element(self, expected_tag):
|
||||
if len(self.data) > 0 and six.indexbytes(self.data, 0) == expected_tag:
|
||||
return self.read_element(expected_tag)
|
||||
return None
|
||||
|
||||
def as_integer(self):
|
||||
if len(self.data) == 0:
|
||||
raise ValueError("Invalid DER input: empty integer contents")
|
||||
first = six.indexbytes(self.data, 0)
|
||||
if first & 0x80 == 0x80:
|
||||
raise ValueError("Negative DER integers are not supported")
|
||||
# The first 9 bits must not all be zero or all be ones. Otherwise, the
|
||||
# encoding should have been one byte shorter.
|
||||
if len(self.data) > 1:
|
||||
second = six.indexbytes(self.data, 1)
|
||||
if first == 0 and second & 0x80 == 0:
|
||||
raise ValueError(
|
||||
"Invalid DER input: integer not minimally-encoded"
|
||||
)
|
||||
return int_from_bytes(self.data, "big")
|
||||
|
||||
|
||||
def encode_der_integer(x):
|
||||
if not isinstance(x, six.integer_types):
|
||||
raise ValueError("Value must be an integer")
|
||||
if x < 0:
|
||||
raise ValueError("Negative integers are not supported")
|
||||
n = x.bit_length() // 8 + 1
|
||||
return int_to_bytes(x, n)
|
||||
|
||||
|
||||
def encode_der(tag, *children):
|
||||
length = 0
|
||||
for child in children:
|
||||
length += len(child)
|
||||
chunks = [six.int2byte(tag)]
|
||||
if length < 0x80:
|
||||
chunks.append(six.int2byte(length))
|
||||
else:
|
||||
length_bytes = int_to_bytes(length)
|
||||
chunks.append(six.int2byte(0x80 | len(length_bytes)))
|
||||
chunks.append(length_bytes)
|
||||
chunks.extend(children)
|
||||
return b"".join(chunks)
|
||||
77
venv/lib/python3.9/site-packages/cryptography/hazmat/_oid.py
Normal file
77
venv/lib/python3.9/site-packages/cryptography/hazmat/_oid.py
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import utils
|
||||
|
||||
|
||||
class ObjectIdentifier(object):
|
||||
def __init__(self, dotted_string):
|
||||
self._dotted_string = dotted_string
|
||||
|
||||
nodes = self._dotted_string.split(".")
|
||||
intnodes = []
|
||||
|
||||
# There must be at least 2 nodes, the first node must be 0..2, and
|
||||
# if less than 2, the second node cannot have a value outside the
|
||||
# range 0..39. All nodes must be integers.
|
||||
for node in nodes:
|
||||
try:
|
||||
node_value = int(node, 10)
|
||||
except ValueError:
|
||||
raise ValueError(
|
||||
"Malformed OID: %s (non-integer nodes)"
|
||||
% (self._dotted_string)
|
||||
)
|
||||
if node_value < 0:
|
||||
raise ValueError(
|
||||
"Malformed OID: %s (negative-integer nodes)"
|
||||
% (self._dotted_string)
|
||||
)
|
||||
intnodes.append(node_value)
|
||||
|
||||
if len(nodes) < 2:
|
||||
raise ValueError(
|
||||
"Malformed OID: %s (insufficient number of nodes)"
|
||||
% (self._dotted_string)
|
||||
)
|
||||
|
||||
if intnodes[0] > 2:
|
||||
raise ValueError(
|
||||
"Malformed OID: %s (first node outside valid range)"
|
||||
% (self._dotted_string)
|
||||
)
|
||||
|
||||
if intnodes[0] < 2 and intnodes[1] >= 40:
|
||||
raise ValueError(
|
||||
"Malformed OID: %s (second node outside valid range)"
|
||||
% (self._dotted_string)
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, ObjectIdentifier):
|
||||
return NotImplemented
|
||||
|
||||
return self.dotted_string == other.dotted_string
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __repr__(self):
|
||||
return "<ObjectIdentifier(oid={}, name={})>".format(
|
||||
self.dotted_string, self._name
|
||||
)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.dotted_string)
|
||||
|
||||
@property
|
||||
def _name(self):
|
||||
# Lazy import to avoid an import cycle
|
||||
from cryptography.x509.oid import _OID_NAMES
|
||||
|
||||
return _OID_NAMES.get(self, "Unknown OID")
|
||||
|
||||
dotted_string = utils.read_only_property("_dotted_string")
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
|
||||
_default_backend = None
|
||||
|
||||
|
||||
def default_backend():
|
||||
global _default_backend
|
||||
|
||||
if _default_backend is None:
|
||||
from cryptography.hazmat.backends.openssl.backend import backend
|
||||
|
||||
_default_backend = backend
|
||||
|
||||
return _default_backend
|
||||
|
||||
|
||||
def _get_backend(backend):
|
||||
if backend is None:
|
||||
return default_backend()
|
||||
else:
|
||||
return backend
|
||||
|
|
@ -0,0 +1,396 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class CipherBackend(object):
|
||||
@abc.abstractmethod
|
||||
def cipher_supported(self, cipher, mode):
|
||||
"""
|
||||
Return True if the given cipher and mode are supported.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def create_symmetric_encryption_ctx(self, cipher, mode):
|
||||
"""
|
||||
Get a CipherContext that can be used for encryption.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def create_symmetric_decryption_ctx(self, cipher, mode):
|
||||
"""
|
||||
Get a CipherContext that can be used for decryption.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class HashBackend(object):
|
||||
@abc.abstractmethod
|
||||
def hash_supported(self, algorithm):
|
||||
"""
|
||||
Return True if the hash algorithm is supported by this backend.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def create_hash_ctx(self, algorithm):
|
||||
"""
|
||||
Create a HashContext for calculating a message digest.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class HMACBackend(object):
|
||||
@abc.abstractmethod
|
||||
def hmac_supported(self, algorithm):
|
||||
"""
|
||||
Return True if the hash algorithm is supported for HMAC by this
|
||||
backend.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def create_hmac_ctx(self, key, algorithm):
|
||||
"""
|
||||
Create a context for calculating a message authentication code.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class CMACBackend(object):
|
||||
@abc.abstractmethod
|
||||
def cmac_algorithm_supported(self, algorithm):
|
||||
"""
|
||||
Returns True if the block cipher is supported for CMAC by this backend
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def create_cmac_ctx(self, algorithm):
|
||||
"""
|
||||
Create a context for calculating a message authentication code.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class PBKDF2HMACBackend(object):
|
||||
@abc.abstractmethod
|
||||
def pbkdf2_hmac_supported(self, algorithm):
|
||||
"""
|
||||
Return True if the hash algorithm is supported for PBKDF2 by this
|
||||
backend.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def derive_pbkdf2_hmac(
|
||||
self, algorithm, length, salt, iterations, key_material
|
||||
):
|
||||
"""
|
||||
Return length bytes derived from provided PBKDF2 parameters.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class RSABackend(object):
|
||||
@abc.abstractmethod
|
||||
def generate_rsa_private_key(self, public_exponent, key_size):
|
||||
"""
|
||||
Generate an RSAPrivateKey instance with public_exponent and a modulus
|
||||
of key_size bits.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def rsa_padding_supported(self, padding):
|
||||
"""
|
||||
Returns True if the backend supports the given padding options.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def generate_rsa_parameters_supported(self, public_exponent, key_size):
|
||||
"""
|
||||
Returns True if the backend supports the given parameters for key
|
||||
generation.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_rsa_private_numbers(self, numbers):
|
||||
"""
|
||||
Returns an RSAPrivateKey provider.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_rsa_public_numbers(self, numbers):
|
||||
"""
|
||||
Returns an RSAPublicKey provider.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DSABackend(object):
|
||||
@abc.abstractmethod
|
||||
def generate_dsa_parameters(self, key_size):
|
||||
"""
|
||||
Generate a DSAParameters instance with a modulus of key_size bits.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def generate_dsa_private_key(self, parameters):
|
||||
"""
|
||||
Generate a DSAPrivateKey instance with parameters as a DSAParameters
|
||||
object.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def generate_dsa_private_key_and_parameters(self, key_size):
|
||||
"""
|
||||
Generate a DSAPrivateKey instance using key size only.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def dsa_hash_supported(self, algorithm):
|
||||
"""
|
||||
Return True if the hash algorithm is supported by the backend for DSA.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def dsa_parameters_supported(self, p, q, g):
|
||||
"""
|
||||
Return True if the parameters are supported by the backend for DSA.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_dsa_private_numbers(self, numbers):
|
||||
"""
|
||||
Returns a DSAPrivateKey provider.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_dsa_public_numbers(self, numbers):
|
||||
"""
|
||||
Returns a DSAPublicKey provider.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_dsa_parameter_numbers(self, numbers):
|
||||
"""
|
||||
Returns a DSAParameters provider.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class EllipticCurveBackend(object):
|
||||
@abc.abstractmethod
|
||||
def elliptic_curve_signature_algorithm_supported(
|
||||
self, signature_algorithm, curve
|
||||
):
|
||||
"""
|
||||
Returns True if the backend supports the named elliptic curve with the
|
||||
specified signature algorithm.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def elliptic_curve_supported(self, curve):
|
||||
"""
|
||||
Returns True if the backend supports the named elliptic curve.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def generate_elliptic_curve_private_key(self, curve):
|
||||
"""
|
||||
Return an object conforming to the EllipticCurvePrivateKey interface.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_elliptic_curve_public_numbers(self, numbers):
|
||||
"""
|
||||
Return an EllipticCurvePublicKey provider using the given numbers.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_elliptic_curve_private_numbers(self, numbers):
|
||||
"""
|
||||
Return an EllipticCurvePrivateKey provider using the given numbers.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve):
|
||||
"""
|
||||
Returns whether the exchange algorithm is supported by this backend.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def derive_elliptic_curve_private_key(self, private_value, curve):
|
||||
"""
|
||||
Compute the private key given the private value and curve.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class PEMSerializationBackend(object):
|
||||
@abc.abstractmethod
|
||||
def load_pem_private_key(self, data, password):
|
||||
"""
|
||||
Loads a private key from PEM encoded data, using the provided password
|
||||
if the data is encrypted.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_pem_public_key(self, data):
|
||||
"""
|
||||
Loads a public key from PEM encoded data.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_pem_parameters(self, data):
|
||||
"""
|
||||
Load encryption parameters from PEM encoded data.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DERSerializationBackend(object):
|
||||
@abc.abstractmethod
|
||||
def load_der_private_key(self, data, password):
|
||||
"""
|
||||
Loads a private key from DER encoded data. Uses the provided password
|
||||
if the data is encrypted.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_der_public_key(self, data):
|
||||
"""
|
||||
Loads a public key from DER encoded data.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_der_parameters(self, data):
|
||||
"""
|
||||
Load encryption parameters from DER encoded data.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class X509Backend(object):
|
||||
@abc.abstractmethod
|
||||
def load_pem_x509_certificate(self, data):
|
||||
"""
|
||||
Load an X.509 certificate from PEM encoded data.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_der_x509_certificate(self, data):
|
||||
"""
|
||||
Load an X.509 certificate from DER encoded data.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_der_x509_csr(self, data):
|
||||
"""
|
||||
Load an X.509 CSR from DER encoded data.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_pem_x509_csr(self, data):
|
||||
"""
|
||||
Load an X.509 CSR from PEM encoded data.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def create_x509_csr(self, builder, private_key, algorithm):
|
||||
"""
|
||||
Create and sign an X.509 CSR from a CSR builder object.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def create_x509_certificate(self, builder, private_key, algorithm):
|
||||
"""
|
||||
Create and sign an X.509 certificate from a CertificateBuilder object.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def create_x509_crl(self, builder, private_key, algorithm):
|
||||
"""
|
||||
Create and sign an X.509 CertificateRevocationList from a
|
||||
CertificateRevocationListBuilder object.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def create_x509_revoked_certificate(self, builder):
|
||||
"""
|
||||
Create a RevokedCertificate object from a RevokedCertificateBuilder
|
||||
object.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def x509_name_bytes(self, name):
|
||||
"""
|
||||
Compute the DER encoded bytes of an X509 Name object.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DHBackend(object):
|
||||
@abc.abstractmethod
|
||||
def generate_dh_parameters(self, generator, key_size):
|
||||
"""
|
||||
Generate a DHParameters instance with a modulus of key_size bits.
|
||||
Using the given generator. Often 2 or 5.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def generate_dh_private_key(self, parameters):
|
||||
"""
|
||||
Generate a DHPrivateKey instance with parameters as a DHParameters
|
||||
object.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def generate_dh_private_key_and_parameters(self, generator, key_size):
|
||||
"""
|
||||
Generate a DHPrivateKey instance using key size only.
|
||||
Using the given generator. Often 2 or 5.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_dh_private_numbers(self, numbers):
|
||||
"""
|
||||
Load a DHPrivateKey from DHPrivateNumbers
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_dh_public_numbers(self, numbers):
|
||||
"""
|
||||
Load a DHPublicKey from DHPublicNumbers.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_dh_parameter_numbers(self, numbers):
|
||||
"""
|
||||
Load DHParameters from DHParameterNumbers.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def dh_parameters_supported(self, p, g, q=None):
|
||||
"""
|
||||
Returns whether the backend supports DH with these parameter values.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def dh_x942_serialization_supported(self):
|
||||
"""
|
||||
Returns True if the backend supports the serialization of DH objects
|
||||
with subgroup order (q).
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class ScryptBackend(object):
|
||||
@abc.abstractmethod
|
||||
def derive_scrypt(self, key_material, salt, length, n, r, p):
|
||||
"""
|
||||
Return bytes derived from provided Scrypt parameters.
|
||||
"""
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography.hazmat.backends.openssl.backend import backend
|
||||
|
||||
|
||||
__all__ = ["backend"]
|
||||
|
|
@ -0,0 +1,166 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography.exceptions import InvalidTag
|
||||
|
||||
|
||||
_ENCRYPT = 1
|
||||
_DECRYPT = 0
|
||||
|
||||
|
||||
def _aead_cipher_name(cipher):
|
||||
from cryptography.hazmat.primitives.ciphers.aead import (
|
||||
AESCCM,
|
||||
AESGCM,
|
||||
ChaCha20Poly1305,
|
||||
)
|
||||
|
||||
if isinstance(cipher, ChaCha20Poly1305):
|
||||
return b"chacha20-poly1305"
|
||||
elif isinstance(cipher, AESCCM):
|
||||
return "aes-{}-ccm".format(len(cipher._key) * 8).encode("ascii")
|
||||
else:
|
||||
assert isinstance(cipher, AESGCM)
|
||||
return "aes-{}-gcm".format(len(cipher._key) * 8).encode("ascii")
|
||||
|
||||
|
||||
def _aead_setup(backend, cipher_name, key, nonce, tag, tag_len, operation):
|
||||
evp_cipher = backend._lib.EVP_get_cipherbyname(cipher_name)
|
||||
backend.openssl_assert(evp_cipher != backend._ffi.NULL)
|
||||
ctx = backend._lib.EVP_CIPHER_CTX_new()
|
||||
ctx = backend._ffi.gc(ctx, backend._lib.EVP_CIPHER_CTX_free)
|
||||
res = backend._lib.EVP_CipherInit_ex(
|
||||
ctx,
|
||||
evp_cipher,
|
||||
backend._ffi.NULL,
|
||||
backend._ffi.NULL,
|
||||
backend._ffi.NULL,
|
||||
int(operation == _ENCRYPT),
|
||||
)
|
||||
backend.openssl_assert(res != 0)
|
||||
res = backend._lib.EVP_CIPHER_CTX_set_key_length(ctx, len(key))
|
||||
backend.openssl_assert(res != 0)
|
||||
res = backend._lib.EVP_CIPHER_CTX_ctrl(
|
||||
ctx,
|
||||
backend._lib.EVP_CTRL_AEAD_SET_IVLEN,
|
||||
len(nonce),
|
||||
backend._ffi.NULL,
|
||||
)
|
||||
backend.openssl_assert(res != 0)
|
||||
if operation == _DECRYPT:
|
||||
res = backend._lib.EVP_CIPHER_CTX_ctrl(
|
||||
ctx, backend._lib.EVP_CTRL_AEAD_SET_TAG, len(tag), tag
|
||||
)
|
||||
backend.openssl_assert(res != 0)
|
||||
elif cipher_name.endswith(b"-ccm"):
|
||||
res = backend._lib.EVP_CIPHER_CTX_ctrl(
|
||||
ctx, backend._lib.EVP_CTRL_AEAD_SET_TAG, tag_len, backend._ffi.NULL
|
||||
)
|
||||
backend.openssl_assert(res != 0)
|
||||
|
||||
nonce_ptr = backend._ffi.from_buffer(nonce)
|
||||
key_ptr = backend._ffi.from_buffer(key)
|
||||
res = backend._lib.EVP_CipherInit_ex(
|
||||
ctx,
|
||||
backend._ffi.NULL,
|
||||
backend._ffi.NULL,
|
||||
key_ptr,
|
||||
nonce_ptr,
|
||||
int(operation == _ENCRYPT),
|
||||
)
|
||||
backend.openssl_assert(res != 0)
|
||||
return ctx
|
||||
|
||||
|
||||
def _set_length(backend, ctx, data_len):
|
||||
intptr = backend._ffi.new("int *")
|
||||
res = backend._lib.EVP_CipherUpdate(
|
||||
ctx, backend._ffi.NULL, intptr, backend._ffi.NULL, data_len
|
||||
)
|
||||
backend.openssl_assert(res != 0)
|
||||
|
||||
|
||||
def _process_aad(backend, ctx, associated_data):
|
||||
outlen = backend._ffi.new("int *")
|
||||
res = backend._lib.EVP_CipherUpdate(
|
||||
ctx, backend._ffi.NULL, outlen, associated_data, len(associated_data)
|
||||
)
|
||||
backend.openssl_assert(res != 0)
|
||||
|
||||
|
||||
def _process_data(backend, ctx, data):
|
||||
outlen = backend._ffi.new("int *")
|
||||
buf = backend._ffi.new("unsigned char[]", len(data))
|
||||
res = backend._lib.EVP_CipherUpdate(ctx, buf, outlen, data, len(data))
|
||||
backend.openssl_assert(res != 0)
|
||||
return backend._ffi.buffer(buf, outlen[0])[:]
|
||||
|
||||
|
||||
def _encrypt(backend, cipher, nonce, data, associated_data, tag_length):
|
||||
from cryptography.hazmat.primitives.ciphers.aead import AESCCM
|
||||
|
||||
cipher_name = _aead_cipher_name(cipher)
|
||||
ctx = _aead_setup(
|
||||
backend, cipher_name, cipher._key, nonce, None, tag_length, _ENCRYPT
|
||||
)
|
||||
# CCM requires us to pass the length of the data before processing anything
|
||||
# However calling this with any other AEAD results in an error
|
||||
if isinstance(cipher, AESCCM):
|
||||
_set_length(backend, ctx, len(data))
|
||||
|
||||
_process_aad(backend, ctx, associated_data)
|
||||
processed_data = _process_data(backend, ctx, data)
|
||||
outlen = backend._ffi.new("int *")
|
||||
res = backend._lib.EVP_CipherFinal_ex(ctx, backend._ffi.NULL, outlen)
|
||||
backend.openssl_assert(res != 0)
|
||||
backend.openssl_assert(outlen[0] == 0)
|
||||
tag_buf = backend._ffi.new("unsigned char[]", tag_length)
|
||||
res = backend._lib.EVP_CIPHER_CTX_ctrl(
|
||||
ctx, backend._lib.EVP_CTRL_AEAD_GET_TAG, tag_length, tag_buf
|
||||
)
|
||||
backend.openssl_assert(res != 0)
|
||||
tag = backend._ffi.buffer(tag_buf)[:]
|
||||
|
||||
return processed_data + tag
|
||||
|
||||
|
||||
def _decrypt(backend, cipher, nonce, data, associated_data, tag_length):
|
||||
from cryptography.hazmat.primitives.ciphers.aead import AESCCM
|
||||
|
||||
if len(data) < tag_length:
|
||||
raise InvalidTag
|
||||
tag = data[-tag_length:]
|
||||
data = data[:-tag_length]
|
||||
cipher_name = _aead_cipher_name(cipher)
|
||||
ctx = _aead_setup(
|
||||
backend, cipher_name, cipher._key, nonce, tag, tag_length, _DECRYPT
|
||||
)
|
||||
# CCM requires us to pass the length of the data before processing anything
|
||||
# However calling this with any other AEAD results in an error
|
||||
if isinstance(cipher, AESCCM):
|
||||
_set_length(backend, ctx, len(data))
|
||||
|
||||
_process_aad(backend, ctx, associated_data)
|
||||
# CCM has a different error path if the tag doesn't match. Errors are
|
||||
# raised in Update and Final is irrelevant.
|
||||
if isinstance(cipher, AESCCM):
|
||||
outlen = backend._ffi.new("int *")
|
||||
buf = backend._ffi.new("unsigned char[]", len(data))
|
||||
res = backend._lib.EVP_CipherUpdate(ctx, buf, outlen, data, len(data))
|
||||
if res != 1:
|
||||
backend._consume_errors()
|
||||
raise InvalidTag
|
||||
|
||||
processed_data = backend._ffi.buffer(buf, outlen[0])[:]
|
||||
else:
|
||||
processed_data = _process_data(backend, ctx, data)
|
||||
outlen = backend._ffi.new("int *")
|
||||
res = backend._lib.EVP_CipherFinal_ex(ctx, backend._ffi.NULL, outlen)
|
||||
if res == 0:
|
||||
backend._consume_errors()
|
||||
raise InvalidTag
|
||||
|
||||
return processed_data
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -0,0 +1,231 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import InvalidTag, UnsupportedAlgorithm, _Reasons
|
||||
from cryptography.hazmat.primitives import ciphers
|
||||
from cryptography.hazmat.primitives.ciphers import modes
|
||||
|
||||
|
||||
@utils.register_interface(ciphers.CipherContext)
|
||||
@utils.register_interface(ciphers.AEADCipherContext)
|
||||
@utils.register_interface(ciphers.AEADEncryptionContext)
|
||||
@utils.register_interface(ciphers.AEADDecryptionContext)
|
||||
class _CipherContext(object):
|
||||
_ENCRYPT = 1
|
||||
_DECRYPT = 0
|
||||
_MAX_CHUNK_SIZE = 2 ** 31 - 1
|
||||
|
||||
def __init__(self, backend, cipher, mode, operation):
|
||||
self._backend = backend
|
||||
self._cipher = cipher
|
||||
self._mode = mode
|
||||
self._operation = operation
|
||||
self._tag = None
|
||||
|
||||
if isinstance(self._cipher, ciphers.BlockCipherAlgorithm):
|
||||
self._block_size_bytes = self._cipher.block_size // 8
|
||||
else:
|
||||
self._block_size_bytes = 1
|
||||
|
||||
ctx = self._backend._lib.EVP_CIPHER_CTX_new()
|
||||
ctx = self._backend._ffi.gc(
|
||||
ctx, self._backend._lib.EVP_CIPHER_CTX_free
|
||||
)
|
||||
|
||||
registry = self._backend._cipher_registry
|
||||
try:
|
||||
adapter = registry[type(cipher), type(mode)]
|
||||
except KeyError:
|
||||
raise UnsupportedAlgorithm(
|
||||
"cipher {} in {} mode is not supported "
|
||||
"by this backend.".format(
|
||||
cipher.name, mode.name if mode else mode
|
||||
),
|
||||
_Reasons.UNSUPPORTED_CIPHER,
|
||||
)
|
||||
|
||||
evp_cipher = adapter(self._backend, cipher, mode)
|
||||
if evp_cipher == self._backend._ffi.NULL:
|
||||
msg = "cipher {0.name} ".format(cipher)
|
||||
if mode is not None:
|
||||
msg += "in {0.name} mode ".format(mode)
|
||||
msg += (
|
||||
"is not supported by this backend (Your version of OpenSSL "
|
||||
"may be too old. Current version: {}.)"
|
||||
).format(self._backend.openssl_version_text())
|
||||
raise UnsupportedAlgorithm(msg, _Reasons.UNSUPPORTED_CIPHER)
|
||||
|
||||
if isinstance(mode, modes.ModeWithInitializationVector):
|
||||
iv_nonce = self._backend._ffi.from_buffer(
|
||||
mode.initialization_vector
|
||||
)
|
||||
elif isinstance(mode, modes.ModeWithTweak):
|
||||
iv_nonce = self._backend._ffi.from_buffer(mode.tweak)
|
||||
elif isinstance(mode, modes.ModeWithNonce):
|
||||
iv_nonce = self._backend._ffi.from_buffer(mode.nonce)
|
||||
elif isinstance(cipher, modes.ModeWithNonce):
|
||||
iv_nonce = self._backend._ffi.from_buffer(cipher.nonce)
|
||||
else:
|
||||
iv_nonce = self._backend._ffi.NULL
|
||||
# begin init with cipher and operation type
|
||||
res = self._backend._lib.EVP_CipherInit_ex(
|
||||
ctx,
|
||||
evp_cipher,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
operation,
|
||||
)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
# set the key length to handle variable key ciphers
|
||||
res = self._backend._lib.EVP_CIPHER_CTX_set_key_length(
|
||||
ctx, len(cipher.key)
|
||||
)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
if isinstance(mode, modes.GCM):
|
||||
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
|
||||
ctx,
|
||||
self._backend._lib.EVP_CTRL_AEAD_SET_IVLEN,
|
||||
len(iv_nonce),
|
||||
self._backend._ffi.NULL,
|
||||
)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
if mode.tag is not None:
|
||||
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
|
||||
ctx,
|
||||
self._backend._lib.EVP_CTRL_AEAD_SET_TAG,
|
||||
len(mode.tag),
|
||||
mode.tag,
|
||||
)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
self._tag = mode.tag
|
||||
|
||||
# pass key/iv
|
||||
res = self._backend._lib.EVP_CipherInit_ex(
|
||||
ctx,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.from_buffer(cipher.key),
|
||||
iv_nonce,
|
||||
operation,
|
||||
)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
# We purposely disable padding here as it's handled higher up in the
|
||||
# API.
|
||||
self._backend._lib.EVP_CIPHER_CTX_set_padding(ctx, 0)
|
||||
self._ctx = ctx
|
||||
|
||||
def update(self, data):
|
||||
buf = bytearray(len(data) + self._block_size_bytes - 1)
|
||||
n = self.update_into(data, buf)
|
||||
return bytes(buf[:n])
|
||||
|
||||
def update_into(self, data, buf):
|
||||
total_data_len = len(data)
|
||||
if len(buf) < (total_data_len + self._block_size_bytes - 1):
|
||||
raise ValueError(
|
||||
"buffer must be at least {} bytes for this "
|
||||
"payload".format(len(data) + self._block_size_bytes - 1)
|
||||
)
|
||||
|
||||
data_processed = 0
|
||||
total_out = 0
|
||||
outlen = self._backend._ffi.new("int *")
|
||||
baseoutbuf = self._backend._ffi.from_buffer(buf)
|
||||
baseinbuf = self._backend._ffi.from_buffer(data)
|
||||
|
||||
while data_processed != total_data_len:
|
||||
outbuf = baseoutbuf + total_out
|
||||
inbuf = baseinbuf + data_processed
|
||||
inlen = min(self._MAX_CHUNK_SIZE, total_data_len - data_processed)
|
||||
|
||||
res = self._backend._lib.EVP_CipherUpdate(
|
||||
self._ctx, outbuf, outlen, inbuf, inlen
|
||||
)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
data_processed += inlen
|
||||
total_out += outlen[0]
|
||||
|
||||
return total_out
|
||||
|
||||
def finalize(self):
|
||||
if (
|
||||
self._operation == self._DECRYPT
|
||||
and isinstance(self._mode, modes.ModeWithAuthenticationTag)
|
||||
and self.tag is None
|
||||
):
|
||||
raise ValueError(
|
||||
"Authentication tag must be provided when decrypting."
|
||||
)
|
||||
|
||||
buf = self._backend._ffi.new("unsigned char[]", self._block_size_bytes)
|
||||
outlen = self._backend._ffi.new("int *")
|
||||
res = self._backend._lib.EVP_CipherFinal_ex(self._ctx, buf, outlen)
|
||||
if res == 0:
|
||||
errors = self._backend._consume_errors()
|
||||
|
||||
if not errors and isinstance(self._mode, modes.GCM):
|
||||
raise InvalidTag
|
||||
|
||||
self._backend.openssl_assert(
|
||||
errors[0]._lib_reason_match(
|
||||
self._backend._lib.ERR_LIB_EVP,
|
||||
self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH,
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
raise ValueError(
|
||||
"The length of the provided data is not a multiple of "
|
||||
"the block length."
|
||||
)
|
||||
|
||||
if (
|
||||
isinstance(self._mode, modes.GCM)
|
||||
and self._operation == self._ENCRYPT
|
||||
):
|
||||
tag_buf = self._backend._ffi.new(
|
||||
"unsigned char[]", self._block_size_bytes
|
||||
)
|
||||
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
|
||||
self._ctx,
|
||||
self._backend._lib.EVP_CTRL_AEAD_GET_TAG,
|
||||
self._block_size_bytes,
|
||||
tag_buf,
|
||||
)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
self._tag = self._backend._ffi.buffer(tag_buf)[:]
|
||||
|
||||
res = self._backend._lib.EVP_CIPHER_CTX_cleanup(self._ctx)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
return self._backend._ffi.buffer(buf)[: outlen[0]]
|
||||
|
||||
def finalize_with_tag(self, tag):
|
||||
if len(tag) < self._mode._min_tag_length:
|
||||
raise ValueError(
|
||||
"Authentication tag must be {} bytes or longer.".format(
|
||||
self._mode._min_tag_length
|
||||
)
|
||||
)
|
||||
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
|
||||
self._ctx, self._backend._lib.EVP_CTRL_AEAD_SET_TAG, len(tag), tag
|
||||
)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
self._tag = tag
|
||||
return self.finalize()
|
||||
|
||||
def authenticate_additional_data(self, data):
|
||||
outlen = self._backend._ffi.new("int *")
|
||||
res = self._backend._lib.EVP_CipherUpdate(
|
||||
self._ctx,
|
||||
self._backend._ffi.NULL,
|
||||
outlen,
|
||||
self._backend._ffi.from_buffer(data),
|
||||
len(data),
|
||||
)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
|
||||
tag = utils.read_only_property("_tag")
|
||||
|
|
@ -0,0 +1,82 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
InvalidSignature,
|
||||
UnsupportedAlgorithm,
|
||||
_Reasons,
|
||||
)
|
||||
from cryptography.hazmat.primitives import constant_time
|
||||
from cryptography.hazmat.primitives.ciphers.modes import CBC
|
||||
|
||||
|
||||
class _CMACContext(object):
|
||||
def __init__(self, backend, algorithm, ctx=None):
|
||||
if not backend.cmac_algorithm_supported(algorithm):
|
||||
raise UnsupportedAlgorithm(
|
||||
"This backend does not support CMAC.",
|
||||
_Reasons.UNSUPPORTED_CIPHER,
|
||||
)
|
||||
|
||||
self._backend = backend
|
||||
self._key = algorithm.key
|
||||
self._algorithm = algorithm
|
||||
self._output_length = algorithm.block_size // 8
|
||||
|
||||
if ctx is None:
|
||||
registry = self._backend._cipher_registry
|
||||
adapter = registry[type(algorithm), CBC]
|
||||
|
||||
evp_cipher = adapter(self._backend, algorithm, CBC)
|
||||
|
||||
ctx = self._backend._lib.CMAC_CTX_new()
|
||||
|
||||
self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
|
||||
ctx = self._backend._ffi.gc(ctx, self._backend._lib.CMAC_CTX_free)
|
||||
|
||||
key_ptr = self._backend._ffi.from_buffer(self._key)
|
||||
res = self._backend._lib.CMAC_Init(
|
||||
ctx,
|
||||
key_ptr,
|
||||
len(self._key),
|
||||
evp_cipher,
|
||||
self._backend._ffi.NULL,
|
||||
)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
|
||||
self._ctx = ctx
|
||||
|
||||
algorithm = utils.read_only_property("_algorithm")
|
||||
|
||||
def update(self, data):
|
||||
res = self._backend._lib.CMAC_Update(self._ctx, data, len(data))
|
||||
self._backend.openssl_assert(res == 1)
|
||||
|
||||
def finalize(self):
|
||||
buf = self._backend._ffi.new("unsigned char[]", self._output_length)
|
||||
length = self._backend._ffi.new("size_t *", self._output_length)
|
||||
res = self._backend._lib.CMAC_Final(self._ctx, buf, length)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
|
||||
self._ctx = None
|
||||
|
||||
return self._backend._ffi.buffer(buf)[:]
|
||||
|
||||
def copy(self):
|
||||
copied_ctx = self._backend._lib.CMAC_CTX_new()
|
||||
copied_ctx = self._backend._ffi.gc(
|
||||
copied_ctx, self._backend._lib.CMAC_CTX_free
|
||||
)
|
||||
res = self._backend._lib.CMAC_CTX_copy(copied_ctx, self._ctx)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
return _CMACContext(self._backend, self._algorithm, ctx=copied_ctx)
|
||||
|
||||
def verify(self, signature):
|
||||
digest = self.finalize()
|
||||
if not constant_time.bytes_eq(digest, signature):
|
||||
raise InvalidSignature("Signature did not match digest.")
|
||||
|
|
@ -0,0 +1,878 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import datetime
|
||||
import ipaddress
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat._der import DERReader, INTEGER, NULL, SEQUENCE
|
||||
from cryptography.x509.extensions import _TLS_FEATURE_TYPE_TO_ENUM
|
||||
from cryptography.x509.name import _ASN1_TYPE_TO_ENUM
|
||||
from cryptography.x509.oid import (
|
||||
CRLEntryExtensionOID,
|
||||
CertificatePoliciesOID,
|
||||
ExtensionOID,
|
||||
OCSPExtensionOID,
|
||||
)
|
||||
|
||||
|
||||
def _obj2txt(backend, obj):
|
||||
# Set to 80 on the recommendation of
|
||||
# https://www.openssl.org/docs/crypto/OBJ_nid2ln.html#return_values
|
||||
#
|
||||
# But OIDs longer than this occur in real life (e.g. Active
|
||||
# Directory makes some very long OIDs). So we need to detect
|
||||
# and properly handle the case where the default buffer is not
|
||||
# big enough.
|
||||
#
|
||||
buf_len = 80
|
||||
buf = backend._ffi.new("char[]", buf_len)
|
||||
|
||||
# 'res' is the number of bytes that *would* be written if the
|
||||
# buffer is large enough. If 'res' > buf_len - 1, we need to
|
||||
# alloc a big-enough buffer and go again.
|
||||
res = backend._lib.OBJ_obj2txt(buf, buf_len, obj, 1)
|
||||
if res > buf_len - 1: # account for terminating null byte
|
||||
buf_len = res + 1
|
||||
buf = backend._ffi.new("char[]", buf_len)
|
||||
res = backend._lib.OBJ_obj2txt(buf, buf_len, obj, 1)
|
||||
backend.openssl_assert(res > 0)
|
||||
return backend._ffi.buffer(buf, res)[:].decode()
|
||||
|
||||
|
||||
def _decode_x509_name_entry(backend, x509_name_entry):
|
||||
obj = backend._lib.X509_NAME_ENTRY_get_object(x509_name_entry)
|
||||
backend.openssl_assert(obj != backend._ffi.NULL)
|
||||
data = backend._lib.X509_NAME_ENTRY_get_data(x509_name_entry)
|
||||
backend.openssl_assert(data != backend._ffi.NULL)
|
||||
value = _asn1_string_to_utf8(backend, data)
|
||||
oid = _obj2txt(backend, obj)
|
||||
type = _ASN1_TYPE_TO_ENUM[data.type]
|
||||
|
||||
return x509.NameAttribute(x509.ObjectIdentifier(oid), value, type)
|
||||
|
||||
|
||||
def _decode_x509_name(backend, x509_name):
|
||||
count = backend._lib.X509_NAME_entry_count(x509_name)
|
||||
attributes = []
|
||||
prev_set_id = -1
|
||||
for x in range(count):
|
||||
entry = backend._lib.X509_NAME_get_entry(x509_name, x)
|
||||
attribute = _decode_x509_name_entry(backend, entry)
|
||||
set_id = backend._lib.Cryptography_X509_NAME_ENTRY_set(entry)
|
||||
if set_id != prev_set_id:
|
||||
attributes.append({attribute})
|
||||
else:
|
||||
# is in the same RDN a previous entry
|
||||
attributes[-1].add(attribute)
|
||||
prev_set_id = set_id
|
||||
|
||||
return x509.Name(x509.RelativeDistinguishedName(rdn) for rdn in attributes)
|
||||
|
||||
|
||||
def _decode_general_names(backend, gns):
|
||||
num = backend._lib.sk_GENERAL_NAME_num(gns)
|
||||
names = []
|
||||
for i in range(num):
|
||||
gn = backend._lib.sk_GENERAL_NAME_value(gns, i)
|
||||
backend.openssl_assert(gn != backend._ffi.NULL)
|
||||
names.append(_decode_general_name(backend, gn))
|
||||
|
||||
return names
|
||||
|
||||
|
||||
def _decode_general_name(backend, gn):
|
||||
if gn.type == backend._lib.GEN_DNS:
|
||||
# Convert to bytes and then decode to utf8. We don't use
|
||||
# asn1_string_to_utf8 here because it doesn't properly convert
|
||||
# utf8 from ia5strings.
|
||||
data = _asn1_string_to_bytes(backend, gn.d.dNSName).decode("utf8")
|
||||
# We don't use the constructor for DNSName so we can bypass validation
|
||||
# This allows us to create DNSName objects that have unicode chars
|
||||
# when a certificate (against the RFC) contains them.
|
||||
return x509.DNSName._init_without_validation(data)
|
||||
elif gn.type == backend._lib.GEN_URI:
|
||||
# Convert to bytes and then decode to utf8. We don't use
|
||||
# asn1_string_to_utf8 here because it doesn't properly convert
|
||||
# utf8 from ia5strings.
|
||||
data = _asn1_string_to_bytes(
|
||||
backend, gn.d.uniformResourceIdentifier
|
||||
).decode("utf8")
|
||||
# We don't use the constructor for URI so we can bypass validation
|
||||
# This allows us to create URI objects that have unicode chars
|
||||
# when a certificate (against the RFC) contains them.
|
||||
return x509.UniformResourceIdentifier._init_without_validation(data)
|
||||
elif gn.type == backend._lib.GEN_RID:
|
||||
oid = _obj2txt(backend, gn.d.registeredID)
|
||||
return x509.RegisteredID(x509.ObjectIdentifier(oid))
|
||||
elif gn.type == backend._lib.GEN_IPADD:
|
||||
data = _asn1_string_to_bytes(backend, gn.d.iPAddress)
|
||||
data_len = len(data)
|
||||
if data_len == 8 or data_len == 32:
|
||||
# This is an IPv4 or IPv6 Network and not a single IP. This
|
||||
# type of data appears in Name Constraints. Unfortunately,
|
||||
# ipaddress doesn't support packed bytes + netmask. Additionally,
|
||||
# IPv6Network can only handle CIDR rather than the full 16 byte
|
||||
# netmask. To handle this we convert the netmask to integer, then
|
||||
# find the first 0 bit, which will be the prefix. If another 1
|
||||
# bit is present after that the netmask is invalid.
|
||||
base = ipaddress.ip_address(data[: data_len // 2])
|
||||
netmask = ipaddress.ip_address(data[data_len // 2 :])
|
||||
bits = bin(int(netmask))[2:]
|
||||
prefix = bits.find("0")
|
||||
# If no 0 bits are found it is a /32 or /128
|
||||
if prefix == -1:
|
||||
prefix = len(bits)
|
||||
|
||||
if "1" in bits[prefix:]:
|
||||
raise ValueError("Invalid netmask")
|
||||
|
||||
ip = ipaddress.ip_network(base.exploded + u"/{}".format(prefix))
|
||||
else:
|
||||
ip = ipaddress.ip_address(data)
|
||||
|
||||
return x509.IPAddress(ip)
|
||||
elif gn.type == backend._lib.GEN_DIRNAME:
|
||||
return x509.DirectoryName(
|
||||
_decode_x509_name(backend, gn.d.directoryName)
|
||||
)
|
||||
elif gn.type == backend._lib.GEN_EMAIL:
|
||||
# Convert to bytes and then decode to utf8. We don't use
|
||||
# asn1_string_to_utf8 here because it doesn't properly convert
|
||||
# utf8 from ia5strings.
|
||||
data = _asn1_string_to_bytes(backend, gn.d.rfc822Name).decode("utf8")
|
||||
# We don't use the constructor for RFC822Name so we can bypass
|
||||
# validation. This allows us to create RFC822Name objects that have
|
||||
# unicode chars when a certificate (against the RFC) contains them.
|
||||
return x509.RFC822Name._init_without_validation(data)
|
||||
elif gn.type == backend._lib.GEN_OTHERNAME:
|
||||
type_id = _obj2txt(backend, gn.d.otherName.type_id)
|
||||
value = _asn1_to_der(backend, gn.d.otherName.value)
|
||||
return x509.OtherName(x509.ObjectIdentifier(type_id), value)
|
||||
else:
|
||||
# x400Address or ediPartyName
|
||||
raise x509.UnsupportedGeneralNameType(
|
||||
"{} is not a supported type".format(
|
||||
x509._GENERAL_NAMES.get(gn.type, gn.type)
|
||||
),
|
||||
gn.type,
|
||||
)
|
||||
|
||||
|
||||
def _decode_ocsp_no_check(backend, ext):
|
||||
return x509.OCSPNoCheck()
|
||||
|
||||
|
||||
def _decode_crl_number(backend, ext):
|
||||
asn1_int = backend._ffi.cast("ASN1_INTEGER *", ext)
|
||||
asn1_int = backend._ffi.gc(asn1_int, backend._lib.ASN1_INTEGER_free)
|
||||
return x509.CRLNumber(_asn1_integer_to_int(backend, asn1_int))
|
||||
|
||||
|
||||
def _decode_delta_crl_indicator(backend, ext):
|
||||
asn1_int = backend._ffi.cast("ASN1_INTEGER *", ext)
|
||||
asn1_int = backend._ffi.gc(asn1_int, backend._lib.ASN1_INTEGER_free)
|
||||
return x509.DeltaCRLIndicator(_asn1_integer_to_int(backend, asn1_int))
|
||||
|
||||
|
||||
class _X509ExtensionParser(object):
|
||||
def __init__(self, backend, ext_count, get_ext, handlers):
|
||||
self.ext_count = ext_count
|
||||
self.get_ext = get_ext
|
||||
self.handlers = handlers
|
||||
self._backend = backend
|
||||
|
||||
def parse(self, x509_obj):
|
||||
extensions = []
|
||||
seen_oids = set()
|
||||
for i in range(self.ext_count(x509_obj)):
|
||||
ext = self.get_ext(x509_obj, i)
|
||||
self._backend.openssl_assert(ext != self._backend._ffi.NULL)
|
||||
crit = self._backend._lib.X509_EXTENSION_get_critical(ext)
|
||||
critical = crit == 1
|
||||
oid = x509.ObjectIdentifier(
|
||||
_obj2txt(
|
||||
self._backend,
|
||||
self._backend._lib.X509_EXTENSION_get_object(ext),
|
||||
)
|
||||
)
|
||||
if oid in seen_oids:
|
||||
raise x509.DuplicateExtension(
|
||||
"Duplicate {} extension found".format(oid), oid
|
||||
)
|
||||
|
||||
# These OIDs are only supported in OpenSSL 1.1.0+ but we want
|
||||
# to support them in all versions of OpenSSL so we decode them
|
||||
# ourselves.
|
||||
if oid == ExtensionOID.TLS_FEATURE:
|
||||
# The extension contents are a SEQUENCE OF INTEGERs.
|
||||
data = self._backend._lib.X509_EXTENSION_get_data(ext)
|
||||
data_bytes = _asn1_string_to_bytes(self._backend, data)
|
||||
features = DERReader(data_bytes).read_single_element(SEQUENCE)
|
||||
parsed = []
|
||||
while not features.is_empty():
|
||||
parsed.append(features.read_element(INTEGER).as_integer())
|
||||
# Map the features to their enum value.
|
||||
value = x509.TLSFeature(
|
||||
[_TLS_FEATURE_TYPE_TO_ENUM[x] for x in parsed]
|
||||
)
|
||||
extensions.append(x509.Extension(oid, critical, value))
|
||||
seen_oids.add(oid)
|
||||
continue
|
||||
elif oid == ExtensionOID.PRECERT_POISON:
|
||||
data = self._backend._lib.X509_EXTENSION_get_data(ext)
|
||||
# The contents of the extension must be an ASN.1 NULL.
|
||||
reader = DERReader(_asn1_string_to_bytes(self._backend, data))
|
||||
reader.read_single_element(NULL).check_empty()
|
||||
extensions.append(
|
||||
x509.Extension(oid, critical, x509.PrecertPoison())
|
||||
)
|
||||
seen_oids.add(oid)
|
||||
continue
|
||||
|
||||
try:
|
||||
handler = self.handlers[oid]
|
||||
except KeyError:
|
||||
# Dump the DER payload into an UnrecognizedExtension object
|
||||
data = self._backend._lib.X509_EXTENSION_get_data(ext)
|
||||
self._backend.openssl_assert(data != self._backend._ffi.NULL)
|
||||
der = self._backend._ffi.buffer(data.data, data.length)[:]
|
||||
unrecognized = x509.UnrecognizedExtension(oid, der)
|
||||
extensions.append(x509.Extension(oid, critical, unrecognized))
|
||||
else:
|
||||
ext_data = self._backend._lib.X509V3_EXT_d2i(ext)
|
||||
if ext_data == self._backend._ffi.NULL:
|
||||
self._backend._consume_errors()
|
||||
raise ValueError(
|
||||
"The {} extension is invalid and can't be "
|
||||
"parsed".format(oid)
|
||||
)
|
||||
|
||||
value = handler(self._backend, ext_data)
|
||||
extensions.append(x509.Extension(oid, critical, value))
|
||||
|
||||
seen_oids.add(oid)
|
||||
|
||||
return x509.Extensions(extensions)
|
||||
|
||||
|
||||
def _decode_certificate_policies(backend, cp):
|
||||
cp = backend._ffi.cast("Cryptography_STACK_OF_POLICYINFO *", cp)
|
||||
cp = backend._ffi.gc(cp, backend._lib.CERTIFICATEPOLICIES_free)
|
||||
|
||||
num = backend._lib.sk_POLICYINFO_num(cp)
|
||||
certificate_policies = []
|
||||
for i in range(num):
|
||||
qualifiers = None
|
||||
pi = backend._lib.sk_POLICYINFO_value(cp, i)
|
||||
oid = x509.ObjectIdentifier(_obj2txt(backend, pi.policyid))
|
||||
if pi.qualifiers != backend._ffi.NULL:
|
||||
qnum = backend._lib.sk_POLICYQUALINFO_num(pi.qualifiers)
|
||||
qualifiers = []
|
||||
for j in range(qnum):
|
||||
pqi = backend._lib.sk_POLICYQUALINFO_value(pi.qualifiers, j)
|
||||
pqualid = x509.ObjectIdentifier(_obj2txt(backend, pqi.pqualid))
|
||||
if pqualid == CertificatePoliciesOID.CPS_QUALIFIER:
|
||||
cpsuri = backend._ffi.buffer(
|
||||
pqi.d.cpsuri.data, pqi.d.cpsuri.length
|
||||
)[:].decode("ascii")
|
||||
qualifiers.append(cpsuri)
|
||||
else:
|
||||
assert pqualid == CertificatePoliciesOID.CPS_USER_NOTICE
|
||||
user_notice = _decode_user_notice(
|
||||
backend, pqi.d.usernotice
|
||||
)
|
||||
qualifiers.append(user_notice)
|
||||
|
||||
certificate_policies.append(x509.PolicyInformation(oid, qualifiers))
|
||||
|
||||
return x509.CertificatePolicies(certificate_policies)
|
||||
|
||||
|
||||
def _decode_user_notice(backend, un):
|
||||
explicit_text = None
|
||||
notice_reference = None
|
||||
|
||||
if un.exptext != backend._ffi.NULL:
|
||||
explicit_text = _asn1_string_to_utf8(backend, un.exptext)
|
||||
|
||||
if un.noticeref != backend._ffi.NULL:
|
||||
organization = _asn1_string_to_utf8(backend, un.noticeref.organization)
|
||||
|
||||
num = backend._lib.sk_ASN1_INTEGER_num(un.noticeref.noticenos)
|
||||
notice_numbers = []
|
||||
for i in range(num):
|
||||
asn1_int = backend._lib.sk_ASN1_INTEGER_value(
|
||||
un.noticeref.noticenos, i
|
||||
)
|
||||
notice_num = _asn1_integer_to_int(backend, asn1_int)
|
||||
notice_numbers.append(notice_num)
|
||||
|
||||
notice_reference = x509.NoticeReference(organization, notice_numbers)
|
||||
|
||||
return x509.UserNotice(notice_reference, explicit_text)
|
||||
|
||||
|
||||
def _decode_basic_constraints(backend, bc_st):
|
||||
basic_constraints = backend._ffi.cast("BASIC_CONSTRAINTS *", bc_st)
|
||||
basic_constraints = backend._ffi.gc(
|
||||
basic_constraints, backend._lib.BASIC_CONSTRAINTS_free
|
||||
)
|
||||
# The byte representation of an ASN.1 boolean true is \xff. OpenSSL
|
||||
# chooses to just map this to its ordinal value, so true is 255 and
|
||||
# false is 0.
|
||||
ca = basic_constraints.ca == 255
|
||||
path_length = _asn1_integer_to_int_or_none(
|
||||
backend, basic_constraints.pathlen
|
||||
)
|
||||
|
||||
return x509.BasicConstraints(ca, path_length)
|
||||
|
||||
|
||||
def _decode_subject_key_identifier(backend, asn1_string):
|
||||
asn1_string = backend._ffi.cast("ASN1_OCTET_STRING *", asn1_string)
|
||||
asn1_string = backend._ffi.gc(
|
||||
asn1_string, backend._lib.ASN1_OCTET_STRING_free
|
||||
)
|
||||
return x509.SubjectKeyIdentifier(
|
||||
backend._ffi.buffer(asn1_string.data, asn1_string.length)[:]
|
||||
)
|
||||
|
||||
|
||||
def _decode_authority_key_identifier(backend, akid):
|
||||
akid = backend._ffi.cast("AUTHORITY_KEYID *", akid)
|
||||
akid = backend._ffi.gc(akid, backend._lib.AUTHORITY_KEYID_free)
|
||||
key_identifier = None
|
||||
authority_cert_issuer = None
|
||||
|
||||
if akid.keyid != backend._ffi.NULL:
|
||||
key_identifier = backend._ffi.buffer(
|
||||
akid.keyid.data, akid.keyid.length
|
||||
)[:]
|
||||
|
||||
if akid.issuer != backend._ffi.NULL:
|
||||
authority_cert_issuer = _decode_general_names(backend, akid.issuer)
|
||||
|
||||
authority_cert_serial_number = _asn1_integer_to_int_or_none(
|
||||
backend, akid.serial
|
||||
)
|
||||
|
||||
return x509.AuthorityKeyIdentifier(
|
||||
key_identifier, authority_cert_issuer, authority_cert_serial_number
|
||||
)
|
||||
|
||||
|
||||
def _decode_information_access(backend, ia):
|
||||
ia = backend._ffi.cast("Cryptography_STACK_OF_ACCESS_DESCRIPTION *", ia)
|
||||
ia = backend._ffi.gc(
|
||||
ia,
|
||||
lambda x: backend._lib.sk_ACCESS_DESCRIPTION_pop_free(
|
||||
x,
|
||||
backend._ffi.addressof(
|
||||
backend._lib._original_lib, "ACCESS_DESCRIPTION_free"
|
||||
),
|
||||
),
|
||||
)
|
||||
num = backend._lib.sk_ACCESS_DESCRIPTION_num(ia)
|
||||
access_descriptions = []
|
||||
for i in range(num):
|
||||
ad = backend._lib.sk_ACCESS_DESCRIPTION_value(ia, i)
|
||||
backend.openssl_assert(ad.method != backend._ffi.NULL)
|
||||
oid = x509.ObjectIdentifier(_obj2txt(backend, ad.method))
|
||||
backend.openssl_assert(ad.location != backend._ffi.NULL)
|
||||
gn = _decode_general_name(backend, ad.location)
|
||||
access_descriptions.append(x509.AccessDescription(oid, gn))
|
||||
|
||||
return access_descriptions
|
||||
|
||||
|
||||
def _decode_authority_information_access(backend, aia):
|
||||
access_descriptions = _decode_information_access(backend, aia)
|
||||
return x509.AuthorityInformationAccess(access_descriptions)
|
||||
|
||||
|
||||
def _decode_subject_information_access(backend, aia):
|
||||
access_descriptions = _decode_information_access(backend, aia)
|
||||
return x509.SubjectInformationAccess(access_descriptions)
|
||||
|
||||
|
||||
def _decode_key_usage(backend, bit_string):
|
||||
bit_string = backend._ffi.cast("ASN1_BIT_STRING *", bit_string)
|
||||
bit_string = backend._ffi.gc(bit_string, backend._lib.ASN1_BIT_STRING_free)
|
||||
get_bit = backend._lib.ASN1_BIT_STRING_get_bit
|
||||
digital_signature = get_bit(bit_string, 0) == 1
|
||||
content_commitment = get_bit(bit_string, 1) == 1
|
||||
key_encipherment = get_bit(bit_string, 2) == 1
|
||||
data_encipherment = get_bit(bit_string, 3) == 1
|
||||
key_agreement = get_bit(bit_string, 4) == 1
|
||||
key_cert_sign = get_bit(bit_string, 5) == 1
|
||||
crl_sign = get_bit(bit_string, 6) == 1
|
||||
encipher_only = get_bit(bit_string, 7) == 1
|
||||
decipher_only = get_bit(bit_string, 8) == 1
|
||||
return x509.KeyUsage(
|
||||
digital_signature,
|
||||
content_commitment,
|
||||
key_encipherment,
|
||||
data_encipherment,
|
||||
key_agreement,
|
||||
key_cert_sign,
|
||||
crl_sign,
|
||||
encipher_only,
|
||||
decipher_only,
|
||||
)
|
||||
|
||||
|
||||
def _decode_general_names_extension(backend, gns):
|
||||
gns = backend._ffi.cast("GENERAL_NAMES *", gns)
|
||||
gns = backend._ffi.gc(gns, backend._lib.GENERAL_NAMES_free)
|
||||
general_names = _decode_general_names(backend, gns)
|
||||
return general_names
|
||||
|
||||
|
||||
def _decode_subject_alt_name(backend, ext):
|
||||
return x509.SubjectAlternativeName(
|
||||
_decode_general_names_extension(backend, ext)
|
||||
)
|
||||
|
||||
|
||||
def _decode_issuer_alt_name(backend, ext):
|
||||
return x509.IssuerAlternativeName(
|
||||
_decode_general_names_extension(backend, ext)
|
||||
)
|
||||
|
||||
|
||||
def _decode_name_constraints(backend, nc):
|
||||
nc = backend._ffi.cast("NAME_CONSTRAINTS *", nc)
|
||||
nc = backend._ffi.gc(nc, backend._lib.NAME_CONSTRAINTS_free)
|
||||
permitted = _decode_general_subtrees(backend, nc.permittedSubtrees)
|
||||
excluded = _decode_general_subtrees(backend, nc.excludedSubtrees)
|
||||
return x509.NameConstraints(
|
||||
permitted_subtrees=permitted, excluded_subtrees=excluded
|
||||
)
|
||||
|
||||
|
||||
def _decode_general_subtrees(backend, stack_subtrees):
|
||||
if stack_subtrees == backend._ffi.NULL:
|
||||
return None
|
||||
|
||||
num = backend._lib.sk_GENERAL_SUBTREE_num(stack_subtrees)
|
||||
subtrees = []
|
||||
|
||||
for i in range(num):
|
||||
obj = backend._lib.sk_GENERAL_SUBTREE_value(stack_subtrees, i)
|
||||
backend.openssl_assert(obj != backend._ffi.NULL)
|
||||
name = _decode_general_name(backend, obj.base)
|
||||
subtrees.append(name)
|
||||
|
||||
return subtrees
|
||||
|
||||
|
||||
def _decode_issuing_dist_point(backend, idp):
|
||||
idp = backend._ffi.cast("ISSUING_DIST_POINT *", idp)
|
||||
idp = backend._ffi.gc(idp, backend._lib.ISSUING_DIST_POINT_free)
|
||||
if idp.distpoint != backend._ffi.NULL:
|
||||
full_name, relative_name = _decode_distpoint(backend, idp.distpoint)
|
||||
else:
|
||||
full_name = None
|
||||
relative_name = None
|
||||
|
||||
only_user = idp.onlyuser == 255
|
||||
only_ca = idp.onlyCA == 255
|
||||
indirect_crl = idp.indirectCRL == 255
|
||||
only_attr = idp.onlyattr == 255
|
||||
if idp.onlysomereasons != backend._ffi.NULL:
|
||||
only_some_reasons = _decode_reasons(backend, idp.onlysomereasons)
|
||||
else:
|
||||
only_some_reasons = None
|
||||
|
||||
return x509.IssuingDistributionPoint(
|
||||
full_name,
|
||||
relative_name,
|
||||
only_user,
|
||||
only_ca,
|
||||
only_some_reasons,
|
||||
indirect_crl,
|
||||
only_attr,
|
||||
)
|
||||
|
||||
|
||||
def _decode_policy_constraints(backend, pc):
|
||||
pc = backend._ffi.cast("POLICY_CONSTRAINTS *", pc)
|
||||
pc = backend._ffi.gc(pc, backend._lib.POLICY_CONSTRAINTS_free)
|
||||
|
||||
require_explicit_policy = _asn1_integer_to_int_or_none(
|
||||
backend, pc.requireExplicitPolicy
|
||||
)
|
||||
inhibit_policy_mapping = _asn1_integer_to_int_or_none(
|
||||
backend, pc.inhibitPolicyMapping
|
||||
)
|
||||
|
||||
return x509.PolicyConstraints(
|
||||
require_explicit_policy, inhibit_policy_mapping
|
||||
)
|
||||
|
||||
|
||||
def _decode_extended_key_usage(backend, sk):
|
||||
sk = backend._ffi.cast("Cryptography_STACK_OF_ASN1_OBJECT *", sk)
|
||||
sk = backend._ffi.gc(sk, backend._lib.sk_ASN1_OBJECT_free)
|
||||
num = backend._lib.sk_ASN1_OBJECT_num(sk)
|
||||
ekus = []
|
||||
|
||||
for i in range(num):
|
||||
obj = backend._lib.sk_ASN1_OBJECT_value(sk, i)
|
||||
backend.openssl_assert(obj != backend._ffi.NULL)
|
||||
oid = x509.ObjectIdentifier(_obj2txt(backend, obj))
|
||||
ekus.append(oid)
|
||||
|
||||
return x509.ExtendedKeyUsage(ekus)
|
||||
|
||||
|
||||
_DISTPOINT_TYPE_FULLNAME = 0
|
||||
_DISTPOINT_TYPE_RELATIVENAME = 1
|
||||
|
||||
|
||||
def _decode_dist_points(backend, cdps):
|
||||
cdps = backend._ffi.cast("Cryptography_STACK_OF_DIST_POINT *", cdps)
|
||||
cdps = backend._ffi.gc(cdps, backend._lib.CRL_DIST_POINTS_free)
|
||||
|
||||
num = backend._lib.sk_DIST_POINT_num(cdps)
|
||||
dist_points = []
|
||||
for i in range(num):
|
||||
full_name = None
|
||||
relative_name = None
|
||||
crl_issuer = None
|
||||
reasons = None
|
||||
cdp = backend._lib.sk_DIST_POINT_value(cdps, i)
|
||||
if cdp.reasons != backend._ffi.NULL:
|
||||
reasons = _decode_reasons(backend, cdp.reasons)
|
||||
|
||||
if cdp.CRLissuer != backend._ffi.NULL:
|
||||
crl_issuer = _decode_general_names(backend, cdp.CRLissuer)
|
||||
|
||||
# Certificates may have a crl_issuer/reasons and no distribution
|
||||
# point so make sure it's not null.
|
||||
if cdp.distpoint != backend._ffi.NULL:
|
||||
full_name, relative_name = _decode_distpoint(
|
||||
backend, cdp.distpoint
|
||||
)
|
||||
|
||||
dist_points.append(
|
||||
x509.DistributionPoint(
|
||||
full_name, relative_name, reasons, crl_issuer
|
||||
)
|
||||
)
|
||||
|
||||
return dist_points
|
||||
|
||||
|
||||
# ReasonFlags ::= BIT STRING {
|
||||
# unused (0),
|
||||
# keyCompromise (1),
|
||||
# cACompromise (2),
|
||||
# affiliationChanged (3),
|
||||
# superseded (4),
|
||||
# cessationOfOperation (5),
|
||||
# certificateHold (6),
|
||||
# privilegeWithdrawn (7),
|
||||
# aACompromise (8) }
|
||||
_REASON_BIT_MAPPING = {
|
||||
1: x509.ReasonFlags.key_compromise,
|
||||
2: x509.ReasonFlags.ca_compromise,
|
||||
3: x509.ReasonFlags.affiliation_changed,
|
||||
4: x509.ReasonFlags.superseded,
|
||||
5: x509.ReasonFlags.cessation_of_operation,
|
||||
6: x509.ReasonFlags.certificate_hold,
|
||||
7: x509.ReasonFlags.privilege_withdrawn,
|
||||
8: x509.ReasonFlags.aa_compromise,
|
||||
}
|
||||
|
||||
|
||||
def _decode_reasons(backend, reasons):
|
||||
# We will check each bit from RFC 5280
|
||||
enum_reasons = []
|
||||
for bit_position, reason in six.iteritems(_REASON_BIT_MAPPING):
|
||||
if backend._lib.ASN1_BIT_STRING_get_bit(reasons, bit_position):
|
||||
enum_reasons.append(reason)
|
||||
|
||||
return frozenset(enum_reasons)
|
||||
|
||||
|
||||
def _decode_distpoint(backend, distpoint):
|
||||
if distpoint.type == _DISTPOINT_TYPE_FULLNAME:
|
||||
full_name = _decode_general_names(backend, distpoint.name.fullname)
|
||||
return full_name, None
|
||||
|
||||
# OpenSSL code doesn't test for a specific type for
|
||||
# relativename, everything that isn't fullname is considered
|
||||
# relativename. Per RFC 5280:
|
||||
#
|
||||
# DistributionPointName ::= CHOICE {
|
||||
# fullName [0] GeneralNames,
|
||||
# nameRelativeToCRLIssuer [1] RelativeDistinguishedName }
|
||||
rns = distpoint.name.relativename
|
||||
rnum = backend._lib.sk_X509_NAME_ENTRY_num(rns)
|
||||
attributes = set()
|
||||
for i in range(rnum):
|
||||
rn = backend._lib.sk_X509_NAME_ENTRY_value(rns, i)
|
||||
backend.openssl_assert(rn != backend._ffi.NULL)
|
||||
attributes.add(_decode_x509_name_entry(backend, rn))
|
||||
|
||||
relative_name = x509.RelativeDistinguishedName(attributes)
|
||||
|
||||
return None, relative_name
|
||||
|
||||
|
||||
def _decode_crl_distribution_points(backend, cdps):
|
||||
dist_points = _decode_dist_points(backend, cdps)
|
||||
return x509.CRLDistributionPoints(dist_points)
|
||||
|
||||
|
||||
def _decode_freshest_crl(backend, cdps):
|
||||
dist_points = _decode_dist_points(backend, cdps)
|
||||
return x509.FreshestCRL(dist_points)
|
||||
|
||||
|
||||
def _decode_inhibit_any_policy(backend, asn1_int):
|
||||
asn1_int = backend._ffi.cast("ASN1_INTEGER *", asn1_int)
|
||||
asn1_int = backend._ffi.gc(asn1_int, backend._lib.ASN1_INTEGER_free)
|
||||
skip_certs = _asn1_integer_to_int(backend, asn1_int)
|
||||
return x509.InhibitAnyPolicy(skip_certs)
|
||||
|
||||
|
||||
def _decode_scts(backend, asn1_scts):
|
||||
from cryptography.hazmat.backends.openssl.x509 import (
|
||||
_SignedCertificateTimestamp,
|
||||
)
|
||||
|
||||
asn1_scts = backend._ffi.cast("Cryptography_STACK_OF_SCT *", asn1_scts)
|
||||
asn1_scts = backend._ffi.gc(asn1_scts, backend._lib.SCT_LIST_free)
|
||||
|
||||
scts = []
|
||||
for i in range(backend._lib.sk_SCT_num(asn1_scts)):
|
||||
sct = backend._lib.sk_SCT_value(asn1_scts, i)
|
||||
|
||||
scts.append(_SignedCertificateTimestamp(backend, asn1_scts, sct))
|
||||
return scts
|
||||
|
||||
|
||||
def _decode_precert_signed_certificate_timestamps(backend, asn1_scts):
|
||||
return x509.PrecertificateSignedCertificateTimestamps(
|
||||
_decode_scts(backend, asn1_scts)
|
||||
)
|
||||
|
||||
|
||||
def _decode_signed_certificate_timestamps(backend, asn1_scts):
|
||||
return x509.SignedCertificateTimestamps(_decode_scts(backend, asn1_scts))
|
||||
|
||||
|
||||
# CRLReason ::= ENUMERATED {
|
||||
# unspecified (0),
|
||||
# keyCompromise (1),
|
||||
# cACompromise (2),
|
||||
# affiliationChanged (3),
|
||||
# superseded (4),
|
||||
# cessationOfOperation (5),
|
||||
# certificateHold (6),
|
||||
# -- value 7 is not used
|
||||
# removeFromCRL (8),
|
||||
# privilegeWithdrawn (9),
|
||||
# aACompromise (10) }
|
||||
_CRL_ENTRY_REASON_CODE_TO_ENUM = {
|
||||
0: x509.ReasonFlags.unspecified,
|
||||
1: x509.ReasonFlags.key_compromise,
|
||||
2: x509.ReasonFlags.ca_compromise,
|
||||
3: x509.ReasonFlags.affiliation_changed,
|
||||
4: x509.ReasonFlags.superseded,
|
||||
5: x509.ReasonFlags.cessation_of_operation,
|
||||
6: x509.ReasonFlags.certificate_hold,
|
||||
8: x509.ReasonFlags.remove_from_crl,
|
||||
9: x509.ReasonFlags.privilege_withdrawn,
|
||||
10: x509.ReasonFlags.aa_compromise,
|
||||
}
|
||||
|
||||
|
||||
_CRL_ENTRY_REASON_ENUM_TO_CODE = {
|
||||
x509.ReasonFlags.unspecified: 0,
|
||||
x509.ReasonFlags.key_compromise: 1,
|
||||
x509.ReasonFlags.ca_compromise: 2,
|
||||
x509.ReasonFlags.affiliation_changed: 3,
|
||||
x509.ReasonFlags.superseded: 4,
|
||||
x509.ReasonFlags.cessation_of_operation: 5,
|
||||
x509.ReasonFlags.certificate_hold: 6,
|
||||
x509.ReasonFlags.remove_from_crl: 8,
|
||||
x509.ReasonFlags.privilege_withdrawn: 9,
|
||||
x509.ReasonFlags.aa_compromise: 10,
|
||||
}
|
||||
|
||||
|
||||
def _decode_crl_reason(backend, enum):
|
||||
enum = backend._ffi.cast("ASN1_ENUMERATED *", enum)
|
||||
enum = backend._ffi.gc(enum, backend._lib.ASN1_ENUMERATED_free)
|
||||
code = backend._lib.ASN1_ENUMERATED_get(enum)
|
||||
|
||||
try:
|
||||
return x509.CRLReason(_CRL_ENTRY_REASON_CODE_TO_ENUM[code])
|
||||
except KeyError:
|
||||
raise ValueError("Unsupported reason code: {}".format(code))
|
||||
|
||||
|
||||
def _decode_invalidity_date(backend, inv_date):
|
||||
generalized_time = backend._ffi.cast("ASN1_GENERALIZEDTIME *", inv_date)
|
||||
generalized_time = backend._ffi.gc(
|
||||
generalized_time, backend._lib.ASN1_GENERALIZEDTIME_free
|
||||
)
|
||||
return x509.InvalidityDate(
|
||||
_parse_asn1_generalized_time(backend, generalized_time)
|
||||
)
|
||||
|
||||
|
||||
def _decode_cert_issuer(backend, gns):
|
||||
gns = backend._ffi.cast("GENERAL_NAMES *", gns)
|
||||
gns = backend._ffi.gc(gns, backend._lib.GENERAL_NAMES_free)
|
||||
general_names = _decode_general_names(backend, gns)
|
||||
return x509.CertificateIssuer(general_names)
|
||||
|
||||
|
||||
def _asn1_to_der(backend, asn1_type):
|
||||
buf = backend._ffi.new("unsigned char **")
|
||||
res = backend._lib.i2d_ASN1_TYPE(asn1_type, buf)
|
||||
backend.openssl_assert(res >= 0)
|
||||
backend.openssl_assert(buf[0] != backend._ffi.NULL)
|
||||
buf = backend._ffi.gc(
|
||||
buf, lambda buffer: backend._lib.OPENSSL_free(buffer[0])
|
||||
)
|
||||
return backend._ffi.buffer(buf[0], res)[:]
|
||||
|
||||
|
||||
def _asn1_integer_to_int(backend, asn1_int):
|
||||
bn = backend._lib.ASN1_INTEGER_to_BN(asn1_int, backend._ffi.NULL)
|
||||
backend.openssl_assert(bn != backend._ffi.NULL)
|
||||
bn = backend._ffi.gc(bn, backend._lib.BN_free)
|
||||
return backend._bn_to_int(bn)
|
||||
|
||||
|
||||
def _asn1_integer_to_int_or_none(backend, asn1_int):
|
||||
if asn1_int == backend._ffi.NULL:
|
||||
return None
|
||||
else:
|
||||
return _asn1_integer_to_int(backend, asn1_int)
|
||||
|
||||
|
||||
def _asn1_string_to_bytes(backend, asn1_string):
|
||||
return backend._ffi.buffer(asn1_string.data, asn1_string.length)[:]
|
||||
|
||||
|
||||
def _asn1_string_to_ascii(backend, asn1_string):
|
||||
return _asn1_string_to_bytes(backend, asn1_string).decode("ascii")
|
||||
|
||||
|
||||
def _asn1_string_to_utf8(backend, asn1_string):
|
||||
buf = backend._ffi.new("unsigned char **")
|
||||
res = backend._lib.ASN1_STRING_to_UTF8(buf, asn1_string)
|
||||
if res == -1:
|
||||
raise ValueError(
|
||||
"Unsupported ASN1 string type. Type: {}".format(asn1_string.type)
|
||||
)
|
||||
|
||||
backend.openssl_assert(buf[0] != backend._ffi.NULL)
|
||||
buf = backend._ffi.gc(
|
||||
buf, lambda buffer: backend._lib.OPENSSL_free(buffer[0])
|
||||
)
|
||||
return backend._ffi.buffer(buf[0], res)[:].decode("utf8")
|
||||
|
||||
|
||||
def _parse_asn1_time(backend, asn1_time):
|
||||
backend.openssl_assert(asn1_time != backend._ffi.NULL)
|
||||
generalized_time = backend._lib.ASN1_TIME_to_generalizedtime(
|
||||
asn1_time, backend._ffi.NULL
|
||||
)
|
||||
if generalized_time == backend._ffi.NULL:
|
||||
raise ValueError(
|
||||
"Couldn't parse ASN.1 time as generalizedtime {!r}".format(
|
||||
_asn1_string_to_bytes(backend, asn1_time)
|
||||
)
|
||||
)
|
||||
|
||||
generalized_time = backend._ffi.gc(
|
||||
generalized_time, backend._lib.ASN1_GENERALIZEDTIME_free
|
||||
)
|
||||
return _parse_asn1_generalized_time(backend, generalized_time)
|
||||
|
||||
|
||||
def _parse_asn1_generalized_time(backend, generalized_time):
|
||||
time = _asn1_string_to_ascii(
|
||||
backend, backend._ffi.cast("ASN1_STRING *", generalized_time)
|
||||
)
|
||||
return datetime.datetime.strptime(time, "%Y%m%d%H%M%SZ")
|
||||
|
||||
|
||||
def _decode_nonce(backend, nonce):
|
||||
nonce = backend._ffi.cast("ASN1_OCTET_STRING *", nonce)
|
||||
nonce = backend._ffi.gc(nonce, backend._lib.ASN1_OCTET_STRING_free)
|
||||
return x509.OCSPNonce(_asn1_string_to_bytes(backend, nonce))
|
||||
|
||||
|
||||
_EXTENSION_HANDLERS_BASE = {
|
||||
ExtensionOID.BASIC_CONSTRAINTS: _decode_basic_constraints,
|
||||
ExtensionOID.SUBJECT_KEY_IDENTIFIER: _decode_subject_key_identifier,
|
||||
ExtensionOID.KEY_USAGE: _decode_key_usage,
|
||||
ExtensionOID.SUBJECT_ALTERNATIVE_NAME: _decode_subject_alt_name,
|
||||
ExtensionOID.EXTENDED_KEY_USAGE: _decode_extended_key_usage,
|
||||
ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _decode_authority_key_identifier,
|
||||
ExtensionOID.AUTHORITY_INFORMATION_ACCESS: (
|
||||
_decode_authority_information_access
|
||||
),
|
||||
ExtensionOID.SUBJECT_INFORMATION_ACCESS: (
|
||||
_decode_subject_information_access
|
||||
),
|
||||
ExtensionOID.CERTIFICATE_POLICIES: _decode_certificate_policies,
|
||||
ExtensionOID.CRL_DISTRIBUTION_POINTS: _decode_crl_distribution_points,
|
||||
ExtensionOID.FRESHEST_CRL: _decode_freshest_crl,
|
||||
ExtensionOID.OCSP_NO_CHECK: _decode_ocsp_no_check,
|
||||
ExtensionOID.INHIBIT_ANY_POLICY: _decode_inhibit_any_policy,
|
||||
ExtensionOID.ISSUER_ALTERNATIVE_NAME: _decode_issuer_alt_name,
|
||||
ExtensionOID.NAME_CONSTRAINTS: _decode_name_constraints,
|
||||
ExtensionOID.POLICY_CONSTRAINTS: _decode_policy_constraints,
|
||||
}
|
||||
_EXTENSION_HANDLERS_SCT = {
|
||||
ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS: (
|
||||
_decode_precert_signed_certificate_timestamps
|
||||
)
|
||||
}
|
||||
|
||||
_REVOKED_EXTENSION_HANDLERS = {
|
||||
CRLEntryExtensionOID.CRL_REASON: _decode_crl_reason,
|
||||
CRLEntryExtensionOID.INVALIDITY_DATE: _decode_invalidity_date,
|
||||
CRLEntryExtensionOID.CERTIFICATE_ISSUER: _decode_cert_issuer,
|
||||
}
|
||||
|
||||
_CRL_EXTENSION_HANDLERS = {
|
||||
ExtensionOID.CRL_NUMBER: _decode_crl_number,
|
||||
ExtensionOID.DELTA_CRL_INDICATOR: _decode_delta_crl_indicator,
|
||||
ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _decode_authority_key_identifier,
|
||||
ExtensionOID.ISSUER_ALTERNATIVE_NAME: _decode_issuer_alt_name,
|
||||
ExtensionOID.AUTHORITY_INFORMATION_ACCESS: (
|
||||
_decode_authority_information_access
|
||||
),
|
||||
ExtensionOID.ISSUING_DISTRIBUTION_POINT: _decode_issuing_dist_point,
|
||||
ExtensionOID.FRESHEST_CRL: _decode_freshest_crl,
|
||||
}
|
||||
|
||||
_OCSP_REQ_EXTENSION_HANDLERS = {
|
||||
OCSPExtensionOID.NONCE: _decode_nonce,
|
||||
}
|
||||
|
||||
_OCSP_BASICRESP_EXTENSION_HANDLERS = {
|
||||
OCSPExtensionOID.NONCE: _decode_nonce,
|
||||
}
|
||||
|
||||
_OCSP_SINGLERESP_EXTENSION_HANDLERS_SCT = {
|
||||
ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS: (
|
||||
_decode_signed_certificate_timestamps
|
||||
)
|
||||
}
|
||||
|
|
@ -0,0 +1,271 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import dh
|
||||
|
||||
|
||||
def _dh_params_dup(dh_cdata, backend):
|
||||
lib = backend._lib
|
||||
ffi = backend._ffi
|
||||
|
||||
param_cdata = lib.DHparams_dup(dh_cdata)
|
||||
backend.openssl_assert(param_cdata != ffi.NULL)
|
||||
param_cdata = ffi.gc(param_cdata, lib.DH_free)
|
||||
if lib.CRYPTOGRAPHY_IS_LIBRESSL:
|
||||
# In libressl DHparams_dup don't copy q
|
||||
q = ffi.new("BIGNUM **")
|
||||
lib.DH_get0_pqg(dh_cdata, ffi.NULL, q, ffi.NULL)
|
||||
q_dup = lib.BN_dup(q[0])
|
||||
res = lib.DH_set0_pqg(param_cdata, ffi.NULL, q_dup, ffi.NULL)
|
||||
backend.openssl_assert(res == 1)
|
||||
|
||||
return param_cdata
|
||||
|
||||
|
||||
def _dh_cdata_to_parameters(dh_cdata, backend):
|
||||
param_cdata = _dh_params_dup(dh_cdata, backend)
|
||||
return _DHParameters(backend, param_cdata)
|
||||
|
||||
|
||||
@utils.register_interface(dh.DHParametersWithSerialization)
|
||||
class _DHParameters(object):
|
||||
def __init__(self, backend, dh_cdata):
|
||||
self._backend = backend
|
||||
self._dh_cdata = dh_cdata
|
||||
|
||||
def parameter_numbers(self):
|
||||
p = self._backend._ffi.new("BIGNUM **")
|
||||
g = self._backend._ffi.new("BIGNUM **")
|
||||
q = self._backend._ffi.new("BIGNUM **")
|
||||
self._backend._lib.DH_get0_pqg(self._dh_cdata, p, q, g)
|
||||
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
|
||||
self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
|
||||
if q[0] == self._backend._ffi.NULL:
|
||||
q_val = None
|
||||
else:
|
||||
q_val = self._backend._bn_to_int(q[0])
|
||||
return dh.DHParameterNumbers(
|
||||
p=self._backend._bn_to_int(p[0]),
|
||||
g=self._backend._bn_to_int(g[0]),
|
||||
q=q_val,
|
||||
)
|
||||
|
||||
def generate_private_key(self):
|
||||
return self._backend.generate_dh_private_key(self)
|
||||
|
||||
def parameter_bytes(self, encoding, format):
|
||||
if format is not serialization.ParameterFormat.PKCS3:
|
||||
raise ValueError("Only PKCS3 serialization is supported")
|
||||
if not self._backend._lib.Cryptography_HAS_EVP_PKEY_DHX:
|
||||
q = self._backend._ffi.new("BIGNUM **")
|
||||
self._backend._lib.DH_get0_pqg(
|
||||
self._dh_cdata,
|
||||
self._backend._ffi.NULL,
|
||||
q,
|
||||
self._backend._ffi.NULL,
|
||||
)
|
||||
if q[0] != self._backend._ffi.NULL:
|
||||
raise UnsupportedAlgorithm(
|
||||
"DH X9.42 serialization is not supported",
|
||||
_Reasons.UNSUPPORTED_SERIALIZATION,
|
||||
)
|
||||
|
||||
return self._backend._parameter_bytes(encoding, format, self._dh_cdata)
|
||||
|
||||
|
||||
def _get_dh_num_bits(backend, dh_cdata):
|
||||
p = backend._ffi.new("BIGNUM **")
|
||||
backend._lib.DH_get0_pqg(dh_cdata, p, backend._ffi.NULL, backend._ffi.NULL)
|
||||
backend.openssl_assert(p[0] != backend._ffi.NULL)
|
||||
return backend._lib.BN_num_bits(p[0])
|
||||
|
||||
|
||||
@utils.register_interface(dh.DHPrivateKeyWithSerialization)
|
||||
class _DHPrivateKey(object):
|
||||
def __init__(self, backend, dh_cdata, evp_pkey):
|
||||
self._backend = backend
|
||||
self._dh_cdata = dh_cdata
|
||||
self._evp_pkey = evp_pkey
|
||||
self._key_size_bytes = self._backend._lib.DH_size(dh_cdata)
|
||||
|
||||
@property
|
||||
def key_size(self):
|
||||
return _get_dh_num_bits(self._backend, self._dh_cdata)
|
||||
|
||||
def private_numbers(self):
|
||||
p = self._backend._ffi.new("BIGNUM **")
|
||||
g = self._backend._ffi.new("BIGNUM **")
|
||||
q = self._backend._ffi.new("BIGNUM **")
|
||||
self._backend._lib.DH_get0_pqg(self._dh_cdata, p, q, g)
|
||||
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
|
||||
self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
|
||||
if q[0] == self._backend._ffi.NULL:
|
||||
q_val = None
|
||||
else:
|
||||
q_val = self._backend._bn_to_int(q[0])
|
||||
pub_key = self._backend._ffi.new("BIGNUM **")
|
||||
priv_key = self._backend._ffi.new("BIGNUM **")
|
||||
self._backend._lib.DH_get0_key(self._dh_cdata, pub_key, priv_key)
|
||||
self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
|
||||
self._backend.openssl_assert(priv_key[0] != self._backend._ffi.NULL)
|
||||
return dh.DHPrivateNumbers(
|
||||
public_numbers=dh.DHPublicNumbers(
|
||||
parameter_numbers=dh.DHParameterNumbers(
|
||||
p=self._backend._bn_to_int(p[0]),
|
||||
g=self._backend._bn_to_int(g[0]),
|
||||
q=q_val,
|
||||
),
|
||||
y=self._backend._bn_to_int(pub_key[0]),
|
||||
),
|
||||
x=self._backend._bn_to_int(priv_key[0]),
|
||||
)
|
||||
|
||||
def exchange(self, peer_public_key):
|
||||
|
||||
buf = self._backend._ffi.new("unsigned char[]", self._key_size_bytes)
|
||||
pub_key = self._backend._ffi.new("BIGNUM **")
|
||||
self._backend._lib.DH_get0_key(
|
||||
peer_public_key._dh_cdata, pub_key, self._backend._ffi.NULL
|
||||
)
|
||||
self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
|
||||
res = self._backend._lib.DH_compute_key(
|
||||
buf, pub_key[0], self._dh_cdata
|
||||
)
|
||||
|
||||
if res == -1:
|
||||
errors_with_text = self._backend._consume_errors_with_text()
|
||||
raise ValueError(
|
||||
"Error computing shared key. Public key is likely invalid "
|
||||
"for this exchange.",
|
||||
errors_with_text,
|
||||
)
|
||||
else:
|
||||
self._backend.openssl_assert(res >= 1)
|
||||
|
||||
key = self._backend._ffi.buffer(buf)[:res]
|
||||
pad = self._key_size_bytes - len(key)
|
||||
|
||||
if pad > 0:
|
||||
key = (b"\x00" * pad) + key
|
||||
|
||||
return key
|
||||
|
||||
def public_key(self):
|
||||
dh_cdata = _dh_params_dup(self._dh_cdata, self._backend)
|
||||
pub_key = self._backend._ffi.new("BIGNUM **")
|
||||
self._backend._lib.DH_get0_key(
|
||||
self._dh_cdata, pub_key, self._backend._ffi.NULL
|
||||
)
|
||||
self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
|
||||
pub_key_dup = self._backend._lib.BN_dup(pub_key[0])
|
||||
self._backend.openssl_assert(pub_key_dup != self._backend._ffi.NULL)
|
||||
|
||||
res = self._backend._lib.DH_set0_key(
|
||||
dh_cdata, pub_key_dup, self._backend._ffi.NULL
|
||||
)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
evp_pkey = self._backend._dh_cdata_to_evp_pkey(dh_cdata)
|
||||
return _DHPublicKey(self._backend, dh_cdata, evp_pkey)
|
||||
|
||||
def parameters(self):
|
||||
return _dh_cdata_to_parameters(self._dh_cdata, self._backend)
|
||||
|
||||
def private_bytes(self, encoding, format, encryption_algorithm):
|
||||
if format is not serialization.PrivateFormat.PKCS8:
|
||||
raise ValueError(
|
||||
"DH private keys support only PKCS8 serialization"
|
||||
)
|
||||
if not self._backend._lib.Cryptography_HAS_EVP_PKEY_DHX:
|
||||
q = self._backend._ffi.new("BIGNUM **")
|
||||
self._backend._lib.DH_get0_pqg(
|
||||
self._dh_cdata,
|
||||
self._backend._ffi.NULL,
|
||||
q,
|
||||
self._backend._ffi.NULL,
|
||||
)
|
||||
if q[0] != self._backend._ffi.NULL:
|
||||
raise UnsupportedAlgorithm(
|
||||
"DH X9.42 serialization is not supported",
|
||||
_Reasons.UNSUPPORTED_SERIALIZATION,
|
||||
)
|
||||
|
||||
return self._backend._private_key_bytes(
|
||||
encoding,
|
||||
format,
|
||||
encryption_algorithm,
|
||||
self,
|
||||
self._evp_pkey,
|
||||
self._dh_cdata,
|
||||
)
|
||||
|
||||
|
||||
@utils.register_interface(dh.DHPublicKeyWithSerialization)
|
||||
class _DHPublicKey(object):
|
||||
def __init__(self, backend, dh_cdata, evp_pkey):
|
||||
self._backend = backend
|
||||
self._dh_cdata = dh_cdata
|
||||
self._evp_pkey = evp_pkey
|
||||
self._key_size_bits = _get_dh_num_bits(self._backend, self._dh_cdata)
|
||||
|
||||
@property
|
||||
def key_size(self):
|
||||
return self._key_size_bits
|
||||
|
||||
def public_numbers(self):
|
||||
p = self._backend._ffi.new("BIGNUM **")
|
||||
g = self._backend._ffi.new("BIGNUM **")
|
||||
q = self._backend._ffi.new("BIGNUM **")
|
||||
self._backend._lib.DH_get0_pqg(self._dh_cdata, p, q, g)
|
||||
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
|
||||
self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
|
||||
if q[0] == self._backend._ffi.NULL:
|
||||
q_val = None
|
||||
else:
|
||||
q_val = self._backend._bn_to_int(q[0])
|
||||
pub_key = self._backend._ffi.new("BIGNUM **")
|
||||
self._backend._lib.DH_get0_key(
|
||||
self._dh_cdata, pub_key, self._backend._ffi.NULL
|
||||
)
|
||||
self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
|
||||
return dh.DHPublicNumbers(
|
||||
parameter_numbers=dh.DHParameterNumbers(
|
||||
p=self._backend._bn_to_int(p[0]),
|
||||
g=self._backend._bn_to_int(g[0]),
|
||||
q=q_val,
|
||||
),
|
||||
y=self._backend._bn_to_int(pub_key[0]),
|
||||
)
|
||||
|
||||
def parameters(self):
|
||||
return _dh_cdata_to_parameters(self._dh_cdata, self._backend)
|
||||
|
||||
def public_bytes(self, encoding, format):
|
||||
if format is not serialization.PublicFormat.SubjectPublicKeyInfo:
|
||||
raise ValueError(
|
||||
"DH public keys support only "
|
||||
"SubjectPublicKeyInfo serialization"
|
||||
)
|
||||
|
||||
if not self._backend._lib.Cryptography_HAS_EVP_PKEY_DHX:
|
||||
q = self._backend._ffi.new("BIGNUM **")
|
||||
self._backend._lib.DH_get0_pqg(
|
||||
self._dh_cdata,
|
||||
self._backend._ffi.NULL,
|
||||
q,
|
||||
self._backend._ffi.NULL,
|
||||
)
|
||||
if q[0] != self._backend._ffi.NULL:
|
||||
raise UnsupportedAlgorithm(
|
||||
"DH X9.42 serialization is not supported",
|
||||
_Reasons.UNSUPPORTED_SERIALIZATION,
|
||||
)
|
||||
|
||||
return self._backend._public_key_bytes(
|
||||
encoding, format, self, self._evp_pkey, None
|
||||
)
|
||||
|
|
@ -0,0 +1,263 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import InvalidSignature
|
||||
from cryptography.hazmat.backends.openssl.utils import (
|
||||
_calculate_digest_and_algorithm,
|
||||
_check_not_prehashed,
|
||||
_warn_sign_verify_deprecated,
|
||||
)
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.asymmetric import (
|
||||
AsymmetricSignatureContext,
|
||||
AsymmetricVerificationContext,
|
||||
dsa,
|
||||
)
|
||||
|
||||
|
||||
def _dsa_sig_sign(backend, private_key, data):
|
||||
sig_buf_len = backend._lib.DSA_size(private_key._dsa_cdata)
|
||||
sig_buf = backend._ffi.new("unsigned char[]", sig_buf_len)
|
||||
buflen = backend._ffi.new("unsigned int *")
|
||||
|
||||
# The first parameter passed to DSA_sign is unused by OpenSSL but
|
||||
# must be an integer.
|
||||
res = backend._lib.DSA_sign(
|
||||
0, data, len(data), sig_buf, buflen, private_key._dsa_cdata
|
||||
)
|
||||
backend.openssl_assert(res == 1)
|
||||
backend.openssl_assert(buflen[0])
|
||||
|
||||
return backend._ffi.buffer(sig_buf)[: buflen[0]]
|
||||
|
||||
|
||||
def _dsa_sig_verify(backend, public_key, signature, data):
|
||||
# The first parameter passed to DSA_verify is unused by OpenSSL but
|
||||
# must be an integer.
|
||||
res = backend._lib.DSA_verify(
|
||||
0, data, len(data), signature, len(signature), public_key._dsa_cdata
|
||||
)
|
||||
|
||||
if res != 1:
|
||||
backend._consume_errors()
|
||||
raise InvalidSignature
|
||||
|
||||
|
||||
@utils.register_interface(AsymmetricVerificationContext)
|
||||
class _DSAVerificationContext(object):
|
||||
def __init__(self, backend, public_key, signature, algorithm):
|
||||
self._backend = backend
|
||||
self._public_key = public_key
|
||||
self._signature = signature
|
||||
self._algorithm = algorithm
|
||||
|
||||
self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
|
||||
|
||||
def update(self, data):
|
||||
self._hash_ctx.update(data)
|
||||
|
||||
def verify(self):
|
||||
data_to_verify = self._hash_ctx.finalize()
|
||||
|
||||
_dsa_sig_verify(
|
||||
self._backend, self._public_key, self._signature, data_to_verify
|
||||
)
|
||||
|
||||
|
||||
@utils.register_interface(AsymmetricSignatureContext)
|
||||
class _DSASignatureContext(object):
|
||||
def __init__(self, backend, private_key, algorithm):
|
||||
self._backend = backend
|
||||
self._private_key = private_key
|
||||
self._algorithm = algorithm
|
||||
self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
|
||||
|
||||
def update(self, data):
|
||||
self._hash_ctx.update(data)
|
||||
|
||||
def finalize(self):
|
||||
data_to_sign = self._hash_ctx.finalize()
|
||||
return _dsa_sig_sign(self._backend, self._private_key, data_to_sign)
|
||||
|
||||
|
||||
@utils.register_interface(dsa.DSAParametersWithNumbers)
|
||||
class _DSAParameters(object):
|
||||
def __init__(self, backend, dsa_cdata):
|
||||
self._backend = backend
|
||||
self._dsa_cdata = dsa_cdata
|
||||
|
||||
def parameter_numbers(self):
|
||||
p = self._backend._ffi.new("BIGNUM **")
|
||||
q = self._backend._ffi.new("BIGNUM **")
|
||||
g = self._backend._ffi.new("BIGNUM **")
|
||||
self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g)
|
||||
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
|
||||
self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
|
||||
self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
|
||||
return dsa.DSAParameterNumbers(
|
||||
p=self._backend._bn_to_int(p[0]),
|
||||
q=self._backend._bn_to_int(q[0]),
|
||||
g=self._backend._bn_to_int(g[0]),
|
||||
)
|
||||
|
||||
def generate_private_key(self):
|
||||
return self._backend.generate_dsa_private_key(self)
|
||||
|
||||
|
||||
@utils.register_interface(dsa.DSAPrivateKeyWithSerialization)
|
||||
class _DSAPrivateKey(object):
|
||||
def __init__(self, backend, dsa_cdata, evp_pkey):
|
||||
self._backend = backend
|
||||
self._dsa_cdata = dsa_cdata
|
||||
self._evp_pkey = evp_pkey
|
||||
|
||||
p = self._backend._ffi.new("BIGNUM **")
|
||||
self._backend._lib.DSA_get0_pqg(
|
||||
dsa_cdata, p, self._backend._ffi.NULL, self._backend._ffi.NULL
|
||||
)
|
||||
self._backend.openssl_assert(p[0] != backend._ffi.NULL)
|
||||
self._key_size = self._backend._lib.BN_num_bits(p[0])
|
||||
|
||||
key_size = utils.read_only_property("_key_size")
|
||||
|
||||
def signer(self, signature_algorithm):
|
||||
_warn_sign_verify_deprecated()
|
||||
_check_not_prehashed(signature_algorithm)
|
||||
return _DSASignatureContext(self._backend, self, signature_algorithm)
|
||||
|
||||
def private_numbers(self):
|
||||
p = self._backend._ffi.new("BIGNUM **")
|
||||
q = self._backend._ffi.new("BIGNUM **")
|
||||
g = self._backend._ffi.new("BIGNUM **")
|
||||
pub_key = self._backend._ffi.new("BIGNUM **")
|
||||
priv_key = self._backend._ffi.new("BIGNUM **")
|
||||
self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g)
|
||||
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
|
||||
self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
|
||||
self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
|
||||
self._backend._lib.DSA_get0_key(self._dsa_cdata, pub_key, priv_key)
|
||||
self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
|
||||
self._backend.openssl_assert(priv_key[0] != self._backend._ffi.NULL)
|
||||
return dsa.DSAPrivateNumbers(
|
||||
public_numbers=dsa.DSAPublicNumbers(
|
||||
parameter_numbers=dsa.DSAParameterNumbers(
|
||||
p=self._backend._bn_to_int(p[0]),
|
||||
q=self._backend._bn_to_int(q[0]),
|
||||
g=self._backend._bn_to_int(g[0]),
|
||||
),
|
||||
y=self._backend._bn_to_int(pub_key[0]),
|
||||
),
|
||||
x=self._backend._bn_to_int(priv_key[0]),
|
||||
)
|
||||
|
||||
def public_key(self):
|
||||
dsa_cdata = self._backend._lib.DSAparams_dup(self._dsa_cdata)
|
||||
self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL)
|
||||
dsa_cdata = self._backend._ffi.gc(
|
||||
dsa_cdata, self._backend._lib.DSA_free
|
||||
)
|
||||
pub_key = self._backend._ffi.new("BIGNUM **")
|
||||
self._backend._lib.DSA_get0_key(
|
||||
self._dsa_cdata, pub_key, self._backend._ffi.NULL
|
||||
)
|
||||
self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
|
||||
pub_key_dup = self._backend._lib.BN_dup(pub_key[0])
|
||||
res = self._backend._lib.DSA_set0_key(
|
||||
dsa_cdata, pub_key_dup, self._backend._ffi.NULL
|
||||
)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
evp_pkey = self._backend._dsa_cdata_to_evp_pkey(dsa_cdata)
|
||||
return _DSAPublicKey(self._backend, dsa_cdata, evp_pkey)
|
||||
|
||||
def parameters(self):
|
||||
dsa_cdata = self._backend._lib.DSAparams_dup(self._dsa_cdata)
|
||||
self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL)
|
||||
dsa_cdata = self._backend._ffi.gc(
|
||||
dsa_cdata, self._backend._lib.DSA_free
|
||||
)
|
||||
return _DSAParameters(self._backend, dsa_cdata)
|
||||
|
||||
def private_bytes(self, encoding, format, encryption_algorithm):
|
||||
return self._backend._private_key_bytes(
|
||||
encoding,
|
||||
format,
|
||||
encryption_algorithm,
|
||||
self,
|
||||
self._evp_pkey,
|
||||
self._dsa_cdata,
|
||||
)
|
||||
|
||||
def sign(self, data, algorithm):
|
||||
data, algorithm = _calculate_digest_and_algorithm(
|
||||
self._backend, data, algorithm
|
||||
)
|
||||
return _dsa_sig_sign(self._backend, self, data)
|
||||
|
||||
|
||||
@utils.register_interface(dsa.DSAPublicKeyWithSerialization)
|
||||
class _DSAPublicKey(object):
|
||||
def __init__(self, backend, dsa_cdata, evp_pkey):
|
||||
self._backend = backend
|
||||
self._dsa_cdata = dsa_cdata
|
||||
self._evp_pkey = evp_pkey
|
||||
p = self._backend._ffi.new("BIGNUM **")
|
||||
self._backend._lib.DSA_get0_pqg(
|
||||
dsa_cdata, p, self._backend._ffi.NULL, self._backend._ffi.NULL
|
||||
)
|
||||
self._backend.openssl_assert(p[0] != backend._ffi.NULL)
|
||||
self._key_size = self._backend._lib.BN_num_bits(p[0])
|
||||
|
||||
key_size = utils.read_only_property("_key_size")
|
||||
|
||||
def verifier(self, signature, signature_algorithm):
|
||||
_warn_sign_verify_deprecated()
|
||||
utils._check_bytes("signature", signature)
|
||||
|
||||
_check_not_prehashed(signature_algorithm)
|
||||
return _DSAVerificationContext(
|
||||
self._backend, self, signature, signature_algorithm
|
||||
)
|
||||
|
||||
def public_numbers(self):
|
||||
p = self._backend._ffi.new("BIGNUM **")
|
||||
q = self._backend._ffi.new("BIGNUM **")
|
||||
g = self._backend._ffi.new("BIGNUM **")
|
||||
pub_key = self._backend._ffi.new("BIGNUM **")
|
||||
self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g)
|
||||
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
|
||||
self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
|
||||
self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
|
||||
self._backend._lib.DSA_get0_key(
|
||||
self._dsa_cdata, pub_key, self._backend._ffi.NULL
|
||||
)
|
||||
self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
|
||||
return dsa.DSAPublicNumbers(
|
||||
parameter_numbers=dsa.DSAParameterNumbers(
|
||||
p=self._backend._bn_to_int(p[0]),
|
||||
q=self._backend._bn_to_int(q[0]),
|
||||
g=self._backend._bn_to_int(g[0]),
|
||||
),
|
||||
y=self._backend._bn_to_int(pub_key[0]),
|
||||
)
|
||||
|
||||
def parameters(self):
|
||||
dsa_cdata = self._backend._lib.DSAparams_dup(self._dsa_cdata)
|
||||
dsa_cdata = self._backend._ffi.gc(
|
||||
dsa_cdata, self._backend._lib.DSA_free
|
||||
)
|
||||
return _DSAParameters(self._backend, dsa_cdata)
|
||||
|
||||
def public_bytes(self, encoding, format):
|
||||
return self._backend._public_key_bytes(
|
||||
encoding, format, self, self._evp_pkey, None
|
||||
)
|
||||
|
||||
def verify(self, signature, data, algorithm):
|
||||
data, algorithm = _calculate_digest_and_algorithm(
|
||||
self._backend, data, algorithm
|
||||
)
|
||||
return _dsa_sig_verify(self._backend, self, signature, data)
|
||||
|
|
@ -0,0 +1,337 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
InvalidSignature,
|
||||
UnsupportedAlgorithm,
|
||||
_Reasons,
|
||||
)
|
||||
from cryptography.hazmat.backends.openssl.utils import (
|
||||
_calculate_digest_and_algorithm,
|
||||
_check_not_prehashed,
|
||||
_warn_sign_verify_deprecated,
|
||||
)
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import (
|
||||
AsymmetricSignatureContext,
|
||||
AsymmetricVerificationContext,
|
||||
ec,
|
||||
)
|
||||
|
||||
|
||||
def _check_signature_algorithm(signature_algorithm):
|
||||
if not isinstance(signature_algorithm, ec.ECDSA):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Unsupported elliptic curve signature algorithm.",
|
||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
|
||||
)
|
||||
|
||||
|
||||
def _ec_key_curve_sn(backend, ec_key):
|
||||
group = backend._lib.EC_KEY_get0_group(ec_key)
|
||||
backend.openssl_assert(group != backend._ffi.NULL)
|
||||
|
||||
nid = backend._lib.EC_GROUP_get_curve_name(group)
|
||||
# The following check is to find EC keys with unnamed curves and raise
|
||||
# an error for now.
|
||||
if nid == backend._lib.NID_undef:
|
||||
raise NotImplementedError(
|
||||
"ECDSA keys with unnamed curves are unsupported " "at this time"
|
||||
)
|
||||
|
||||
# This is like the above check, but it also catches the case where you
|
||||
# explicitly encoded a curve with the same parameters as a named curve.
|
||||
# Don't do that.
|
||||
if (
|
||||
backend._lib.CRYPTOGRAPHY_OPENSSL_102U_OR_GREATER
|
||||
and backend._lib.EC_GROUP_get_asn1_flag(group) == 0
|
||||
):
|
||||
raise NotImplementedError(
|
||||
"ECDSA keys with unnamed curves are unsupported " "at this time"
|
||||
)
|
||||
|
||||
curve_name = backend._lib.OBJ_nid2sn(nid)
|
||||
backend.openssl_assert(curve_name != backend._ffi.NULL)
|
||||
|
||||
sn = backend._ffi.string(curve_name).decode("ascii")
|
||||
return sn
|
||||
|
||||
|
||||
def _mark_asn1_named_ec_curve(backend, ec_cdata):
|
||||
"""
|
||||
Set the named curve flag on the EC_KEY. This causes OpenSSL to
|
||||
serialize EC keys along with their curve OID which makes
|
||||
deserialization easier.
|
||||
"""
|
||||
|
||||
backend._lib.EC_KEY_set_asn1_flag(
|
||||
ec_cdata, backend._lib.OPENSSL_EC_NAMED_CURVE
|
||||
)
|
||||
|
||||
|
||||
def _sn_to_elliptic_curve(backend, sn):
|
||||
try:
|
||||
return ec._CURVE_TYPES[sn]()
|
||||
except KeyError:
|
||||
raise UnsupportedAlgorithm(
|
||||
"{} is not a supported elliptic curve".format(sn),
|
||||
_Reasons.UNSUPPORTED_ELLIPTIC_CURVE,
|
||||
)
|
||||
|
||||
|
||||
def _ecdsa_sig_sign(backend, private_key, data):
|
||||
max_size = backend._lib.ECDSA_size(private_key._ec_key)
|
||||
backend.openssl_assert(max_size > 0)
|
||||
|
||||
sigbuf = backend._ffi.new("unsigned char[]", max_size)
|
||||
siglen_ptr = backend._ffi.new("unsigned int[]", 1)
|
||||
res = backend._lib.ECDSA_sign(
|
||||
0, data, len(data), sigbuf, siglen_ptr, private_key._ec_key
|
||||
)
|
||||
backend.openssl_assert(res == 1)
|
||||
return backend._ffi.buffer(sigbuf)[: siglen_ptr[0]]
|
||||
|
||||
|
||||
def _ecdsa_sig_verify(backend, public_key, signature, data):
|
||||
res = backend._lib.ECDSA_verify(
|
||||
0, data, len(data), signature, len(signature), public_key._ec_key
|
||||
)
|
||||
if res != 1:
|
||||
backend._consume_errors()
|
||||
raise InvalidSignature
|
||||
|
||||
|
||||
@utils.register_interface(AsymmetricSignatureContext)
|
||||
class _ECDSASignatureContext(object):
|
||||
def __init__(self, backend, private_key, algorithm):
|
||||
self._backend = backend
|
||||
self._private_key = private_key
|
||||
self._digest = hashes.Hash(algorithm, backend)
|
||||
|
||||
def update(self, data):
|
||||
self._digest.update(data)
|
||||
|
||||
def finalize(self):
|
||||
digest = self._digest.finalize()
|
||||
|
||||
return _ecdsa_sig_sign(self._backend, self._private_key, digest)
|
||||
|
||||
|
||||
@utils.register_interface(AsymmetricVerificationContext)
|
||||
class _ECDSAVerificationContext(object):
|
||||
def __init__(self, backend, public_key, signature, algorithm):
|
||||
self._backend = backend
|
||||
self._public_key = public_key
|
||||
self._signature = signature
|
||||
self._digest = hashes.Hash(algorithm, backend)
|
||||
|
||||
def update(self, data):
|
||||
self._digest.update(data)
|
||||
|
||||
def verify(self):
|
||||
digest = self._digest.finalize()
|
||||
_ecdsa_sig_verify(
|
||||
self._backend, self._public_key, self._signature, digest
|
||||
)
|
||||
|
||||
|
||||
@utils.register_interface(ec.EllipticCurvePrivateKeyWithSerialization)
|
||||
class _EllipticCurvePrivateKey(object):
|
||||
def __init__(self, backend, ec_key_cdata, evp_pkey):
|
||||
self._backend = backend
|
||||
self._ec_key = ec_key_cdata
|
||||
self._evp_pkey = evp_pkey
|
||||
|
||||
sn = _ec_key_curve_sn(backend, ec_key_cdata)
|
||||
self._curve = _sn_to_elliptic_curve(backend, sn)
|
||||
_mark_asn1_named_ec_curve(backend, ec_key_cdata)
|
||||
|
||||
curve = utils.read_only_property("_curve")
|
||||
|
||||
@property
|
||||
def key_size(self):
|
||||
return self.curve.key_size
|
||||
|
||||
def signer(self, signature_algorithm):
|
||||
_warn_sign_verify_deprecated()
|
||||
_check_signature_algorithm(signature_algorithm)
|
||||
_check_not_prehashed(signature_algorithm.algorithm)
|
||||
return _ECDSASignatureContext(
|
||||
self._backend, self, signature_algorithm.algorithm
|
||||
)
|
||||
|
||||
def exchange(self, algorithm, peer_public_key):
|
||||
if not (
|
||||
self._backend.elliptic_curve_exchange_algorithm_supported(
|
||||
algorithm, self.curve
|
||||
)
|
||||
):
|
||||
raise UnsupportedAlgorithm(
|
||||
"This backend does not support the ECDH algorithm.",
|
||||
_Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
|
||||
)
|
||||
|
||||
if peer_public_key.curve.name != self.curve.name:
|
||||
raise ValueError(
|
||||
"peer_public_key and self are not on the same curve"
|
||||
)
|
||||
|
||||
group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
|
||||
z_len = (self._backend._lib.EC_GROUP_get_degree(group) + 7) // 8
|
||||
self._backend.openssl_assert(z_len > 0)
|
||||
z_buf = self._backend._ffi.new("uint8_t[]", z_len)
|
||||
peer_key = self._backend._lib.EC_KEY_get0_public_key(
|
||||
peer_public_key._ec_key
|
||||
)
|
||||
|
||||
r = self._backend._lib.ECDH_compute_key(
|
||||
z_buf, z_len, peer_key, self._ec_key, self._backend._ffi.NULL
|
||||
)
|
||||
self._backend.openssl_assert(r > 0)
|
||||
return self._backend._ffi.buffer(z_buf)[:z_len]
|
||||
|
||||
def public_key(self):
|
||||
group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
|
||||
self._backend.openssl_assert(group != self._backend._ffi.NULL)
|
||||
|
||||
curve_nid = self._backend._lib.EC_GROUP_get_curve_name(group)
|
||||
public_ec_key = self._backend._ec_key_new_by_curve_nid(curve_nid)
|
||||
|
||||
point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
|
||||
self._backend.openssl_assert(point != self._backend._ffi.NULL)
|
||||
|
||||
res = self._backend._lib.EC_KEY_set_public_key(public_ec_key, point)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
|
||||
evp_pkey = self._backend._ec_cdata_to_evp_pkey(public_ec_key)
|
||||
|
||||
return _EllipticCurvePublicKey(self._backend, public_ec_key, evp_pkey)
|
||||
|
||||
def private_numbers(self):
|
||||
bn = self._backend._lib.EC_KEY_get0_private_key(self._ec_key)
|
||||
private_value = self._backend._bn_to_int(bn)
|
||||
return ec.EllipticCurvePrivateNumbers(
|
||||
private_value=private_value,
|
||||
public_numbers=self.public_key().public_numbers(),
|
||||
)
|
||||
|
||||
def private_bytes(self, encoding, format, encryption_algorithm):
|
||||
return self._backend._private_key_bytes(
|
||||
encoding,
|
||||
format,
|
||||
encryption_algorithm,
|
||||
self,
|
||||
self._evp_pkey,
|
||||
self._ec_key,
|
||||
)
|
||||
|
||||
def sign(self, data, signature_algorithm):
|
||||
_check_signature_algorithm(signature_algorithm)
|
||||
data, algorithm = _calculate_digest_and_algorithm(
|
||||
self._backend, data, signature_algorithm._algorithm
|
||||
)
|
||||
return _ecdsa_sig_sign(self._backend, self, data)
|
||||
|
||||
|
||||
@utils.register_interface(ec.EllipticCurvePublicKeyWithSerialization)
|
||||
class _EllipticCurvePublicKey(object):
|
||||
def __init__(self, backend, ec_key_cdata, evp_pkey):
|
||||
self._backend = backend
|
||||
self._ec_key = ec_key_cdata
|
||||
self._evp_pkey = evp_pkey
|
||||
|
||||
sn = _ec_key_curve_sn(backend, ec_key_cdata)
|
||||
self._curve = _sn_to_elliptic_curve(backend, sn)
|
||||
_mark_asn1_named_ec_curve(backend, ec_key_cdata)
|
||||
|
||||
curve = utils.read_only_property("_curve")
|
||||
|
||||
@property
|
||||
def key_size(self):
|
||||
return self.curve.key_size
|
||||
|
||||
def verifier(self, signature, signature_algorithm):
|
||||
_warn_sign_verify_deprecated()
|
||||
utils._check_bytes("signature", signature)
|
||||
|
||||
_check_signature_algorithm(signature_algorithm)
|
||||
_check_not_prehashed(signature_algorithm.algorithm)
|
||||
return _ECDSAVerificationContext(
|
||||
self._backend, self, signature, signature_algorithm.algorithm
|
||||
)
|
||||
|
||||
def public_numbers(self):
|
||||
get_func, group = self._backend._ec_key_determine_group_get_func(
|
||||
self._ec_key
|
||||
)
|
||||
point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
|
||||
self._backend.openssl_assert(point != self._backend._ffi.NULL)
|
||||
|
||||
with self._backend._tmp_bn_ctx() as bn_ctx:
|
||||
bn_x = self._backend._lib.BN_CTX_get(bn_ctx)
|
||||
bn_y = self._backend._lib.BN_CTX_get(bn_ctx)
|
||||
|
||||
res = get_func(group, point, bn_x, bn_y, bn_ctx)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
|
||||
x = self._backend._bn_to_int(bn_x)
|
||||
y = self._backend._bn_to_int(bn_y)
|
||||
|
||||
return ec.EllipticCurvePublicNumbers(x=x, y=y, curve=self._curve)
|
||||
|
||||
def _encode_point(self, format):
|
||||
if format is serialization.PublicFormat.CompressedPoint:
|
||||
conversion = self._backend._lib.POINT_CONVERSION_COMPRESSED
|
||||
else:
|
||||
assert format is serialization.PublicFormat.UncompressedPoint
|
||||
conversion = self._backend._lib.POINT_CONVERSION_UNCOMPRESSED
|
||||
|
||||
group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
|
||||
self._backend.openssl_assert(group != self._backend._ffi.NULL)
|
||||
point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
|
||||
self._backend.openssl_assert(point != self._backend._ffi.NULL)
|
||||
with self._backend._tmp_bn_ctx() as bn_ctx:
|
||||
buflen = self._backend._lib.EC_POINT_point2oct(
|
||||
group, point, conversion, self._backend._ffi.NULL, 0, bn_ctx
|
||||
)
|
||||
self._backend.openssl_assert(buflen > 0)
|
||||
buf = self._backend._ffi.new("char[]", buflen)
|
||||
res = self._backend._lib.EC_POINT_point2oct(
|
||||
group, point, conversion, buf, buflen, bn_ctx
|
||||
)
|
||||
self._backend.openssl_assert(buflen == res)
|
||||
|
||||
return self._backend._ffi.buffer(buf)[:]
|
||||
|
||||
def public_bytes(self, encoding, format):
|
||||
|
||||
if (
|
||||
encoding is serialization.Encoding.X962
|
||||
or format is serialization.PublicFormat.CompressedPoint
|
||||
or format is serialization.PublicFormat.UncompressedPoint
|
||||
):
|
||||
if encoding is not serialization.Encoding.X962 or format not in (
|
||||
serialization.PublicFormat.CompressedPoint,
|
||||
serialization.PublicFormat.UncompressedPoint,
|
||||
):
|
||||
raise ValueError(
|
||||
"X962 encoding must be used with CompressedPoint or "
|
||||
"UncompressedPoint format"
|
||||
)
|
||||
|
||||
return self._encode_point(format)
|
||||
else:
|
||||
return self._backend._public_key_bytes(
|
||||
encoding, format, self, self._evp_pkey, None
|
||||
)
|
||||
|
||||
def verify(self, signature, data, signature_algorithm):
|
||||
_check_signature_algorithm(signature_algorithm)
|
||||
data, algorithm = _calculate_digest_and_algorithm(
|
||||
self._backend, data, signature_algorithm._algorithm
|
||||
)
|
||||
_ecdsa_sig_verify(self._backend, self, signature, data)
|
||||
|
|
@ -0,0 +1,145 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import exceptions, utils
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
from cryptography.hazmat.primitives.asymmetric.ed25519 import (
|
||||
Ed25519PrivateKey,
|
||||
Ed25519PublicKey,
|
||||
_ED25519_KEY_SIZE,
|
||||
_ED25519_SIG_SIZE,
|
||||
)
|
||||
|
||||
|
||||
@utils.register_interface(Ed25519PublicKey)
|
||||
class _Ed25519PublicKey(object):
|
||||
def __init__(self, backend, evp_pkey):
|
||||
self._backend = backend
|
||||
self._evp_pkey = evp_pkey
|
||||
|
||||
def public_bytes(self, encoding, format):
|
||||
if (
|
||||
encoding is serialization.Encoding.Raw
|
||||
or format is serialization.PublicFormat.Raw
|
||||
):
|
||||
if (
|
||||
encoding is not serialization.Encoding.Raw
|
||||
or format is not serialization.PublicFormat.Raw
|
||||
):
|
||||
raise ValueError(
|
||||
"When using Raw both encoding and format must be Raw"
|
||||
)
|
||||
|
||||
return self._raw_public_bytes()
|
||||
|
||||
return self._backend._public_key_bytes(
|
||||
encoding, format, self, self._evp_pkey, None
|
||||
)
|
||||
|
||||
def _raw_public_bytes(self):
|
||||
buf = self._backend._ffi.new("unsigned char []", _ED25519_KEY_SIZE)
|
||||
buflen = self._backend._ffi.new("size_t *", _ED25519_KEY_SIZE)
|
||||
res = self._backend._lib.EVP_PKEY_get_raw_public_key(
|
||||
self._evp_pkey, buf, buflen
|
||||
)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
self._backend.openssl_assert(buflen[0] == _ED25519_KEY_SIZE)
|
||||
return self._backend._ffi.buffer(buf, _ED25519_KEY_SIZE)[:]
|
||||
|
||||
def verify(self, signature, data):
|
||||
evp_md_ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new()
|
||||
self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL)
|
||||
evp_md_ctx = self._backend._ffi.gc(
|
||||
evp_md_ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free
|
||||
)
|
||||
res = self._backend._lib.EVP_DigestVerifyInit(
|
||||
evp_md_ctx,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
self._evp_pkey,
|
||||
)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
res = self._backend._lib.EVP_DigestVerify(
|
||||
evp_md_ctx, signature, len(signature), data, len(data)
|
||||
)
|
||||
if res != 1:
|
||||
self._backend._consume_errors()
|
||||
raise exceptions.InvalidSignature
|
||||
|
||||
|
||||
@utils.register_interface(Ed25519PrivateKey)
|
||||
class _Ed25519PrivateKey(object):
|
||||
def __init__(self, backend, evp_pkey):
|
||||
self._backend = backend
|
||||
self._evp_pkey = evp_pkey
|
||||
|
||||
def public_key(self):
|
||||
buf = self._backend._ffi.new("unsigned char []", _ED25519_KEY_SIZE)
|
||||
buflen = self._backend._ffi.new("size_t *", _ED25519_KEY_SIZE)
|
||||
res = self._backend._lib.EVP_PKEY_get_raw_public_key(
|
||||
self._evp_pkey, buf, buflen
|
||||
)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
self._backend.openssl_assert(buflen[0] == _ED25519_KEY_SIZE)
|
||||
public_bytes = self._backend._ffi.buffer(buf)[:]
|
||||
return self._backend.ed25519_load_public_bytes(public_bytes)
|
||||
|
||||
def sign(self, data):
|
||||
evp_md_ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new()
|
||||
self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL)
|
||||
evp_md_ctx = self._backend._ffi.gc(
|
||||
evp_md_ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free
|
||||
)
|
||||
res = self._backend._lib.EVP_DigestSignInit(
|
||||
evp_md_ctx,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
self._evp_pkey,
|
||||
)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
buf = self._backend._ffi.new("unsigned char[]", _ED25519_SIG_SIZE)
|
||||
buflen = self._backend._ffi.new("size_t *", len(buf))
|
||||
res = self._backend._lib.EVP_DigestSign(
|
||||
evp_md_ctx, buf, buflen, data, len(data)
|
||||
)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
self._backend.openssl_assert(buflen[0] == _ED25519_SIG_SIZE)
|
||||
return self._backend._ffi.buffer(buf, buflen[0])[:]
|
||||
|
||||
def private_bytes(self, encoding, format, encryption_algorithm):
|
||||
if (
|
||||
encoding is serialization.Encoding.Raw
|
||||
or format is serialization.PublicFormat.Raw
|
||||
):
|
||||
if (
|
||||
format is not serialization.PrivateFormat.Raw
|
||||
or encoding is not serialization.Encoding.Raw
|
||||
or not isinstance(
|
||||
encryption_algorithm, serialization.NoEncryption
|
||||
)
|
||||
):
|
||||
raise ValueError(
|
||||
"When using Raw both encoding and format must be Raw "
|
||||
"and encryption_algorithm must be NoEncryption()"
|
||||
)
|
||||
|
||||
return self._raw_private_bytes()
|
||||
|
||||
return self._backend._private_key_bytes(
|
||||
encoding, format, encryption_algorithm, self, self._evp_pkey, None
|
||||
)
|
||||
|
||||
def _raw_private_bytes(self):
|
||||
buf = self._backend._ffi.new("unsigned char []", _ED25519_KEY_SIZE)
|
||||
buflen = self._backend._ffi.new("size_t *", _ED25519_KEY_SIZE)
|
||||
res = self._backend._lib.EVP_PKEY_get_raw_private_key(
|
||||
self._evp_pkey, buf, buflen
|
||||
)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
self._backend.openssl_assert(buflen[0] == _ED25519_KEY_SIZE)
|
||||
return self._backend._ffi.buffer(buf, _ED25519_KEY_SIZE)[:]
|
||||
|
|
@ -0,0 +1,146 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import exceptions, utils
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
from cryptography.hazmat.primitives.asymmetric.ed448 import (
|
||||
Ed448PrivateKey,
|
||||
Ed448PublicKey,
|
||||
)
|
||||
|
||||
_ED448_KEY_SIZE = 57
|
||||
_ED448_SIG_SIZE = 114
|
||||
|
||||
|
||||
@utils.register_interface(Ed448PublicKey)
|
||||
class _Ed448PublicKey(object):
|
||||
def __init__(self, backend, evp_pkey):
|
||||
self._backend = backend
|
||||
self._evp_pkey = evp_pkey
|
||||
|
||||
def public_bytes(self, encoding, format):
|
||||
if (
|
||||
encoding is serialization.Encoding.Raw
|
||||
or format is serialization.PublicFormat.Raw
|
||||
):
|
||||
if (
|
||||
encoding is not serialization.Encoding.Raw
|
||||
or format is not serialization.PublicFormat.Raw
|
||||
):
|
||||
raise ValueError(
|
||||
"When using Raw both encoding and format must be Raw"
|
||||
)
|
||||
|
||||
return self._raw_public_bytes()
|
||||
|
||||
return self._backend._public_key_bytes(
|
||||
encoding, format, self, self._evp_pkey, None
|
||||
)
|
||||
|
||||
def _raw_public_bytes(self):
|
||||
buf = self._backend._ffi.new("unsigned char []", _ED448_KEY_SIZE)
|
||||
buflen = self._backend._ffi.new("size_t *", _ED448_KEY_SIZE)
|
||||
res = self._backend._lib.EVP_PKEY_get_raw_public_key(
|
||||
self._evp_pkey, buf, buflen
|
||||
)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
self._backend.openssl_assert(buflen[0] == _ED448_KEY_SIZE)
|
||||
return self._backend._ffi.buffer(buf, _ED448_KEY_SIZE)[:]
|
||||
|
||||
def verify(self, signature, data):
|
||||
evp_md_ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new()
|
||||
self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL)
|
||||
evp_md_ctx = self._backend._ffi.gc(
|
||||
evp_md_ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free
|
||||
)
|
||||
res = self._backend._lib.EVP_DigestVerifyInit(
|
||||
evp_md_ctx,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
self._evp_pkey,
|
||||
)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
res = self._backend._lib.EVP_DigestVerify(
|
||||
evp_md_ctx, signature, len(signature), data, len(data)
|
||||
)
|
||||
if res != 1:
|
||||
self._backend._consume_errors()
|
||||
raise exceptions.InvalidSignature
|
||||
|
||||
|
||||
@utils.register_interface(Ed448PrivateKey)
|
||||
class _Ed448PrivateKey(object):
|
||||
def __init__(self, backend, evp_pkey):
|
||||
self._backend = backend
|
||||
self._evp_pkey = evp_pkey
|
||||
|
||||
def public_key(self):
|
||||
buf = self._backend._ffi.new("unsigned char []", _ED448_KEY_SIZE)
|
||||
buflen = self._backend._ffi.new("size_t *", _ED448_KEY_SIZE)
|
||||
res = self._backend._lib.EVP_PKEY_get_raw_public_key(
|
||||
self._evp_pkey, buf, buflen
|
||||
)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
self._backend.openssl_assert(buflen[0] == _ED448_KEY_SIZE)
|
||||
public_bytes = self._backend._ffi.buffer(buf)[:]
|
||||
return self._backend.ed448_load_public_bytes(public_bytes)
|
||||
|
||||
def sign(self, data):
|
||||
evp_md_ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new()
|
||||
self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL)
|
||||
evp_md_ctx = self._backend._ffi.gc(
|
||||
evp_md_ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free
|
||||
)
|
||||
res = self._backend._lib.EVP_DigestSignInit(
|
||||
evp_md_ctx,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
self._evp_pkey,
|
||||
)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
buf = self._backend._ffi.new("unsigned char[]", _ED448_SIG_SIZE)
|
||||
buflen = self._backend._ffi.new("size_t *", len(buf))
|
||||
res = self._backend._lib.EVP_DigestSign(
|
||||
evp_md_ctx, buf, buflen, data, len(data)
|
||||
)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
self._backend.openssl_assert(buflen[0] == _ED448_SIG_SIZE)
|
||||
return self._backend._ffi.buffer(buf, buflen[0])[:]
|
||||
|
||||
def private_bytes(self, encoding, format, encryption_algorithm):
|
||||
if (
|
||||
encoding is serialization.Encoding.Raw
|
||||
or format is serialization.PublicFormat.Raw
|
||||
):
|
||||
if (
|
||||
format is not serialization.PrivateFormat.Raw
|
||||
or encoding is not serialization.Encoding.Raw
|
||||
or not isinstance(
|
||||
encryption_algorithm, serialization.NoEncryption
|
||||
)
|
||||
):
|
||||
raise ValueError(
|
||||
"When using Raw both encoding and format must be Raw "
|
||||
"and encryption_algorithm must be NoEncryption()"
|
||||
)
|
||||
|
||||
return self._raw_private_bytes()
|
||||
|
||||
return self._backend._private_key_bytes(
|
||||
encoding, format, encryption_algorithm, self, self._evp_pkey, None
|
||||
)
|
||||
|
||||
def _raw_private_bytes(self):
|
||||
buf = self._backend._ffi.new("unsigned char []", _ED448_KEY_SIZE)
|
||||
buflen = self._backend._ffi.new("size_t *", _ED448_KEY_SIZE)
|
||||
res = self._backend._lib.EVP_PKEY_get_raw_private_key(
|
||||
self._evp_pkey, buf, buflen
|
||||
)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
self._backend.openssl_assert(buflen[0] == _ED448_KEY_SIZE)
|
||||
return self._backend._ffi.buffer(buf, _ED448_KEY_SIZE)[:]
|
||||
|
|
@ -0,0 +1,657 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import calendar
|
||||
import ipaddress
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils, x509
|
||||
from cryptography.hazmat.backends.openssl.decode_asn1 import (
|
||||
_CRL_ENTRY_REASON_ENUM_TO_CODE,
|
||||
_DISTPOINT_TYPE_FULLNAME,
|
||||
_DISTPOINT_TYPE_RELATIVENAME,
|
||||
)
|
||||
from cryptography.x509.name import _ASN1Type
|
||||
from cryptography.x509.oid import (
|
||||
CRLEntryExtensionOID,
|
||||
ExtensionOID,
|
||||
OCSPExtensionOID,
|
||||
)
|
||||
|
||||
|
||||
def _encode_asn1_int(backend, x):
|
||||
"""
|
||||
Converts a python integer to an ASN1_INTEGER. The returned ASN1_INTEGER
|
||||
will not be garbage collected (to support adding them to structs that take
|
||||
ownership of the object). Be sure to register it for GC if it will be
|
||||
discarded after use.
|
||||
|
||||
"""
|
||||
# Convert Python integer to OpenSSL "bignum" in case value exceeds
|
||||
# machine's native integer limits (note: `int_to_bn` doesn't automatically
|
||||
# GC).
|
||||
i = backend._int_to_bn(x)
|
||||
i = backend._ffi.gc(i, backend._lib.BN_free)
|
||||
|
||||
# Wrap in an ASN.1 integer. Don't GC -- as documented.
|
||||
i = backend._lib.BN_to_ASN1_INTEGER(i, backend._ffi.NULL)
|
||||
backend.openssl_assert(i != backend._ffi.NULL)
|
||||
return i
|
||||
|
||||
|
||||
def _encode_asn1_int_gc(backend, x):
|
||||
i = _encode_asn1_int(backend, x)
|
||||
i = backend._ffi.gc(i, backend._lib.ASN1_INTEGER_free)
|
||||
return i
|
||||
|
||||
|
||||
def _encode_asn1_str(backend, data):
|
||||
"""
|
||||
Create an ASN1_OCTET_STRING from a Python byte string.
|
||||
"""
|
||||
s = backend._lib.ASN1_OCTET_STRING_new()
|
||||
res = backend._lib.ASN1_OCTET_STRING_set(s, data, len(data))
|
||||
backend.openssl_assert(res == 1)
|
||||
return s
|
||||
|
||||
|
||||
def _encode_asn1_utf8_str(backend, string):
|
||||
"""
|
||||
Create an ASN1_UTF8STRING from a Python unicode string.
|
||||
This object will be an ASN1_STRING with UTF8 type in OpenSSL and
|
||||
can be decoded with ASN1_STRING_to_UTF8.
|
||||
"""
|
||||
s = backend._lib.ASN1_UTF8STRING_new()
|
||||
res = backend._lib.ASN1_STRING_set(
|
||||
s, string.encode("utf8"), len(string.encode("utf8"))
|
||||
)
|
||||
backend.openssl_assert(res == 1)
|
||||
return s
|
||||
|
||||
|
||||
def _encode_asn1_str_gc(backend, data):
|
||||
s = _encode_asn1_str(backend, data)
|
||||
s = backend._ffi.gc(s, backend._lib.ASN1_OCTET_STRING_free)
|
||||
return s
|
||||
|
||||
|
||||
def _encode_inhibit_any_policy(backend, inhibit_any_policy):
|
||||
return _encode_asn1_int_gc(backend, inhibit_any_policy.skip_certs)
|
||||
|
||||
|
||||
def _encode_name(backend, name):
|
||||
"""
|
||||
The X509_NAME created will not be gc'd. Use _encode_name_gc if needed.
|
||||
"""
|
||||
subject = backend._lib.X509_NAME_new()
|
||||
for rdn in name.rdns:
|
||||
set_flag = 0 # indicate whether to add to last RDN or create new RDN
|
||||
for attribute in rdn:
|
||||
name_entry = _encode_name_entry(backend, attribute)
|
||||
# X509_NAME_add_entry dups the object so we need to gc this copy
|
||||
name_entry = backend._ffi.gc(
|
||||
name_entry, backend._lib.X509_NAME_ENTRY_free
|
||||
)
|
||||
res = backend._lib.X509_NAME_add_entry(
|
||||
subject, name_entry, -1, set_flag
|
||||
)
|
||||
backend.openssl_assert(res == 1)
|
||||
set_flag = -1
|
||||
return subject
|
||||
|
||||
|
||||
def _encode_name_gc(backend, attributes):
|
||||
subject = _encode_name(backend, attributes)
|
||||
subject = backend._ffi.gc(subject, backend._lib.X509_NAME_free)
|
||||
return subject
|
||||
|
||||
|
||||
def _encode_sk_name_entry(backend, attributes):
|
||||
"""
|
||||
The sk_X509_NAME_ENTRY created will not be gc'd.
|
||||
"""
|
||||
stack = backend._lib.sk_X509_NAME_ENTRY_new_null()
|
||||
for attribute in attributes:
|
||||
name_entry = _encode_name_entry(backend, attribute)
|
||||
res = backend._lib.sk_X509_NAME_ENTRY_push(stack, name_entry)
|
||||
backend.openssl_assert(res >= 1)
|
||||
return stack
|
||||
|
||||
|
||||
def _encode_name_entry(backend, attribute):
|
||||
if attribute._type is _ASN1Type.BMPString:
|
||||
value = attribute.value.encode("utf_16_be")
|
||||
elif attribute._type is _ASN1Type.UniversalString:
|
||||
value = attribute.value.encode("utf_32_be")
|
||||
else:
|
||||
value = attribute.value.encode("utf8")
|
||||
|
||||
obj = _txt2obj_gc(backend, attribute.oid.dotted_string)
|
||||
|
||||
name_entry = backend._lib.X509_NAME_ENTRY_create_by_OBJ(
|
||||
backend._ffi.NULL, obj, attribute._type.value, value, len(value)
|
||||
)
|
||||
return name_entry
|
||||
|
||||
|
||||
def _encode_crl_number_delta_crl_indicator(backend, ext):
|
||||
return _encode_asn1_int_gc(backend, ext.crl_number)
|
||||
|
||||
|
||||
def _encode_issuing_dist_point(backend, ext):
|
||||
idp = backend._lib.ISSUING_DIST_POINT_new()
|
||||
backend.openssl_assert(idp != backend._ffi.NULL)
|
||||
idp = backend._ffi.gc(idp, backend._lib.ISSUING_DIST_POINT_free)
|
||||
idp.onlyuser = 255 if ext.only_contains_user_certs else 0
|
||||
idp.onlyCA = 255 if ext.only_contains_ca_certs else 0
|
||||
idp.indirectCRL = 255 if ext.indirect_crl else 0
|
||||
idp.onlyattr = 255 if ext.only_contains_attribute_certs else 0
|
||||
if ext.only_some_reasons:
|
||||
idp.onlysomereasons = _encode_reasonflags(
|
||||
backend, ext.only_some_reasons
|
||||
)
|
||||
|
||||
if ext.full_name:
|
||||
idp.distpoint = _encode_full_name(backend, ext.full_name)
|
||||
|
||||
if ext.relative_name:
|
||||
idp.distpoint = _encode_relative_name(backend, ext.relative_name)
|
||||
|
||||
return idp
|
||||
|
||||
|
||||
def _encode_crl_reason(backend, crl_reason):
|
||||
asn1enum = backend._lib.ASN1_ENUMERATED_new()
|
||||
backend.openssl_assert(asn1enum != backend._ffi.NULL)
|
||||
asn1enum = backend._ffi.gc(asn1enum, backend._lib.ASN1_ENUMERATED_free)
|
||||
res = backend._lib.ASN1_ENUMERATED_set(
|
||||
asn1enum, _CRL_ENTRY_REASON_ENUM_TO_CODE[crl_reason.reason]
|
||||
)
|
||||
backend.openssl_assert(res == 1)
|
||||
|
||||
return asn1enum
|
||||
|
||||
|
||||
def _encode_invalidity_date(backend, invalidity_date):
|
||||
time = backend._lib.ASN1_GENERALIZEDTIME_set(
|
||||
backend._ffi.NULL,
|
||||
calendar.timegm(invalidity_date.invalidity_date.timetuple()),
|
||||
)
|
||||
backend.openssl_assert(time != backend._ffi.NULL)
|
||||
time = backend._ffi.gc(time, backend._lib.ASN1_GENERALIZEDTIME_free)
|
||||
|
||||
return time
|
||||
|
||||
|
||||
def _encode_certificate_policies(backend, certificate_policies):
|
||||
cp = backend._lib.sk_POLICYINFO_new_null()
|
||||
backend.openssl_assert(cp != backend._ffi.NULL)
|
||||
cp = backend._ffi.gc(cp, backend._lib.sk_POLICYINFO_free)
|
||||
for policy_info in certificate_policies:
|
||||
pi = backend._lib.POLICYINFO_new()
|
||||
backend.openssl_assert(pi != backend._ffi.NULL)
|
||||
res = backend._lib.sk_POLICYINFO_push(cp, pi)
|
||||
backend.openssl_assert(res >= 1)
|
||||
oid = _txt2obj(backend, policy_info.policy_identifier.dotted_string)
|
||||
pi.policyid = oid
|
||||
if policy_info.policy_qualifiers:
|
||||
pqis = backend._lib.sk_POLICYQUALINFO_new_null()
|
||||
backend.openssl_assert(pqis != backend._ffi.NULL)
|
||||
for qualifier in policy_info.policy_qualifiers:
|
||||
pqi = backend._lib.POLICYQUALINFO_new()
|
||||
backend.openssl_assert(pqi != backend._ffi.NULL)
|
||||
res = backend._lib.sk_POLICYQUALINFO_push(pqis, pqi)
|
||||
backend.openssl_assert(res >= 1)
|
||||
if isinstance(qualifier, six.text_type):
|
||||
pqi.pqualid = _txt2obj(
|
||||
backend, x509.OID_CPS_QUALIFIER.dotted_string
|
||||
)
|
||||
pqi.d.cpsuri = _encode_asn1_str(
|
||||
backend,
|
||||
qualifier.encode("ascii"),
|
||||
)
|
||||
else:
|
||||
assert isinstance(qualifier, x509.UserNotice)
|
||||
pqi.pqualid = _txt2obj(
|
||||
backend, x509.OID_CPS_USER_NOTICE.dotted_string
|
||||
)
|
||||
un = backend._lib.USERNOTICE_new()
|
||||
backend.openssl_assert(un != backend._ffi.NULL)
|
||||
pqi.d.usernotice = un
|
||||
if qualifier.explicit_text:
|
||||
un.exptext = _encode_asn1_utf8_str(
|
||||
backend, qualifier.explicit_text
|
||||
)
|
||||
|
||||
un.noticeref = _encode_notice_reference(
|
||||
backend, qualifier.notice_reference
|
||||
)
|
||||
|
||||
pi.qualifiers = pqis
|
||||
|
||||
return cp
|
||||
|
||||
|
||||
def _encode_notice_reference(backend, notice):
|
||||
if notice is None:
|
||||
return backend._ffi.NULL
|
||||
else:
|
||||
nr = backend._lib.NOTICEREF_new()
|
||||
backend.openssl_assert(nr != backend._ffi.NULL)
|
||||
# organization is a required field
|
||||
nr.organization = _encode_asn1_utf8_str(backend, notice.organization)
|
||||
|
||||
notice_stack = backend._lib.sk_ASN1_INTEGER_new_null()
|
||||
nr.noticenos = notice_stack
|
||||
for number in notice.notice_numbers:
|
||||
num = _encode_asn1_int(backend, number)
|
||||
res = backend._lib.sk_ASN1_INTEGER_push(notice_stack, num)
|
||||
backend.openssl_assert(res >= 1)
|
||||
|
||||
return nr
|
||||
|
||||
|
||||
def _txt2obj(backend, name):
|
||||
"""
|
||||
Converts a Python string with an ASN.1 object ID in dotted form to a
|
||||
ASN1_OBJECT.
|
||||
"""
|
||||
name = name.encode("ascii")
|
||||
obj = backend._lib.OBJ_txt2obj(name, 1)
|
||||
backend.openssl_assert(obj != backend._ffi.NULL)
|
||||
return obj
|
||||
|
||||
|
||||
def _txt2obj_gc(backend, name):
|
||||
obj = _txt2obj(backend, name)
|
||||
obj = backend._ffi.gc(obj, backend._lib.ASN1_OBJECT_free)
|
||||
return obj
|
||||
|
||||
|
||||
def _encode_ocsp_nocheck(backend, ext):
|
||||
# Doesn't need to be GC'd
|
||||
return backend._lib.ASN1_NULL_new()
|
||||
|
||||
|
||||
def _encode_key_usage(backend, key_usage):
|
||||
set_bit = backend._lib.ASN1_BIT_STRING_set_bit
|
||||
ku = backend._lib.ASN1_BIT_STRING_new()
|
||||
ku = backend._ffi.gc(ku, backend._lib.ASN1_BIT_STRING_free)
|
||||
res = set_bit(ku, 0, key_usage.digital_signature)
|
||||
backend.openssl_assert(res == 1)
|
||||
res = set_bit(ku, 1, key_usage.content_commitment)
|
||||
backend.openssl_assert(res == 1)
|
||||
res = set_bit(ku, 2, key_usage.key_encipherment)
|
||||
backend.openssl_assert(res == 1)
|
||||
res = set_bit(ku, 3, key_usage.data_encipherment)
|
||||
backend.openssl_assert(res == 1)
|
||||
res = set_bit(ku, 4, key_usage.key_agreement)
|
||||
backend.openssl_assert(res == 1)
|
||||
res = set_bit(ku, 5, key_usage.key_cert_sign)
|
||||
backend.openssl_assert(res == 1)
|
||||
res = set_bit(ku, 6, key_usage.crl_sign)
|
||||
backend.openssl_assert(res == 1)
|
||||
if key_usage.key_agreement:
|
||||
res = set_bit(ku, 7, key_usage.encipher_only)
|
||||
backend.openssl_assert(res == 1)
|
||||
res = set_bit(ku, 8, key_usage.decipher_only)
|
||||
backend.openssl_assert(res == 1)
|
||||
else:
|
||||
res = set_bit(ku, 7, 0)
|
||||
backend.openssl_assert(res == 1)
|
||||
res = set_bit(ku, 8, 0)
|
||||
backend.openssl_assert(res == 1)
|
||||
|
||||
return ku
|
||||
|
||||
|
||||
def _encode_authority_key_identifier(backend, authority_keyid):
|
||||
akid = backend._lib.AUTHORITY_KEYID_new()
|
||||
backend.openssl_assert(akid != backend._ffi.NULL)
|
||||
akid = backend._ffi.gc(akid, backend._lib.AUTHORITY_KEYID_free)
|
||||
if authority_keyid.key_identifier is not None:
|
||||
akid.keyid = _encode_asn1_str(
|
||||
backend,
|
||||
authority_keyid.key_identifier,
|
||||
)
|
||||
|
||||
if authority_keyid.authority_cert_issuer is not None:
|
||||
akid.issuer = _encode_general_names(
|
||||
backend, authority_keyid.authority_cert_issuer
|
||||
)
|
||||
|
||||
if authority_keyid.authority_cert_serial_number is not None:
|
||||
akid.serial = _encode_asn1_int(
|
||||
backend, authority_keyid.authority_cert_serial_number
|
||||
)
|
||||
|
||||
return akid
|
||||
|
||||
|
||||
def _encode_basic_constraints(backend, basic_constraints):
|
||||
constraints = backend._lib.BASIC_CONSTRAINTS_new()
|
||||
constraints = backend._ffi.gc(
|
||||
constraints, backend._lib.BASIC_CONSTRAINTS_free
|
||||
)
|
||||
constraints.ca = 255 if basic_constraints.ca else 0
|
||||
if basic_constraints.ca and basic_constraints.path_length is not None:
|
||||
constraints.pathlen = _encode_asn1_int(
|
||||
backend, basic_constraints.path_length
|
||||
)
|
||||
|
||||
return constraints
|
||||
|
||||
|
||||
def _encode_information_access(backend, info_access):
|
||||
aia = backend._lib.sk_ACCESS_DESCRIPTION_new_null()
|
||||
backend.openssl_assert(aia != backend._ffi.NULL)
|
||||
aia = backend._ffi.gc(
|
||||
aia,
|
||||
lambda x: backend._lib.sk_ACCESS_DESCRIPTION_pop_free(
|
||||
x,
|
||||
backend._ffi.addressof(
|
||||
backend._lib._original_lib, "ACCESS_DESCRIPTION_free"
|
||||
),
|
||||
),
|
||||
)
|
||||
for access_description in info_access:
|
||||
ad = backend._lib.ACCESS_DESCRIPTION_new()
|
||||
method = _txt2obj(
|
||||
backend, access_description.access_method.dotted_string
|
||||
)
|
||||
_encode_general_name_preallocated(
|
||||
backend, access_description.access_location, ad.location
|
||||
)
|
||||
ad.method = method
|
||||
res = backend._lib.sk_ACCESS_DESCRIPTION_push(aia, ad)
|
||||
backend.openssl_assert(res >= 1)
|
||||
|
||||
return aia
|
||||
|
||||
|
||||
def _encode_general_names(backend, names):
|
||||
general_names = backend._lib.GENERAL_NAMES_new()
|
||||
backend.openssl_assert(general_names != backend._ffi.NULL)
|
||||
for name in names:
|
||||
gn = _encode_general_name(backend, name)
|
||||
res = backend._lib.sk_GENERAL_NAME_push(general_names, gn)
|
||||
backend.openssl_assert(res != 0)
|
||||
|
||||
return general_names
|
||||
|
||||
|
||||
def _encode_alt_name(backend, san):
|
||||
general_names = _encode_general_names(backend, san)
|
||||
general_names = backend._ffi.gc(
|
||||
general_names, backend._lib.GENERAL_NAMES_free
|
||||
)
|
||||
return general_names
|
||||
|
||||
|
||||
def _encode_subject_key_identifier(backend, ski):
|
||||
return _encode_asn1_str_gc(backend, ski.digest)
|
||||
|
||||
|
||||
def _encode_general_name(backend, name):
|
||||
gn = backend._lib.GENERAL_NAME_new()
|
||||
_encode_general_name_preallocated(backend, name, gn)
|
||||
return gn
|
||||
|
||||
|
||||
def _encode_general_name_preallocated(backend, name, gn):
|
||||
if isinstance(name, x509.DNSName):
|
||||
backend.openssl_assert(gn != backend._ffi.NULL)
|
||||
gn.type = backend._lib.GEN_DNS
|
||||
|
||||
ia5 = backend._lib.ASN1_IA5STRING_new()
|
||||
backend.openssl_assert(ia5 != backend._ffi.NULL)
|
||||
# ia5strings are supposed to be ITU T.50 but to allow round-tripping
|
||||
# of broken certs that encode utf8 we'll encode utf8 here too.
|
||||
value = name.value.encode("utf8")
|
||||
|
||||
res = backend._lib.ASN1_STRING_set(ia5, value, len(value))
|
||||
backend.openssl_assert(res == 1)
|
||||
gn.d.dNSName = ia5
|
||||
elif isinstance(name, x509.RegisteredID):
|
||||
backend.openssl_assert(gn != backend._ffi.NULL)
|
||||
gn.type = backend._lib.GEN_RID
|
||||
obj = backend._lib.OBJ_txt2obj(
|
||||
name.value.dotted_string.encode("ascii"), 1
|
||||
)
|
||||
backend.openssl_assert(obj != backend._ffi.NULL)
|
||||
gn.d.registeredID = obj
|
||||
elif isinstance(name, x509.DirectoryName):
|
||||
backend.openssl_assert(gn != backend._ffi.NULL)
|
||||
dir_name = _encode_name(backend, name.value)
|
||||
gn.type = backend._lib.GEN_DIRNAME
|
||||
gn.d.directoryName = dir_name
|
||||
elif isinstance(name, x509.IPAddress):
|
||||
backend.openssl_assert(gn != backend._ffi.NULL)
|
||||
if isinstance(name.value, ipaddress.IPv4Network):
|
||||
packed = name.value.network_address.packed + utils.int_to_bytes(
|
||||
((1 << 32) - name.value.num_addresses), 4
|
||||
)
|
||||
elif isinstance(name.value, ipaddress.IPv6Network):
|
||||
packed = name.value.network_address.packed + utils.int_to_bytes(
|
||||
(1 << 128) - name.value.num_addresses, 16
|
||||
)
|
||||
else:
|
||||
packed = name.value.packed
|
||||
ipaddr = _encode_asn1_str(backend, packed)
|
||||
gn.type = backend._lib.GEN_IPADD
|
||||
gn.d.iPAddress = ipaddr
|
||||
elif isinstance(name, x509.OtherName):
|
||||
backend.openssl_assert(gn != backend._ffi.NULL)
|
||||
other_name = backend._lib.OTHERNAME_new()
|
||||
backend.openssl_assert(other_name != backend._ffi.NULL)
|
||||
|
||||
type_id = backend._lib.OBJ_txt2obj(
|
||||
name.type_id.dotted_string.encode("ascii"), 1
|
||||
)
|
||||
backend.openssl_assert(type_id != backend._ffi.NULL)
|
||||
data = backend._ffi.new("unsigned char[]", name.value)
|
||||
data_ptr_ptr = backend._ffi.new("unsigned char **")
|
||||
data_ptr_ptr[0] = data
|
||||
value = backend._lib.d2i_ASN1_TYPE(
|
||||
backend._ffi.NULL, data_ptr_ptr, len(name.value)
|
||||
)
|
||||
if value == backend._ffi.NULL:
|
||||
backend._consume_errors()
|
||||
raise ValueError("Invalid ASN.1 data")
|
||||
other_name.type_id = type_id
|
||||
other_name.value = value
|
||||
gn.type = backend._lib.GEN_OTHERNAME
|
||||
gn.d.otherName = other_name
|
||||
elif isinstance(name, x509.RFC822Name):
|
||||
backend.openssl_assert(gn != backend._ffi.NULL)
|
||||
# ia5strings are supposed to be ITU T.50 but to allow round-tripping
|
||||
# of broken certs that encode utf8 we'll encode utf8 here too.
|
||||
data = name.value.encode("utf8")
|
||||
asn1_str = _encode_asn1_str(backend, data)
|
||||
gn.type = backend._lib.GEN_EMAIL
|
||||
gn.d.rfc822Name = asn1_str
|
||||
elif isinstance(name, x509.UniformResourceIdentifier):
|
||||
backend.openssl_assert(gn != backend._ffi.NULL)
|
||||
# ia5strings are supposed to be ITU T.50 but to allow round-tripping
|
||||
# of broken certs that encode utf8 we'll encode utf8 here too.
|
||||
data = name.value.encode("utf8")
|
||||
asn1_str = _encode_asn1_str(backend, data)
|
||||
gn.type = backend._lib.GEN_URI
|
||||
gn.d.uniformResourceIdentifier = asn1_str
|
||||
else:
|
||||
raise ValueError("{} is an unknown GeneralName type".format(name))
|
||||
|
||||
|
||||
def _encode_extended_key_usage(backend, extended_key_usage):
|
||||
eku = backend._lib.sk_ASN1_OBJECT_new_null()
|
||||
eku = backend._ffi.gc(eku, backend._lib.sk_ASN1_OBJECT_free)
|
||||
for oid in extended_key_usage:
|
||||
obj = _txt2obj(backend, oid.dotted_string)
|
||||
res = backend._lib.sk_ASN1_OBJECT_push(eku, obj)
|
||||
backend.openssl_assert(res >= 1)
|
||||
|
||||
return eku
|
||||
|
||||
|
||||
_CRLREASONFLAGS = {
|
||||
x509.ReasonFlags.key_compromise: 1,
|
||||
x509.ReasonFlags.ca_compromise: 2,
|
||||
x509.ReasonFlags.affiliation_changed: 3,
|
||||
x509.ReasonFlags.superseded: 4,
|
||||
x509.ReasonFlags.cessation_of_operation: 5,
|
||||
x509.ReasonFlags.certificate_hold: 6,
|
||||
x509.ReasonFlags.privilege_withdrawn: 7,
|
||||
x509.ReasonFlags.aa_compromise: 8,
|
||||
}
|
||||
|
||||
|
||||
def _encode_reasonflags(backend, reasons):
|
||||
bitmask = backend._lib.ASN1_BIT_STRING_new()
|
||||
backend.openssl_assert(bitmask != backend._ffi.NULL)
|
||||
for reason in reasons:
|
||||
res = backend._lib.ASN1_BIT_STRING_set_bit(
|
||||
bitmask, _CRLREASONFLAGS[reason], 1
|
||||
)
|
||||
backend.openssl_assert(res == 1)
|
||||
|
||||
return bitmask
|
||||
|
||||
|
||||
def _encode_full_name(backend, full_name):
|
||||
dpn = backend._lib.DIST_POINT_NAME_new()
|
||||
backend.openssl_assert(dpn != backend._ffi.NULL)
|
||||
dpn.type = _DISTPOINT_TYPE_FULLNAME
|
||||
dpn.name.fullname = _encode_general_names(backend, full_name)
|
||||
return dpn
|
||||
|
||||
|
||||
def _encode_relative_name(backend, relative_name):
|
||||
dpn = backend._lib.DIST_POINT_NAME_new()
|
||||
backend.openssl_assert(dpn != backend._ffi.NULL)
|
||||
dpn.type = _DISTPOINT_TYPE_RELATIVENAME
|
||||
dpn.name.relativename = _encode_sk_name_entry(backend, relative_name)
|
||||
return dpn
|
||||
|
||||
|
||||
def _encode_cdps_freshest_crl(backend, cdps):
|
||||
cdp = backend._lib.sk_DIST_POINT_new_null()
|
||||
cdp = backend._ffi.gc(cdp, backend._lib.sk_DIST_POINT_free)
|
||||
for point in cdps:
|
||||
dp = backend._lib.DIST_POINT_new()
|
||||
backend.openssl_assert(dp != backend._ffi.NULL)
|
||||
|
||||
if point.reasons:
|
||||
dp.reasons = _encode_reasonflags(backend, point.reasons)
|
||||
|
||||
if point.full_name:
|
||||
dp.distpoint = _encode_full_name(backend, point.full_name)
|
||||
|
||||
if point.relative_name:
|
||||
dp.distpoint = _encode_relative_name(backend, point.relative_name)
|
||||
|
||||
if point.crl_issuer:
|
||||
dp.CRLissuer = _encode_general_names(backend, point.crl_issuer)
|
||||
|
||||
res = backend._lib.sk_DIST_POINT_push(cdp, dp)
|
||||
backend.openssl_assert(res >= 1)
|
||||
|
||||
return cdp
|
||||
|
||||
|
||||
def _encode_name_constraints(backend, name_constraints):
|
||||
nc = backend._lib.NAME_CONSTRAINTS_new()
|
||||
backend.openssl_assert(nc != backend._ffi.NULL)
|
||||
nc = backend._ffi.gc(nc, backend._lib.NAME_CONSTRAINTS_free)
|
||||
permitted = _encode_general_subtree(
|
||||
backend, name_constraints.permitted_subtrees
|
||||
)
|
||||
nc.permittedSubtrees = permitted
|
||||
excluded = _encode_general_subtree(
|
||||
backend, name_constraints.excluded_subtrees
|
||||
)
|
||||
nc.excludedSubtrees = excluded
|
||||
|
||||
return nc
|
||||
|
||||
|
||||
def _encode_policy_constraints(backend, policy_constraints):
|
||||
pc = backend._lib.POLICY_CONSTRAINTS_new()
|
||||
backend.openssl_assert(pc != backend._ffi.NULL)
|
||||
pc = backend._ffi.gc(pc, backend._lib.POLICY_CONSTRAINTS_free)
|
||||
if policy_constraints.require_explicit_policy is not None:
|
||||
pc.requireExplicitPolicy = _encode_asn1_int(
|
||||
backend, policy_constraints.require_explicit_policy
|
||||
)
|
||||
|
||||
if policy_constraints.inhibit_policy_mapping is not None:
|
||||
pc.inhibitPolicyMapping = _encode_asn1_int(
|
||||
backend, policy_constraints.inhibit_policy_mapping
|
||||
)
|
||||
|
||||
return pc
|
||||
|
||||
|
||||
def _encode_general_subtree(backend, subtrees):
|
||||
if subtrees is None:
|
||||
return backend._ffi.NULL
|
||||
else:
|
||||
general_subtrees = backend._lib.sk_GENERAL_SUBTREE_new_null()
|
||||
for name in subtrees:
|
||||
gs = backend._lib.GENERAL_SUBTREE_new()
|
||||
gs.base = _encode_general_name(backend, name)
|
||||
res = backend._lib.sk_GENERAL_SUBTREE_push(general_subtrees, gs)
|
||||
assert res >= 1
|
||||
|
||||
return general_subtrees
|
||||
|
||||
|
||||
def _encode_nonce(backend, nonce):
|
||||
return _encode_asn1_str_gc(backend, nonce.nonce)
|
||||
|
||||
|
||||
_EXTENSION_ENCODE_HANDLERS = {
|
||||
ExtensionOID.BASIC_CONSTRAINTS: _encode_basic_constraints,
|
||||
ExtensionOID.SUBJECT_KEY_IDENTIFIER: _encode_subject_key_identifier,
|
||||
ExtensionOID.KEY_USAGE: _encode_key_usage,
|
||||
ExtensionOID.SUBJECT_ALTERNATIVE_NAME: _encode_alt_name,
|
||||
ExtensionOID.ISSUER_ALTERNATIVE_NAME: _encode_alt_name,
|
||||
ExtensionOID.EXTENDED_KEY_USAGE: _encode_extended_key_usage,
|
||||
ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _encode_authority_key_identifier,
|
||||
ExtensionOID.CERTIFICATE_POLICIES: _encode_certificate_policies,
|
||||
ExtensionOID.AUTHORITY_INFORMATION_ACCESS: _encode_information_access,
|
||||
ExtensionOID.SUBJECT_INFORMATION_ACCESS: _encode_information_access,
|
||||
ExtensionOID.CRL_DISTRIBUTION_POINTS: _encode_cdps_freshest_crl,
|
||||
ExtensionOID.FRESHEST_CRL: _encode_cdps_freshest_crl,
|
||||
ExtensionOID.INHIBIT_ANY_POLICY: _encode_inhibit_any_policy,
|
||||
ExtensionOID.OCSP_NO_CHECK: _encode_ocsp_nocheck,
|
||||
ExtensionOID.NAME_CONSTRAINTS: _encode_name_constraints,
|
||||
ExtensionOID.POLICY_CONSTRAINTS: _encode_policy_constraints,
|
||||
}
|
||||
|
||||
_CRL_EXTENSION_ENCODE_HANDLERS = {
|
||||
ExtensionOID.ISSUER_ALTERNATIVE_NAME: _encode_alt_name,
|
||||
ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _encode_authority_key_identifier,
|
||||
ExtensionOID.AUTHORITY_INFORMATION_ACCESS: _encode_information_access,
|
||||
ExtensionOID.CRL_NUMBER: _encode_crl_number_delta_crl_indicator,
|
||||
ExtensionOID.DELTA_CRL_INDICATOR: _encode_crl_number_delta_crl_indicator,
|
||||
ExtensionOID.ISSUING_DISTRIBUTION_POINT: _encode_issuing_dist_point,
|
||||
ExtensionOID.FRESHEST_CRL: _encode_cdps_freshest_crl,
|
||||
}
|
||||
|
||||
_CRL_ENTRY_EXTENSION_ENCODE_HANDLERS = {
|
||||
CRLEntryExtensionOID.CERTIFICATE_ISSUER: _encode_alt_name,
|
||||
CRLEntryExtensionOID.CRL_REASON: _encode_crl_reason,
|
||||
CRLEntryExtensionOID.INVALIDITY_DATE: _encode_invalidity_date,
|
||||
}
|
||||
|
||||
_OCSP_REQUEST_EXTENSION_ENCODE_HANDLERS = {
|
||||
OCSPExtensionOID.NONCE: _encode_nonce,
|
||||
}
|
||||
|
||||
_OCSP_BASICRESP_EXTENSION_ENCODE_HANDLERS = {
|
||||
OCSPExtensionOID.NONCE: _encode_nonce,
|
||||
}
|
||||
|
|
@ -0,0 +1,82 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
|
||||
|
||||
@utils.register_interface(hashes.HashContext)
|
||||
class _HashContext(object):
|
||||
def __init__(self, backend, algorithm, ctx=None):
|
||||
self._algorithm = algorithm
|
||||
|
||||
self._backend = backend
|
||||
|
||||
if ctx is None:
|
||||
ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new()
|
||||
ctx = self._backend._ffi.gc(
|
||||
ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free
|
||||
)
|
||||
evp_md = self._backend._evp_md_from_algorithm(algorithm)
|
||||
if evp_md == self._backend._ffi.NULL:
|
||||
raise UnsupportedAlgorithm(
|
||||
"{} is not a supported hash on this backend.".format(
|
||||
algorithm.name
|
||||
),
|
||||
_Reasons.UNSUPPORTED_HASH,
|
||||
)
|
||||
res = self._backend._lib.EVP_DigestInit_ex(
|
||||
ctx, evp_md, self._backend._ffi.NULL
|
||||
)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
|
||||
self._ctx = ctx
|
||||
|
||||
algorithm = utils.read_only_property("_algorithm")
|
||||
|
||||
def copy(self):
|
||||
copied_ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new()
|
||||
copied_ctx = self._backend._ffi.gc(
|
||||
copied_ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free
|
||||
)
|
||||
res = self._backend._lib.EVP_MD_CTX_copy_ex(copied_ctx, self._ctx)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
return _HashContext(self._backend, self.algorithm, ctx=copied_ctx)
|
||||
|
||||
def update(self, data):
|
||||
data_ptr = self._backend._ffi.from_buffer(data)
|
||||
res = self._backend._lib.EVP_DigestUpdate(
|
||||
self._ctx, data_ptr, len(data)
|
||||
)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
|
||||
def finalize(self):
|
||||
if isinstance(self.algorithm, hashes.ExtendableOutputFunction):
|
||||
# extendable output functions use a different finalize
|
||||
return self._finalize_xof()
|
||||
else:
|
||||
buf = self._backend._ffi.new(
|
||||
"unsigned char[]", self._backend._lib.EVP_MAX_MD_SIZE
|
||||
)
|
||||
outlen = self._backend._ffi.new("unsigned int *")
|
||||
res = self._backend._lib.EVP_DigestFinal_ex(self._ctx, buf, outlen)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
self._backend.openssl_assert(
|
||||
outlen[0] == self.algorithm.digest_size
|
||||
)
|
||||
return self._backend._ffi.buffer(buf)[: outlen[0]]
|
||||
|
||||
def _finalize_xof(self):
|
||||
buf = self._backend._ffi.new(
|
||||
"unsigned char[]", self.algorithm.digest_size
|
||||
)
|
||||
res = self._backend._lib.EVP_DigestFinalXOF(
|
||||
self._ctx, buf, self.algorithm.digest_size
|
||||
)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
return self._backend._ffi.buffer(buf)[: self.algorithm.digest_size]
|
||||
|
|
@ -0,0 +1,78 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
InvalidSignature,
|
||||
UnsupportedAlgorithm,
|
||||
_Reasons,
|
||||
)
|
||||
from cryptography.hazmat.primitives import constant_time, hashes
|
||||
|
||||
|
||||
@utils.register_interface(hashes.HashContext)
|
||||
class _HMACContext(object):
|
||||
def __init__(self, backend, key, algorithm, ctx=None):
|
||||
self._algorithm = algorithm
|
||||
self._backend = backend
|
||||
|
||||
if ctx is None:
|
||||
ctx = self._backend._lib.Cryptography_HMAC_CTX_new()
|
||||
self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
|
||||
ctx = self._backend._ffi.gc(
|
||||
ctx, self._backend._lib.Cryptography_HMAC_CTX_free
|
||||
)
|
||||
evp_md = self._backend._evp_md_from_algorithm(algorithm)
|
||||
if evp_md == self._backend._ffi.NULL:
|
||||
raise UnsupportedAlgorithm(
|
||||
"{} is not a supported hash on this backend".format(
|
||||
algorithm.name
|
||||
),
|
||||
_Reasons.UNSUPPORTED_HASH,
|
||||
)
|
||||
key_ptr = self._backend._ffi.from_buffer(key)
|
||||
res = self._backend._lib.HMAC_Init_ex(
|
||||
ctx, key_ptr, len(key), evp_md, self._backend._ffi.NULL
|
||||
)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
|
||||
self._ctx = ctx
|
||||
self._key = key
|
||||
|
||||
algorithm = utils.read_only_property("_algorithm")
|
||||
|
||||
def copy(self):
|
||||
copied_ctx = self._backend._lib.Cryptography_HMAC_CTX_new()
|
||||
self._backend.openssl_assert(copied_ctx != self._backend._ffi.NULL)
|
||||
copied_ctx = self._backend._ffi.gc(
|
||||
copied_ctx, self._backend._lib.Cryptography_HMAC_CTX_free
|
||||
)
|
||||
res = self._backend._lib.HMAC_CTX_copy(copied_ctx, self._ctx)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
return _HMACContext(
|
||||
self._backend, self._key, self.algorithm, ctx=copied_ctx
|
||||
)
|
||||
|
||||
def update(self, data):
|
||||
data_ptr = self._backend._ffi.from_buffer(data)
|
||||
res = self._backend._lib.HMAC_Update(self._ctx, data_ptr, len(data))
|
||||
self._backend.openssl_assert(res != 0)
|
||||
|
||||
def finalize(self):
|
||||
buf = self._backend._ffi.new(
|
||||
"unsigned char[]", self._backend._lib.EVP_MAX_MD_SIZE
|
||||
)
|
||||
outlen = self._backend._ffi.new("unsigned int *")
|
||||
res = self._backend._lib.HMAC_Final(self._ctx, buf, outlen)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
self._backend.openssl_assert(outlen[0] == self.algorithm.digest_size)
|
||||
return self._backend._ffi.buffer(buf)[: outlen[0]]
|
||||
|
||||
def verify(self, signature):
|
||||
digest = self.finalize()
|
||||
if not constant_time.bytes_eq(digest, signature):
|
||||
raise InvalidSignature("Signature did not match digest.")
|
||||
|
|
@ -0,0 +1,401 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import functools
|
||||
|
||||
from cryptography import utils, x509
|
||||
from cryptography.exceptions import UnsupportedAlgorithm
|
||||
from cryptography.hazmat.backends.openssl.decode_asn1 import (
|
||||
_CRL_ENTRY_REASON_CODE_TO_ENUM,
|
||||
_asn1_integer_to_int,
|
||||
_asn1_string_to_bytes,
|
||||
_decode_x509_name,
|
||||
_obj2txt,
|
||||
_parse_asn1_generalized_time,
|
||||
)
|
||||
from cryptography.hazmat.backends.openssl.x509 import _Certificate
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
from cryptography.x509.ocsp import (
|
||||
OCSPCertStatus,
|
||||
OCSPRequest,
|
||||
OCSPResponse,
|
||||
OCSPResponseStatus,
|
||||
_CERT_STATUS_TO_ENUM,
|
||||
_OIDS_TO_HASH,
|
||||
_RESPONSE_STATUS_TO_ENUM,
|
||||
)
|
||||
|
||||
|
||||
def _requires_successful_response(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(self, *args):
|
||||
if self.response_status != OCSPResponseStatus.SUCCESSFUL:
|
||||
raise ValueError(
|
||||
"OCSP response status is not successful so the property "
|
||||
"has no value"
|
||||
)
|
||||
else:
|
||||
return func(self, *args)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def _issuer_key_hash(backend, cert_id):
|
||||
key_hash = backend._ffi.new("ASN1_OCTET_STRING **")
|
||||
res = backend._lib.OCSP_id_get0_info(
|
||||
backend._ffi.NULL,
|
||||
backend._ffi.NULL,
|
||||
key_hash,
|
||||
backend._ffi.NULL,
|
||||
cert_id,
|
||||
)
|
||||
backend.openssl_assert(res == 1)
|
||||
backend.openssl_assert(key_hash[0] != backend._ffi.NULL)
|
||||
return _asn1_string_to_bytes(backend, key_hash[0])
|
||||
|
||||
|
||||
def _issuer_name_hash(backend, cert_id):
|
||||
name_hash = backend._ffi.new("ASN1_OCTET_STRING **")
|
||||
res = backend._lib.OCSP_id_get0_info(
|
||||
name_hash,
|
||||
backend._ffi.NULL,
|
||||
backend._ffi.NULL,
|
||||
backend._ffi.NULL,
|
||||
cert_id,
|
||||
)
|
||||
backend.openssl_assert(res == 1)
|
||||
backend.openssl_assert(name_hash[0] != backend._ffi.NULL)
|
||||
return _asn1_string_to_bytes(backend, name_hash[0])
|
||||
|
||||
|
||||
def _serial_number(backend, cert_id):
|
||||
num = backend._ffi.new("ASN1_INTEGER **")
|
||||
res = backend._lib.OCSP_id_get0_info(
|
||||
backend._ffi.NULL, backend._ffi.NULL, backend._ffi.NULL, num, cert_id
|
||||
)
|
||||
backend.openssl_assert(res == 1)
|
||||
backend.openssl_assert(num[0] != backend._ffi.NULL)
|
||||
return _asn1_integer_to_int(backend, num[0])
|
||||
|
||||
|
||||
def _hash_algorithm(backend, cert_id):
|
||||
asn1obj = backend._ffi.new("ASN1_OBJECT **")
|
||||
res = backend._lib.OCSP_id_get0_info(
|
||||
backend._ffi.NULL,
|
||||
asn1obj,
|
||||
backend._ffi.NULL,
|
||||
backend._ffi.NULL,
|
||||
cert_id,
|
||||
)
|
||||
backend.openssl_assert(res == 1)
|
||||
backend.openssl_assert(asn1obj[0] != backend._ffi.NULL)
|
||||
oid = _obj2txt(backend, asn1obj[0])
|
||||
try:
|
||||
return _OIDS_TO_HASH[oid]
|
||||
except KeyError:
|
||||
raise UnsupportedAlgorithm(
|
||||
"Signature algorithm OID: {} not recognized".format(oid)
|
||||
)
|
||||
|
||||
|
||||
@utils.register_interface(OCSPResponse)
|
||||
class _OCSPResponse(object):
|
||||
def __init__(self, backend, ocsp_response):
|
||||
self._backend = backend
|
||||
self._ocsp_response = ocsp_response
|
||||
status = self._backend._lib.OCSP_response_status(self._ocsp_response)
|
||||
self._backend.openssl_assert(status in _RESPONSE_STATUS_TO_ENUM)
|
||||
self._status = _RESPONSE_STATUS_TO_ENUM[status]
|
||||
if self._status is OCSPResponseStatus.SUCCESSFUL:
|
||||
basic = self._backend._lib.OCSP_response_get1_basic(
|
||||
self._ocsp_response
|
||||
)
|
||||
self._backend.openssl_assert(basic != self._backend._ffi.NULL)
|
||||
self._basic = self._backend._ffi.gc(
|
||||
basic, self._backend._lib.OCSP_BASICRESP_free
|
||||
)
|
||||
num_resp = self._backend._lib.OCSP_resp_count(self._basic)
|
||||
if num_resp != 1:
|
||||
raise ValueError(
|
||||
"OCSP response contains more than one SINGLERESP structure"
|
||||
", which this library does not support. "
|
||||
"{} found".format(num_resp)
|
||||
)
|
||||
self._single = self._backend._lib.OCSP_resp_get0(self._basic, 0)
|
||||
self._backend.openssl_assert(
|
||||
self._single != self._backend._ffi.NULL
|
||||
)
|
||||
self._cert_id = self._backend._lib.OCSP_SINGLERESP_get0_id(
|
||||
self._single
|
||||
)
|
||||
self._backend.openssl_assert(
|
||||
self._cert_id != self._backend._ffi.NULL
|
||||
)
|
||||
|
||||
response_status = utils.read_only_property("_status")
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def signature_algorithm_oid(self):
|
||||
alg = self._backend._lib.OCSP_resp_get0_tbs_sigalg(self._basic)
|
||||
self._backend.openssl_assert(alg != self._backend._ffi.NULL)
|
||||
oid = _obj2txt(self._backend, alg.algorithm)
|
||||
return x509.ObjectIdentifier(oid)
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def signature_hash_algorithm(self):
|
||||
oid = self.signature_algorithm_oid
|
||||
try:
|
||||
return x509._SIG_OIDS_TO_HASH[oid]
|
||||
except KeyError:
|
||||
raise UnsupportedAlgorithm(
|
||||
"Signature algorithm OID:{} not recognized".format(oid)
|
||||
)
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def signature(self):
|
||||
sig = self._backend._lib.OCSP_resp_get0_signature(self._basic)
|
||||
self._backend.openssl_assert(sig != self._backend._ffi.NULL)
|
||||
return _asn1_string_to_bytes(self._backend, sig)
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def tbs_response_bytes(self):
|
||||
respdata = self._backend._lib.OCSP_resp_get0_respdata(self._basic)
|
||||
self._backend.openssl_assert(respdata != self._backend._ffi.NULL)
|
||||
pp = self._backend._ffi.new("unsigned char **")
|
||||
res = self._backend._lib.i2d_OCSP_RESPDATA(respdata, pp)
|
||||
self._backend.openssl_assert(pp[0] != self._backend._ffi.NULL)
|
||||
pp = self._backend._ffi.gc(
|
||||
pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0])
|
||||
)
|
||||
self._backend.openssl_assert(res > 0)
|
||||
return self._backend._ffi.buffer(pp[0], res)[:]
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def certificates(self):
|
||||
sk_x509 = self._backend._lib.OCSP_resp_get0_certs(self._basic)
|
||||
num = self._backend._lib.sk_X509_num(sk_x509)
|
||||
certs = []
|
||||
for i in range(num):
|
||||
x509 = self._backend._lib.sk_X509_value(sk_x509, i)
|
||||
self._backend.openssl_assert(x509 != self._backend._ffi.NULL)
|
||||
cert = _Certificate(self._backend, x509)
|
||||
# We need to keep the OCSP response that the certificate came from
|
||||
# alive until the Certificate object itself goes out of scope, so
|
||||
# we give it a private reference.
|
||||
cert._ocsp_resp = self
|
||||
certs.append(cert)
|
||||
|
||||
return certs
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def responder_key_hash(self):
|
||||
_, asn1_string = self._responder_key_name()
|
||||
if asn1_string == self._backend._ffi.NULL:
|
||||
return None
|
||||
else:
|
||||
return _asn1_string_to_bytes(self._backend, asn1_string)
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def responder_name(self):
|
||||
x509_name, _ = self._responder_key_name()
|
||||
if x509_name == self._backend._ffi.NULL:
|
||||
return None
|
||||
else:
|
||||
return _decode_x509_name(self._backend, x509_name)
|
||||
|
||||
def _responder_key_name(self):
|
||||
asn1_string = self._backend._ffi.new("ASN1_OCTET_STRING **")
|
||||
x509_name = self._backend._ffi.new("X509_NAME **")
|
||||
res = self._backend._lib.OCSP_resp_get0_id(
|
||||
self._basic, asn1_string, x509_name
|
||||
)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
return x509_name[0], asn1_string[0]
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def produced_at(self):
|
||||
produced_at = self._backend._lib.OCSP_resp_get0_produced_at(
|
||||
self._basic
|
||||
)
|
||||
return _parse_asn1_generalized_time(self._backend, produced_at)
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def certificate_status(self):
|
||||
status = self._backend._lib.OCSP_single_get0_status(
|
||||
self._single,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
)
|
||||
self._backend.openssl_assert(status in _CERT_STATUS_TO_ENUM)
|
||||
return _CERT_STATUS_TO_ENUM[status]
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def revocation_time(self):
|
||||
if self.certificate_status is not OCSPCertStatus.REVOKED:
|
||||
return None
|
||||
|
||||
asn1_time = self._backend._ffi.new("ASN1_GENERALIZEDTIME **")
|
||||
self._backend._lib.OCSP_single_get0_status(
|
||||
self._single,
|
||||
self._backend._ffi.NULL,
|
||||
asn1_time,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
)
|
||||
self._backend.openssl_assert(asn1_time[0] != self._backend._ffi.NULL)
|
||||
return _parse_asn1_generalized_time(self._backend, asn1_time[0])
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def revocation_reason(self):
|
||||
if self.certificate_status is not OCSPCertStatus.REVOKED:
|
||||
return None
|
||||
|
||||
reason_ptr = self._backend._ffi.new("int *")
|
||||
self._backend._lib.OCSP_single_get0_status(
|
||||
self._single,
|
||||
reason_ptr,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
)
|
||||
# If no reason is encoded OpenSSL returns -1
|
||||
if reason_ptr[0] == -1:
|
||||
return None
|
||||
else:
|
||||
self._backend.openssl_assert(
|
||||
reason_ptr[0] in _CRL_ENTRY_REASON_CODE_TO_ENUM
|
||||
)
|
||||
return _CRL_ENTRY_REASON_CODE_TO_ENUM[reason_ptr[0]]
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def this_update(self):
|
||||
asn1_time = self._backend._ffi.new("ASN1_GENERALIZEDTIME **")
|
||||
self._backend._lib.OCSP_single_get0_status(
|
||||
self._single,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
asn1_time,
|
||||
self._backend._ffi.NULL,
|
||||
)
|
||||
self._backend.openssl_assert(asn1_time[0] != self._backend._ffi.NULL)
|
||||
return _parse_asn1_generalized_time(self._backend, asn1_time[0])
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def next_update(self):
|
||||
asn1_time = self._backend._ffi.new("ASN1_GENERALIZEDTIME **")
|
||||
self._backend._lib.OCSP_single_get0_status(
|
||||
self._single,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
asn1_time,
|
||||
)
|
||||
if asn1_time[0] != self._backend._ffi.NULL:
|
||||
return _parse_asn1_generalized_time(self._backend, asn1_time[0])
|
||||
else:
|
||||
return None
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def issuer_key_hash(self):
|
||||
return _issuer_key_hash(self._backend, self._cert_id)
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def issuer_name_hash(self):
|
||||
return _issuer_name_hash(self._backend, self._cert_id)
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def hash_algorithm(self):
|
||||
return _hash_algorithm(self._backend, self._cert_id)
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def serial_number(self):
|
||||
return _serial_number(self._backend, self._cert_id)
|
||||
|
||||
@utils.cached_property
|
||||
@_requires_successful_response
|
||||
def extensions(self):
|
||||
return self._backend._ocsp_basicresp_ext_parser.parse(self._basic)
|
||||
|
||||
@utils.cached_property
|
||||
@_requires_successful_response
|
||||
def single_extensions(self):
|
||||
return self._backend._ocsp_singleresp_ext_parser.parse(self._single)
|
||||
|
||||
def public_bytes(self, encoding):
|
||||
if encoding is not serialization.Encoding.DER:
|
||||
raise ValueError("The only allowed encoding value is Encoding.DER")
|
||||
|
||||
bio = self._backend._create_mem_bio_gc()
|
||||
res = self._backend._lib.i2d_OCSP_RESPONSE_bio(
|
||||
bio, self._ocsp_response
|
||||
)
|
||||
self._backend.openssl_assert(res > 0)
|
||||
return self._backend._read_mem_bio(bio)
|
||||
|
||||
|
||||
@utils.register_interface(OCSPRequest)
|
||||
class _OCSPRequest(object):
|
||||
def __init__(self, backend, ocsp_request):
|
||||
if backend._lib.OCSP_request_onereq_count(ocsp_request) > 1:
|
||||
raise NotImplementedError(
|
||||
"OCSP request contains more than one request"
|
||||
)
|
||||
self._backend = backend
|
||||
self._ocsp_request = ocsp_request
|
||||
self._request = self._backend._lib.OCSP_request_onereq_get0(
|
||||
self._ocsp_request, 0
|
||||
)
|
||||
self._backend.openssl_assert(self._request != self._backend._ffi.NULL)
|
||||
self._cert_id = self._backend._lib.OCSP_onereq_get0_id(self._request)
|
||||
self._backend.openssl_assert(self._cert_id != self._backend._ffi.NULL)
|
||||
|
||||
@property
|
||||
def issuer_key_hash(self):
|
||||
return _issuer_key_hash(self._backend, self._cert_id)
|
||||
|
||||
@property
|
||||
def issuer_name_hash(self):
|
||||
return _issuer_name_hash(self._backend, self._cert_id)
|
||||
|
||||
@property
|
||||
def serial_number(self):
|
||||
return _serial_number(self._backend, self._cert_id)
|
||||
|
||||
@property
|
||||
def hash_algorithm(self):
|
||||
return _hash_algorithm(self._backend, self._cert_id)
|
||||
|
||||
@utils.cached_property
|
||||
def extensions(self):
|
||||
return self._backend._ocsp_req_ext_parser.parse(self._ocsp_request)
|
||||
|
||||
def public_bytes(self, encoding):
|
||||
if encoding is not serialization.Encoding.DER:
|
||||
raise ValueError("The only allowed encoding value is Encoding.DER")
|
||||
|
||||
bio = self._backend._create_mem_bio_gc()
|
||||
res = self._backend._lib.i2d_OCSP_REQUEST_bio(bio, self._ocsp_request)
|
||||
self._backend.openssl_assert(res > 0)
|
||||
return self._backend._read_mem_bio(bio)
|
||||
|
|
@ -0,0 +1,65 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
|
||||
from cryptography.exceptions import InvalidSignature
|
||||
from cryptography.hazmat.primitives import constant_time
|
||||
|
||||
|
||||
_POLY1305_TAG_SIZE = 16
|
||||
_POLY1305_KEY_SIZE = 32
|
||||
|
||||
|
||||
class _Poly1305Context(object):
|
||||
def __init__(self, backend, key):
|
||||
self._backend = backend
|
||||
|
||||
key_ptr = self._backend._ffi.from_buffer(key)
|
||||
# This function copies the key into OpenSSL-owned memory so we don't
|
||||
# need to retain it ourselves
|
||||
evp_pkey = self._backend._lib.EVP_PKEY_new_raw_private_key(
|
||||
self._backend._lib.NID_poly1305,
|
||||
self._backend._ffi.NULL,
|
||||
key_ptr,
|
||||
len(key),
|
||||
)
|
||||
self._backend.openssl_assert(evp_pkey != self._backend._ffi.NULL)
|
||||
self._evp_pkey = self._backend._ffi.gc(
|
||||
evp_pkey, self._backend._lib.EVP_PKEY_free
|
||||
)
|
||||
ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new()
|
||||
self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
|
||||
self._ctx = self._backend._ffi.gc(
|
||||
ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free
|
||||
)
|
||||
res = self._backend._lib.EVP_DigestSignInit(
|
||||
self._ctx,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
self._evp_pkey,
|
||||
)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
|
||||
def update(self, data):
|
||||
data_ptr = self._backend._ffi.from_buffer(data)
|
||||
res = self._backend._lib.EVP_DigestSignUpdate(
|
||||
self._ctx, data_ptr, len(data)
|
||||
)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
|
||||
def finalize(self):
|
||||
buf = self._backend._ffi.new("unsigned char[]", _POLY1305_TAG_SIZE)
|
||||
outlen = self._backend._ffi.new("size_t *")
|
||||
res = self._backend._lib.EVP_DigestSignFinal(self._ctx, buf, outlen)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
self._backend.openssl_assert(outlen[0] == _POLY1305_TAG_SIZE)
|
||||
return self._backend._ffi.buffer(buf)[: outlen[0]]
|
||||
|
||||
def verify(self, tag):
|
||||
mac = self.finalize()
|
||||
if not constant_time.bytes_eq(mac, tag):
|
||||
raise InvalidSignature("Value did not match computed tag.")
|
||||
|
|
@ -0,0 +1,465 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
InvalidSignature,
|
||||
UnsupportedAlgorithm,
|
||||
_Reasons,
|
||||
)
|
||||
from cryptography.hazmat.backends.openssl.utils import (
|
||||
_calculate_digest_and_algorithm,
|
||||
_check_not_prehashed,
|
||||
_warn_sign_verify_deprecated,
|
||||
)
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.asymmetric import (
|
||||
AsymmetricSignatureContext,
|
||||
AsymmetricVerificationContext,
|
||||
rsa,
|
||||
)
|
||||
from cryptography.hazmat.primitives.asymmetric.padding import (
|
||||
AsymmetricPadding,
|
||||
MGF1,
|
||||
OAEP,
|
||||
PKCS1v15,
|
||||
PSS,
|
||||
calculate_max_pss_salt_length,
|
||||
)
|
||||
from cryptography.hazmat.primitives.asymmetric.rsa import (
|
||||
RSAPrivateKeyWithSerialization,
|
||||
RSAPublicKeyWithSerialization,
|
||||
)
|
||||
|
||||
|
||||
def _get_rsa_pss_salt_length(pss, key, hash_algorithm):
|
||||
salt = pss._salt_length
|
||||
|
||||
if salt is MGF1.MAX_LENGTH or salt is PSS.MAX_LENGTH:
|
||||
return calculate_max_pss_salt_length(key, hash_algorithm)
|
||||
else:
|
||||
return salt
|
||||
|
||||
|
||||
def _enc_dec_rsa(backend, key, data, padding):
|
||||
if not isinstance(padding, AsymmetricPadding):
|
||||
raise TypeError("Padding must be an instance of AsymmetricPadding.")
|
||||
|
||||
if isinstance(padding, PKCS1v15):
|
||||
padding_enum = backend._lib.RSA_PKCS1_PADDING
|
||||
elif isinstance(padding, OAEP):
|
||||
padding_enum = backend._lib.RSA_PKCS1_OAEP_PADDING
|
||||
|
||||
if not isinstance(padding._mgf, MGF1):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Only MGF1 is supported by this backend.",
|
||||
_Reasons.UNSUPPORTED_MGF,
|
||||
)
|
||||
|
||||
if not backend.rsa_padding_supported(padding):
|
||||
raise UnsupportedAlgorithm(
|
||||
"This combination of padding and hash algorithm is not "
|
||||
"supported by this backend.",
|
||||
_Reasons.UNSUPPORTED_PADDING,
|
||||
)
|
||||
|
||||
else:
|
||||
raise UnsupportedAlgorithm(
|
||||
"{} is not supported by this backend.".format(padding.name),
|
||||
_Reasons.UNSUPPORTED_PADDING,
|
||||
)
|
||||
|
||||
return _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum, padding)
|
||||
|
||||
|
||||
def _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum, padding):
|
||||
if isinstance(key, _RSAPublicKey):
|
||||
init = backend._lib.EVP_PKEY_encrypt_init
|
||||
crypt = backend._lib.EVP_PKEY_encrypt
|
||||
else:
|
||||
init = backend._lib.EVP_PKEY_decrypt_init
|
||||
crypt = backend._lib.EVP_PKEY_decrypt
|
||||
|
||||
pkey_ctx = backend._lib.EVP_PKEY_CTX_new(key._evp_pkey, backend._ffi.NULL)
|
||||
backend.openssl_assert(pkey_ctx != backend._ffi.NULL)
|
||||
pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free)
|
||||
res = init(pkey_ctx)
|
||||
backend.openssl_assert(res == 1)
|
||||
res = backend._lib.EVP_PKEY_CTX_set_rsa_padding(pkey_ctx, padding_enum)
|
||||
backend.openssl_assert(res > 0)
|
||||
buf_size = backend._lib.EVP_PKEY_size(key._evp_pkey)
|
||||
backend.openssl_assert(buf_size > 0)
|
||||
if isinstance(padding, OAEP) and backend._lib.Cryptography_HAS_RSA_OAEP_MD:
|
||||
mgf1_md = backend._evp_md_non_null_from_algorithm(
|
||||
padding._mgf._algorithm
|
||||
)
|
||||
res = backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, mgf1_md)
|
||||
backend.openssl_assert(res > 0)
|
||||
oaep_md = backend._evp_md_non_null_from_algorithm(padding._algorithm)
|
||||
res = backend._lib.EVP_PKEY_CTX_set_rsa_oaep_md(pkey_ctx, oaep_md)
|
||||
backend.openssl_assert(res > 0)
|
||||
|
||||
if (
|
||||
isinstance(padding, OAEP)
|
||||
and padding._label is not None
|
||||
and len(padding._label) > 0
|
||||
):
|
||||
# set0_rsa_oaep_label takes ownership of the char * so we need to
|
||||
# copy it into some new memory
|
||||
labelptr = backend._lib.OPENSSL_malloc(len(padding._label))
|
||||
backend.openssl_assert(labelptr != backend._ffi.NULL)
|
||||
backend._ffi.memmove(labelptr, padding._label, len(padding._label))
|
||||
res = backend._lib.EVP_PKEY_CTX_set0_rsa_oaep_label(
|
||||
pkey_ctx, labelptr, len(padding._label)
|
||||
)
|
||||
backend.openssl_assert(res == 1)
|
||||
|
||||
outlen = backend._ffi.new("size_t *", buf_size)
|
||||
buf = backend._ffi.new("unsigned char[]", buf_size)
|
||||
# Everything from this line onwards is written with the goal of being as
|
||||
# constant-time as is practical given the constraints of Python and our
|
||||
# API. See Bleichenbacher's '98 attack on RSA, and its many many variants.
|
||||
# As such, you should not attempt to change this (particularly to "clean it
|
||||
# up") without understanding why it was written this way (see
|
||||
# Chesterton's Fence), and without measuring to verify you have not
|
||||
# introduced observable time differences.
|
||||
res = crypt(pkey_ctx, buf, outlen, data, len(data))
|
||||
resbuf = backend._ffi.buffer(buf)[: outlen[0]]
|
||||
backend._lib.ERR_clear_error()
|
||||
if res <= 0:
|
||||
raise ValueError("Encryption/decryption failed.")
|
||||
return resbuf
|
||||
|
||||
|
||||
def _rsa_sig_determine_padding(backend, key, padding, algorithm):
|
||||
if not isinstance(padding, AsymmetricPadding):
|
||||
raise TypeError("Expected provider of AsymmetricPadding.")
|
||||
|
||||
pkey_size = backend._lib.EVP_PKEY_size(key._evp_pkey)
|
||||
backend.openssl_assert(pkey_size > 0)
|
||||
|
||||
if isinstance(padding, PKCS1v15):
|
||||
padding_enum = backend._lib.RSA_PKCS1_PADDING
|
||||
elif isinstance(padding, PSS):
|
||||
if not isinstance(padding._mgf, MGF1):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Only MGF1 is supported by this backend.",
|
||||
_Reasons.UNSUPPORTED_MGF,
|
||||
)
|
||||
|
||||
# Size of key in bytes - 2 is the maximum
|
||||
# PSS signature length (salt length is checked later)
|
||||
if pkey_size - algorithm.digest_size - 2 < 0:
|
||||
raise ValueError(
|
||||
"Digest too large for key size. Use a larger "
|
||||
"key or different digest."
|
||||
)
|
||||
|
||||
padding_enum = backend._lib.RSA_PKCS1_PSS_PADDING
|
||||
else:
|
||||
raise UnsupportedAlgorithm(
|
||||
"{} is not supported by this backend.".format(padding.name),
|
||||
_Reasons.UNSUPPORTED_PADDING,
|
||||
)
|
||||
|
||||
return padding_enum
|
||||
|
||||
|
||||
def _rsa_sig_setup(backend, padding, algorithm, key, data, init_func):
|
||||
padding_enum = _rsa_sig_determine_padding(backend, key, padding, algorithm)
|
||||
evp_md = backend._evp_md_non_null_from_algorithm(algorithm)
|
||||
pkey_ctx = backend._lib.EVP_PKEY_CTX_new(key._evp_pkey, backend._ffi.NULL)
|
||||
backend.openssl_assert(pkey_ctx != backend._ffi.NULL)
|
||||
pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free)
|
||||
res = init_func(pkey_ctx)
|
||||
backend.openssl_assert(res == 1)
|
||||
res = backend._lib.EVP_PKEY_CTX_set_signature_md(pkey_ctx, evp_md)
|
||||
if res == 0:
|
||||
backend._consume_errors()
|
||||
raise UnsupportedAlgorithm(
|
||||
"{} is not supported by this backend for RSA signing.".format(
|
||||
algorithm.name
|
||||
),
|
||||
_Reasons.UNSUPPORTED_HASH,
|
||||
)
|
||||
res = backend._lib.EVP_PKEY_CTX_set_rsa_padding(pkey_ctx, padding_enum)
|
||||
backend.openssl_assert(res > 0)
|
||||
if isinstance(padding, PSS):
|
||||
res = backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen(
|
||||
pkey_ctx, _get_rsa_pss_salt_length(padding, key, algorithm)
|
||||
)
|
||||
backend.openssl_assert(res > 0)
|
||||
|
||||
mgf1_md = backend._evp_md_non_null_from_algorithm(
|
||||
padding._mgf._algorithm
|
||||
)
|
||||
res = backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, mgf1_md)
|
||||
backend.openssl_assert(res > 0)
|
||||
|
||||
return pkey_ctx
|
||||
|
||||
|
||||
def _rsa_sig_sign(backend, padding, algorithm, private_key, data):
|
||||
pkey_ctx = _rsa_sig_setup(
|
||||
backend,
|
||||
padding,
|
||||
algorithm,
|
||||
private_key,
|
||||
data,
|
||||
backend._lib.EVP_PKEY_sign_init,
|
||||
)
|
||||
buflen = backend._ffi.new("size_t *")
|
||||
res = backend._lib.EVP_PKEY_sign(
|
||||
pkey_ctx, backend._ffi.NULL, buflen, data, len(data)
|
||||
)
|
||||
backend.openssl_assert(res == 1)
|
||||
buf = backend._ffi.new("unsigned char[]", buflen[0])
|
||||
res = backend._lib.EVP_PKEY_sign(pkey_ctx, buf, buflen, data, len(data))
|
||||
if res != 1:
|
||||
errors = backend._consume_errors_with_text()
|
||||
raise ValueError(
|
||||
"Digest or salt length too long for key size. Use a larger key "
|
||||
"or shorter salt length if you are specifying a PSS salt",
|
||||
errors,
|
||||
)
|
||||
|
||||
return backend._ffi.buffer(buf)[:]
|
||||
|
||||
|
||||
def _rsa_sig_verify(backend, padding, algorithm, public_key, signature, data):
|
||||
pkey_ctx = _rsa_sig_setup(
|
||||
backend,
|
||||
padding,
|
||||
algorithm,
|
||||
public_key,
|
||||
data,
|
||||
backend._lib.EVP_PKEY_verify_init,
|
||||
)
|
||||
res = backend._lib.EVP_PKEY_verify(
|
||||
pkey_ctx, signature, len(signature), data, len(data)
|
||||
)
|
||||
# The previous call can return negative numbers in the event of an
|
||||
# error. This is not a signature failure but we need to fail if it
|
||||
# occurs.
|
||||
backend.openssl_assert(res >= 0)
|
||||
if res == 0:
|
||||
backend._consume_errors()
|
||||
raise InvalidSignature
|
||||
|
||||
|
||||
@utils.register_interface(AsymmetricSignatureContext)
|
||||
class _RSASignatureContext(object):
|
||||
def __init__(self, backend, private_key, padding, algorithm):
|
||||
self._backend = backend
|
||||
self._private_key = private_key
|
||||
|
||||
# We now call _rsa_sig_determine_padding in _rsa_sig_setup. However
|
||||
# we need to make a pointless call to it here so we maintain the
|
||||
# API of erroring on init with this context if the values are invalid.
|
||||
_rsa_sig_determine_padding(backend, private_key, padding, algorithm)
|
||||
self._padding = padding
|
||||
self._algorithm = algorithm
|
||||
self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
|
||||
|
||||
def update(self, data):
|
||||
self._hash_ctx.update(data)
|
||||
|
||||
def finalize(self):
|
||||
return _rsa_sig_sign(
|
||||
self._backend,
|
||||
self._padding,
|
||||
self._algorithm,
|
||||
self._private_key,
|
||||
self._hash_ctx.finalize(),
|
||||
)
|
||||
|
||||
|
||||
@utils.register_interface(AsymmetricVerificationContext)
|
||||
class _RSAVerificationContext(object):
|
||||
def __init__(self, backend, public_key, signature, padding, algorithm):
|
||||
self._backend = backend
|
||||
self._public_key = public_key
|
||||
self._signature = signature
|
||||
self._padding = padding
|
||||
# We now call _rsa_sig_determine_padding in _rsa_sig_setup. However
|
||||
# we need to make a pointless call to it here so we maintain the
|
||||
# API of erroring on init with this context if the values are invalid.
|
||||
_rsa_sig_determine_padding(backend, public_key, padding, algorithm)
|
||||
|
||||
padding = padding
|
||||
self._algorithm = algorithm
|
||||
self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
|
||||
|
||||
def update(self, data):
|
||||
self._hash_ctx.update(data)
|
||||
|
||||
def verify(self):
|
||||
return _rsa_sig_verify(
|
||||
self._backend,
|
||||
self._padding,
|
||||
self._algorithm,
|
||||
self._public_key,
|
||||
self._signature,
|
||||
self._hash_ctx.finalize(),
|
||||
)
|
||||
|
||||
|
||||
@utils.register_interface(RSAPrivateKeyWithSerialization)
|
||||
class _RSAPrivateKey(object):
|
||||
def __init__(self, backend, rsa_cdata, evp_pkey):
|
||||
res = backend._lib.RSA_check_key(rsa_cdata)
|
||||
if res != 1:
|
||||
errors = backend._consume_errors_with_text()
|
||||
raise ValueError("Invalid private key", errors)
|
||||
|
||||
# Blinding is on by default in many versions of OpenSSL, but let's
|
||||
# just be conservative here.
|
||||
res = backend._lib.RSA_blinding_on(rsa_cdata, backend._ffi.NULL)
|
||||
backend.openssl_assert(res == 1)
|
||||
|
||||
self._backend = backend
|
||||
self._rsa_cdata = rsa_cdata
|
||||
self._evp_pkey = evp_pkey
|
||||
|
||||
n = self._backend._ffi.new("BIGNUM **")
|
||||
self._backend._lib.RSA_get0_key(
|
||||
self._rsa_cdata,
|
||||
n,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
)
|
||||
self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
|
||||
self._key_size = self._backend._lib.BN_num_bits(n[0])
|
||||
|
||||
key_size = utils.read_only_property("_key_size")
|
||||
|
||||
def signer(self, padding, algorithm):
|
||||
_warn_sign_verify_deprecated()
|
||||
_check_not_prehashed(algorithm)
|
||||
return _RSASignatureContext(self._backend, self, padding, algorithm)
|
||||
|
||||
def decrypt(self, ciphertext, padding):
|
||||
key_size_bytes = (self.key_size + 7) // 8
|
||||
if key_size_bytes != len(ciphertext):
|
||||
raise ValueError("Ciphertext length must be equal to key size.")
|
||||
|
||||
return _enc_dec_rsa(self._backend, self, ciphertext, padding)
|
||||
|
||||
def public_key(self):
|
||||
ctx = self._backend._lib.RSAPublicKey_dup(self._rsa_cdata)
|
||||
self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
|
||||
ctx = self._backend._ffi.gc(ctx, self._backend._lib.RSA_free)
|
||||
evp_pkey = self._backend._rsa_cdata_to_evp_pkey(ctx)
|
||||
return _RSAPublicKey(self._backend, ctx, evp_pkey)
|
||||
|
||||
def private_numbers(self):
|
||||
n = self._backend._ffi.new("BIGNUM **")
|
||||
e = self._backend._ffi.new("BIGNUM **")
|
||||
d = self._backend._ffi.new("BIGNUM **")
|
||||
p = self._backend._ffi.new("BIGNUM **")
|
||||
q = self._backend._ffi.new("BIGNUM **")
|
||||
dmp1 = self._backend._ffi.new("BIGNUM **")
|
||||
dmq1 = self._backend._ffi.new("BIGNUM **")
|
||||
iqmp = self._backend._ffi.new("BIGNUM **")
|
||||
self._backend._lib.RSA_get0_key(self._rsa_cdata, n, e, d)
|
||||
self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
|
||||
self._backend.openssl_assert(e[0] != self._backend._ffi.NULL)
|
||||
self._backend.openssl_assert(d[0] != self._backend._ffi.NULL)
|
||||
self._backend._lib.RSA_get0_factors(self._rsa_cdata, p, q)
|
||||
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
|
||||
self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
|
||||
self._backend._lib.RSA_get0_crt_params(
|
||||
self._rsa_cdata, dmp1, dmq1, iqmp
|
||||
)
|
||||
self._backend.openssl_assert(dmp1[0] != self._backend._ffi.NULL)
|
||||
self._backend.openssl_assert(dmq1[0] != self._backend._ffi.NULL)
|
||||
self._backend.openssl_assert(iqmp[0] != self._backend._ffi.NULL)
|
||||
return rsa.RSAPrivateNumbers(
|
||||
p=self._backend._bn_to_int(p[0]),
|
||||
q=self._backend._bn_to_int(q[0]),
|
||||
d=self._backend._bn_to_int(d[0]),
|
||||
dmp1=self._backend._bn_to_int(dmp1[0]),
|
||||
dmq1=self._backend._bn_to_int(dmq1[0]),
|
||||
iqmp=self._backend._bn_to_int(iqmp[0]),
|
||||
public_numbers=rsa.RSAPublicNumbers(
|
||||
e=self._backend._bn_to_int(e[0]),
|
||||
n=self._backend._bn_to_int(n[0]),
|
||||
),
|
||||
)
|
||||
|
||||
def private_bytes(self, encoding, format, encryption_algorithm):
|
||||
return self._backend._private_key_bytes(
|
||||
encoding,
|
||||
format,
|
||||
encryption_algorithm,
|
||||
self,
|
||||
self._evp_pkey,
|
||||
self._rsa_cdata,
|
||||
)
|
||||
|
||||
def sign(self, data, padding, algorithm):
|
||||
data, algorithm = _calculate_digest_and_algorithm(
|
||||
self._backend, data, algorithm
|
||||
)
|
||||
return _rsa_sig_sign(self._backend, padding, algorithm, self, data)
|
||||
|
||||
|
||||
@utils.register_interface(RSAPublicKeyWithSerialization)
|
||||
class _RSAPublicKey(object):
|
||||
def __init__(self, backend, rsa_cdata, evp_pkey):
|
||||
self._backend = backend
|
||||
self._rsa_cdata = rsa_cdata
|
||||
self._evp_pkey = evp_pkey
|
||||
|
||||
n = self._backend._ffi.new("BIGNUM **")
|
||||
self._backend._lib.RSA_get0_key(
|
||||
self._rsa_cdata,
|
||||
n,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
)
|
||||
self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
|
||||
self._key_size = self._backend._lib.BN_num_bits(n[0])
|
||||
|
||||
key_size = utils.read_only_property("_key_size")
|
||||
|
||||
def verifier(self, signature, padding, algorithm):
|
||||
_warn_sign_verify_deprecated()
|
||||
utils._check_bytes("signature", signature)
|
||||
|
||||
_check_not_prehashed(algorithm)
|
||||
return _RSAVerificationContext(
|
||||
self._backend, self, signature, padding, algorithm
|
||||
)
|
||||
|
||||
def encrypt(self, plaintext, padding):
|
||||
return _enc_dec_rsa(self._backend, self, plaintext, padding)
|
||||
|
||||
def public_numbers(self):
|
||||
n = self._backend._ffi.new("BIGNUM **")
|
||||
e = self._backend._ffi.new("BIGNUM **")
|
||||
self._backend._lib.RSA_get0_key(
|
||||
self._rsa_cdata, n, e, self._backend._ffi.NULL
|
||||
)
|
||||
self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
|
||||
self._backend.openssl_assert(e[0] != self._backend._ffi.NULL)
|
||||
return rsa.RSAPublicNumbers(
|
||||
e=self._backend._bn_to_int(e[0]),
|
||||
n=self._backend._bn_to_int(n[0]),
|
||||
)
|
||||
|
||||
def public_bytes(self, encoding, format):
|
||||
return self._backend._public_key_bytes(
|
||||
encoding, format, self, self._evp_pkey, self._rsa_cdata
|
||||
)
|
||||
|
||||
def verify(self, signature, data, padding, algorithm):
|
||||
data, algorithm = _calculate_digest_and_algorithm(
|
||||
self._backend, data, algorithm
|
||||
)
|
||||
return _rsa_sig_verify(
|
||||
self._backend, padding, algorithm, self, signature, data
|
||||
)
|
||||
|
|
@ -0,0 +1,65 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import warnings
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.asymmetric.utils import Prehashed
|
||||
|
||||
|
||||
def _evp_pkey_derive(backend, evp_pkey, peer_public_key):
|
||||
ctx = backend._lib.EVP_PKEY_CTX_new(evp_pkey, backend._ffi.NULL)
|
||||
backend.openssl_assert(ctx != backend._ffi.NULL)
|
||||
ctx = backend._ffi.gc(ctx, backend._lib.EVP_PKEY_CTX_free)
|
||||
res = backend._lib.EVP_PKEY_derive_init(ctx)
|
||||
backend.openssl_assert(res == 1)
|
||||
res = backend._lib.EVP_PKEY_derive_set_peer(ctx, peer_public_key._evp_pkey)
|
||||
backend.openssl_assert(res == 1)
|
||||
keylen = backend._ffi.new("size_t *")
|
||||
res = backend._lib.EVP_PKEY_derive(ctx, backend._ffi.NULL, keylen)
|
||||
backend.openssl_assert(res == 1)
|
||||
backend.openssl_assert(keylen[0] > 0)
|
||||
buf = backend._ffi.new("unsigned char[]", keylen[0])
|
||||
res = backend._lib.EVP_PKEY_derive(ctx, buf, keylen)
|
||||
if res != 1:
|
||||
raise ValueError("Null shared key derived from public/private pair.")
|
||||
|
||||
return backend._ffi.buffer(buf, keylen[0])[:]
|
||||
|
||||
|
||||
def _calculate_digest_and_algorithm(backend, data, algorithm):
|
||||
if not isinstance(algorithm, Prehashed):
|
||||
hash_ctx = hashes.Hash(algorithm, backend)
|
||||
hash_ctx.update(data)
|
||||
data = hash_ctx.finalize()
|
||||
else:
|
||||
algorithm = algorithm._algorithm
|
||||
|
||||
if len(data) != algorithm.digest_size:
|
||||
raise ValueError(
|
||||
"The provided data must be the same length as the hash "
|
||||
"algorithm's digest size."
|
||||
)
|
||||
|
||||
return (data, algorithm)
|
||||
|
||||
|
||||
def _check_not_prehashed(signature_algorithm):
|
||||
if isinstance(signature_algorithm, Prehashed):
|
||||
raise TypeError(
|
||||
"Prehashed is only supported in the sign and verify methods. "
|
||||
"It cannot be used with signer or verifier."
|
||||
)
|
||||
|
||||
|
||||
def _warn_sign_verify_deprecated():
|
||||
warnings.warn(
|
||||
"signer and verifier have been deprecated. Please use sign "
|
||||
"and verify instead.",
|
||||
utils.PersistentlyDeprecated2017,
|
||||
stacklevel=3,
|
||||
)
|
||||
|
|
@ -0,0 +1,123 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.hazmat.backends.openssl.utils import _evp_pkey_derive
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
from cryptography.hazmat.primitives.asymmetric.x25519 import (
|
||||
X25519PrivateKey,
|
||||
X25519PublicKey,
|
||||
)
|
||||
|
||||
|
||||
_X25519_KEY_SIZE = 32
|
||||
|
||||
|
||||
@utils.register_interface(X25519PublicKey)
|
||||
class _X25519PublicKey(object):
|
||||
def __init__(self, backend, evp_pkey):
|
||||
self._backend = backend
|
||||
self._evp_pkey = evp_pkey
|
||||
|
||||
def public_bytes(self, encoding, format):
|
||||
if (
|
||||
encoding is serialization.Encoding.Raw
|
||||
or format is serialization.PublicFormat.Raw
|
||||
):
|
||||
if (
|
||||
encoding is not serialization.Encoding.Raw
|
||||
or format is not serialization.PublicFormat.Raw
|
||||
):
|
||||
raise ValueError(
|
||||
"When using Raw both encoding and format must be Raw"
|
||||
)
|
||||
|
||||
return self._raw_public_bytes()
|
||||
|
||||
return self._backend._public_key_bytes(
|
||||
encoding, format, self, self._evp_pkey, None
|
||||
)
|
||||
|
||||
def _raw_public_bytes(self):
|
||||
ucharpp = self._backend._ffi.new("unsigned char **")
|
||||
res = self._backend._lib.EVP_PKEY_get1_tls_encodedpoint(
|
||||
self._evp_pkey, ucharpp
|
||||
)
|
||||
self._backend.openssl_assert(res == 32)
|
||||
self._backend.openssl_assert(ucharpp[0] != self._backend._ffi.NULL)
|
||||
data = self._backend._ffi.gc(
|
||||
ucharpp[0], self._backend._lib.OPENSSL_free
|
||||
)
|
||||
return self._backend._ffi.buffer(data, res)[:]
|
||||
|
||||
|
||||
@utils.register_interface(X25519PrivateKey)
|
||||
class _X25519PrivateKey(object):
|
||||
def __init__(self, backend, evp_pkey):
|
||||
self._backend = backend
|
||||
self._evp_pkey = evp_pkey
|
||||
|
||||
def public_key(self):
|
||||
bio = self._backend._create_mem_bio_gc()
|
||||
res = self._backend._lib.i2d_PUBKEY_bio(bio, self._evp_pkey)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
evp_pkey = self._backend._lib.d2i_PUBKEY_bio(
|
||||
bio, self._backend._ffi.NULL
|
||||
)
|
||||
self._backend.openssl_assert(evp_pkey != self._backend._ffi.NULL)
|
||||
evp_pkey = self._backend._ffi.gc(
|
||||
evp_pkey, self._backend._lib.EVP_PKEY_free
|
||||
)
|
||||
return _X25519PublicKey(self._backend, evp_pkey)
|
||||
|
||||
def exchange(self, peer_public_key):
|
||||
if not isinstance(peer_public_key, X25519PublicKey):
|
||||
raise TypeError("peer_public_key must be X25519PublicKey.")
|
||||
|
||||
return _evp_pkey_derive(self._backend, self._evp_pkey, peer_public_key)
|
||||
|
||||
def private_bytes(self, encoding, format, encryption_algorithm):
|
||||
if (
|
||||
encoding is serialization.Encoding.Raw
|
||||
or format is serialization.PublicFormat.Raw
|
||||
):
|
||||
if (
|
||||
format is not serialization.PrivateFormat.Raw
|
||||
or encoding is not serialization.Encoding.Raw
|
||||
or not isinstance(
|
||||
encryption_algorithm, serialization.NoEncryption
|
||||
)
|
||||
):
|
||||
raise ValueError(
|
||||
"When using Raw both encoding and format must be Raw "
|
||||
"and encryption_algorithm must be NoEncryption()"
|
||||
)
|
||||
|
||||
return self._raw_private_bytes()
|
||||
|
||||
return self._backend._private_key_bytes(
|
||||
encoding, format, encryption_algorithm, self, self._evp_pkey, None
|
||||
)
|
||||
|
||||
def _raw_private_bytes(self):
|
||||
# When we drop support for CRYPTOGRAPHY_OPENSSL_LESS_THAN_111 we can
|
||||
# switch this to EVP_PKEY_new_raw_private_key
|
||||
# The trick we use here is serializing to a PKCS8 key and just
|
||||
# using the last 32 bytes, which is the key itself.
|
||||
bio = self._backend._create_mem_bio_gc()
|
||||
res = self._backend._lib.i2d_PKCS8PrivateKey_bio(
|
||||
bio,
|
||||
self._evp_pkey,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
0,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
pkcs8 = self._backend._read_mem_bio(bio)
|
||||
self._backend.openssl_assert(len(pkcs8) == 48)
|
||||
return pkcs8[-_X25519_KEY_SIZE:]
|
||||
|
|
@ -0,0 +1,107 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.hazmat.backends.openssl.utils import _evp_pkey_derive
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
from cryptography.hazmat.primitives.asymmetric.x448 import (
|
||||
X448PrivateKey,
|
||||
X448PublicKey,
|
||||
)
|
||||
|
||||
_X448_KEY_SIZE = 56
|
||||
|
||||
|
||||
@utils.register_interface(X448PublicKey)
|
||||
class _X448PublicKey(object):
|
||||
def __init__(self, backend, evp_pkey):
|
||||
self._backend = backend
|
||||
self._evp_pkey = evp_pkey
|
||||
|
||||
def public_bytes(self, encoding, format):
|
||||
if (
|
||||
encoding is serialization.Encoding.Raw
|
||||
or format is serialization.PublicFormat.Raw
|
||||
):
|
||||
if (
|
||||
encoding is not serialization.Encoding.Raw
|
||||
or format is not serialization.PublicFormat.Raw
|
||||
):
|
||||
raise ValueError(
|
||||
"When using Raw both encoding and format must be Raw"
|
||||
)
|
||||
|
||||
return self._raw_public_bytes()
|
||||
|
||||
return self._backend._public_key_bytes(
|
||||
encoding, format, self, self._evp_pkey, None
|
||||
)
|
||||
|
||||
def _raw_public_bytes(self):
|
||||
buf = self._backend._ffi.new("unsigned char []", _X448_KEY_SIZE)
|
||||
buflen = self._backend._ffi.new("size_t *", _X448_KEY_SIZE)
|
||||
res = self._backend._lib.EVP_PKEY_get_raw_public_key(
|
||||
self._evp_pkey, buf, buflen
|
||||
)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
self._backend.openssl_assert(buflen[0] == _X448_KEY_SIZE)
|
||||
return self._backend._ffi.buffer(buf, _X448_KEY_SIZE)[:]
|
||||
|
||||
|
||||
@utils.register_interface(X448PrivateKey)
|
||||
class _X448PrivateKey(object):
|
||||
def __init__(self, backend, evp_pkey):
|
||||
self._backend = backend
|
||||
self._evp_pkey = evp_pkey
|
||||
|
||||
def public_key(self):
|
||||
buf = self._backend._ffi.new("unsigned char []", _X448_KEY_SIZE)
|
||||
buflen = self._backend._ffi.new("size_t *", _X448_KEY_SIZE)
|
||||
res = self._backend._lib.EVP_PKEY_get_raw_public_key(
|
||||
self._evp_pkey, buf, buflen
|
||||
)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
self._backend.openssl_assert(buflen[0] == _X448_KEY_SIZE)
|
||||
return self._backend.x448_load_public_bytes(buf)
|
||||
|
||||
def exchange(self, peer_public_key):
|
||||
if not isinstance(peer_public_key, X448PublicKey):
|
||||
raise TypeError("peer_public_key must be X448PublicKey.")
|
||||
|
||||
return _evp_pkey_derive(self._backend, self._evp_pkey, peer_public_key)
|
||||
|
||||
def private_bytes(self, encoding, format, encryption_algorithm):
|
||||
if (
|
||||
encoding is serialization.Encoding.Raw
|
||||
or format is serialization.PublicFormat.Raw
|
||||
):
|
||||
if (
|
||||
format is not serialization.PrivateFormat.Raw
|
||||
or encoding is not serialization.Encoding.Raw
|
||||
or not isinstance(
|
||||
encryption_algorithm, serialization.NoEncryption
|
||||
)
|
||||
):
|
||||
raise ValueError(
|
||||
"When using Raw both encoding and format must be Raw "
|
||||
"and encryption_algorithm must be NoEncryption()"
|
||||
)
|
||||
|
||||
return self._raw_private_bytes()
|
||||
|
||||
return self._backend._private_key_bytes(
|
||||
encoding, format, encryption_algorithm, self, self._evp_pkey, None
|
||||
)
|
||||
|
||||
def _raw_private_bytes(self):
|
||||
buf = self._backend._ffi.new("unsigned char []", _X448_KEY_SIZE)
|
||||
buflen = self._backend._ffi.new("size_t *", _X448_KEY_SIZE)
|
||||
res = self._backend._lib.EVP_PKEY_get_raw_private_key(
|
||||
self._evp_pkey, buf, buflen
|
||||
)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
self._backend.openssl_assert(buflen[0] == _X448_KEY_SIZE)
|
||||
return self._backend._ffi.buffer(buf, _X448_KEY_SIZE)[:]
|
||||
|
|
@ -0,0 +1,587 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import datetime
|
||||
import operator
|
||||
|
||||
from cryptography import utils, x509
|
||||
from cryptography.exceptions import UnsupportedAlgorithm
|
||||
from cryptography.hazmat.backends.openssl.decode_asn1 import (
|
||||
_asn1_integer_to_int,
|
||||
_asn1_string_to_bytes,
|
||||
_decode_x509_name,
|
||||
_obj2txt,
|
||||
_parse_asn1_time,
|
||||
)
|
||||
from cryptography.hazmat.backends.openssl.encode_asn1 import (
|
||||
_encode_asn1_int_gc,
|
||||
_txt2obj_gc,
|
||||
)
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa
|
||||
from cryptography.x509.name import _ASN1Type
|
||||
|
||||
|
||||
@utils.register_interface(x509.Certificate)
|
||||
class _Certificate(object):
|
||||
def __init__(self, backend, x509_cert):
|
||||
self._backend = backend
|
||||
self._x509 = x509_cert
|
||||
|
||||
version = self._backend._lib.X509_get_version(self._x509)
|
||||
if version == 0:
|
||||
self._version = x509.Version.v1
|
||||
elif version == 2:
|
||||
self._version = x509.Version.v3
|
||||
else:
|
||||
raise x509.InvalidVersion(
|
||||
"{} is not a valid X509 version".format(version), version
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<Certificate(subject={}, ...)>".format(self.subject)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, x509.Certificate):
|
||||
return NotImplemented
|
||||
|
||||
res = self._backend._lib.X509_cmp(self._x509, other._x509)
|
||||
return res == 0
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.public_bytes(serialization.Encoding.DER))
|
||||
|
||||
def __deepcopy__(self, memo):
|
||||
return self
|
||||
|
||||
def fingerprint(self, algorithm):
|
||||
h = hashes.Hash(algorithm, self._backend)
|
||||
h.update(self.public_bytes(serialization.Encoding.DER))
|
||||
return h.finalize()
|
||||
|
||||
version = utils.read_only_property("_version")
|
||||
|
||||
@property
|
||||
def serial_number(self):
|
||||
asn1_int = self._backend._lib.X509_get_serialNumber(self._x509)
|
||||
self._backend.openssl_assert(asn1_int != self._backend._ffi.NULL)
|
||||
return _asn1_integer_to_int(self._backend, asn1_int)
|
||||
|
||||
def public_key(self):
|
||||
pkey = self._backend._lib.X509_get_pubkey(self._x509)
|
||||
if pkey == self._backend._ffi.NULL:
|
||||
# Remove errors from the stack.
|
||||
self._backend._consume_errors()
|
||||
raise ValueError("Certificate public key is of an unknown type")
|
||||
|
||||
pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free)
|
||||
|
||||
return self._backend._evp_pkey_to_public_key(pkey)
|
||||
|
||||
@property
|
||||
def not_valid_before(self):
|
||||
asn1_time = self._backend._lib.X509_getm_notBefore(self._x509)
|
||||
return _parse_asn1_time(self._backend, asn1_time)
|
||||
|
||||
@property
|
||||
def not_valid_after(self):
|
||||
asn1_time = self._backend._lib.X509_getm_notAfter(self._x509)
|
||||
return _parse_asn1_time(self._backend, asn1_time)
|
||||
|
||||
@property
|
||||
def issuer(self):
|
||||
issuer = self._backend._lib.X509_get_issuer_name(self._x509)
|
||||
self._backend.openssl_assert(issuer != self._backend._ffi.NULL)
|
||||
return _decode_x509_name(self._backend, issuer)
|
||||
|
||||
@property
|
||||
def subject(self):
|
||||
subject = self._backend._lib.X509_get_subject_name(self._x509)
|
||||
self._backend.openssl_assert(subject != self._backend._ffi.NULL)
|
||||
return _decode_x509_name(self._backend, subject)
|
||||
|
||||
@property
|
||||
def signature_hash_algorithm(self):
|
||||
oid = self.signature_algorithm_oid
|
||||
try:
|
||||
return x509._SIG_OIDS_TO_HASH[oid]
|
||||
except KeyError:
|
||||
raise UnsupportedAlgorithm(
|
||||
"Signature algorithm OID:{} not recognized".format(oid)
|
||||
)
|
||||
|
||||
@property
|
||||
def signature_algorithm_oid(self):
|
||||
alg = self._backend._ffi.new("X509_ALGOR **")
|
||||
self._backend._lib.X509_get0_signature(
|
||||
self._backend._ffi.NULL, alg, self._x509
|
||||
)
|
||||
self._backend.openssl_assert(alg[0] != self._backend._ffi.NULL)
|
||||
oid = _obj2txt(self._backend, alg[0].algorithm)
|
||||
return x509.ObjectIdentifier(oid)
|
||||
|
||||
@utils.cached_property
|
||||
def extensions(self):
|
||||
return self._backend._certificate_extension_parser.parse(self._x509)
|
||||
|
||||
@property
|
||||
def signature(self):
|
||||
sig = self._backend._ffi.new("ASN1_BIT_STRING **")
|
||||
self._backend._lib.X509_get0_signature(
|
||||
sig, self._backend._ffi.NULL, self._x509
|
||||
)
|
||||
self._backend.openssl_assert(sig[0] != self._backend._ffi.NULL)
|
||||
return _asn1_string_to_bytes(self._backend, sig[0])
|
||||
|
||||
@property
|
||||
def tbs_certificate_bytes(self):
|
||||
pp = self._backend._ffi.new("unsigned char **")
|
||||
res = self._backend._lib.i2d_re_X509_tbs(self._x509, pp)
|
||||
self._backend.openssl_assert(res > 0)
|
||||
pp = self._backend._ffi.gc(
|
||||
pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0])
|
||||
)
|
||||
return self._backend._ffi.buffer(pp[0], res)[:]
|
||||
|
||||
def public_bytes(self, encoding):
|
||||
bio = self._backend._create_mem_bio_gc()
|
||||
if encoding is serialization.Encoding.PEM:
|
||||
res = self._backend._lib.PEM_write_bio_X509(bio, self._x509)
|
||||
elif encoding is serialization.Encoding.DER:
|
||||
res = self._backend._lib.i2d_X509_bio(bio, self._x509)
|
||||
else:
|
||||
raise TypeError("encoding must be an item from the Encoding enum")
|
||||
|
||||
self._backend.openssl_assert(res == 1)
|
||||
return self._backend._read_mem_bio(bio)
|
||||
|
||||
|
||||
@utils.register_interface(x509.RevokedCertificate)
|
||||
class _RevokedCertificate(object):
|
||||
def __init__(self, backend, crl, x509_revoked):
|
||||
self._backend = backend
|
||||
# The X509_REVOKED_value is a X509_REVOKED * that has
|
||||
# no reference counting. This means when X509_CRL_free is
|
||||
# called then the CRL and all X509_REVOKED * are freed. Since
|
||||
# you can retain a reference to a single revoked certificate
|
||||
# and let the CRL fall out of scope we need to retain a
|
||||
# private reference to the CRL inside the RevokedCertificate
|
||||
# object to prevent the gc from being called inappropriately.
|
||||
self._crl = crl
|
||||
self._x509_revoked = x509_revoked
|
||||
|
||||
@property
|
||||
def serial_number(self):
|
||||
asn1_int = self._backend._lib.X509_REVOKED_get0_serialNumber(
|
||||
self._x509_revoked
|
||||
)
|
||||
self._backend.openssl_assert(asn1_int != self._backend._ffi.NULL)
|
||||
return _asn1_integer_to_int(self._backend, asn1_int)
|
||||
|
||||
@property
|
||||
def revocation_date(self):
|
||||
return _parse_asn1_time(
|
||||
self._backend,
|
||||
self._backend._lib.X509_REVOKED_get0_revocationDate(
|
||||
self._x509_revoked
|
||||
),
|
||||
)
|
||||
|
||||
@utils.cached_property
|
||||
def extensions(self):
|
||||
return self._backend._revoked_cert_extension_parser.parse(
|
||||
self._x509_revoked
|
||||
)
|
||||
|
||||
|
||||
@utils.register_interface(x509.CertificateRevocationList)
|
||||
class _CertificateRevocationList(object):
|
||||
def __init__(self, backend, x509_crl):
|
||||
self._backend = backend
|
||||
self._x509_crl = x509_crl
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, x509.CertificateRevocationList):
|
||||
return NotImplemented
|
||||
|
||||
res = self._backend._lib.X509_CRL_cmp(self._x509_crl, other._x509_crl)
|
||||
return res == 0
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def fingerprint(self, algorithm):
|
||||
h = hashes.Hash(algorithm, self._backend)
|
||||
bio = self._backend._create_mem_bio_gc()
|
||||
res = self._backend._lib.i2d_X509_CRL_bio(bio, self._x509_crl)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
der = self._backend._read_mem_bio(bio)
|
||||
h.update(der)
|
||||
return h.finalize()
|
||||
|
||||
@utils.cached_property
|
||||
def _sorted_crl(self):
|
||||
# X509_CRL_get0_by_serial sorts in place, which breaks a variety of
|
||||
# things we don't want to break (like iteration and the signature).
|
||||
# Let's dupe it and sort that instead.
|
||||
dup = self._backend._lib.X509_CRL_dup(self._x509_crl)
|
||||
self._backend.openssl_assert(dup != self._backend._ffi.NULL)
|
||||
dup = self._backend._ffi.gc(dup, self._backend._lib.X509_CRL_free)
|
||||
return dup
|
||||
|
||||
def get_revoked_certificate_by_serial_number(self, serial_number):
|
||||
revoked = self._backend._ffi.new("X509_REVOKED **")
|
||||
asn1_int = _encode_asn1_int_gc(self._backend, serial_number)
|
||||
res = self._backend._lib.X509_CRL_get0_by_serial(
|
||||
self._sorted_crl, revoked, asn1_int
|
||||
)
|
||||
if res == 0:
|
||||
return None
|
||||
else:
|
||||
self._backend.openssl_assert(revoked[0] != self._backend._ffi.NULL)
|
||||
return _RevokedCertificate(
|
||||
self._backend, self._sorted_crl, revoked[0]
|
||||
)
|
||||
|
||||
@property
|
||||
def signature_hash_algorithm(self):
|
||||
oid = self.signature_algorithm_oid
|
||||
try:
|
||||
return x509._SIG_OIDS_TO_HASH[oid]
|
||||
except KeyError:
|
||||
raise UnsupportedAlgorithm(
|
||||
"Signature algorithm OID:{} not recognized".format(oid)
|
||||
)
|
||||
|
||||
@property
|
||||
def signature_algorithm_oid(self):
|
||||
alg = self._backend._ffi.new("X509_ALGOR **")
|
||||
self._backend._lib.X509_CRL_get0_signature(
|
||||
self._x509_crl, self._backend._ffi.NULL, alg
|
||||
)
|
||||
self._backend.openssl_assert(alg[0] != self._backend._ffi.NULL)
|
||||
oid = _obj2txt(self._backend, alg[0].algorithm)
|
||||
return x509.ObjectIdentifier(oid)
|
||||
|
||||
@property
|
||||
def issuer(self):
|
||||
issuer = self._backend._lib.X509_CRL_get_issuer(self._x509_crl)
|
||||
self._backend.openssl_assert(issuer != self._backend._ffi.NULL)
|
||||
return _decode_x509_name(self._backend, issuer)
|
||||
|
||||
@property
|
||||
def next_update(self):
|
||||
nu = self._backend._lib.X509_CRL_get_nextUpdate(self._x509_crl)
|
||||
self._backend.openssl_assert(nu != self._backend._ffi.NULL)
|
||||
return _parse_asn1_time(self._backend, nu)
|
||||
|
||||
@property
|
||||
def last_update(self):
|
||||
lu = self._backend._lib.X509_CRL_get_lastUpdate(self._x509_crl)
|
||||
self._backend.openssl_assert(lu != self._backend._ffi.NULL)
|
||||
return _parse_asn1_time(self._backend, lu)
|
||||
|
||||
@property
|
||||
def signature(self):
|
||||
sig = self._backend._ffi.new("ASN1_BIT_STRING **")
|
||||
self._backend._lib.X509_CRL_get0_signature(
|
||||
self._x509_crl, sig, self._backend._ffi.NULL
|
||||
)
|
||||
self._backend.openssl_assert(sig[0] != self._backend._ffi.NULL)
|
||||
return _asn1_string_to_bytes(self._backend, sig[0])
|
||||
|
||||
@property
|
||||
def tbs_certlist_bytes(self):
|
||||
pp = self._backend._ffi.new("unsigned char **")
|
||||
res = self._backend._lib.i2d_re_X509_CRL_tbs(self._x509_crl, pp)
|
||||
self._backend.openssl_assert(res > 0)
|
||||
pp = self._backend._ffi.gc(
|
||||
pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0])
|
||||
)
|
||||
return self._backend._ffi.buffer(pp[0], res)[:]
|
||||
|
||||
def public_bytes(self, encoding):
|
||||
bio = self._backend._create_mem_bio_gc()
|
||||
if encoding is serialization.Encoding.PEM:
|
||||
res = self._backend._lib.PEM_write_bio_X509_CRL(
|
||||
bio, self._x509_crl
|
||||
)
|
||||
elif encoding is serialization.Encoding.DER:
|
||||
res = self._backend._lib.i2d_X509_CRL_bio(bio, self._x509_crl)
|
||||
else:
|
||||
raise TypeError("encoding must be an item from the Encoding enum")
|
||||
|
||||
self._backend.openssl_assert(res == 1)
|
||||
return self._backend._read_mem_bio(bio)
|
||||
|
||||
def _revoked_cert(self, idx):
|
||||
revoked = self._backend._lib.X509_CRL_get_REVOKED(self._x509_crl)
|
||||
r = self._backend._lib.sk_X509_REVOKED_value(revoked, idx)
|
||||
self._backend.openssl_assert(r != self._backend._ffi.NULL)
|
||||
return _RevokedCertificate(self._backend, self, r)
|
||||
|
||||
def __iter__(self):
|
||||
for i in range(len(self)):
|
||||
yield self._revoked_cert(i)
|
||||
|
||||
def __getitem__(self, idx):
|
||||
if isinstance(idx, slice):
|
||||
start, stop, step = idx.indices(len(self))
|
||||
return [self._revoked_cert(i) for i in range(start, stop, step)]
|
||||
else:
|
||||
idx = operator.index(idx)
|
||||
if idx < 0:
|
||||
idx += len(self)
|
||||
if not 0 <= idx < len(self):
|
||||
raise IndexError
|
||||
return self._revoked_cert(idx)
|
||||
|
||||
def __len__(self):
|
||||
revoked = self._backend._lib.X509_CRL_get_REVOKED(self._x509_crl)
|
||||
if revoked == self._backend._ffi.NULL:
|
||||
return 0
|
||||
else:
|
||||
return self._backend._lib.sk_X509_REVOKED_num(revoked)
|
||||
|
||||
@utils.cached_property
|
||||
def extensions(self):
|
||||
return self._backend._crl_extension_parser.parse(self._x509_crl)
|
||||
|
||||
def is_signature_valid(self, public_key):
|
||||
if not isinstance(
|
||||
public_key,
|
||||
(dsa.DSAPublicKey, rsa.RSAPublicKey, ec.EllipticCurvePublicKey),
|
||||
):
|
||||
raise TypeError(
|
||||
"Expecting one of DSAPublicKey, RSAPublicKey,"
|
||||
" or EllipticCurvePublicKey."
|
||||
)
|
||||
res = self._backend._lib.X509_CRL_verify(
|
||||
self._x509_crl, public_key._evp_pkey
|
||||
)
|
||||
|
||||
if res != 1:
|
||||
self._backend._consume_errors()
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@utils.register_interface(x509.CertificateSigningRequest)
|
||||
class _CertificateSigningRequest(object):
|
||||
def __init__(self, backend, x509_req):
|
||||
self._backend = backend
|
||||
self._x509_req = x509_req
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, _CertificateSigningRequest):
|
||||
return NotImplemented
|
||||
|
||||
self_bytes = self.public_bytes(serialization.Encoding.DER)
|
||||
other_bytes = other.public_bytes(serialization.Encoding.DER)
|
||||
return self_bytes == other_bytes
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.public_bytes(serialization.Encoding.DER))
|
||||
|
||||
def public_key(self):
|
||||
pkey = self._backend._lib.X509_REQ_get_pubkey(self._x509_req)
|
||||
self._backend.openssl_assert(pkey != self._backend._ffi.NULL)
|
||||
pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free)
|
||||
return self._backend._evp_pkey_to_public_key(pkey)
|
||||
|
||||
@property
|
||||
def subject(self):
|
||||
subject = self._backend._lib.X509_REQ_get_subject_name(self._x509_req)
|
||||
self._backend.openssl_assert(subject != self._backend._ffi.NULL)
|
||||
return _decode_x509_name(self._backend, subject)
|
||||
|
||||
@property
|
||||
def signature_hash_algorithm(self):
|
||||
oid = self.signature_algorithm_oid
|
||||
try:
|
||||
return x509._SIG_OIDS_TO_HASH[oid]
|
||||
except KeyError:
|
||||
raise UnsupportedAlgorithm(
|
||||
"Signature algorithm OID:{} not recognized".format(oid)
|
||||
)
|
||||
|
||||
@property
|
||||
def signature_algorithm_oid(self):
|
||||
alg = self._backend._ffi.new("X509_ALGOR **")
|
||||
self._backend._lib.X509_REQ_get0_signature(
|
||||
self._x509_req, self._backend._ffi.NULL, alg
|
||||
)
|
||||
self._backend.openssl_assert(alg[0] != self._backend._ffi.NULL)
|
||||
oid = _obj2txt(self._backend, alg[0].algorithm)
|
||||
return x509.ObjectIdentifier(oid)
|
||||
|
||||
@utils.cached_property
|
||||
def extensions(self):
|
||||
x509_exts = self._backend._lib.X509_REQ_get_extensions(self._x509_req)
|
||||
x509_exts = self._backend._ffi.gc(
|
||||
x509_exts,
|
||||
lambda x: self._backend._lib.sk_X509_EXTENSION_pop_free(
|
||||
x,
|
||||
self._backend._ffi.addressof(
|
||||
self._backend._lib._original_lib, "X509_EXTENSION_free"
|
||||
),
|
||||
),
|
||||
)
|
||||
return self._backend._csr_extension_parser.parse(x509_exts)
|
||||
|
||||
def public_bytes(self, encoding):
|
||||
bio = self._backend._create_mem_bio_gc()
|
||||
if encoding is serialization.Encoding.PEM:
|
||||
res = self._backend._lib.PEM_write_bio_X509_REQ(
|
||||
bio, self._x509_req
|
||||
)
|
||||
elif encoding is serialization.Encoding.DER:
|
||||
res = self._backend._lib.i2d_X509_REQ_bio(bio, self._x509_req)
|
||||
else:
|
||||
raise TypeError("encoding must be an item from the Encoding enum")
|
||||
|
||||
self._backend.openssl_assert(res == 1)
|
||||
return self._backend._read_mem_bio(bio)
|
||||
|
||||
@property
|
||||
def tbs_certrequest_bytes(self):
|
||||
pp = self._backend._ffi.new("unsigned char **")
|
||||
res = self._backend._lib.i2d_re_X509_REQ_tbs(self._x509_req, pp)
|
||||
self._backend.openssl_assert(res > 0)
|
||||
pp = self._backend._ffi.gc(
|
||||
pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0])
|
||||
)
|
||||
return self._backend._ffi.buffer(pp[0], res)[:]
|
||||
|
||||
@property
|
||||
def signature(self):
|
||||
sig = self._backend._ffi.new("ASN1_BIT_STRING **")
|
||||
self._backend._lib.X509_REQ_get0_signature(
|
||||
self._x509_req, sig, self._backend._ffi.NULL
|
||||
)
|
||||
self._backend.openssl_assert(sig[0] != self._backend._ffi.NULL)
|
||||
return _asn1_string_to_bytes(self._backend, sig[0])
|
||||
|
||||
@property
|
||||
def is_signature_valid(self):
|
||||
pkey = self._backend._lib.X509_REQ_get_pubkey(self._x509_req)
|
||||
self._backend.openssl_assert(pkey != self._backend._ffi.NULL)
|
||||
pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free)
|
||||
res = self._backend._lib.X509_REQ_verify(self._x509_req, pkey)
|
||||
|
||||
if res != 1:
|
||||
self._backend._consume_errors()
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def get_attribute_for_oid(self, oid):
|
||||
obj = _txt2obj_gc(self._backend, oid.dotted_string)
|
||||
pos = self._backend._lib.X509_REQ_get_attr_by_OBJ(
|
||||
self._x509_req, obj, -1
|
||||
)
|
||||
if pos == -1:
|
||||
raise x509.AttributeNotFound(
|
||||
"No {} attribute was found".format(oid), oid
|
||||
)
|
||||
|
||||
attr = self._backend._lib.X509_REQ_get_attr(self._x509_req, pos)
|
||||
self._backend.openssl_assert(attr != self._backend._ffi.NULL)
|
||||
# We don't support multiple valued attributes for now.
|
||||
self._backend.openssl_assert(
|
||||
self._backend._lib.X509_ATTRIBUTE_count(attr) == 1
|
||||
)
|
||||
asn1_type = self._backend._lib.X509_ATTRIBUTE_get0_type(attr, 0)
|
||||
self._backend.openssl_assert(asn1_type != self._backend._ffi.NULL)
|
||||
# We need this to ensure that our C type cast is safe.
|
||||
# Also this should always be a sane string type, but we'll see if
|
||||
# that is true in the real world...
|
||||
if asn1_type.type not in (
|
||||
_ASN1Type.UTF8String.value,
|
||||
_ASN1Type.PrintableString.value,
|
||||
_ASN1Type.IA5String.value,
|
||||
):
|
||||
raise ValueError(
|
||||
"OID {} has a disallowed ASN.1 type: {}".format(
|
||||
oid, asn1_type.type
|
||||
)
|
||||
)
|
||||
|
||||
data = self._backend._lib.X509_ATTRIBUTE_get0_data(
|
||||
attr, 0, asn1_type.type, self._backend._ffi.NULL
|
||||
)
|
||||
self._backend.openssl_assert(data != self._backend._ffi.NULL)
|
||||
# This cast is safe iff we assert on the type above to ensure
|
||||
# that it is always a type of ASN1_STRING
|
||||
data = self._backend._ffi.cast("ASN1_STRING *", data)
|
||||
return _asn1_string_to_bytes(self._backend, data)
|
||||
|
||||
|
||||
@utils.register_interface(
|
||||
x509.certificate_transparency.SignedCertificateTimestamp
|
||||
)
|
||||
class _SignedCertificateTimestamp(object):
|
||||
def __init__(self, backend, sct_list, sct):
|
||||
self._backend = backend
|
||||
# Keep the SCT_LIST that this SCT came from alive.
|
||||
self._sct_list = sct_list
|
||||
self._sct = sct
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
version = self._backend._lib.SCT_get_version(self._sct)
|
||||
assert version == self._backend._lib.SCT_VERSION_V1
|
||||
return x509.certificate_transparency.Version.v1
|
||||
|
||||
@property
|
||||
def log_id(self):
|
||||
out = self._backend._ffi.new("unsigned char **")
|
||||
log_id_length = self._backend._lib.SCT_get0_log_id(self._sct, out)
|
||||
assert log_id_length >= 0
|
||||
return self._backend._ffi.buffer(out[0], log_id_length)[:]
|
||||
|
||||
@property
|
||||
def timestamp(self):
|
||||
timestamp = self._backend._lib.SCT_get_timestamp(self._sct)
|
||||
milliseconds = timestamp % 1000
|
||||
return datetime.datetime.utcfromtimestamp(timestamp // 1000).replace(
|
||||
microsecond=milliseconds * 1000
|
||||
)
|
||||
|
||||
@property
|
||||
def entry_type(self):
|
||||
entry_type = self._backend._lib.SCT_get_log_entry_type(self._sct)
|
||||
# We currently only support loading SCTs from the X.509 extension, so
|
||||
# we only have precerts.
|
||||
assert entry_type == self._backend._lib.CT_LOG_ENTRY_TYPE_PRECERT
|
||||
return x509.certificate_transparency.LogEntryType.PRE_CERTIFICATE
|
||||
|
||||
@property
|
||||
def _signature(self):
|
||||
ptrptr = self._backend._ffi.new("unsigned char **")
|
||||
res = self._backend._lib.SCT_get0_signature(self._sct, ptrptr)
|
||||
self._backend.openssl_assert(res > 0)
|
||||
self._backend.openssl_assert(ptrptr[0] != self._backend._ffi.NULL)
|
||||
return self._backend._ffi.buffer(ptrptr[0], res)[:]
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._signature)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, _SignedCertificateTimestamp):
|
||||
return NotImplemented
|
||||
|
||||
return self._signature == other._signature
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
BIN
venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_padding.abi3.so
Executable file
BIN
venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_padding.abi3.so
Executable file
Binary file not shown.
|
|
@ -0,0 +1,5 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
|
@ -0,0 +1,345 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
|
||||
def cryptography_has_ec2m():
|
||||
return [
|
||||
"EC_POINT_set_affine_coordinates_GF2m",
|
||||
"EC_POINT_get_affine_coordinates_GF2m",
|
||||
"EC_POINT_set_compressed_coordinates_GF2m",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_rsa_oaep_md():
|
||||
return [
|
||||
"EVP_PKEY_CTX_set_rsa_oaep_md",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_rsa_oaep_label():
|
||||
return [
|
||||
"EVP_PKEY_CTX_set0_rsa_oaep_label",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_ssl3_method():
|
||||
return [
|
||||
"SSLv3_method",
|
||||
"SSLv3_client_method",
|
||||
"SSLv3_server_method",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_102_verification():
|
||||
return [
|
||||
"X509_V_ERR_SUITE_B_INVALID_VERSION",
|
||||
"X509_V_ERR_SUITE_B_INVALID_ALGORITHM",
|
||||
"X509_V_ERR_SUITE_B_INVALID_CURVE",
|
||||
"X509_V_ERR_SUITE_B_INVALID_SIGNATURE_ALGORITHM",
|
||||
"X509_V_ERR_SUITE_B_LOS_NOT_ALLOWED",
|
||||
"X509_V_ERR_SUITE_B_CANNOT_SIGN_P_384_WITH_P_256",
|
||||
"X509_V_FLAG_SUITEB_128_LOS_ONLY",
|
||||
"X509_V_FLAG_SUITEB_192_LOS",
|
||||
"X509_V_FLAG_SUITEB_128_LOS",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_110_verification_params():
|
||||
return ["X509_CHECK_FLAG_NEVER_CHECK_SUBJECT"]
|
||||
|
||||
|
||||
def cryptography_has_set_cert_cb():
|
||||
return [
|
||||
"SSL_CTX_set_cert_cb",
|
||||
"SSL_set_cert_cb",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_ssl_st():
|
||||
return [
|
||||
"SSL_ST_BEFORE",
|
||||
"SSL_ST_OK",
|
||||
"SSL_ST_INIT",
|
||||
"SSL_ST_RENEGOTIATE",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_tls_st():
|
||||
return [
|
||||
"TLS_ST_BEFORE",
|
||||
"TLS_ST_OK",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_locking_callbacks():
|
||||
return [
|
||||
"Cryptography_setup_ssl_threads",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_scrypt():
|
||||
return [
|
||||
"EVP_PBE_scrypt",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_evp_pkey_dhx():
|
||||
return [
|
||||
"EVP_PKEY_DHX",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_mem_functions():
|
||||
return [
|
||||
"Cryptography_CRYPTO_set_mem_functions",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_sct():
|
||||
return [
|
||||
"SCT_get_version",
|
||||
"SCT_get_log_entry_type",
|
||||
"SCT_get0_log_id",
|
||||
"SCT_get0_signature",
|
||||
"SCT_get_timestamp",
|
||||
"SCT_set_source",
|
||||
"sk_SCT_num",
|
||||
"sk_SCT_value",
|
||||
"SCT_LIST_free",
|
||||
"sk_SCT_push",
|
||||
"sk_SCT_new_null",
|
||||
"SCT_new",
|
||||
"SCT_set1_log_id",
|
||||
"SCT_set_timestamp",
|
||||
"SCT_set_version",
|
||||
"SCT_set_log_entry_type",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_x509_store_ctx_get_issuer():
|
||||
return [
|
||||
"X509_STORE_get_get_issuer",
|
||||
"X509_STORE_set_get_issuer",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_x25519():
|
||||
return [
|
||||
"EVP_PKEY_X25519",
|
||||
"NID_X25519",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_x448():
|
||||
return [
|
||||
"EVP_PKEY_X448",
|
||||
"NID_X448",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_ed448():
|
||||
return [
|
||||
"EVP_PKEY_ED448",
|
||||
"NID_ED448",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_ed25519():
|
||||
return [
|
||||
"NID_ED25519",
|
||||
"EVP_PKEY_ED25519",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_poly1305():
|
||||
return [
|
||||
"NID_poly1305",
|
||||
"EVP_PKEY_POLY1305",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_oneshot_evp_digest_sign_verify():
|
||||
return [
|
||||
"EVP_DigestSign",
|
||||
"EVP_DigestVerify",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_evp_digestfinal_xof():
|
||||
return [
|
||||
"EVP_DigestFinalXOF",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_evp_pkey_get_set_tls_encodedpoint():
|
||||
return [
|
||||
"EVP_PKEY_get1_tls_encodedpoint",
|
||||
"EVP_PKEY_set1_tls_encodedpoint",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_fips():
|
||||
return [
|
||||
"FIPS_mode_set",
|
||||
"FIPS_mode",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_ssl_sigalgs():
|
||||
return [
|
||||
"SSL_CTX_set1_sigalgs_list",
|
||||
"SSL_get_sigalgs",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_psk():
|
||||
return [
|
||||
"SSL_CTX_use_psk_identity_hint",
|
||||
"SSL_CTX_set_psk_server_callback",
|
||||
"SSL_CTX_set_psk_client_callback",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_custom_ext():
|
||||
return [
|
||||
"SSL_CTX_add_client_custom_ext",
|
||||
"SSL_CTX_add_server_custom_ext",
|
||||
"SSL_extension_supported",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_openssl_cleanup():
|
||||
return [
|
||||
"OPENSSL_cleanup",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_cipher_details():
|
||||
return [
|
||||
"SSL_CIPHER_is_aead",
|
||||
"SSL_CIPHER_get_cipher_nid",
|
||||
"SSL_CIPHER_get_digest_nid",
|
||||
"SSL_CIPHER_get_kx_nid",
|
||||
"SSL_CIPHER_get_auth_nid",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_tlsv13():
|
||||
return [
|
||||
"SSL_OP_NO_TLSv1_3",
|
||||
"SSL_VERIFY_POST_HANDSHAKE",
|
||||
"SSL_CTX_set_ciphersuites",
|
||||
"SSL_verify_client_post_handshake",
|
||||
"SSL_CTX_set_post_handshake_auth",
|
||||
"SSL_set_post_handshake_auth",
|
||||
"SSL_SESSION_get_max_early_data",
|
||||
"SSL_write_early_data",
|
||||
"SSL_read_early_data",
|
||||
"SSL_CTX_set_max_early_data",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_keylog():
|
||||
return [
|
||||
"SSL_CTX_set_keylog_callback",
|
||||
"SSL_CTX_get_keylog_callback",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_raw_key():
|
||||
return [
|
||||
"EVP_PKEY_new_raw_private_key",
|
||||
"EVP_PKEY_new_raw_public_key",
|
||||
"EVP_PKEY_get_raw_private_key",
|
||||
"EVP_PKEY_get_raw_public_key",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_engine():
|
||||
return [
|
||||
"ENGINE_by_id",
|
||||
"ENGINE_init",
|
||||
"ENGINE_finish",
|
||||
"ENGINE_get_default_RAND",
|
||||
"ENGINE_set_default_RAND",
|
||||
"ENGINE_unregister_RAND",
|
||||
"ENGINE_ctrl_cmd",
|
||||
"ENGINE_free",
|
||||
"ENGINE_get_name",
|
||||
"Cryptography_add_osrandom_engine",
|
||||
"ENGINE_ctrl_cmd_string",
|
||||
"ENGINE_load_builtin_engines",
|
||||
"ENGINE_load_private_key",
|
||||
"ENGINE_load_public_key",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_verified_chain():
|
||||
return [
|
||||
"SSL_get0_verified_chain",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_srtp():
|
||||
return [
|
||||
"SSL_CTX_set_tlsext_use_srtp",
|
||||
"SSL_set_tlsext_use_srtp",
|
||||
"SSL_get_selected_srtp_profile",
|
||||
]
|
||||
|
||||
|
||||
# This is a mapping of
|
||||
# {condition: function-returning-names-dependent-on-that-condition} so we can
|
||||
# loop over them and delete unsupported names at runtime. It will be removed
|
||||
# when cffi supports #if in cdef. We use functions instead of just a dict of
|
||||
# lists so we can use coverage to measure which are used.
|
||||
CONDITIONAL_NAMES = {
|
||||
"Cryptography_HAS_EC2M": cryptography_has_ec2m,
|
||||
"Cryptography_HAS_RSA_OAEP_MD": cryptography_has_rsa_oaep_md,
|
||||
"Cryptography_HAS_RSA_OAEP_LABEL": cryptography_has_rsa_oaep_label,
|
||||
"Cryptography_HAS_SSL3_METHOD": cryptography_has_ssl3_method,
|
||||
"Cryptography_HAS_102_VERIFICATION": cryptography_has_102_verification,
|
||||
"Cryptography_HAS_110_VERIFICATION_PARAMS": (
|
||||
cryptography_has_110_verification_params
|
||||
),
|
||||
"Cryptography_HAS_SET_CERT_CB": cryptography_has_set_cert_cb,
|
||||
"Cryptography_HAS_SSL_ST": cryptography_has_ssl_st,
|
||||
"Cryptography_HAS_TLS_ST": cryptography_has_tls_st,
|
||||
"Cryptography_HAS_LOCKING_CALLBACKS": cryptography_has_locking_callbacks,
|
||||
"Cryptography_HAS_SCRYPT": cryptography_has_scrypt,
|
||||
"Cryptography_HAS_EVP_PKEY_DHX": cryptography_has_evp_pkey_dhx,
|
||||
"Cryptography_HAS_MEM_FUNCTIONS": cryptography_has_mem_functions,
|
||||
"Cryptography_HAS_SCT": cryptography_has_sct,
|
||||
"Cryptography_HAS_X509_STORE_CTX_GET_ISSUER": (
|
||||
cryptography_has_x509_store_ctx_get_issuer
|
||||
),
|
||||
"Cryptography_HAS_X25519": cryptography_has_x25519,
|
||||
"Cryptography_HAS_X448": cryptography_has_x448,
|
||||
"Cryptography_HAS_ED448": cryptography_has_ed448,
|
||||
"Cryptography_HAS_ED25519": cryptography_has_ed25519,
|
||||
"Cryptography_HAS_POLY1305": cryptography_has_poly1305,
|
||||
"Cryptography_HAS_ONESHOT_EVP_DIGEST_SIGN_VERIFY": (
|
||||
cryptography_has_oneshot_evp_digest_sign_verify
|
||||
),
|
||||
"Cryptography_HAS_EVP_PKEY_get_set_tls_encodedpoint": (
|
||||
cryptography_has_evp_pkey_get_set_tls_encodedpoint
|
||||
),
|
||||
"Cryptography_HAS_FIPS": cryptography_has_fips,
|
||||
"Cryptography_HAS_SIGALGS": cryptography_has_ssl_sigalgs,
|
||||
"Cryptography_HAS_PSK": cryptography_has_psk,
|
||||
"Cryptography_HAS_CUSTOM_EXT": cryptography_has_custom_ext,
|
||||
"Cryptography_HAS_OPENSSL_CLEANUP": cryptography_has_openssl_cleanup,
|
||||
"Cryptography_HAS_CIPHER_DETAILS": cryptography_has_cipher_details,
|
||||
"Cryptography_HAS_TLSv1_3": cryptography_has_tlsv13,
|
||||
"Cryptography_HAS_KEYLOG": cryptography_has_keylog,
|
||||
"Cryptography_HAS_RAW_KEY": cryptography_has_raw_key,
|
||||
"Cryptography_HAS_EVP_DIGESTFINAL_XOF": (
|
||||
cryptography_has_evp_digestfinal_xof
|
||||
),
|
||||
"Cryptography_HAS_ENGINE": cryptography_has_engine,
|
||||
"Cryptography_HAS_VERIFIED_CHAIN": cryptography_has_verified_chain,
|
||||
"Cryptography_HAS_SRTP": cryptography_has_srtp,
|
||||
}
|
||||
|
|
@ -0,0 +1,222 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import collections
|
||||
import os
|
||||
import threading
|
||||
import types
|
||||
import warnings
|
||||
|
||||
import cryptography
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import InternalError
|
||||
from cryptography.hazmat.bindings._openssl import ffi, lib
|
||||
from cryptography.hazmat.bindings.openssl._conditional import CONDITIONAL_NAMES
|
||||
|
||||
_OpenSSLErrorWithText = collections.namedtuple(
|
||||
"_OpenSSLErrorWithText", ["code", "lib", "func", "reason", "reason_text"]
|
||||
)
|
||||
|
||||
|
||||
class _OpenSSLError(object):
|
||||
def __init__(self, code, lib, func, reason):
|
||||
self._code = code
|
||||
self._lib = lib
|
||||
self._func = func
|
||||
self._reason = reason
|
||||
|
||||
def _lib_reason_match(self, lib, reason):
|
||||
return lib == self.lib and reason == self.reason
|
||||
|
||||
code = utils.read_only_property("_code")
|
||||
lib = utils.read_only_property("_lib")
|
||||
func = utils.read_only_property("_func")
|
||||
reason = utils.read_only_property("_reason")
|
||||
|
||||
|
||||
def _consume_errors(lib):
|
||||
errors = []
|
||||
while True:
|
||||
code = lib.ERR_get_error()
|
||||
if code == 0:
|
||||
break
|
||||
|
||||
err_lib = lib.ERR_GET_LIB(code)
|
||||
err_func = lib.ERR_GET_FUNC(code)
|
||||
err_reason = lib.ERR_GET_REASON(code)
|
||||
|
||||
errors.append(_OpenSSLError(code, err_lib, err_func, err_reason))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def _errors_with_text(errors):
|
||||
errors_with_text = []
|
||||
for err in errors:
|
||||
buf = ffi.new("char[]", 256)
|
||||
lib.ERR_error_string_n(err.code, buf, len(buf))
|
||||
err_text_reason = ffi.string(buf)
|
||||
|
||||
errors_with_text.append(
|
||||
_OpenSSLErrorWithText(
|
||||
err.code, err.lib, err.func, err.reason, err_text_reason
|
||||
)
|
||||
)
|
||||
|
||||
return errors_with_text
|
||||
|
||||
|
||||
def _consume_errors_with_text(lib):
|
||||
return _errors_with_text(_consume_errors(lib))
|
||||
|
||||
|
||||
def _openssl_assert(lib, ok, errors=None):
|
||||
if not ok:
|
||||
if errors is None:
|
||||
errors = _consume_errors(lib)
|
||||
errors_with_text = _errors_with_text(errors)
|
||||
|
||||
raise InternalError(
|
||||
"Unknown OpenSSL error. This error is commonly encountered when "
|
||||
"another library is not cleaning up the OpenSSL error stack. If "
|
||||
"you are using cryptography with another library that uses "
|
||||
"OpenSSL try disabling it before reporting a bug. Otherwise "
|
||||
"please file an issue at https://github.com/pyca/cryptography/"
|
||||
"issues with information on how to reproduce "
|
||||
"this. ({0!r})".format(errors_with_text),
|
||||
errors_with_text,
|
||||
)
|
||||
|
||||
|
||||
def build_conditional_library(lib, conditional_names):
|
||||
conditional_lib = types.ModuleType("lib")
|
||||
conditional_lib._original_lib = lib
|
||||
excluded_names = set()
|
||||
for condition, names_cb in conditional_names.items():
|
||||
if not getattr(lib, condition):
|
||||
excluded_names.update(names_cb())
|
||||
|
||||
for attr in dir(lib):
|
||||
if attr not in excluded_names:
|
||||
setattr(conditional_lib, attr, getattr(lib, attr))
|
||||
|
||||
return conditional_lib
|
||||
|
||||
|
||||
class Binding(object):
|
||||
"""
|
||||
OpenSSL API wrapper.
|
||||
"""
|
||||
|
||||
lib = None
|
||||
ffi = ffi
|
||||
_lib_loaded = False
|
||||
_init_lock = threading.Lock()
|
||||
_lock_init_lock = threading.Lock()
|
||||
|
||||
def __init__(self):
|
||||
self._ensure_ffi_initialized()
|
||||
|
||||
@classmethod
|
||||
def _register_osrandom_engine(cls):
|
||||
# Clear any errors extant in the queue before we start. In many
|
||||
# scenarios other things may be interacting with OpenSSL in the same
|
||||
# process space and it has proven untenable to assume that they will
|
||||
# reliably clear the error queue. Once we clear it here we will
|
||||
# error on any subsequent unexpected item in the stack.
|
||||
cls.lib.ERR_clear_error()
|
||||
if cls.lib.CRYPTOGRAPHY_NEEDS_OSRANDOM_ENGINE:
|
||||
result = cls.lib.Cryptography_add_osrandom_engine()
|
||||
_openssl_assert(cls.lib, result in (1, 2))
|
||||
|
||||
@classmethod
|
||||
def _ensure_ffi_initialized(cls):
|
||||
with cls._init_lock:
|
||||
if not cls._lib_loaded:
|
||||
cls.lib = build_conditional_library(lib, CONDITIONAL_NAMES)
|
||||
cls._lib_loaded = True
|
||||
# initialize the SSL library
|
||||
cls.lib.SSL_library_init()
|
||||
# adds all ciphers/digests for EVP
|
||||
cls.lib.OpenSSL_add_all_algorithms()
|
||||
# loads error strings for libcrypto and libssl functions
|
||||
cls.lib.SSL_load_error_strings()
|
||||
cls._register_osrandom_engine()
|
||||
|
||||
@classmethod
|
||||
def init_static_locks(cls):
|
||||
with cls._lock_init_lock:
|
||||
cls._ensure_ffi_initialized()
|
||||
# Use Python's implementation if available, importing _ssl triggers
|
||||
# the setup for this.
|
||||
__import__("_ssl")
|
||||
|
||||
if (
|
||||
not cls.lib.Cryptography_HAS_LOCKING_CALLBACKS
|
||||
or cls.lib.CRYPTO_get_locking_callback() != cls.ffi.NULL
|
||||
):
|
||||
return
|
||||
|
||||
# If nothing else has setup a locking callback already, we set up
|
||||
# our own
|
||||
res = lib.Cryptography_setup_ssl_threads()
|
||||
_openssl_assert(cls.lib, res == 1)
|
||||
|
||||
|
||||
def _verify_openssl_version(lib):
|
||||
if (
|
||||
lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_110
|
||||
and not lib.CRYPTOGRAPHY_IS_LIBRESSL
|
||||
):
|
||||
if os.environ.get("CRYPTOGRAPHY_ALLOW_OPENSSL_102"):
|
||||
warnings.warn(
|
||||
"OpenSSL version 1.0.2 is no longer supported by the OpenSSL "
|
||||
"project, please upgrade. The next version of cryptography "
|
||||
"will completely remove support for it.",
|
||||
utils.CryptographyDeprecationWarning,
|
||||
)
|
||||
else:
|
||||
raise RuntimeError(
|
||||
"You are linking against OpenSSL 1.0.2, which is no longer "
|
||||
"supported by the OpenSSL project. To use this version of "
|
||||
"cryptography you need to upgrade to a newer version of "
|
||||
"OpenSSL. For this version only you can also set the "
|
||||
"environment variable CRYPTOGRAPHY_ALLOW_OPENSSL_102 to "
|
||||
"allow OpenSSL 1.0.2."
|
||||
)
|
||||
|
||||
|
||||
def _verify_package_version(version):
|
||||
# Occasionally we run into situations where the version of the Python
|
||||
# package does not match the version of the shared object that is loaded.
|
||||
# This may occur in environments where multiple versions of cryptography
|
||||
# are installed and available in the python path. To avoid errors cropping
|
||||
# up later this code checks that the currently imported package and the
|
||||
# shared object that were loaded have the same version and raise an
|
||||
# ImportError if they do not
|
||||
so_package_version = ffi.string(lib.CRYPTOGRAPHY_PACKAGE_VERSION)
|
||||
if version.encode("ascii") != so_package_version:
|
||||
raise ImportError(
|
||||
"The version of cryptography does not match the loaded "
|
||||
"shared object. This can happen if you have multiple copies of "
|
||||
"cryptography installed in your Python path. Please try creating "
|
||||
"a new virtual environment to resolve this issue. "
|
||||
"Loaded python version: {}, shared object version: {}".format(
|
||||
version, so_package_version
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
_verify_package_version(cryptography.__version__)
|
||||
|
||||
# OpenSSL is not thread safe until the locks are initialized. We call this
|
||||
# method in module scope so that it executes with the import lock. On
|
||||
# Pythons < 3.4 this import lock is a global lock, which can prevent a race
|
||||
# condition registering the OpenSSL locks. On Python 3.4+ the import lock
|
||||
# is per module so this approach will not work.
|
||||
Binding.init_static_locks()
|
||||
|
||||
_verify_openssl_version(Binding.lib)
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class AsymmetricSignatureContext(object):
|
||||
@abc.abstractmethod
|
||||
def update(self, data):
|
||||
"""
|
||||
Processes the provided bytes and returns nothing.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def finalize(self):
|
||||
"""
|
||||
Returns the signature as bytes.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class AsymmetricVerificationContext(object):
|
||||
@abc.abstractmethod
|
||||
def update(self, data):
|
||||
"""
|
||||
Processes the provided bytes and returns nothing.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def verify(self):
|
||||
"""
|
||||
Raises an exception if the bytes provided to update do not match the
|
||||
signature or the signature does not match the public key.
|
||||
"""
|
||||
|
|
@ -0,0 +1,216 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.hazmat.backends import _get_backend
|
||||
|
||||
|
||||
def generate_parameters(generator, key_size, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
return backend.generate_dh_parameters(generator, key_size)
|
||||
|
||||
|
||||
class DHPrivateNumbers(object):
|
||||
def __init__(self, x, public_numbers):
|
||||
if not isinstance(x, six.integer_types):
|
||||
raise TypeError("x must be an integer.")
|
||||
|
||||
if not isinstance(public_numbers, DHPublicNumbers):
|
||||
raise TypeError(
|
||||
"public_numbers must be an instance of " "DHPublicNumbers."
|
||||
)
|
||||
|
||||
self._x = x
|
||||
self._public_numbers = public_numbers
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, DHPrivateNumbers):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
self._x == other._x
|
||||
and self._public_numbers == other._public_numbers
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def private_key(self, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
return backend.load_dh_private_numbers(self)
|
||||
|
||||
public_numbers = utils.read_only_property("_public_numbers")
|
||||
x = utils.read_only_property("_x")
|
||||
|
||||
|
||||
class DHPublicNumbers(object):
|
||||
def __init__(self, y, parameter_numbers):
|
||||
if not isinstance(y, six.integer_types):
|
||||
raise TypeError("y must be an integer.")
|
||||
|
||||
if not isinstance(parameter_numbers, DHParameterNumbers):
|
||||
raise TypeError(
|
||||
"parameters must be an instance of DHParameterNumbers."
|
||||
)
|
||||
|
||||
self._y = y
|
||||
self._parameter_numbers = parameter_numbers
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, DHPublicNumbers):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
self._y == other._y
|
||||
and self._parameter_numbers == other._parameter_numbers
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def public_key(self, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
return backend.load_dh_public_numbers(self)
|
||||
|
||||
y = utils.read_only_property("_y")
|
||||
parameter_numbers = utils.read_only_property("_parameter_numbers")
|
||||
|
||||
|
||||
class DHParameterNumbers(object):
|
||||
def __init__(self, p, g, q=None):
|
||||
if not isinstance(p, six.integer_types) or not isinstance(
|
||||
g, six.integer_types
|
||||
):
|
||||
raise TypeError("p and g must be integers")
|
||||
if q is not None and not isinstance(q, six.integer_types):
|
||||
raise TypeError("q must be integer or None")
|
||||
|
||||
if g < 2:
|
||||
raise ValueError("DH generator must be 2 or greater")
|
||||
|
||||
self._p = p
|
||||
self._g = g
|
||||
self._q = q
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, DHParameterNumbers):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
self._p == other._p and self._g == other._g and self._q == other._q
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def parameters(self, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
return backend.load_dh_parameter_numbers(self)
|
||||
|
||||
p = utils.read_only_property("_p")
|
||||
g = utils.read_only_property("_g")
|
||||
q = utils.read_only_property("_q")
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DHParameters(object):
|
||||
@abc.abstractmethod
|
||||
def generate_private_key(self):
|
||||
"""
|
||||
Generates and returns a DHPrivateKey.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def parameter_bytes(self, encoding, format):
|
||||
"""
|
||||
Returns the parameters serialized as bytes.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def parameter_numbers(self):
|
||||
"""
|
||||
Returns a DHParameterNumbers.
|
||||
"""
|
||||
|
||||
|
||||
DHParametersWithSerialization = DHParameters
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DHPrivateKey(object):
|
||||
@abc.abstractproperty
|
||||
def key_size(self):
|
||||
"""
|
||||
The bit length of the prime modulus.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_key(self):
|
||||
"""
|
||||
The DHPublicKey associated with this private key.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def parameters(self):
|
||||
"""
|
||||
The DHParameters object associated with this private key.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def exchange(self, peer_public_key):
|
||||
"""
|
||||
Given peer's DHPublicKey, carry out the key exchange and
|
||||
return shared key as bytes.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DHPrivateKeyWithSerialization(DHPrivateKey):
|
||||
@abc.abstractmethod
|
||||
def private_numbers(self):
|
||||
"""
|
||||
Returns a DHPrivateNumbers.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def private_bytes(self, encoding, format, encryption_algorithm):
|
||||
"""
|
||||
Returns the key serialized as bytes.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DHPublicKey(object):
|
||||
@abc.abstractproperty
|
||||
def key_size(self):
|
||||
"""
|
||||
The bit length of the prime modulus.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def parameters(self):
|
||||
"""
|
||||
The DHParameters object associated with this public key.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_numbers(self):
|
||||
"""
|
||||
Returns a DHPublicNumbers.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_bytes(self, encoding, format):
|
||||
"""
|
||||
Returns the key serialized as bytes.
|
||||
"""
|
||||
|
||||
|
||||
DHPublicKeyWithSerialization = DHPublicKey
|
||||
|
|
@ -0,0 +1,261 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.hazmat.backends import _get_backend
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DSAParameters(object):
|
||||
@abc.abstractmethod
|
||||
def generate_private_key(self):
|
||||
"""
|
||||
Generates and returns a DSAPrivateKey.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DSAParametersWithNumbers(DSAParameters):
|
||||
@abc.abstractmethod
|
||||
def parameter_numbers(self):
|
||||
"""
|
||||
Returns a DSAParameterNumbers.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DSAPrivateKey(object):
|
||||
@abc.abstractproperty
|
||||
def key_size(self):
|
||||
"""
|
||||
The bit length of the prime modulus.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_key(self):
|
||||
"""
|
||||
The DSAPublicKey associated with this private key.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def parameters(self):
|
||||
"""
|
||||
The DSAParameters object associated with this private key.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def signer(self, signature_algorithm):
|
||||
"""
|
||||
Returns an AsymmetricSignatureContext used for signing data.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def sign(self, data, algorithm):
|
||||
"""
|
||||
Signs the data
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DSAPrivateKeyWithSerialization(DSAPrivateKey):
|
||||
@abc.abstractmethod
|
||||
def private_numbers(self):
|
||||
"""
|
||||
Returns a DSAPrivateNumbers.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def private_bytes(self, encoding, format, encryption_algorithm):
|
||||
"""
|
||||
Returns the key serialized as bytes.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DSAPublicKey(object):
|
||||
@abc.abstractproperty
|
||||
def key_size(self):
|
||||
"""
|
||||
The bit length of the prime modulus.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def parameters(self):
|
||||
"""
|
||||
The DSAParameters object associated with this public key.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def verifier(self, signature, signature_algorithm):
|
||||
"""
|
||||
Returns an AsymmetricVerificationContext used for signing data.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_numbers(self):
|
||||
"""
|
||||
Returns a DSAPublicNumbers.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_bytes(self, encoding, format):
|
||||
"""
|
||||
Returns the key serialized as bytes.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def verify(self, signature, data, algorithm):
|
||||
"""
|
||||
Verifies the signature of the data.
|
||||
"""
|
||||
|
||||
|
||||
DSAPublicKeyWithSerialization = DSAPublicKey
|
||||
|
||||
|
||||
def generate_parameters(key_size, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
return backend.generate_dsa_parameters(key_size)
|
||||
|
||||
|
||||
def generate_private_key(key_size, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
return backend.generate_dsa_private_key_and_parameters(key_size)
|
||||
|
||||
|
||||
def _check_dsa_parameters(parameters):
|
||||
if parameters.p.bit_length() not in [1024, 2048, 3072, 4096]:
|
||||
raise ValueError(
|
||||
"p must be exactly 1024, 2048, 3072, or 4096 bits long"
|
||||
)
|
||||
if parameters.q.bit_length() not in [160, 224, 256]:
|
||||
raise ValueError("q must be exactly 160, 224, or 256 bits long")
|
||||
|
||||
if not (1 < parameters.g < parameters.p):
|
||||
raise ValueError("g, p don't satisfy 1 < g < p.")
|
||||
|
||||
|
||||
def _check_dsa_private_numbers(numbers):
|
||||
parameters = numbers.public_numbers.parameter_numbers
|
||||
_check_dsa_parameters(parameters)
|
||||
if numbers.x <= 0 or numbers.x >= parameters.q:
|
||||
raise ValueError("x must be > 0 and < q.")
|
||||
|
||||
if numbers.public_numbers.y != pow(parameters.g, numbers.x, parameters.p):
|
||||
raise ValueError("y must be equal to (g ** x % p).")
|
||||
|
||||
|
||||
class DSAParameterNumbers(object):
|
||||
def __init__(self, p, q, g):
|
||||
if (
|
||||
not isinstance(p, six.integer_types)
|
||||
or not isinstance(q, six.integer_types)
|
||||
or not isinstance(g, six.integer_types)
|
||||
):
|
||||
raise TypeError(
|
||||
"DSAParameterNumbers p, q, and g arguments must be integers."
|
||||
)
|
||||
|
||||
self._p = p
|
||||
self._q = q
|
||||
self._g = g
|
||||
|
||||
p = utils.read_only_property("_p")
|
||||
q = utils.read_only_property("_q")
|
||||
g = utils.read_only_property("_g")
|
||||
|
||||
def parameters(self, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
return backend.load_dsa_parameter_numbers(self)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, DSAParameterNumbers):
|
||||
return NotImplemented
|
||||
|
||||
return self.p == other.p and self.q == other.q and self.g == other.g
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"<DSAParameterNumbers(p={self.p}, q={self.q}, "
|
||||
"g={self.g})>".format(self=self)
|
||||
)
|
||||
|
||||
|
||||
class DSAPublicNumbers(object):
|
||||
def __init__(self, y, parameter_numbers):
|
||||
if not isinstance(y, six.integer_types):
|
||||
raise TypeError("DSAPublicNumbers y argument must be an integer.")
|
||||
|
||||
if not isinstance(parameter_numbers, DSAParameterNumbers):
|
||||
raise TypeError(
|
||||
"parameter_numbers must be a DSAParameterNumbers instance."
|
||||
)
|
||||
|
||||
self._y = y
|
||||
self._parameter_numbers = parameter_numbers
|
||||
|
||||
y = utils.read_only_property("_y")
|
||||
parameter_numbers = utils.read_only_property("_parameter_numbers")
|
||||
|
||||
def public_key(self, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
return backend.load_dsa_public_numbers(self)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, DSAPublicNumbers):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
self.y == other.y
|
||||
and self.parameter_numbers == other.parameter_numbers
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"<DSAPublicNumbers(y={self.y}, "
|
||||
"parameter_numbers={self.parameter_numbers})>".format(self=self)
|
||||
)
|
||||
|
||||
|
||||
class DSAPrivateNumbers(object):
|
||||
def __init__(self, x, public_numbers):
|
||||
if not isinstance(x, six.integer_types):
|
||||
raise TypeError("DSAPrivateNumbers x argument must be an integer.")
|
||||
|
||||
if not isinstance(public_numbers, DSAPublicNumbers):
|
||||
raise TypeError(
|
||||
"public_numbers must be a DSAPublicNumbers instance."
|
||||
)
|
||||
self._public_numbers = public_numbers
|
||||
self._x = x
|
||||
|
||||
x = utils.read_only_property("_x")
|
||||
public_numbers = utils.read_only_property("_public_numbers")
|
||||
|
||||
def private_key(self, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
return backend.load_dsa_private_numbers(self)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, DSAPrivateNumbers):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
self.x == other.x and self.public_numbers == other.public_numbers
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
|
@ -0,0 +1,502 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
import warnings
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.hazmat._oid import ObjectIdentifier
|
||||
from cryptography.hazmat.backends import _get_backend
|
||||
|
||||
|
||||
class EllipticCurveOID(object):
|
||||
SECP192R1 = ObjectIdentifier("1.2.840.10045.3.1.1")
|
||||
SECP224R1 = ObjectIdentifier("1.3.132.0.33")
|
||||
SECP256K1 = ObjectIdentifier("1.3.132.0.10")
|
||||
SECP256R1 = ObjectIdentifier("1.2.840.10045.3.1.7")
|
||||
SECP384R1 = ObjectIdentifier("1.3.132.0.34")
|
||||
SECP521R1 = ObjectIdentifier("1.3.132.0.35")
|
||||
BRAINPOOLP256R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.7")
|
||||
BRAINPOOLP384R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.11")
|
||||
BRAINPOOLP512R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.13")
|
||||
SECT163K1 = ObjectIdentifier("1.3.132.0.1")
|
||||
SECT163R2 = ObjectIdentifier("1.3.132.0.15")
|
||||
SECT233K1 = ObjectIdentifier("1.3.132.0.26")
|
||||
SECT233R1 = ObjectIdentifier("1.3.132.0.27")
|
||||
SECT283K1 = ObjectIdentifier("1.3.132.0.16")
|
||||
SECT283R1 = ObjectIdentifier("1.3.132.0.17")
|
||||
SECT409K1 = ObjectIdentifier("1.3.132.0.36")
|
||||
SECT409R1 = ObjectIdentifier("1.3.132.0.37")
|
||||
SECT571K1 = ObjectIdentifier("1.3.132.0.38")
|
||||
SECT571R1 = ObjectIdentifier("1.3.132.0.39")
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class EllipticCurve(object):
|
||||
@abc.abstractproperty
|
||||
def name(self):
|
||||
"""
|
||||
The name of the curve. e.g. secp256r1.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def key_size(self):
|
||||
"""
|
||||
Bit size of a secret scalar for the curve.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class EllipticCurveSignatureAlgorithm(object):
|
||||
@abc.abstractproperty
|
||||
def algorithm(self):
|
||||
"""
|
||||
The digest algorithm used with this signature.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class EllipticCurvePrivateKey(object):
|
||||
@abc.abstractmethod
|
||||
def signer(self, signature_algorithm):
|
||||
"""
|
||||
Returns an AsymmetricSignatureContext used for signing data.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def exchange(self, algorithm, peer_public_key):
|
||||
"""
|
||||
Performs a key exchange operation using the provided algorithm with the
|
||||
provided peer's public key.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_key(self):
|
||||
"""
|
||||
The EllipticCurvePublicKey for this private key.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def curve(self):
|
||||
"""
|
||||
The EllipticCurve that this key is on.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def key_size(self):
|
||||
"""
|
||||
Bit size of a secret scalar for the curve.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def sign(self, data, signature_algorithm):
|
||||
"""
|
||||
Signs the data
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class EllipticCurvePrivateKeyWithSerialization(EllipticCurvePrivateKey):
|
||||
@abc.abstractmethod
|
||||
def private_numbers(self):
|
||||
"""
|
||||
Returns an EllipticCurvePrivateNumbers.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def private_bytes(self, encoding, format, encryption_algorithm):
|
||||
"""
|
||||
Returns the key serialized as bytes.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class EllipticCurvePublicKey(object):
|
||||
@abc.abstractmethod
|
||||
def verifier(self, signature, signature_algorithm):
|
||||
"""
|
||||
Returns an AsymmetricVerificationContext used for signing data.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def curve(self):
|
||||
"""
|
||||
The EllipticCurve that this key is on.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def key_size(self):
|
||||
"""
|
||||
Bit size of a secret scalar for the curve.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_numbers(self):
|
||||
"""
|
||||
Returns an EllipticCurvePublicNumbers.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_bytes(self, encoding, format):
|
||||
"""
|
||||
Returns the key serialized as bytes.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def verify(self, signature, data, signature_algorithm):
|
||||
"""
|
||||
Verifies the signature of the data.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def from_encoded_point(cls, curve, data):
|
||||
utils._check_bytes("data", data)
|
||||
|
||||
if not isinstance(curve, EllipticCurve):
|
||||
raise TypeError("curve must be an EllipticCurve instance")
|
||||
|
||||
if len(data) == 0:
|
||||
raise ValueError("data must not be an empty byte string")
|
||||
|
||||
if six.indexbytes(data, 0) not in [0x02, 0x03, 0x04]:
|
||||
raise ValueError("Unsupported elliptic curve point type")
|
||||
|
||||
from cryptography.hazmat.backends.openssl.backend import backend
|
||||
|
||||
return backend.load_elliptic_curve_public_bytes(curve, data)
|
||||
|
||||
|
||||
EllipticCurvePublicKeyWithSerialization = EllipticCurvePublicKey
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECT571R1(object):
|
||||
name = "sect571r1"
|
||||
key_size = 570
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECT409R1(object):
|
||||
name = "sect409r1"
|
||||
key_size = 409
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECT283R1(object):
|
||||
name = "sect283r1"
|
||||
key_size = 283
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECT233R1(object):
|
||||
name = "sect233r1"
|
||||
key_size = 233
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECT163R2(object):
|
||||
name = "sect163r2"
|
||||
key_size = 163
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECT571K1(object):
|
||||
name = "sect571k1"
|
||||
key_size = 571
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECT409K1(object):
|
||||
name = "sect409k1"
|
||||
key_size = 409
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECT283K1(object):
|
||||
name = "sect283k1"
|
||||
key_size = 283
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECT233K1(object):
|
||||
name = "sect233k1"
|
||||
key_size = 233
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECT163K1(object):
|
||||
name = "sect163k1"
|
||||
key_size = 163
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECP521R1(object):
|
||||
name = "secp521r1"
|
||||
key_size = 521
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECP384R1(object):
|
||||
name = "secp384r1"
|
||||
key_size = 384
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECP256R1(object):
|
||||
name = "secp256r1"
|
||||
key_size = 256
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECP256K1(object):
|
||||
name = "secp256k1"
|
||||
key_size = 256
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECP224R1(object):
|
||||
name = "secp224r1"
|
||||
key_size = 224
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECP192R1(object):
|
||||
name = "secp192r1"
|
||||
key_size = 192
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class BrainpoolP256R1(object):
|
||||
name = "brainpoolP256r1"
|
||||
key_size = 256
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class BrainpoolP384R1(object):
|
||||
name = "brainpoolP384r1"
|
||||
key_size = 384
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class BrainpoolP512R1(object):
|
||||
name = "brainpoolP512r1"
|
||||
key_size = 512
|
||||
|
||||
|
||||
_CURVE_TYPES = {
|
||||
"prime192v1": SECP192R1,
|
||||
"prime256v1": SECP256R1,
|
||||
"secp192r1": SECP192R1,
|
||||
"secp224r1": SECP224R1,
|
||||
"secp256r1": SECP256R1,
|
||||
"secp384r1": SECP384R1,
|
||||
"secp521r1": SECP521R1,
|
||||
"secp256k1": SECP256K1,
|
||||
"sect163k1": SECT163K1,
|
||||
"sect233k1": SECT233K1,
|
||||
"sect283k1": SECT283K1,
|
||||
"sect409k1": SECT409K1,
|
||||
"sect571k1": SECT571K1,
|
||||
"sect163r2": SECT163R2,
|
||||
"sect233r1": SECT233R1,
|
||||
"sect283r1": SECT283R1,
|
||||
"sect409r1": SECT409R1,
|
||||
"sect571r1": SECT571R1,
|
||||
"brainpoolP256r1": BrainpoolP256R1,
|
||||
"brainpoolP384r1": BrainpoolP384R1,
|
||||
"brainpoolP512r1": BrainpoolP512R1,
|
||||
}
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurveSignatureAlgorithm)
|
||||
class ECDSA(object):
|
||||
def __init__(self, algorithm):
|
||||
self._algorithm = algorithm
|
||||
|
||||
algorithm = utils.read_only_property("_algorithm")
|
||||
|
||||
|
||||
def generate_private_key(curve, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
return backend.generate_elliptic_curve_private_key(curve)
|
||||
|
||||
|
||||
def derive_private_key(private_value, curve, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
if not isinstance(private_value, six.integer_types):
|
||||
raise TypeError("private_value must be an integer type.")
|
||||
|
||||
if private_value <= 0:
|
||||
raise ValueError("private_value must be a positive integer.")
|
||||
|
||||
if not isinstance(curve, EllipticCurve):
|
||||
raise TypeError("curve must provide the EllipticCurve interface.")
|
||||
|
||||
return backend.derive_elliptic_curve_private_key(private_value, curve)
|
||||
|
||||
|
||||
class EllipticCurvePublicNumbers(object):
|
||||
def __init__(self, x, y, curve):
|
||||
if not isinstance(x, six.integer_types) or not isinstance(
|
||||
y, six.integer_types
|
||||
):
|
||||
raise TypeError("x and y must be integers.")
|
||||
|
||||
if not isinstance(curve, EllipticCurve):
|
||||
raise TypeError("curve must provide the EllipticCurve interface.")
|
||||
|
||||
self._y = y
|
||||
self._x = x
|
||||
self._curve = curve
|
||||
|
||||
def public_key(self, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
return backend.load_elliptic_curve_public_numbers(self)
|
||||
|
||||
def encode_point(self):
|
||||
warnings.warn(
|
||||
"encode_point has been deprecated on EllipticCurvePublicNumbers"
|
||||
" and will be removed in a future version. Please use "
|
||||
"EllipticCurvePublicKey.public_bytes to obtain both "
|
||||
"compressed and uncompressed point encoding.",
|
||||
utils.PersistentlyDeprecated2019,
|
||||
stacklevel=2,
|
||||
)
|
||||
# key_size is in bits. Convert to bytes and round up
|
||||
byte_length = (self.curve.key_size + 7) // 8
|
||||
return (
|
||||
b"\x04"
|
||||
+ utils.int_to_bytes(self.x, byte_length)
|
||||
+ utils.int_to_bytes(self.y, byte_length)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_encoded_point(cls, curve, data):
|
||||
if not isinstance(curve, EllipticCurve):
|
||||
raise TypeError("curve must be an EllipticCurve instance")
|
||||
|
||||
warnings.warn(
|
||||
"Support for unsafe construction of public numbers from "
|
||||
"encoded data will be removed in a future version. "
|
||||
"Please use EllipticCurvePublicKey.from_encoded_point",
|
||||
utils.PersistentlyDeprecated2019,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
if data.startswith(b"\x04"):
|
||||
# key_size is in bits. Convert to bytes and round up
|
||||
byte_length = (curve.key_size + 7) // 8
|
||||
if len(data) == 2 * byte_length + 1:
|
||||
x = utils.int_from_bytes(data[1 : byte_length + 1], "big")
|
||||
y = utils.int_from_bytes(data[byte_length + 1 :], "big")
|
||||
return cls(x, y, curve)
|
||||
else:
|
||||
raise ValueError("Invalid elliptic curve point data length")
|
||||
else:
|
||||
raise ValueError("Unsupported elliptic curve point type")
|
||||
|
||||
curve = utils.read_only_property("_curve")
|
||||
x = utils.read_only_property("_x")
|
||||
y = utils.read_only_property("_y")
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, EllipticCurvePublicNumbers):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
self.x == other.x
|
||||
and self.y == other.y
|
||||
and self.curve.name == other.curve.name
|
||||
and self.curve.key_size == other.curve.key_size
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.x, self.y, self.curve.name, self.curve.key_size))
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"<EllipticCurvePublicNumbers(curve={0.curve.name}, x={0.x}, "
|
||||
"y={0.y}>".format(self)
|
||||
)
|
||||
|
||||
|
||||
class EllipticCurvePrivateNumbers(object):
|
||||
def __init__(self, private_value, public_numbers):
|
||||
if not isinstance(private_value, six.integer_types):
|
||||
raise TypeError("private_value must be an integer.")
|
||||
|
||||
if not isinstance(public_numbers, EllipticCurvePublicNumbers):
|
||||
raise TypeError(
|
||||
"public_numbers must be an EllipticCurvePublicNumbers "
|
||||
"instance."
|
||||
)
|
||||
|
||||
self._private_value = private_value
|
||||
self._public_numbers = public_numbers
|
||||
|
||||
def private_key(self, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
return backend.load_elliptic_curve_private_numbers(self)
|
||||
|
||||
private_value = utils.read_only_property("_private_value")
|
||||
public_numbers = utils.read_only_property("_public_numbers")
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, EllipticCurvePrivateNumbers):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
self.private_value == other.private_value
|
||||
and self.public_numbers == other.public_numbers
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.private_value, self.public_numbers))
|
||||
|
||||
|
||||
class ECDH(object):
|
||||
pass
|
||||
|
||||
|
||||
_OID_TO_CURVE = {
|
||||
EllipticCurveOID.SECP192R1: SECP192R1,
|
||||
EllipticCurveOID.SECP224R1: SECP224R1,
|
||||
EllipticCurveOID.SECP256K1: SECP256K1,
|
||||
EllipticCurveOID.SECP256R1: SECP256R1,
|
||||
EllipticCurveOID.SECP384R1: SECP384R1,
|
||||
EllipticCurveOID.SECP521R1: SECP521R1,
|
||||
EllipticCurveOID.BRAINPOOLP256R1: BrainpoolP256R1,
|
||||
EllipticCurveOID.BRAINPOOLP384R1: BrainpoolP384R1,
|
||||
EllipticCurveOID.BRAINPOOLP512R1: BrainpoolP512R1,
|
||||
EllipticCurveOID.SECT163K1: SECT163K1,
|
||||
EllipticCurveOID.SECT163R2: SECT163R2,
|
||||
EllipticCurveOID.SECT233K1: SECT233K1,
|
||||
EllipticCurveOID.SECT233R1: SECT233R1,
|
||||
EllipticCurveOID.SECT283K1: SECT283K1,
|
||||
EllipticCurveOID.SECT283R1: SECT283R1,
|
||||
EllipticCurveOID.SECT409K1: SECT409K1,
|
||||
EllipticCurveOID.SECT409R1: SECT409R1,
|
||||
EllipticCurveOID.SECT571K1: SECT571K1,
|
||||
EllipticCurveOID.SECT571R1: SECT571R1,
|
||||
}
|
||||
|
||||
|
||||
def get_curve_for_oid(oid):
|
||||
try:
|
||||
return _OID_TO_CURVE[oid]
|
||||
except KeyError:
|
||||
raise LookupError(
|
||||
"The provided object identifier has no matching elliptic "
|
||||
"curve class"
|
||||
)
|
||||
|
|
@ -0,0 +1,87 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
|
||||
|
||||
|
||||
_ED25519_KEY_SIZE = 32
|
||||
_ED25519_SIG_SIZE = 64
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class Ed25519PublicKey(object):
|
||||
@classmethod
|
||||
def from_public_bytes(cls, data):
|
||||
from cryptography.hazmat.backends.openssl.backend import backend
|
||||
|
||||
if not backend.ed25519_supported():
|
||||
raise UnsupportedAlgorithm(
|
||||
"ed25519 is not supported by this version of OpenSSL.",
|
||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
|
||||
)
|
||||
|
||||
return backend.ed25519_load_public_bytes(data)
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_bytes(self, encoding, format):
|
||||
"""
|
||||
The serialized bytes of the public key.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def verify(self, signature, data):
|
||||
"""
|
||||
Verify the signature.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class Ed25519PrivateKey(object):
|
||||
@classmethod
|
||||
def generate(cls):
|
||||
from cryptography.hazmat.backends.openssl.backend import backend
|
||||
|
||||
if not backend.ed25519_supported():
|
||||
raise UnsupportedAlgorithm(
|
||||
"ed25519 is not supported by this version of OpenSSL.",
|
||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
|
||||
)
|
||||
|
||||
return backend.ed25519_generate_key()
|
||||
|
||||
@classmethod
|
||||
def from_private_bytes(cls, data):
|
||||
from cryptography.hazmat.backends.openssl.backend import backend
|
||||
|
||||
if not backend.ed25519_supported():
|
||||
raise UnsupportedAlgorithm(
|
||||
"ed25519 is not supported by this version of OpenSSL.",
|
||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
|
||||
)
|
||||
|
||||
return backend.ed25519_load_private_bytes(data)
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_key(self):
|
||||
"""
|
||||
The Ed25519PublicKey derived from the private key.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def private_bytes(self, encoding, format, encryption_algorithm):
|
||||
"""
|
||||
The serialized bytes of the private key.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def sign(self, data):
|
||||
"""
|
||||
Signs the data.
|
||||
"""
|
||||
|
|
@ -0,0 +1,82 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class Ed448PublicKey(object):
|
||||
@classmethod
|
||||
def from_public_bytes(cls, data):
|
||||
from cryptography.hazmat.backends.openssl.backend import backend
|
||||
|
||||
if not backend.ed448_supported():
|
||||
raise UnsupportedAlgorithm(
|
||||
"ed448 is not supported by this version of OpenSSL.",
|
||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
|
||||
)
|
||||
|
||||
return backend.ed448_load_public_bytes(data)
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_bytes(self, encoding, format):
|
||||
"""
|
||||
The serialized bytes of the public key.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def verify(self, signature, data):
|
||||
"""
|
||||
Verify the signature.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class Ed448PrivateKey(object):
|
||||
@classmethod
|
||||
def generate(cls):
|
||||
from cryptography.hazmat.backends.openssl.backend import backend
|
||||
|
||||
if not backend.ed448_supported():
|
||||
raise UnsupportedAlgorithm(
|
||||
"ed448 is not supported by this version of OpenSSL.",
|
||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
|
||||
)
|
||||
return backend.ed448_generate_key()
|
||||
|
||||
@classmethod
|
||||
def from_private_bytes(cls, data):
|
||||
from cryptography.hazmat.backends.openssl.backend import backend
|
||||
|
||||
if not backend.ed448_supported():
|
||||
raise UnsupportedAlgorithm(
|
||||
"ed448 is not supported by this version of OpenSSL.",
|
||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
|
||||
)
|
||||
|
||||
return backend.ed448_load_private_bytes(data)
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_key(self):
|
||||
"""
|
||||
The Ed448PublicKey derived from the private key.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def sign(self, data):
|
||||
"""
|
||||
Signs the data.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def private_bytes(self, encoding, format, encryption_algorithm):
|
||||
"""
|
||||
The serialized bytes of the private key.
|
||||
"""
|
||||
|
|
@ -0,0 +1,80 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.asymmetric import rsa
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class AsymmetricPadding(object):
|
||||
@abc.abstractproperty
|
||||
def name(self):
|
||||
"""
|
||||
A string naming this padding (e.g. "PSS", "PKCS1").
|
||||
"""
|
||||
|
||||
|
||||
@utils.register_interface(AsymmetricPadding)
|
||||
class PKCS1v15(object):
|
||||
name = "EMSA-PKCS1-v1_5"
|
||||
|
||||
|
||||
@utils.register_interface(AsymmetricPadding)
|
||||
class PSS(object):
|
||||
MAX_LENGTH = object()
|
||||
name = "EMSA-PSS"
|
||||
|
||||
def __init__(self, mgf, salt_length):
|
||||
self._mgf = mgf
|
||||
|
||||
if (
|
||||
not isinstance(salt_length, six.integer_types)
|
||||
and salt_length is not self.MAX_LENGTH
|
||||
):
|
||||
raise TypeError("salt_length must be an integer.")
|
||||
|
||||
if salt_length is not self.MAX_LENGTH and salt_length < 0:
|
||||
raise ValueError("salt_length must be zero or greater.")
|
||||
|
||||
self._salt_length = salt_length
|
||||
|
||||
|
||||
@utils.register_interface(AsymmetricPadding)
|
||||
class OAEP(object):
|
||||
name = "EME-OAEP"
|
||||
|
||||
def __init__(self, mgf, algorithm, label):
|
||||
if not isinstance(algorithm, hashes.HashAlgorithm):
|
||||
raise TypeError("Expected instance of hashes.HashAlgorithm.")
|
||||
|
||||
self._mgf = mgf
|
||||
self._algorithm = algorithm
|
||||
self._label = label
|
||||
|
||||
|
||||
class MGF1(object):
|
||||
MAX_LENGTH = object()
|
||||
|
||||
def __init__(self, algorithm):
|
||||
if not isinstance(algorithm, hashes.HashAlgorithm):
|
||||
raise TypeError("Expected instance of hashes.HashAlgorithm.")
|
||||
|
||||
self._algorithm = algorithm
|
||||
|
||||
|
||||
def calculate_max_pss_salt_length(key, hash_algorithm):
|
||||
if not isinstance(key, (rsa.RSAPrivateKey, rsa.RSAPublicKey)):
|
||||
raise TypeError("key must be an RSA public or private key")
|
||||
# bit length - 1 per RFC 3447
|
||||
emlen = (key.key_size + 6) // 8
|
||||
salt_length = emlen - hash_algorithm.digest_size - 2
|
||||
assert salt_length >= 0
|
||||
return salt_length
|
||||
|
|
@ -0,0 +1,374 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
|
||||
try:
|
||||
# Only available in math in 3.5+
|
||||
from math import gcd
|
||||
except ImportError:
|
||||
from fractions import gcd
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
|
||||
from cryptography.hazmat.backends import _get_backend
|
||||
from cryptography.hazmat.backends.interfaces import RSABackend
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class RSAPrivateKey(object):
|
||||
@abc.abstractmethod
|
||||
def signer(self, padding, algorithm):
|
||||
"""
|
||||
Returns an AsymmetricSignatureContext used for signing data.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def decrypt(self, ciphertext, padding):
|
||||
"""
|
||||
Decrypts the provided ciphertext.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def key_size(self):
|
||||
"""
|
||||
The bit length of the public modulus.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_key(self):
|
||||
"""
|
||||
The RSAPublicKey associated with this private key.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def sign(self, data, padding, algorithm):
|
||||
"""
|
||||
Signs the data.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class RSAPrivateKeyWithSerialization(RSAPrivateKey):
|
||||
@abc.abstractmethod
|
||||
def private_numbers(self):
|
||||
"""
|
||||
Returns an RSAPrivateNumbers.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def private_bytes(self, encoding, format, encryption_algorithm):
|
||||
"""
|
||||
Returns the key serialized as bytes.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class RSAPublicKey(object):
|
||||
@abc.abstractmethod
|
||||
def verifier(self, signature, padding, algorithm):
|
||||
"""
|
||||
Returns an AsymmetricVerificationContext used for verifying signatures.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def encrypt(self, plaintext, padding):
|
||||
"""
|
||||
Encrypts the given plaintext.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def key_size(self):
|
||||
"""
|
||||
The bit length of the public modulus.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_numbers(self):
|
||||
"""
|
||||
Returns an RSAPublicNumbers
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_bytes(self, encoding, format):
|
||||
"""
|
||||
Returns the key serialized as bytes.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def verify(self, signature, data, padding, algorithm):
|
||||
"""
|
||||
Verifies the signature of the data.
|
||||
"""
|
||||
|
||||
|
||||
RSAPublicKeyWithSerialization = RSAPublicKey
|
||||
|
||||
|
||||
def generate_private_key(public_exponent, key_size, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
if not isinstance(backend, RSABackend):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Backend object does not implement RSABackend.",
|
||||
_Reasons.BACKEND_MISSING_INTERFACE,
|
||||
)
|
||||
|
||||
_verify_rsa_parameters(public_exponent, key_size)
|
||||
return backend.generate_rsa_private_key(public_exponent, key_size)
|
||||
|
||||
|
||||
def _verify_rsa_parameters(public_exponent, key_size):
|
||||
if public_exponent not in (3, 65537):
|
||||
raise ValueError(
|
||||
"public_exponent must be either 3 (for legacy compatibility) or "
|
||||
"65537. Almost everyone should choose 65537 here!"
|
||||
)
|
||||
|
||||
if key_size < 512:
|
||||
raise ValueError("key_size must be at least 512-bits.")
|
||||
|
||||
|
||||
def _check_private_key_components(
|
||||
p, q, private_exponent, dmp1, dmq1, iqmp, public_exponent, modulus
|
||||
):
|
||||
if modulus < 3:
|
||||
raise ValueError("modulus must be >= 3.")
|
||||
|
||||
if p >= modulus:
|
||||
raise ValueError("p must be < modulus.")
|
||||
|
||||
if q >= modulus:
|
||||
raise ValueError("q must be < modulus.")
|
||||
|
||||
if dmp1 >= modulus:
|
||||
raise ValueError("dmp1 must be < modulus.")
|
||||
|
||||
if dmq1 >= modulus:
|
||||
raise ValueError("dmq1 must be < modulus.")
|
||||
|
||||
if iqmp >= modulus:
|
||||
raise ValueError("iqmp must be < modulus.")
|
||||
|
||||
if private_exponent >= modulus:
|
||||
raise ValueError("private_exponent must be < modulus.")
|
||||
|
||||
if public_exponent < 3 or public_exponent >= modulus:
|
||||
raise ValueError("public_exponent must be >= 3 and < modulus.")
|
||||
|
||||
if public_exponent & 1 == 0:
|
||||
raise ValueError("public_exponent must be odd.")
|
||||
|
||||
if dmp1 & 1 == 0:
|
||||
raise ValueError("dmp1 must be odd.")
|
||||
|
||||
if dmq1 & 1 == 0:
|
||||
raise ValueError("dmq1 must be odd.")
|
||||
|
||||
if p * q != modulus:
|
||||
raise ValueError("p*q must equal modulus.")
|
||||
|
||||
|
||||
def _check_public_key_components(e, n):
|
||||
if n < 3:
|
||||
raise ValueError("n must be >= 3.")
|
||||
|
||||
if e < 3 or e >= n:
|
||||
raise ValueError("e must be >= 3 and < n.")
|
||||
|
||||
if e & 1 == 0:
|
||||
raise ValueError("e must be odd.")
|
||||
|
||||
|
||||
def _modinv(e, m):
|
||||
"""
|
||||
Modular Multiplicative Inverse. Returns x such that: (x*e) mod m == 1
|
||||
"""
|
||||
x1, x2 = 1, 0
|
||||
a, b = e, m
|
||||
while b > 0:
|
||||
q, r = divmod(a, b)
|
||||
xn = x1 - q * x2
|
||||
a, b, x1, x2 = b, r, x2, xn
|
||||
return x1 % m
|
||||
|
||||
|
||||
def rsa_crt_iqmp(p, q):
|
||||
"""
|
||||
Compute the CRT (q ** -1) % p value from RSA primes p and q.
|
||||
"""
|
||||
return _modinv(q, p)
|
||||
|
||||
|
||||
def rsa_crt_dmp1(private_exponent, p):
|
||||
"""
|
||||
Compute the CRT private_exponent % (p - 1) value from the RSA
|
||||
private_exponent (d) and p.
|
||||
"""
|
||||
return private_exponent % (p - 1)
|
||||
|
||||
|
||||
def rsa_crt_dmq1(private_exponent, q):
|
||||
"""
|
||||
Compute the CRT private_exponent % (q - 1) value from the RSA
|
||||
private_exponent (d) and q.
|
||||
"""
|
||||
return private_exponent % (q - 1)
|
||||
|
||||
|
||||
# Controls the number of iterations rsa_recover_prime_factors will perform
|
||||
# to obtain the prime factors. Each iteration increments by 2 so the actual
|
||||
# maximum attempts is half this number.
|
||||
_MAX_RECOVERY_ATTEMPTS = 1000
|
||||
|
||||
|
||||
def rsa_recover_prime_factors(n, e, d):
|
||||
"""
|
||||
Compute factors p and q from the private exponent d. We assume that n has
|
||||
no more than two factors. This function is adapted from code in PyCrypto.
|
||||
"""
|
||||
# See 8.2.2(i) in Handbook of Applied Cryptography.
|
||||
ktot = d * e - 1
|
||||
# The quantity d*e-1 is a multiple of phi(n), even,
|
||||
# and can be represented as t*2^s.
|
||||
t = ktot
|
||||
while t % 2 == 0:
|
||||
t = t // 2
|
||||
# Cycle through all multiplicative inverses in Zn.
|
||||
# The algorithm is non-deterministic, but there is a 50% chance
|
||||
# any candidate a leads to successful factoring.
|
||||
# See "Digitalized Signatures and Public Key Functions as Intractable
|
||||
# as Factorization", M. Rabin, 1979
|
||||
spotted = False
|
||||
a = 2
|
||||
while not spotted and a < _MAX_RECOVERY_ATTEMPTS:
|
||||
k = t
|
||||
# Cycle through all values a^{t*2^i}=a^k
|
||||
while k < ktot:
|
||||
cand = pow(a, k, n)
|
||||
# Check if a^k is a non-trivial root of unity (mod n)
|
||||
if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1:
|
||||
# We have found a number such that (cand-1)(cand+1)=0 (mod n).
|
||||
# Either of the terms divides n.
|
||||
p = gcd(cand + 1, n)
|
||||
spotted = True
|
||||
break
|
||||
k *= 2
|
||||
# This value was not any good... let's try another!
|
||||
a += 2
|
||||
if not spotted:
|
||||
raise ValueError("Unable to compute factors p and q from exponent d.")
|
||||
# Found !
|
||||
q, r = divmod(n, p)
|
||||
assert r == 0
|
||||
p, q = sorted((p, q), reverse=True)
|
||||
return (p, q)
|
||||
|
||||
|
||||
class RSAPrivateNumbers(object):
|
||||
def __init__(self, p, q, d, dmp1, dmq1, iqmp, public_numbers):
|
||||
if (
|
||||
not isinstance(p, six.integer_types)
|
||||
or not isinstance(q, six.integer_types)
|
||||
or not isinstance(d, six.integer_types)
|
||||
or not isinstance(dmp1, six.integer_types)
|
||||
or not isinstance(dmq1, six.integer_types)
|
||||
or not isinstance(iqmp, six.integer_types)
|
||||
):
|
||||
raise TypeError(
|
||||
"RSAPrivateNumbers p, q, d, dmp1, dmq1, iqmp arguments must"
|
||||
" all be an integers."
|
||||
)
|
||||
|
||||
if not isinstance(public_numbers, RSAPublicNumbers):
|
||||
raise TypeError(
|
||||
"RSAPrivateNumbers public_numbers must be an RSAPublicNumbers"
|
||||
" instance."
|
||||
)
|
||||
|
||||
self._p = p
|
||||
self._q = q
|
||||
self._d = d
|
||||
self._dmp1 = dmp1
|
||||
self._dmq1 = dmq1
|
||||
self._iqmp = iqmp
|
||||
self._public_numbers = public_numbers
|
||||
|
||||
p = utils.read_only_property("_p")
|
||||
q = utils.read_only_property("_q")
|
||||
d = utils.read_only_property("_d")
|
||||
dmp1 = utils.read_only_property("_dmp1")
|
||||
dmq1 = utils.read_only_property("_dmq1")
|
||||
iqmp = utils.read_only_property("_iqmp")
|
||||
public_numbers = utils.read_only_property("_public_numbers")
|
||||
|
||||
def private_key(self, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
return backend.load_rsa_private_numbers(self)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, RSAPrivateNumbers):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
self.p == other.p
|
||||
and self.q == other.q
|
||||
and self.d == other.d
|
||||
and self.dmp1 == other.dmp1
|
||||
and self.dmq1 == other.dmq1
|
||||
and self.iqmp == other.iqmp
|
||||
and self.public_numbers == other.public_numbers
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __hash__(self):
|
||||
return hash(
|
||||
(
|
||||
self.p,
|
||||
self.q,
|
||||
self.d,
|
||||
self.dmp1,
|
||||
self.dmq1,
|
||||
self.iqmp,
|
||||
self.public_numbers,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class RSAPublicNumbers(object):
|
||||
def __init__(self, e, n):
|
||||
if not isinstance(e, six.integer_types) or not isinstance(
|
||||
n, six.integer_types
|
||||
):
|
||||
raise TypeError("RSAPublicNumbers arguments must be integers.")
|
||||
|
||||
self._e = e
|
||||
self._n = n
|
||||
|
||||
e = utils.read_only_property("_e")
|
||||
n = utils.read_only_property("_n")
|
||||
|
||||
def public_key(self, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
return backend.load_rsa_public_numbers(self)
|
||||
|
||||
def __repr__(self):
|
||||
return "<RSAPublicNumbers(e={0.e}, n={0.n})>".format(self)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, RSAPublicNumbers):
|
||||
return NotImplemented
|
||||
|
||||
return self.e == other.e and self.n == other.n
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.e, self.n))
|
||||
|
|
@ -0,0 +1,41 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.hazmat._der import (
|
||||
DERReader,
|
||||
INTEGER,
|
||||
SEQUENCE,
|
||||
encode_der,
|
||||
encode_der_integer,
|
||||
)
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
|
||||
|
||||
def decode_dss_signature(signature):
|
||||
with DERReader(signature).read_single_element(SEQUENCE) as seq:
|
||||
r = seq.read_element(INTEGER).as_integer()
|
||||
s = seq.read_element(INTEGER).as_integer()
|
||||
return r, s
|
||||
|
||||
|
||||
def encode_dss_signature(r, s):
|
||||
return encode_der(
|
||||
SEQUENCE,
|
||||
encode_der(INTEGER, encode_der_integer(r)),
|
||||
encode_der(INTEGER, encode_der_integer(s)),
|
||||
)
|
||||
|
||||
|
||||
class Prehashed(object):
|
||||
def __init__(self, algorithm):
|
||||
if not isinstance(algorithm, hashes.HashAlgorithm):
|
||||
raise TypeError("Expected instance of HashAlgorithm.")
|
||||
|
||||
self._algorithm = algorithm
|
||||
self._digest_size = algorithm.digest_size
|
||||
|
||||
digest_size = utils.read_only_property("_digest_size")
|
||||
|
|
@ -0,0 +1,76 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class X25519PublicKey(object):
|
||||
@classmethod
|
||||
def from_public_bytes(cls, data):
|
||||
from cryptography.hazmat.backends.openssl.backend import backend
|
||||
|
||||
if not backend.x25519_supported():
|
||||
raise UnsupportedAlgorithm(
|
||||
"X25519 is not supported by this version of OpenSSL.",
|
||||
_Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
|
||||
)
|
||||
|
||||
return backend.x25519_load_public_bytes(data)
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_bytes(self, encoding, format):
|
||||
"""
|
||||
The serialized bytes of the public key.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class X25519PrivateKey(object):
|
||||
@classmethod
|
||||
def generate(cls):
|
||||
from cryptography.hazmat.backends.openssl.backend import backend
|
||||
|
||||
if not backend.x25519_supported():
|
||||
raise UnsupportedAlgorithm(
|
||||
"X25519 is not supported by this version of OpenSSL.",
|
||||
_Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
|
||||
)
|
||||
return backend.x25519_generate_key()
|
||||
|
||||
@classmethod
|
||||
def from_private_bytes(cls, data):
|
||||
from cryptography.hazmat.backends.openssl.backend import backend
|
||||
|
||||
if not backend.x25519_supported():
|
||||
raise UnsupportedAlgorithm(
|
||||
"X25519 is not supported by this version of OpenSSL.",
|
||||
_Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
|
||||
)
|
||||
|
||||
return backend.x25519_load_private_bytes(data)
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_key(self):
|
||||
"""
|
||||
The serialized bytes of the public key.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def private_bytes(self, encoding, format, encryption_algorithm):
|
||||
"""
|
||||
The serialized bytes of the private key.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def exchange(self, peer_public_key):
|
||||
"""
|
||||
Performs a key exchange operation using the provided peer's public key.
|
||||
"""
|
||||
|
|
@ -0,0 +1,76 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class X448PublicKey(object):
|
||||
@classmethod
|
||||
def from_public_bytes(cls, data):
|
||||
from cryptography.hazmat.backends.openssl.backend import backend
|
||||
|
||||
if not backend.x448_supported():
|
||||
raise UnsupportedAlgorithm(
|
||||
"X448 is not supported by this version of OpenSSL.",
|
||||
_Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
|
||||
)
|
||||
|
||||
return backend.x448_load_public_bytes(data)
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_bytes(self, encoding, format):
|
||||
"""
|
||||
The serialized bytes of the public key.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class X448PrivateKey(object):
|
||||
@classmethod
|
||||
def generate(cls):
|
||||
from cryptography.hazmat.backends.openssl.backend import backend
|
||||
|
||||
if not backend.x448_supported():
|
||||
raise UnsupportedAlgorithm(
|
||||
"X448 is not supported by this version of OpenSSL.",
|
||||
_Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
|
||||
)
|
||||
return backend.x448_generate_key()
|
||||
|
||||
@classmethod
|
||||
def from_private_bytes(cls, data):
|
||||
from cryptography.hazmat.backends.openssl.backend import backend
|
||||
|
||||
if not backend.x448_supported():
|
||||
raise UnsupportedAlgorithm(
|
||||
"X448 is not supported by this version of OpenSSL.",
|
||||
_Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
|
||||
)
|
||||
|
||||
return backend.x448_load_private_bytes(data)
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_key(self):
|
||||
"""
|
||||
The serialized bytes of the public key.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def private_bytes(self, encoding, format, encryption_algorithm):
|
||||
"""
|
||||
The serialized bytes of the private key.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def exchange(self, peer_public_key):
|
||||
"""
|
||||
Performs a key exchange operation using the provided peer's public key.
|
||||
"""
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography.hazmat.primitives.ciphers.base import (
|
||||
AEADCipherContext,
|
||||
AEADDecryptionContext,
|
||||
AEADEncryptionContext,
|
||||
BlockCipherAlgorithm,
|
||||
Cipher,
|
||||
CipherAlgorithm,
|
||||
CipherContext,
|
||||
)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"Cipher",
|
||||
"CipherAlgorithm",
|
||||
"BlockCipherAlgorithm",
|
||||
"CipherContext",
|
||||
"AEADCipherContext",
|
||||
"AEADDecryptionContext",
|
||||
"AEADEncryptionContext",
|
||||
]
|
||||
|
|
@ -0,0 +1,174 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import os
|
||||
|
||||
from cryptography import exceptions, utils
|
||||
from cryptography.hazmat.backends.openssl import aead
|
||||
from cryptography.hazmat.backends.openssl.backend import backend
|
||||
|
||||
|
||||
class ChaCha20Poly1305(object):
|
||||
_MAX_SIZE = 2 ** 32
|
||||
|
||||
def __init__(self, key):
|
||||
if not backend.aead_cipher_supported(self):
|
||||
raise exceptions.UnsupportedAlgorithm(
|
||||
"ChaCha20Poly1305 is not supported by this version of OpenSSL",
|
||||
exceptions._Reasons.UNSUPPORTED_CIPHER,
|
||||
)
|
||||
utils._check_byteslike("key", key)
|
||||
|
||||
if len(key) != 32:
|
||||
raise ValueError("ChaCha20Poly1305 key must be 32 bytes.")
|
||||
|
||||
self._key = key
|
||||
|
||||
@classmethod
|
||||
def generate_key(cls):
|
||||
return os.urandom(32)
|
||||
|
||||
def encrypt(self, nonce, data, associated_data):
|
||||
if associated_data is None:
|
||||
associated_data = b""
|
||||
|
||||
if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE:
|
||||
# This is OverflowError to match what cffi would raise
|
||||
raise OverflowError(
|
||||
"Data or associated data too long. Max 2**32 bytes"
|
||||
)
|
||||
|
||||
self._check_params(nonce, data, associated_data)
|
||||
return aead._encrypt(backend, self, nonce, data, associated_data, 16)
|
||||
|
||||
def decrypt(self, nonce, data, associated_data):
|
||||
if associated_data is None:
|
||||
associated_data = b""
|
||||
|
||||
self._check_params(nonce, data, associated_data)
|
||||
return aead._decrypt(backend, self, nonce, data, associated_data, 16)
|
||||
|
||||
def _check_params(self, nonce, data, associated_data):
|
||||
utils._check_byteslike("nonce", nonce)
|
||||
utils._check_bytes("data", data)
|
||||
utils._check_bytes("associated_data", associated_data)
|
||||
if len(nonce) != 12:
|
||||
raise ValueError("Nonce must be 12 bytes")
|
||||
|
||||
|
||||
class AESCCM(object):
|
||||
_MAX_SIZE = 2 ** 32
|
||||
|
||||
def __init__(self, key, tag_length=16):
|
||||
utils._check_byteslike("key", key)
|
||||
if len(key) not in (16, 24, 32):
|
||||
raise ValueError("AESCCM key must be 128, 192, or 256 bits.")
|
||||
|
||||
self._key = key
|
||||
if not isinstance(tag_length, int):
|
||||
raise TypeError("tag_length must be an integer")
|
||||
|
||||
if tag_length not in (4, 6, 8, 10, 12, 14, 16):
|
||||
raise ValueError("Invalid tag_length")
|
||||
|
||||
self._tag_length = tag_length
|
||||
|
||||
@classmethod
|
||||
def generate_key(cls, bit_length):
|
||||
if not isinstance(bit_length, int):
|
||||
raise TypeError("bit_length must be an integer")
|
||||
|
||||
if bit_length not in (128, 192, 256):
|
||||
raise ValueError("bit_length must be 128, 192, or 256")
|
||||
|
||||
return os.urandom(bit_length // 8)
|
||||
|
||||
def encrypt(self, nonce, data, associated_data):
|
||||
if associated_data is None:
|
||||
associated_data = b""
|
||||
|
||||
if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE:
|
||||
# This is OverflowError to match what cffi would raise
|
||||
raise OverflowError(
|
||||
"Data or associated data too long. Max 2**32 bytes"
|
||||
)
|
||||
|
||||
self._check_params(nonce, data, associated_data)
|
||||
self._validate_lengths(nonce, len(data))
|
||||
return aead._encrypt(
|
||||
backend, self, nonce, data, associated_data, self._tag_length
|
||||
)
|
||||
|
||||
def decrypt(self, nonce, data, associated_data):
|
||||
if associated_data is None:
|
||||
associated_data = b""
|
||||
|
||||
self._check_params(nonce, data, associated_data)
|
||||
return aead._decrypt(
|
||||
backend, self, nonce, data, associated_data, self._tag_length
|
||||
)
|
||||
|
||||
def _validate_lengths(self, nonce, data_len):
|
||||
# For information about computing this, see
|
||||
# https://tools.ietf.org/html/rfc3610#section-2.1
|
||||
l_val = 15 - len(nonce)
|
||||
if 2 ** (8 * l_val) < data_len:
|
||||
raise ValueError("Data too long for nonce")
|
||||
|
||||
def _check_params(self, nonce, data, associated_data):
|
||||
utils._check_byteslike("nonce", nonce)
|
||||
utils._check_bytes("data", data)
|
||||
utils._check_bytes("associated_data", associated_data)
|
||||
if not 7 <= len(nonce) <= 13:
|
||||
raise ValueError("Nonce must be between 7 and 13 bytes")
|
||||
|
||||
|
||||
class AESGCM(object):
|
||||
_MAX_SIZE = 2 ** 32
|
||||
|
||||
def __init__(self, key):
|
||||
utils._check_byteslike("key", key)
|
||||
if len(key) not in (16, 24, 32):
|
||||
raise ValueError("AESGCM key must be 128, 192, or 256 bits.")
|
||||
|
||||
self._key = key
|
||||
|
||||
@classmethod
|
||||
def generate_key(cls, bit_length):
|
||||
if not isinstance(bit_length, int):
|
||||
raise TypeError("bit_length must be an integer")
|
||||
|
||||
if bit_length not in (128, 192, 256):
|
||||
raise ValueError("bit_length must be 128, 192, or 256")
|
||||
|
||||
return os.urandom(bit_length // 8)
|
||||
|
||||
def encrypt(self, nonce, data, associated_data):
|
||||
if associated_data is None:
|
||||
associated_data = b""
|
||||
|
||||
if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE:
|
||||
# This is OverflowError to match what cffi would raise
|
||||
raise OverflowError(
|
||||
"Data or associated data too long. Max 2**32 bytes"
|
||||
)
|
||||
|
||||
self._check_params(nonce, data, associated_data)
|
||||
return aead._encrypt(backend, self, nonce, data, associated_data, 16)
|
||||
|
||||
def decrypt(self, nonce, data, associated_data):
|
||||
if associated_data is None:
|
||||
associated_data = b""
|
||||
|
||||
self._check_params(nonce, data, associated_data)
|
||||
return aead._decrypt(backend, self, nonce, data, associated_data, 16)
|
||||
|
||||
def _check_params(self, nonce, data, associated_data):
|
||||
utils._check_byteslike("nonce", nonce)
|
||||
utils._check_bytes("data", data)
|
||||
utils._check_bytes("associated_data", associated_data)
|
||||
if len(nonce) == 0:
|
||||
raise ValueError("Nonce must be at least 1 byte")
|
||||
|
|
@ -0,0 +1,170 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.hazmat.primitives.ciphers import (
|
||||
BlockCipherAlgorithm,
|
||||
CipherAlgorithm,
|
||||
)
|
||||
from cryptography.hazmat.primitives.ciphers.modes import ModeWithNonce
|
||||
|
||||
|
||||
def _verify_key_size(algorithm, key):
|
||||
# Verify that the key is instance of bytes
|
||||
utils._check_byteslike("key", key)
|
||||
|
||||
# Verify that the key size matches the expected key size
|
||||
if len(key) * 8 not in algorithm.key_sizes:
|
||||
raise ValueError(
|
||||
"Invalid key size ({}) for {}.".format(
|
||||
len(key) * 8, algorithm.name
|
||||
)
|
||||
)
|
||||
return key
|
||||
|
||||
|
||||
@utils.register_interface(BlockCipherAlgorithm)
|
||||
@utils.register_interface(CipherAlgorithm)
|
||||
class AES(object):
|
||||
name = "AES"
|
||||
block_size = 128
|
||||
# 512 added to support AES-256-XTS, which uses 512-bit keys
|
||||
key_sizes = frozenset([128, 192, 256, 512])
|
||||
|
||||
def __init__(self, key):
|
||||
self.key = _verify_key_size(self, key)
|
||||
|
||||
@property
|
||||
def key_size(self):
|
||||
return len(self.key) * 8
|
||||
|
||||
|
||||
@utils.register_interface(BlockCipherAlgorithm)
|
||||
@utils.register_interface(CipherAlgorithm)
|
||||
class Camellia(object):
|
||||
name = "camellia"
|
||||
block_size = 128
|
||||
key_sizes = frozenset([128, 192, 256])
|
||||
|
||||
def __init__(self, key):
|
||||
self.key = _verify_key_size(self, key)
|
||||
|
||||
@property
|
||||
def key_size(self):
|
||||
return len(self.key) * 8
|
||||
|
||||
|
||||
@utils.register_interface(BlockCipherAlgorithm)
|
||||
@utils.register_interface(CipherAlgorithm)
|
||||
class TripleDES(object):
|
||||
name = "3DES"
|
||||
block_size = 64
|
||||
key_sizes = frozenset([64, 128, 192])
|
||||
|
||||
def __init__(self, key):
|
||||
if len(key) == 8:
|
||||
key += key + key
|
||||
elif len(key) == 16:
|
||||
key += key[:8]
|
||||
self.key = _verify_key_size(self, key)
|
||||
|
||||
@property
|
||||
def key_size(self):
|
||||
return len(self.key) * 8
|
||||
|
||||
|
||||
@utils.register_interface(BlockCipherAlgorithm)
|
||||
@utils.register_interface(CipherAlgorithm)
|
||||
class Blowfish(object):
|
||||
name = "Blowfish"
|
||||
block_size = 64
|
||||
key_sizes = frozenset(range(32, 449, 8))
|
||||
|
||||
def __init__(self, key):
|
||||
self.key = _verify_key_size(self, key)
|
||||
|
||||
@property
|
||||
def key_size(self):
|
||||
return len(self.key) * 8
|
||||
|
||||
|
||||
@utils.register_interface(BlockCipherAlgorithm)
|
||||
@utils.register_interface(CipherAlgorithm)
|
||||
class CAST5(object):
|
||||
name = "CAST5"
|
||||
block_size = 64
|
||||
key_sizes = frozenset(range(40, 129, 8))
|
||||
|
||||
def __init__(self, key):
|
||||
self.key = _verify_key_size(self, key)
|
||||
|
||||
@property
|
||||
def key_size(self):
|
||||
return len(self.key) * 8
|
||||
|
||||
|
||||
@utils.register_interface(CipherAlgorithm)
|
||||
class ARC4(object):
|
||||
name = "RC4"
|
||||
key_sizes = frozenset([40, 56, 64, 80, 128, 160, 192, 256])
|
||||
|
||||
def __init__(self, key):
|
||||
self.key = _verify_key_size(self, key)
|
||||
|
||||
@property
|
||||
def key_size(self):
|
||||
return len(self.key) * 8
|
||||
|
||||
|
||||
@utils.register_interface(CipherAlgorithm)
|
||||
class IDEA(object):
|
||||
name = "IDEA"
|
||||
block_size = 64
|
||||
key_sizes = frozenset([128])
|
||||
|
||||
def __init__(self, key):
|
||||
self.key = _verify_key_size(self, key)
|
||||
|
||||
@property
|
||||
def key_size(self):
|
||||
return len(self.key) * 8
|
||||
|
||||
|
||||
@utils.register_interface(BlockCipherAlgorithm)
|
||||
@utils.register_interface(CipherAlgorithm)
|
||||
class SEED(object):
|
||||
name = "SEED"
|
||||
block_size = 128
|
||||
key_sizes = frozenset([128])
|
||||
|
||||
def __init__(self, key):
|
||||
self.key = _verify_key_size(self, key)
|
||||
|
||||
@property
|
||||
def key_size(self):
|
||||
return len(self.key) * 8
|
||||
|
||||
|
||||
@utils.register_interface(CipherAlgorithm)
|
||||
@utils.register_interface(ModeWithNonce)
|
||||
class ChaCha20(object):
|
||||
name = "ChaCha20"
|
||||
key_sizes = frozenset([256])
|
||||
|
||||
def __init__(self, key, nonce):
|
||||
self.key = _verify_key_size(self, key)
|
||||
utils._check_byteslike("nonce", nonce)
|
||||
|
||||
if len(nonce) != 16:
|
||||
raise ValueError("nonce must be 128-bits (16 bytes)")
|
||||
|
||||
self._nonce = nonce
|
||||
|
||||
nonce = utils.read_only_property("_nonce")
|
||||
|
||||
@property
|
||||
def key_size(self):
|
||||
return len(self.key) * 8
|
||||
|
|
@ -0,0 +1,241 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
AlreadyFinalized,
|
||||
AlreadyUpdated,
|
||||
NotYetFinalized,
|
||||
UnsupportedAlgorithm,
|
||||
_Reasons,
|
||||
)
|
||||
from cryptography.hazmat.backends import _get_backend
|
||||
from cryptography.hazmat.backends.interfaces import CipherBackend
|
||||
from cryptography.hazmat.primitives.ciphers import modes
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class CipherAlgorithm(object):
|
||||
@abc.abstractproperty
|
||||
def name(self):
|
||||
"""
|
||||
A string naming this mode (e.g. "AES", "Camellia").
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def key_size(self):
|
||||
"""
|
||||
The size of the key being used as an integer in bits (e.g. 128, 256).
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class BlockCipherAlgorithm(object):
|
||||
@abc.abstractproperty
|
||||
def block_size(self):
|
||||
"""
|
||||
The size of a block as an integer in bits (e.g. 64, 128).
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class CipherContext(object):
|
||||
@abc.abstractmethod
|
||||
def update(self, data):
|
||||
"""
|
||||
Processes the provided bytes through the cipher and returns the results
|
||||
as bytes.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def update_into(self, data, buf):
|
||||
"""
|
||||
Processes the provided bytes and writes the resulting data into the
|
||||
provided buffer. Returns the number of bytes written.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def finalize(self):
|
||||
"""
|
||||
Returns the results of processing the final block as bytes.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class AEADCipherContext(object):
|
||||
@abc.abstractmethod
|
||||
def authenticate_additional_data(self, data):
|
||||
"""
|
||||
Authenticates the provided bytes.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class AEADDecryptionContext(object):
|
||||
@abc.abstractmethod
|
||||
def finalize_with_tag(self, tag):
|
||||
"""
|
||||
Returns the results of processing the final block as bytes and allows
|
||||
delayed passing of the authentication tag.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class AEADEncryptionContext(object):
|
||||
@abc.abstractproperty
|
||||
def tag(self):
|
||||
"""
|
||||
Returns tag bytes. This is only available after encryption is
|
||||
finalized.
|
||||
"""
|
||||
|
||||
|
||||
class Cipher(object):
|
||||
def __init__(self, algorithm, mode, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
if not isinstance(backend, CipherBackend):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Backend object does not implement CipherBackend.",
|
||||
_Reasons.BACKEND_MISSING_INTERFACE,
|
||||
)
|
||||
|
||||
if not isinstance(algorithm, CipherAlgorithm):
|
||||
raise TypeError("Expected interface of CipherAlgorithm.")
|
||||
|
||||
if mode is not None:
|
||||
mode.validate_for_algorithm(algorithm)
|
||||
|
||||
self.algorithm = algorithm
|
||||
self.mode = mode
|
||||
self._backend = backend
|
||||
|
||||
def encryptor(self):
|
||||
if isinstance(self.mode, modes.ModeWithAuthenticationTag):
|
||||
if self.mode.tag is not None:
|
||||
raise ValueError(
|
||||
"Authentication tag must be None when encrypting."
|
||||
)
|
||||
ctx = self._backend.create_symmetric_encryption_ctx(
|
||||
self.algorithm, self.mode
|
||||
)
|
||||
return self._wrap_ctx(ctx, encrypt=True)
|
||||
|
||||
def decryptor(self):
|
||||
ctx = self._backend.create_symmetric_decryption_ctx(
|
||||
self.algorithm, self.mode
|
||||
)
|
||||
return self._wrap_ctx(ctx, encrypt=False)
|
||||
|
||||
def _wrap_ctx(self, ctx, encrypt):
|
||||
if isinstance(self.mode, modes.ModeWithAuthenticationTag):
|
||||
if encrypt:
|
||||
return _AEADEncryptionContext(ctx)
|
||||
else:
|
||||
return _AEADCipherContext(ctx)
|
||||
else:
|
||||
return _CipherContext(ctx)
|
||||
|
||||
|
||||
@utils.register_interface(CipherContext)
|
||||
class _CipherContext(object):
|
||||
def __init__(self, ctx):
|
||||
self._ctx = ctx
|
||||
|
||||
def update(self, data):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
return self._ctx.update(data)
|
||||
|
||||
def update_into(self, data, buf):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
return self._ctx.update_into(data, buf)
|
||||
|
||||
def finalize(self):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
data = self._ctx.finalize()
|
||||
self._ctx = None
|
||||
return data
|
||||
|
||||
|
||||
@utils.register_interface(AEADCipherContext)
|
||||
@utils.register_interface(CipherContext)
|
||||
@utils.register_interface(AEADDecryptionContext)
|
||||
class _AEADCipherContext(object):
|
||||
def __init__(self, ctx):
|
||||
self._ctx = ctx
|
||||
self._bytes_processed = 0
|
||||
self._aad_bytes_processed = 0
|
||||
self._tag = None
|
||||
self._updated = False
|
||||
|
||||
def _check_limit(self, data_size):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
self._updated = True
|
||||
self._bytes_processed += data_size
|
||||
if self._bytes_processed > self._ctx._mode._MAX_ENCRYPTED_BYTES:
|
||||
raise ValueError(
|
||||
"{} has a maximum encrypted byte limit of {}".format(
|
||||
self._ctx._mode.name, self._ctx._mode._MAX_ENCRYPTED_BYTES
|
||||
)
|
||||
)
|
||||
|
||||
def update(self, data):
|
||||
self._check_limit(len(data))
|
||||
return self._ctx.update(data)
|
||||
|
||||
def update_into(self, data, buf):
|
||||
self._check_limit(len(data))
|
||||
return self._ctx.update_into(data, buf)
|
||||
|
||||
def finalize(self):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
data = self._ctx.finalize()
|
||||
self._tag = self._ctx.tag
|
||||
self._ctx = None
|
||||
return data
|
||||
|
||||
def finalize_with_tag(self, tag):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
data = self._ctx.finalize_with_tag(tag)
|
||||
self._tag = self._ctx.tag
|
||||
self._ctx = None
|
||||
return data
|
||||
|
||||
def authenticate_additional_data(self, data):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
if self._updated:
|
||||
raise AlreadyUpdated("Update has been called on this context.")
|
||||
|
||||
self._aad_bytes_processed += len(data)
|
||||
if self._aad_bytes_processed > self._ctx._mode._MAX_AAD_BYTES:
|
||||
raise ValueError(
|
||||
"{} has a maximum AAD byte limit of {}".format(
|
||||
self._ctx._mode.name, self._ctx._mode._MAX_AAD_BYTES
|
||||
)
|
||||
)
|
||||
|
||||
self._ctx.authenticate_additional_data(data)
|
||||
|
||||
|
||||
@utils.register_interface(AEADEncryptionContext)
|
||||
class _AEADEncryptionContext(_AEADCipherContext):
|
||||
@property
|
||||
def tag(self):
|
||||
if self._ctx is not None:
|
||||
raise NotYetFinalized(
|
||||
"You must finalize encryption before " "getting the tag."
|
||||
)
|
||||
return self._tag
|
||||
|
|
@ -0,0 +1,223 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class Mode(object):
|
||||
@abc.abstractproperty
|
||||
def name(self):
|
||||
"""
|
||||
A string naming this mode (e.g. "ECB", "CBC").
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def validate_for_algorithm(self, algorithm):
|
||||
"""
|
||||
Checks that all the necessary invariants of this (mode, algorithm)
|
||||
combination are met.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class ModeWithInitializationVector(object):
|
||||
@abc.abstractproperty
|
||||
def initialization_vector(self):
|
||||
"""
|
||||
The value of the initialization vector for this mode as bytes.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class ModeWithTweak(object):
|
||||
@abc.abstractproperty
|
||||
def tweak(self):
|
||||
"""
|
||||
The value of the tweak for this mode as bytes.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class ModeWithNonce(object):
|
||||
@abc.abstractproperty
|
||||
def nonce(self):
|
||||
"""
|
||||
The value of the nonce for this mode as bytes.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class ModeWithAuthenticationTag(object):
|
||||
@abc.abstractproperty
|
||||
def tag(self):
|
||||
"""
|
||||
The value of the tag supplied to the constructor of this mode.
|
||||
"""
|
||||
|
||||
|
||||
def _check_aes_key_length(self, algorithm):
|
||||
if algorithm.key_size > 256 and algorithm.name == "AES":
|
||||
raise ValueError(
|
||||
"Only 128, 192, and 256 bit keys are allowed for this AES mode"
|
||||
)
|
||||
|
||||
|
||||
def _check_iv_length(self, algorithm):
|
||||
if len(self.initialization_vector) * 8 != algorithm.block_size:
|
||||
raise ValueError(
|
||||
"Invalid IV size ({}) for {}.".format(
|
||||
len(self.initialization_vector), self.name
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _check_iv_and_key_length(self, algorithm):
|
||||
_check_aes_key_length(self, algorithm)
|
||||
_check_iv_length(self, algorithm)
|
||||
|
||||
|
||||
@utils.register_interface(Mode)
|
||||
@utils.register_interface(ModeWithInitializationVector)
|
||||
class CBC(object):
|
||||
name = "CBC"
|
||||
|
||||
def __init__(self, initialization_vector):
|
||||
utils._check_byteslike("initialization_vector", initialization_vector)
|
||||
self._initialization_vector = initialization_vector
|
||||
|
||||
initialization_vector = utils.read_only_property("_initialization_vector")
|
||||
validate_for_algorithm = _check_iv_and_key_length
|
||||
|
||||
|
||||
@utils.register_interface(Mode)
|
||||
@utils.register_interface(ModeWithTweak)
|
||||
class XTS(object):
|
||||
name = "XTS"
|
||||
|
||||
def __init__(self, tweak):
|
||||
utils._check_byteslike("tweak", tweak)
|
||||
|
||||
if len(tweak) != 16:
|
||||
raise ValueError("tweak must be 128-bits (16 bytes)")
|
||||
|
||||
self._tweak = tweak
|
||||
|
||||
tweak = utils.read_only_property("_tweak")
|
||||
|
||||
def validate_for_algorithm(self, algorithm):
|
||||
if algorithm.key_size not in (256, 512):
|
||||
raise ValueError(
|
||||
"The XTS specification requires a 256-bit key for AES-128-XTS"
|
||||
" and 512-bit key for AES-256-XTS"
|
||||
)
|
||||
|
||||
|
||||
@utils.register_interface(Mode)
|
||||
class ECB(object):
|
||||
name = "ECB"
|
||||
|
||||
validate_for_algorithm = _check_aes_key_length
|
||||
|
||||
|
||||
@utils.register_interface(Mode)
|
||||
@utils.register_interface(ModeWithInitializationVector)
|
||||
class OFB(object):
|
||||
name = "OFB"
|
||||
|
||||
def __init__(self, initialization_vector):
|
||||
utils._check_byteslike("initialization_vector", initialization_vector)
|
||||
self._initialization_vector = initialization_vector
|
||||
|
||||
initialization_vector = utils.read_only_property("_initialization_vector")
|
||||
validate_for_algorithm = _check_iv_and_key_length
|
||||
|
||||
|
||||
@utils.register_interface(Mode)
|
||||
@utils.register_interface(ModeWithInitializationVector)
|
||||
class CFB(object):
|
||||
name = "CFB"
|
||||
|
||||
def __init__(self, initialization_vector):
|
||||
utils._check_byteslike("initialization_vector", initialization_vector)
|
||||
self._initialization_vector = initialization_vector
|
||||
|
||||
initialization_vector = utils.read_only_property("_initialization_vector")
|
||||
validate_for_algorithm = _check_iv_and_key_length
|
||||
|
||||
|
||||
@utils.register_interface(Mode)
|
||||
@utils.register_interface(ModeWithInitializationVector)
|
||||
class CFB8(object):
|
||||
name = "CFB8"
|
||||
|
||||
def __init__(self, initialization_vector):
|
||||
utils._check_byteslike("initialization_vector", initialization_vector)
|
||||
self._initialization_vector = initialization_vector
|
||||
|
||||
initialization_vector = utils.read_only_property("_initialization_vector")
|
||||
validate_for_algorithm = _check_iv_and_key_length
|
||||
|
||||
|
||||
@utils.register_interface(Mode)
|
||||
@utils.register_interface(ModeWithNonce)
|
||||
class CTR(object):
|
||||
name = "CTR"
|
||||
|
||||
def __init__(self, nonce):
|
||||
utils._check_byteslike("nonce", nonce)
|
||||
self._nonce = nonce
|
||||
|
||||
nonce = utils.read_only_property("_nonce")
|
||||
|
||||
def validate_for_algorithm(self, algorithm):
|
||||
_check_aes_key_length(self, algorithm)
|
||||
if len(self.nonce) * 8 != algorithm.block_size:
|
||||
raise ValueError(
|
||||
"Invalid nonce size ({}) for {}.".format(
|
||||
len(self.nonce), self.name
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@utils.register_interface(Mode)
|
||||
@utils.register_interface(ModeWithInitializationVector)
|
||||
@utils.register_interface(ModeWithAuthenticationTag)
|
||||
class GCM(object):
|
||||
name = "GCM"
|
||||
_MAX_ENCRYPTED_BYTES = (2 ** 39 - 256) // 8
|
||||
_MAX_AAD_BYTES = (2 ** 64) // 8
|
||||
|
||||
def __init__(self, initialization_vector, tag=None, min_tag_length=16):
|
||||
# len(initialization_vector) must in [1, 2 ** 64), but it's impossible
|
||||
# to actually construct a bytes object that large, so we don't check
|
||||
# for it
|
||||
utils._check_byteslike("initialization_vector", initialization_vector)
|
||||
if len(initialization_vector) == 0:
|
||||
raise ValueError("initialization_vector must be at least 1 byte")
|
||||
self._initialization_vector = initialization_vector
|
||||
if tag is not None:
|
||||
utils._check_bytes("tag", tag)
|
||||
if min_tag_length < 4:
|
||||
raise ValueError("min_tag_length must be >= 4")
|
||||
if len(tag) < min_tag_length:
|
||||
raise ValueError(
|
||||
"Authentication tag must be {} bytes or longer.".format(
|
||||
min_tag_length
|
||||
)
|
||||
)
|
||||
self._tag = tag
|
||||
self._min_tag_length = min_tag_length
|
||||
|
||||
tag = utils.read_only_property("_tag")
|
||||
initialization_vector = utils.read_only_property("_initialization_vector")
|
||||
|
||||
def validate_for_algorithm(self, algorithm):
|
||||
_check_aes_key_length(self, algorithm)
|
||||
|
|
@ -0,0 +1,64 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
AlreadyFinalized,
|
||||
UnsupportedAlgorithm,
|
||||
_Reasons,
|
||||
)
|
||||
from cryptography.hazmat.backends import _get_backend
|
||||
from cryptography.hazmat.backends.interfaces import CMACBackend
|
||||
from cryptography.hazmat.primitives import ciphers
|
||||
|
||||
|
||||
class CMAC(object):
|
||||
def __init__(self, algorithm, backend=None, ctx=None):
|
||||
backend = _get_backend(backend)
|
||||
if not isinstance(backend, CMACBackend):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Backend object does not implement CMACBackend.",
|
||||
_Reasons.BACKEND_MISSING_INTERFACE,
|
||||
)
|
||||
|
||||
if not isinstance(algorithm, ciphers.BlockCipherAlgorithm):
|
||||
raise TypeError("Expected instance of BlockCipherAlgorithm.")
|
||||
self._algorithm = algorithm
|
||||
|
||||
self._backend = backend
|
||||
if ctx is None:
|
||||
self._ctx = self._backend.create_cmac_ctx(self._algorithm)
|
||||
else:
|
||||
self._ctx = ctx
|
||||
|
||||
def update(self, data):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
|
||||
utils._check_bytes("data", data)
|
||||
self._ctx.update(data)
|
||||
|
||||
def finalize(self):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
digest = self._ctx.finalize()
|
||||
self._ctx = None
|
||||
return digest
|
||||
|
||||
def verify(self, signature):
|
||||
utils._check_bytes("signature", signature)
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
|
||||
ctx, self._ctx = self._ctx, None
|
||||
ctx.verify(signature)
|
||||
|
||||
def copy(self):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
return CMAC(
|
||||
self._algorithm, backend=self._backend, ctx=self._ctx.copy()
|
||||
)
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import hmac
|
||||
|
||||
|
||||
def bytes_eq(a, b):
|
||||
if not isinstance(a, bytes) or not isinstance(b, bytes):
|
||||
raise TypeError("a and b must be bytes.")
|
||||
|
||||
return hmac.compare_digest(a, b)
|
||||
|
|
@ -0,0 +1,259 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
AlreadyFinalized,
|
||||
UnsupportedAlgorithm,
|
||||
_Reasons,
|
||||
)
|
||||
from cryptography.hazmat.backends import _get_backend
|
||||
from cryptography.hazmat.backends.interfaces import HashBackend
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class HashAlgorithm(object):
|
||||
@abc.abstractproperty
|
||||
def name(self):
|
||||
"""
|
||||
A string naming this algorithm (e.g. "sha256", "md5").
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def digest_size(self):
|
||||
"""
|
||||
The size of the resulting digest in bytes.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class HashContext(object):
|
||||
@abc.abstractproperty
|
||||
def algorithm(self):
|
||||
"""
|
||||
A HashAlgorithm that will be used by this context.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def update(self, data):
|
||||
"""
|
||||
Processes the provided bytes through the hash.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def finalize(self):
|
||||
"""
|
||||
Finalizes the hash context and returns the hash digest as bytes.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def copy(self):
|
||||
"""
|
||||
Return a HashContext that is a copy of the current context.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class ExtendableOutputFunction(object):
|
||||
"""
|
||||
An interface for extendable output functions.
|
||||
"""
|
||||
|
||||
|
||||
@utils.register_interface(HashContext)
|
||||
class Hash(object):
|
||||
def __init__(self, algorithm, backend=None, ctx=None):
|
||||
backend = _get_backend(backend)
|
||||
if not isinstance(backend, HashBackend):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Backend object does not implement HashBackend.",
|
||||
_Reasons.BACKEND_MISSING_INTERFACE,
|
||||
)
|
||||
|
||||
if not isinstance(algorithm, HashAlgorithm):
|
||||
raise TypeError("Expected instance of hashes.HashAlgorithm.")
|
||||
self._algorithm = algorithm
|
||||
|
||||
self._backend = backend
|
||||
|
||||
if ctx is None:
|
||||
self._ctx = self._backend.create_hash_ctx(self.algorithm)
|
||||
else:
|
||||
self._ctx = ctx
|
||||
|
||||
algorithm = utils.read_only_property("_algorithm")
|
||||
|
||||
def update(self, data):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
utils._check_byteslike("data", data)
|
||||
self._ctx.update(data)
|
||||
|
||||
def copy(self):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
return Hash(
|
||||
self.algorithm, backend=self._backend, ctx=self._ctx.copy()
|
||||
)
|
||||
|
||||
def finalize(self):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
digest = self._ctx.finalize()
|
||||
self._ctx = None
|
||||
return digest
|
||||
|
||||
|
||||
@utils.register_interface(HashAlgorithm)
|
||||
class SHA1(object):
|
||||
name = "sha1"
|
||||
digest_size = 20
|
||||
block_size = 64
|
||||
|
||||
|
||||
@utils.register_interface(HashAlgorithm)
|
||||
class SHA512_224(object): # noqa: N801
|
||||
name = "sha512-224"
|
||||
digest_size = 28
|
||||
block_size = 128
|
||||
|
||||
|
||||
@utils.register_interface(HashAlgorithm)
|
||||
class SHA512_256(object): # noqa: N801
|
||||
name = "sha512-256"
|
||||
digest_size = 32
|
||||
block_size = 128
|
||||
|
||||
|
||||
@utils.register_interface(HashAlgorithm)
|
||||
class SHA224(object):
|
||||
name = "sha224"
|
||||
digest_size = 28
|
||||
block_size = 64
|
||||
|
||||
|
||||
@utils.register_interface(HashAlgorithm)
|
||||
class SHA256(object):
|
||||
name = "sha256"
|
||||
digest_size = 32
|
||||
block_size = 64
|
||||
|
||||
|
||||
@utils.register_interface(HashAlgorithm)
|
||||
class SHA384(object):
|
||||
name = "sha384"
|
||||
digest_size = 48
|
||||
block_size = 128
|
||||
|
||||
|
||||
@utils.register_interface(HashAlgorithm)
|
||||
class SHA512(object):
|
||||
name = "sha512"
|
||||
digest_size = 64
|
||||
block_size = 128
|
||||
|
||||
|
||||
@utils.register_interface(HashAlgorithm)
|
||||
class SHA3_224(object): # noqa: N801
|
||||
name = "sha3-224"
|
||||
digest_size = 28
|
||||
|
||||
|
||||
@utils.register_interface(HashAlgorithm)
|
||||
class SHA3_256(object): # noqa: N801
|
||||
name = "sha3-256"
|
||||
digest_size = 32
|
||||
|
||||
|
||||
@utils.register_interface(HashAlgorithm)
|
||||
class SHA3_384(object): # noqa: N801
|
||||
name = "sha3-384"
|
||||
digest_size = 48
|
||||
|
||||
|
||||
@utils.register_interface(HashAlgorithm)
|
||||
class SHA3_512(object): # noqa: N801
|
||||
name = "sha3-512"
|
||||
digest_size = 64
|
||||
|
||||
|
||||
@utils.register_interface(HashAlgorithm)
|
||||
@utils.register_interface(ExtendableOutputFunction)
|
||||
class SHAKE128(object):
|
||||
name = "shake128"
|
||||
|
||||
def __init__(self, digest_size):
|
||||
if not isinstance(digest_size, six.integer_types):
|
||||
raise TypeError("digest_size must be an integer")
|
||||
|
||||
if digest_size < 1:
|
||||
raise ValueError("digest_size must be a positive integer")
|
||||
|
||||
self._digest_size = digest_size
|
||||
|
||||
digest_size = utils.read_only_property("_digest_size")
|
||||
|
||||
|
||||
@utils.register_interface(HashAlgorithm)
|
||||
@utils.register_interface(ExtendableOutputFunction)
|
||||
class SHAKE256(object):
|
||||
name = "shake256"
|
||||
|
||||
def __init__(self, digest_size):
|
||||
if not isinstance(digest_size, six.integer_types):
|
||||
raise TypeError("digest_size must be an integer")
|
||||
|
||||
if digest_size < 1:
|
||||
raise ValueError("digest_size must be a positive integer")
|
||||
|
||||
self._digest_size = digest_size
|
||||
|
||||
digest_size = utils.read_only_property("_digest_size")
|
||||
|
||||
|
||||
@utils.register_interface(HashAlgorithm)
|
||||
class MD5(object):
|
||||
name = "md5"
|
||||
digest_size = 16
|
||||
block_size = 64
|
||||
|
||||
|
||||
@utils.register_interface(HashAlgorithm)
|
||||
class BLAKE2b(object):
|
||||
name = "blake2b"
|
||||
_max_digest_size = 64
|
||||
_min_digest_size = 1
|
||||
block_size = 128
|
||||
|
||||
def __init__(self, digest_size):
|
||||
|
||||
if digest_size != 64:
|
||||
raise ValueError("Digest size must be 64")
|
||||
|
||||
self._digest_size = digest_size
|
||||
|
||||
digest_size = utils.read_only_property("_digest_size")
|
||||
|
||||
|
||||
@utils.register_interface(HashAlgorithm)
|
||||
class BLAKE2s(object):
|
||||
name = "blake2s"
|
||||
block_size = 64
|
||||
_max_digest_size = 32
|
||||
_min_digest_size = 1
|
||||
|
||||
def __init__(self, digest_size):
|
||||
|
||||
if digest_size != 32:
|
||||
raise ValueError("Digest size must be 32")
|
||||
|
||||
self._digest_size = digest_size
|
||||
|
||||
digest_size = utils.read_only_property("_digest_size")
|
||||
|
|
@ -0,0 +1,70 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
AlreadyFinalized,
|
||||
UnsupportedAlgorithm,
|
||||
_Reasons,
|
||||
)
|
||||
from cryptography.hazmat.backends import _get_backend
|
||||
from cryptography.hazmat.backends.interfaces import HMACBackend
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
|
||||
|
||||
@utils.register_interface(hashes.HashContext)
|
||||
class HMAC(object):
|
||||
def __init__(self, key, algorithm, backend=None, ctx=None):
|
||||
backend = _get_backend(backend)
|
||||
if not isinstance(backend, HMACBackend):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Backend object does not implement HMACBackend.",
|
||||
_Reasons.BACKEND_MISSING_INTERFACE,
|
||||
)
|
||||
|
||||
if not isinstance(algorithm, hashes.HashAlgorithm):
|
||||
raise TypeError("Expected instance of hashes.HashAlgorithm.")
|
||||
self._algorithm = algorithm
|
||||
|
||||
self._backend = backend
|
||||
self._key = key
|
||||
if ctx is None:
|
||||
self._ctx = self._backend.create_hmac_ctx(key, self.algorithm)
|
||||
else:
|
||||
self._ctx = ctx
|
||||
|
||||
algorithm = utils.read_only_property("_algorithm")
|
||||
|
||||
def update(self, data):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
utils._check_byteslike("data", data)
|
||||
self._ctx.update(data)
|
||||
|
||||
def copy(self):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
return HMAC(
|
||||
self._key,
|
||||
self.algorithm,
|
||||
backend=self._backend,
|
||||
ctx=self._ctx.copy(),
|
||||
)
|
||||
|
||||
def finalize(self):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
digest = self._ctx.finalize()
|
||||
self._ctx = None
|
||||
return digest
|
||||
|
||||
def verify(self, signature):
|
||||
utils._check_bytes("signature", signature)
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
|
||||
ctx, self._ctx = self._ctx, None
|
||||
ctx.verify(signature)
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class KeyDerivationFunction(object):
|
||||
@abc.abstractmethod
|
||||
def derive(self, key_material):
|
||||
"""
|
||||
Deterministically generates and returns a new key based on the existing
|
||||
key material.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def verify(self, key_material, expected_key):
|
||||
"""
|
||||
Checks whether the key generated by the key material matches the
|
||||
expected derived key. Raises an exception if they do not match.
|
||||
"""
|
||||
|
|
@ -0,0 +1,131 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import struct
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
AlreadyFinalized,
|
||||
InvalidKey,
|
||||
UnsupportedAlgorithm,
|
||||
_Reasons,
|
||||
)
|
||||
from cryptography.hazmat.backends import _get_backend
|
||||
from cryptography.hazmat.backends.interfaces import HMACBackend
|
||||
from cryptography.hazmat.backends.interfaces import HashBackend
|
||||
from cryptography.hazmat.primitives import constant_time, hashes, hmac
|
||||
from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
|
||||
|
||||
|
||||
def _int_to_u32be(n):
|
||||
return struct.pack(">I", n)
|
||||
|
||||
|
||||
def _common_args_checks(algorithm, length, otherinfo):
|
||||
max_length = algorithm.digest_size * (2 ** 32 - 1)
|
||||
if length > max_length:
|
||||
raise ValueError(
|
||||
"Can not derive keys larger than {} bits.".format(max_length)
|
||||
)
|
||||
if otherinfo is not None:
|
||||
utils._check_bytes("otherinfo", otherinfo)
|
||||
|
||||
|
||||
def _concatkdf_derive(key_material, length, auxfn, otherinfo):
|
||||
utils._check_byteslike("key_material", key_material)
|
||||
output = [b""]
|
||||
outlen = 0
|
||||
counter = 1
|
||||
|
||||
while length > outlen:
|
||||
h = auxfn()
|
||||
h.update(_int_to_u32be(counter))
|
||||
h.update(key_material)
|
||||
h.update(otherinfo)
|
||||
output.append(h.finalize())
|
||||
outlen += len(output[-1])
|
||||
counter += 1
|
||||
|
||||
return b"".join(output)[:length]
|
||||
|
||||
|
||||
@utils.register_interface(KeyDerivationFunction)
|
||||
class ConcatKDFHash(object):
|
||||
def __init__(self, algorithm, length, otherinfo, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
|
||||
_common_args_checks(algorithm, length, otherinfo)
|
||||
self._algorithm = algorithm
|
||||
self._length = length
|
||||
self._otherinfo = otherinfo
|
||||
if self._otherinfo is None:
|
||||
self._otherinfo = b""
|
||||
|
||||
if not isinstance(backend, HashBackend):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Backend object does not implement HashBackend.",
|
||||
_Reasons.BACKEND_MISSING_INTERFACE,
|
||||
)
|
||||
self._backend = backend
|
||||
self._used = False
|
||||
|
||||
def _hash(self):
|
||||
return hashes.Hash(self._algorithm, self._backend)
|
||||
|
||||
def derive(self, key_material):
|
||||
if self._used:
|
||||
raise AlreadyFinalized
|
||||
self._used = True
|
||||
return _concatkdf_derive(
|
||||
key_material, self._length, self._hash, self._otherinfo
|
||||
)
|
||||
|
||||
def verify(self, key_material, expected_key):
|
||||
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
|
||||
raise InvalidKey
|
||||
|
||||
|
||||
@utils.register_interface(KeyDerivationFunction)
|
||||
class ConcatKDFHMAC(object):
|
||||
def __init__(self, algorithm, length, salt, otherinfo, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
|
||||
_common_args_checks(algorithm, length, otherinfo)
|
||||
self._algorithm = algorithm
|
||||
self._length = length
|
||||
self._otherinfo = otherinfo
|
||||
if self._otherinfo is None:
|
||||
self._otherinfo = b""
|
||||
|
||||
if salt is None:
|
||||
salt = b"\x00" * algorithm.block_size
|
||||
else:
|
||||
utils._check_bytes("salt", salt)
|
||||
|
||||
self._salt = salt
|
||||
|
||||
if not isinstance(backend, HMACBackend):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Backend object does not implement HMACBackend.",
|
||||
_Reasons.BACKEND_MISSING_INTERFACE,
|
||||
)
|
||||
self._backend = backend
|
||||
self._used = False
|
||||
|
||||
def _hmac(self):
|
||||
return hmac.HMAC(self._salt, self._algorithm, self._backend)
|
||||
|
||||
def derive(self, key_material):
|
||||
if self._used:
|
||||
raise AlreadyFinalized
|
||||
self._used = True
|
||||
return _concatkdf_derive(
|
||||
key_material, self._length, self._hmac, self._otherinfo
|
||||
)
|
||||
|
||||
def verify(self, key_material, expected_key):
|
||||
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
|
||||
raise InvalidKey
|
||||
|
|
@ -0,0 +1,115 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
AlreadyFinalized,
|
||||
InvalidKey,
|
||||
UnsupportedAlgorithm,
|
||||
_Reasons,
|
||||
)
|
||||
from cryptography.hazmat.backends import _get_backend
|
||||
from cryptography.hazmat.backends.interfaces import HMACBackend
|
||||
from cryptography.hazmat.primitives import constant_time, hmac
|
||||
from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
|
||||
|
||||
|
||||
@utils.register_interface(KeyDerivationFunction)
|
||||
class HKDF(object):
|
||||
def __init__(self, algorithm, length, salt, info, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
if not isinstance(backend, HMACBackend):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Backend object does not implement HMACBackend.",
|
||||
_Reasons.BACKEND_MISSING_INTERFACE,
|
||||
)
|
||||
|
||||
self._algorithm = algorithm
|
||||
|
||||
if salt is None:
|
||||
salt = b"\x00" * self._algorithm.digest_size
|
||||
else:
|
||||
utils._check_bytes("salt", salt)
|
||||
|
||||
self._salt = salt
|
||||
|
||||
self._backend = backend
|
||||
|
||||
self._hkdf_expand = HKDFExpand(self._algorithm, length, info, backend)
|
||||
|
||||
def _extract(self, key_material):
|
||||
h = hmac.HMAC(self._salt, self._algorithm, backend=self._backend)
|
||||
h.update(key_material)
|
||||
return h.finalize()
|
||||
|
||||
def derive(self, key_material):
|
||||
utils._check_byteslike("key_material", key_material)
|
||||
return self._hkdf_expand.derive(self._extract(key_material))
|
||||
|
||||
def verify(self, key_material, expected_key):
|
||||
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
|
||||
raise InvalidKey
|
||||
|
||||
|
||||
@utils.register_interface(KeyDerivationFunction)
|
||||
class HKDFExpand(object):
|
||||
def __init__(self, algorithm, length, info, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
if not isinstance(backend, HMACBackend):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Backend object does not implement HMACBackend.",
|
||||
_Reasons.BACKEND_MISSING_INTERFACE,
|
||||
)
|
||||
|
||||
self._algorithm = algorithm
|
||||
|
||||
self._backend = backend
|
||||
|
||||
max_length = 255 * algorithm.digest_size
|
||||
|
||||
if length > max_length:
|
||||
raise ValueError(
|
||||
"Can not derive keys larger than {} octets.".format(max_length)
|
||||
)
|
||||
|
||||
self._length = length
|
||||
|
||||
if info is None:
|
||||
info = b""
|
||||
else:
|
||||
utils._check_bytes("info", info)
|
||||
|
||||
self._info = info
|
||||
|
||||
self._used = False
|
||||
|
||||
def _expand(self, key_material):
|
||||
output = [b""]
|
||||
counter = 1
|
||||
|
||||
while self._algorithm.digest_size * (len(output) - 1) < self._length:
|
||||
h = hmac.HMAC(key_material, self._algorithm, backend=self._backend)
|
||||
h.update(output[-1])
|
||||
h.update(self._info)
|
||||
h.update(six.int2byte(counter))
|
||||
output.append(h.finalize())
|
||||
counter += 1
|
||||
|
||||
return b"".join(output)[: self._length]
|
||||
|
||||
def derive(self, key_material):
|
||||
utils._check_byteslike("key_material", key_material)
|
||||
if self._used:
|
||||
raise AlreadyFinalized
|
||||
|
||||
self._used = True
|
||||
return self._expand(key_material)
|
||||
|
||||
def verify(self, key_material, expected_key):
|
||||
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
|
||||
raise InvalidKey
|
||||
|
|
@ -0,0 +1,162 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from enum import Enum
|
||||
|
||||
from six.moves import range
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
AlreadyFinalized,
|
||||
InvalidKey,
|
||||
UnsupportedAlgorithm,
|
||||
_Reasons,
|
||||
)
|
||||
from cryptography.hazmat.backends import _get_backend
|
||||
from cryptography.hazmat.backends.interfaces import HMACBackend
|
||||
from cryptography.hazmat.primitives import constant_time, hashes, hmac
|
||||
from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
|
||||
|
||||
|
||||
class Mode(Enum):
|
||||
CounterMode = "ctr"
|
||||
|
||||
|
||||
class CounterLocation(Enum):
|
||||
BeforeFixed = "before_fixed"
|
||||
AfterFixed = "after_fixed"
|
||||
|
||||
|
||||
@utils.register_interface(KeyDerivationFunction)
|
||||
class KBKDFHMAC(object):
|
||||
def __init__(
|
||||
self,
|
||||
algorithm,
|
||||
mode,
|
||||
length,
|
||||
rlen,
|
||||
llen,
|
||||
location,
|
||||
label,
|
||||
context,
|
||||
fixed,
|
||||
backend=None,
|
||||
):
|
||||
backend = _get_backend(backend)
|
||||
if not isinstance(backend, HMACBackend):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Backend object does not implement HMACBackend.",
|
||||
_Reasons.BACKEND_MISSING_INTERFACE,
|
||||
)
|
||||
|
||||
if not isinstance(algorithm, hashes.HashAlgorithm):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Algorithm supplied is not a supported hash algorithm.",
|
||||
_Reasons.UNSUPPORTED_HASH,
|
||||
)
|
||||
|
||||
if not backend.hmac_supported(algorithm):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Algorithm supplied is not a supported hmac algorithm.",
|
||||
_Reasons.UNSUPPORTED_HASH,
|
||||
)
|
||||
|
||||
if not isinstance(mode, Mode):
|
||||
raise TypeError("mode must be of type Mode")
|
||||
|
||||
if not isinstance(location, CounterLocation):
|
||||
raise TypeError("location must be of type CounterLocation")
|
||||
|
||||
if (label or context) and fixed:
|
||||
raise ValueError(
|
||||
"When supplying fixed data, " "label and context are ignored."
|
||||
)
|
||||
|
||||
if rlen is None or not self._valid_byte_length(rlen):
|
||||
raise ValueError("rlen must be between 1 and 4")
|
||||
|
||||
if llen is None and fixed is None:
|
||||
raise ValueError("Please specify an llen")
|
||||
|
||||
if llen is not None and not isinstance(llen, int):
|
||||
raise TypeError("llen must be an integer")
|
||||
|
||||
if label is None:
|
||||
label = b""
|
||||
|
||||
if context is None:
|
||||
context = b""
|
||||
|
||||
utils._check_bytes("label", label)
|
||||
utils._check_bytes("context", context)
|
||||
self._algorithm = algorithm
|
||||
self._mode = mode
|
||||
self._length = length
|
||||
self._rlen = rlen
|
||||
self._llen = llen
|
||||
self._location = location
|
||||
self._label = label
|
||||
self._context = context
|
||||
self._backend = backend
|
||||
self._used = False
|
||||
self._fixed_data = fixed
|
||||
|
||||
def _valid_byte_length(self, value):
|
||||
if not isinstance(value, int):
|
||||
raise TypeError("value must be of type int")
|
||||
|
||||
value_bin = utils.int_to_bytes(1, value)
|
||||
if not 1 <= len(value_bin) <= 4:
|
||||
return False
|
||||
return True
|
||||
|
||||
def derive(self, key_material):
|
||||
if self._used:
|
||||
raise AlreadyFinalized
|
||||
|
||||
utils._check_byteslike("key_material", key_material)
|
||||
self._used = True
|
||||
|
||||
# inverse floor division (equivalent to ceiling)
|
||||
rounds = -(-self._length // self._algorithm.digest_size)
|
||||
|
||||
output = [b""]
|
||||
|
||||
# For counter mode, the number of iterations shall not be
|
||||
# larger than 2^r-1, where r <= 32 is the binary length of the counter
|
||||
# This ensures that the counter values used as an input to the
|
||||
# PRF will not repeat during a particular call to the KDF function.
|
||||
r_bin = utils.int_to_bytes(1, self._rlen)
|
||||
if rounds > pow(2, len(r_bin) * 8) - 1:
|
||||
raise ValueError("There are too many iterations.")
|
||||
|
||||
for i in range(1, rounds + 1):
|
||||
h = hmac.HMAC(key_material, self._algorithm, backend=self._backend)
|
||||
|
||||
counter = utils.int_to_bytes(i, self._rlen)
|
||||
if self._location == CounterLocation.BeforeFixed:
|
||||
h.update(counter)
|
||||
|
||||
h.update(self._generate_fixed_input())
|
||||
|
||||
if self._location == CounterLocation.AfterFixed:
|
||||
h.update(counter)
|
||||
|
||||
output.append(h.finalize())
|
||||
|
||||
return b"".join(output)[: self._length]
|
||||
|
||||
def _generate_fixed_input(self):
|
||||
if self._fixed_data and isinstance(self._fixed_data, bytes):
|
||||
return self._fixed_data
|
||||
|
||||
l_val = utils.int_to_bytes(self._length * 8, self._llen)
|
||||
|
||||
return b"".join([self._label, b"\x00", self._context, l_val])
|
||||
|
||||
def verify(self, key_material, expected_key):
|
||||
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
|
||||
raise InvalidKey
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
AlreadyFinalized,
|
||||
InvalidKey,
|
||||
UnsupportedAlgorithm,
|
||||
_Reasons,
|
||||
)
|
||||
from cryptography.hazmat.backends import _get_backend
|
||||
from cryptography.hazmat.backends.interfaces import PBKDF2HMACBackend
|
||||
from cryptography.hazmat.primitives import constant_time
|
||||
from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
|
||||
|
||||
|
||||
@utils.register_interface(KeyDerivationFunction)
|
||||
class PBKDF2HMAC(object):
|
||||
def __init__(self, algorithm, length, salt, iterations, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
if not isinstance(backend, PBKDF2HMACBackend):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Backend object does not implement PBKDF2HMACBackend.",
|
||||
_Reasons.BACKEND_MISSING_INTERFACE,
|
||||
)
|
||||
|
||||
if not backend.pbkdf2_hmac_supported(algorithm):
|
||||
raise UnsupportedAlgorithm(
|
||||
"{} is not supported for PBKDF2 by this backend.".format(
|
||||
algorithm.name
|
||||
),
|
||||
_Reasons.UNSUPPORTED_HASH,
|
||||
)
|
||||
self._used = False
|
||||
self._algorithm = algorithm
|
||||
self._length = length
|
||||
utils._check_bytes("salt", salt)
|
||||
self._salt = salt
|
||||
self._iterations = iterations
|
||||
self._backend = backend
|
||||
|
||||
def derive(self, key_material):
|
||||
if self._used:
|
||||
raise AlreadyFinalized("PBKDF2 instances can only be used once.")
|
||||
self._used = True
|
||||
|
||||
utils._check_byteslike("key_material", key_material)
|
||||
return self._backend.derive_pbkdf2_hmac(
|
||||
self._algorithm,
|
||||
self._length,
|
||||
self._salt,
|
||||
self._iterations,
|
||||
key_material,
|
||||
)
|
||||
|
||||
def verify(self, key_material, expected_key):
|
||||
derived_key = self.derive(key_material)
|
||||
if not constant_time.bytes_eq(derived_key, expected_key):
|
||||
raise InvalidKey("Keys do not match.")
|
||||
|
|
@ -0,0 +1,68 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import sys
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
AlreadyFinalized,
|
||||
InvalidKey,
|
||||
UnsupportedAlgorithm,
|
||||
_Reasons,
|
||||
)
|
||||
from cryptography.hazmat.backends import _get_backend
|
||||
from cryptography.hazmat.backends.interfaces import ScryptBackend
|
||||
from cryptography.hazmat.primitives import constant_time
|
||||
from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
|
||||
|
||||
|
||||
# This is used by the scrypt tests to skip tests that require more memory
|
||||
# than the MEM_LIMIT
|
||||
_MEM_LIMIT = sys.maxsize // 2
|
||||
|
||||
|
||||
@utils.register_interface(KeyDerivationFunction)
|
||||
class Scrypt(object):
|
||||
def __init__(self, salt, length, n, r, p, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
if not isinstance(backend, ScryptBackend):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Backend object does not implement ScryptBackend.",
|
||||
_Reasons.BACKEND_MISSING_INTERFACE,
|
||||
)
|
||||
|
||||
self._length = length
|
||||
utils._check_bytes("salt", salt)
|
||||
if n < 2 or (n & (n - 1)) != 0:
|
||||
raise ValueError("n must be greater than 1 and be a power of 2.")
|
||||
|
||||
if r < 1:
|
||||
raise ValueError("r must be greater than or equal to 1.")
|
||||
|
||||
if p < 1:
|
||||
raise ValueError("p must be greater than or equal to 1.")
|
||||
|
||||
self._used = False
|
||||
self._salt = salt
|
||||
self._n = n
|
||||
self._r = r
|
||||
self._p = p
|
||||
self._backend = backend
|
||||
|
||||
def derive(self, key_material):
|
||||
if self._used:
|
||||
raise AlreadyFinalized("Scrypt instances can only be used once.")
|
||||
self._used = True
|
||||
|
||||
utils._check_byteslike("key_material", key_material)
|
||||
return self._backend.derive_scrypt(
|
||||
key_material, self._salt, self._length, self._n, self._r, self._p
|
||||
)
|
||||
|
||||
def verify(self, key_material, expected_key):
|
||||
derived_key = self.derive(key_material)
|
||||
if not constant_time.bytes_eq(derived_key, expected_key):
|
||||
raise InvalidKey("Keys do not match.")
|
||||
|
|
@ -0,0 +1,74 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import struct
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
AlreadyFinalized,
|
||||
InvalidKey,
|
||||
UnsupportedAlgorithm,
|
||||
_Reasons,
|
||||
)
|
||||
from cryptography.hazmat.backends import _get_backend
|
||||
from cryptography.hazmat.backends.interfaces import HashBackend
|
||||
from cryptography.hazmat.primitives import constant_time, hashes
|
||||
from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
|
||||
|
||||
|
||||
def _int_to_u32be(n):
|
||||
return struct.pack(">I", n)
|
||||
|
||||
|
||||
@utils.register_interface(KeyDerivationFunction)
|
||||
class X963KDF(object):
|
||||
def __init__(self, algorithm, length, sharedinfo, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
|
||||
max_len = algorithm.digest_size * (2 ** 32 - 1)
|
||||
if length > max_len:
|
||||
raise ValueError(
|
||||
"Can not derive keys larger than {} bits.".format(max_len)
|
||||
)
|
||||
if sharedinfo is not None:
|
||||
utils._check_bytes("sharedinfo", sharedinfo)
|
||||
|
||||
self._algorithm = algorithm
|
||||
self._length = length
|
||||
self._sharedinfo = sharedinfo
|
||||
|
||||
if not isinstance(backend, HashBackend):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Backend object does not implement HashBackend.",
|
||||
_Reasons.BACKEND_MISSING_INTERFACE,
|
||||
)
|
||||
self._backend = backend
|
||||
self._used = False
|
||||
|
||||
def derive(self, key_material):
|
||||
if self._used:
|
||||
raise AlreadyFinalized
|
||||
self._used = True
|
||||
utils._check_byteslike("key_material", key_material)
|
||||
output = [b""]
|
||||
outlen = 0
|
||||
counter = 1
|
||||
|
||||
while self._length > outlen:
|
||||
h = hashes.Hash(self._algorithm, self._backend)
|
||||
h.update(key_material)
|
||||
h.update(_int_to_u32be(counter))
|
||||
if self._sharedinfo is not None:
|
||||
h.update(self._sharedinfo)
|
||||
output.append(h.finalize())
|
||||
outlen += len(output[-1])
|
||||
counter += 1
|
||||
|
||||
return b"".join(output)[: self._length]
|
||||
|
||||
def verify(self, key_material, expected_key):
|
||||
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
|
||||
raise InvalidKey
|
||||
|
|
@ -0,0 +1,161 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import struct
|
||||
|
||||
from cryptography.hazmat.backends import _get_backend
|
||||
from cryptography.hazmat.primitives.ciphers import Cipher
|
||||
from cryptography.hazmat.primitives.ciphers.algorithms import AES
|
||||
from cryptography.hazmat.primitives.ciphers.modes import ECB
|
||||
from cryptography.hazmat.primitives.constant_time import bytes_eq
|
||||
|
||||
|
||||
def _wrap_core(wrapping_key, a, r, backend):
|
||||
# RFC 3394 Key Wrap - 2.2.1 (index method)
|
||||
encryptor = Cipher(AES(wrapping_key), ECB(), backend).encryptor()
|
||||
n = len(r)
|
||||
for j in range(6):
|
||||
for i in range(n):
|
||||
# every encryption operation is a discrete 16 byte chunk (because
|
||||
# AES has a 128-bit block size) and since we're using ECB it is
|
||||
# safe to reuse the encryptor for the entire operation
|
||||
b = encryptor.update(a + r[i])
|
||||
# pack/unpack are safe as these are always 64-bit chunks
|
||||
a = struct.pack(
|
||||
">Q", struct.unpack(">Q", b[:8])[0] ^ ((n * j) + i + 1)
|
||||
)
|
||||
r[i] = b[-8:]
|
||||
|
||||
assert encryptor.finalize() == b""
|
||||
|
||||
return a + b"".join(r)
|
||||
|
||||
|
||||
def aes_key_wrap(wrapping_key, key_to_wrap, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
if len(wrapping_key) not in [16, 24, 32]:
|
||||
raise ValueError("The wrapping key must be a valid AES key length")
|
||||
|
||||
if len(key_to_wrap) < 16:
|
||||
raise ValueError("The key to wrap must be at least 16 bytes")
|
||||
|
||||
if len(key_to_wrap) % 8 != 0:
|
||||
raise ValueError("The key to wrap must be a multiple of 8 bytes")
|
||||
|
||||
a = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6"
|
||||
r = [key_to_wrap[i : i + 8] for i in range(0, len(key_to_wrap), 8)]
|
||||
return _wrap_core(wrapping_key, a, r, backend)
|
||||
|
||||
|
||||
def _unwrap_core(wrapping_key, a, r, backend):
|
||||
# Implement RFC 3394 Key Unwrap - 2.2.2 (index method)
|
||||
decryptor = Cipher(AES(wrapping_key), ECB(), backend).decryptor()
|
||||
n = len(r)
|
||||
for j in reversed(range(6)):
|
||||
for i in reversed(range(n)):
|
||||
# pack/unpack are safe as these are always 64-bit chunks
|
||||
atr = (
|
||||
struct.pack(
|
||||
">Q", struct.unpack(">Q", a)[0] ^ ((n * j) + i + 1)
|
||||
)
|
||||
+ r[i]
|
||||
)
|
||||
# every decryption operation is a discrete 16 byte chunk so
|
||||
# it is safe to reuse the decryptor for the entire operation
|
||||
b = decryptor.update(atr)
|
||||
a = b[:8]
|
||||
r[i] = b[-8:]
|
||||
|
||||
assert decryptor.finalize() == b""
|
||||
return a, r
|
||||
|
||||
|
||||
def aes_key_wrap_with_padding(wrapping_key, key_to_wrap, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
if len(wrapping_key) not in [16, 24, 32]:
|
||||
raise ValueError("The wrapping key must be a valid AES key length")
|
||||
|
||||
aiv = b"\xA6\x59\x59\xA6" + struct.pack(">i", len(key_to_wrap))
|
||||
# pad the key to wrap if necessary
|
||||
pad = (8 - (len(key_to_wrap) % 8)) % 8
|
||||
key_to_wrap = key_to_wrap + b"\x00" * pad
|
||||
if len(key_to_wrap) == 8:
|
||||
# RFC 5649 - 4.1 - exactly 8 octets after padding
|
||||
encryptor = Cipher(AES(wrapping_key), ECB(), backend).encryptor()
|
||||
b = encryptor.update(aiv + key_to_wrap)
|
||||
assert encryptor.finalize() == b""
|
||||
return b
|
||||
else:
|
||||
r = [key_to_wrap[i : i + 8] for i in range(0, len(key_to_wrap), 8)]
|
||||
return _wrap_core(wrapping_key, aiv, r, backend)
|
||||
|
||||
|
||||
def aes_key_unwrap_with_padding(wrapping_key, wrapped_key, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
if len(wrapped_key) < 16:
|
||||
raise InvalidUnwrap("Must be at least 16 bytes")
|
||||
|
||||
if len(wrapping_key) not in [16, 24, 32]:
|
||||
raise ValueError("The wrapping key must be a valid AES key length")
|
||||
|
||||
if len(wrapped_key) == 16:
|
||||
# RFC 5649 - 4.2 - exactly two 64-bit blocks
|
||||
decryptor = Cipher(AES(wrapping_key), ECB(), backend).decryptor()
|
||||
b = decryptor.update(wrapped_key)
|
||||
assert decryptor.finalize() == b""
|
||||
a = b[:8]
|
||||
data = b[8:]
|
||||
n = 1
|
||||
else:
|
||||
r = [wrapped_key[i : i + 8] for i in range(0, len(wrapped_key), 8)]
|
||||
encrypted_aiv = r.pop(0)
|
||||
n = len(r)
|
||||
a, r = _unwrap_core(wrapping_key, encrypted_aiv, r, backend)
|
||||
data = b"".join(r)
|
||||
|
||||
# 1) Check that MSB(32,A) = A65959A6.
|
||||
# 2) Check that 8*(n-1) < LSB(32,A) <= 8*n. If so, let
|
||||
# MLI = LSB(32,A).
|
||||
# 3) Let b = (8*n)-MLI, and then check that the rightmost b octets of
|
||||
# the output data are zero.
|
||||
(mli,) = struct.unpack(">I", a[4:])
|
||||
b = (8 * n) - mli
|
||||
if (
|
||||
not bytes_eq(a[:4], b"\xa6\x59\x59\xa6")
|
||||
or not 8 * (n - 1) < mli <= 8 * n
|
||||
or (b != 0 and not bytes_eq(data[-b:], b"\x00" * b))
|
||||
):
|
||||
raise InvalidUnwrap()
|
||||
|
||||
if b == 0:
|
||||
return data
|
||||
else:
|
||||
return data[:-b]
|
||||
|
||||
|
||||
def aes_key_unwrap(wrapping_key, wrapped_key, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
if len(wrapped_key) < 24:
|
||||
raise InvalidUnwrap("Must be at least 24 bytes")
|
||||
|
||||
if len(wrapped_key) % 8 != 0:
|
||||
raise InvalidUnwrap("The wrapped key must be a multiple of 8 bytes")
|
||||
|
||||
if len(wrapping_key) not in [16, 24, 32]:
|
||||
raise ValueError("The wrapping key must be a valid AES key length")
|
||||
|
||||
aiv = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6"
|
||||
r = [wrapped_key[i : i + 8] for i in range(0, len(wrapped_key), 8)]
|
||||
a = r.pop(0)
|
||||
a, r = _unwrap_core(wrapping_key, a, r, backend)
|
||||
if not bytes_eq(a, aiv):
|
||||
raise InvalidUnwrap()
|
||||
|
||||
return b"".join(r)
|
||||
|
||||
|
||||
class InvalidUnwrap(Exception):
|
||||
pass
|
||||
|
|
@ -0,0 +1,208 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import AlreadyFinalized
|
||||
from cryptography.hazmat.bindings._padding import lib
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class PaddingContext(object):
|
||||
@abc.abstractmethod
|
||||
def update(self, data):
|
||||
"""
|
||||
Pads the provided bytes and returns any available data as bytes.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def finalize(self):
|
||||
"""
|
||||
Finalize the padding, returns bytes.
|
||||
"""
|
||||
|
||||
|
||||
def _byte_padding_check(block_size):
|
||||
if not (0 <= block_size <= 2040):
|
||||
raise ValueError("block_size must be in range(0, 2041).")
|
||||
|
||||
if block_size % 8 != 0:
|
||||
raise ValueError("block_size must be a multiple of 8.")
|
||||
|
||||
|
||||
def _byte_padding_update(buffer_, data, block_size):
|
||||
if buffer_ is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
|
||||
utils._check_byteslike("data", data)
|
||||
|
||||
buffer_ += bytes(data)
|
||||
|
||||
finished_blocks = len(buffer_) // (block_size // 8)
|
||||
|
||||
result = buffer_[: finished_blocks * (block_size // 8)]
|
||||
buffer_ = buffer_[finished_blocks * (block_size // 8) :]
|
||||
|
||||
return buffer_, result
|
||||
|
||||
|
||||
def _byte_padding_pad(buffer_, block_size, paddingfn):
|
||||
if buffer_ is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
|
||||
pad_size = block_size // 8 - len(buffer_)
|
||||
return buffer_ + paddingfn(pad_size)
|
||||
|
||||
|
||||
def _byte_unpadding_update(buffer_, data, block_size):
|
||||
if buffer_ is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
|
||||
utils._check_byteslike("data", data)
|
||||
|
||||
buffer_ += bytes(data)
|
||||
|
||||
finished_blocks = max(len(buffer_) // (block_size // 8) - 1, 0)
|
||||
|
||||
result = buffer_[: finished_blocks * (block_size // 8)]
|
||||
buffer_ = buffer_[finished_blocks * (block_size // 8) :]
|
||||
|
||||
return buffer_, result
|
||||
|
||||
|
||||
def _byte_unpadding_check(buffer_, block_size, checkfn):
|
||||
if buffer_ is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
|
||||
if len(buffer_) != block_size // 8:
|
||||
raise ValueError("Invalid padding bytes.")
|
||||
|
||||
valid = checkfn(buffer_, block_size // 8)
|
||||
|
||||
if not valid:
|
||||
raise ValueError("Invalid padding bytes.")
|
||||
|
||||
pad_size = six.indexbytes(buffer_, -1)
|
||||
return buffer_[:-pad_size]
|
||||
|
||||
|
||||
class PKCS7(object):
|
||||
def __init__(self, block_size):
|
||||
_byte_padding_check(block_size)
|
||||
self.block_size = block_size
|
||||
|
||||
def padder(self):
|
||||
return _PKCS7PaddingContext(self.block_size)
|
||||
|
||||
def unpadder(self):
|
||||
return _PKCS7UnpaddingContext(self.block_size)
|
||||
|
||||
|
||||
@utils.register_interface(PaddingContext)
|
||||
class _PKCS7PaddingContext(object):
|
||||
def __init__(self, block_size):
|
||||
self.block_size = block_size
|
||||
# TODO: more copies than necessary, we should use zero-buffer (#193)
|
||||
self._buffer = b""
|
||||
|
||||
def update(self, data):
|
||||
self._buffer, result = _byte_padding_update(
|
||||
self._buffer, data, self.block_size
|
||||
)
|
||||
return result
|
||||
|
||||
def _padding(self, size):
|
||||
return six.int2byte(size) * size
|
||||
|
||||
def finalize(self):
|
||||
result = _byte_padding_pad(
|
||||
self._buffer, self.block_size, self._padding
|
||||
)
|
||||
self._buffer = None
|
||||
return result
|
||||
|
||||
|
||||
@utils.register_interface(PaddingContext)
|
||||
class _PKCS7UnpaddingContext(object):
|
||||
def __init__(self, block_size):
|
||||
self.block_size = block_size
|
||||
# TODO: more copies than necessary, we should use zero-buffer (#193)
|
||||
self._buffer = b""
|
||||
|
||||
def update(self, data):
|
||||
self._buffer, result = _byte_unpadding_update(
|
||||
self._buffer, data, self.block_size
|
||||
)
|
||||
return result
|
||||
|
||||
def finalize(self):
|
||||
result = _byte_unpadding_check(
|
||||
self._buffer, self.block_size, lib.Cryptography_check_pkcs7_padding
|
||||
)
|
||||
self._buffer = None
|
||||
return result
|
||||
|
||||
|
||||
class ANSIX923(object):
|
||||
def __init__(self, block_size):
|
||||
_byte_padding_check(block_size)
|
||||
self.block_size = block_size
|
||||
|
||||
def padder(self):
|
||||
return _ANSIX923PaddingContext(self.block_size)
|
||||
|
||||
def unpadder(self):
|
||||
return _ANSIX923UnpaddingContext(self.block_size)
|
||||
|
||||
|
||||
@utils.register_interface(PaddingContext)
|
||||
class _ANSIX923PaddingContext(object):
|
||||
def __init__(self, block_size):
|
||||
self.block_size = block_size
|
||||
# TODO: more copies than necessary, we should use zero-buffer (#193)
|
||||
self._buffer = b""
|
||||
|
||||
def update(self, data):
|
||||
self._buffer, result = _byte_padding_update(
|
||||
self._buffer, data, self.block_size
|
||||
)
|
||||
return result
|
||||
|
||||
def _padding(self, size):
|
||||
return six.int2byte(0) * (size - 1) + six.int2byte(size)
|
||||
|
||||
def finalize(self):
|
||||
result = _byte_padding_pad(
|
||||
self._buffer, self.block_size, self._padding
|
||||
)
|
||||
self._buffer = None
|
||||
return result
|
||||
|
||||
|
||||
@utils.register_interface(PaddingContext)
|
||||
class _ANSIX923UnpaddingContext(object):
|
||||
def __init__(self, block_size):
|
||||
self.block_size = block_size
|
||||
# TODO: more copies than necessary, we should use zero-buffer (#193)
|
||||
self._buffer = b""
|
||||
|
||||
def update(self, data):
|
||||
self._buffer, result = _byte_unpadding_update(
|
||||
self._buffer, data, self.block_size
|
||||
)
|
||||
return result
|
||||
|
||||
def finalize(self):
|
||||
result = _byte_unpadding_check(
|
||||
self._buffer,
|
||||
self.block_size,
|
||||
lib.Cryptography_check_ansix923_padding,
|
||||
)
|
||||
self._buffer = None
|
||||
return result
|
||||
|
|
@ -0,0 +1,58 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
AlreadyFinalized,
|
||||
UnsupportedAlgorithm,
|
||||
_Reasons,
|
||||
)
|
||||
|
||||
|
||||
class Poly1305(object):
|
||||
def __init__(self, key):
|
||||
from cryptography.hazmat.backends.openssl.backend import backend
|
||||
|
||||
if not backend.poly1305_supported():
|
||||
raise UnsupportedAlgorithm(
|
||||
"poly1305 is not supported by this version of OpenSSL.",
|
||||
_Reasons.UNSUPPORTED_MAC,
|
||||
)
|
||||
self._ctx = backend.create_poly1305_ctx(key)
|
||||
|
||||
def update(self, data):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
utils._check_byteslike("data", data)
|
||||
self._ctx.update(data)
|
||||
|
||||
def finalize(self):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
mac = self._ctx.finalize()
|
||||
self._ctx = None
|
||||
return mac
|
||||
|
||||
def verify(self, tag):
|
||||
utils._check_bytes("tag", tag)
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
|
||||
ctx, self._ctx = self._ctx, None
|
||||
ctx.verify(tag)
|
||||
|
||||
@classmethod
|
||||
def generate_tag(cls, key, data):
|
||||
p = Poly1305(key)
|
||||
p.update(data)
|
||||
return p.finalize()
|
||||
|
||||
@classmethod
|
||||
def verify_tag(cls, key, data, tag):
|
||||
p = Poly1305(key)
|
||||
p.update(data)
|
||||
p.verify(tag)
|
||||
|
|
@ -0,0 +1,44 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography.hazmat.primitives.serialization.base import (
|
||||
BestAvailableEncryption,
|
||||
Encoding,
|
||||
KeySerializationEncryption,
|
||||
NoEncryption,
|
||||
ParameterFormat,
|
||||
PrivateFormat,
|
||||
PublicFormat,
|
||||
load_der_parameters,
|
||||
load_der_private_key,
|
||||
load_der_public_key,
|
||||
load_pem_parameters,
|
||||
load_pem_private_key,
|
||||
load_pem_public_key,
|
||||
)
|
||||
from cryptography.hazmat.primitives.serialization.ssh import (
|
||||
load_ssh_private_key,
|
||||
load_ssh_public_key,
|
||||
)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"load_der_parameters",
|
||||
"load_der_private_key",
|
||||
"load_der_public_key",
|
||||
"load_pem_parameters",
|
||||
"load_pem_private_key",
|
||||
"load_pem_public_key",
|
||||
"load_ssh_private_key",
|
||||
"load_ssh_public_key",
|
||||
"Encoding",
|
||||
"PrivateFormat",
|
||||
"PublicFormat",
|
||||
"ParameterFormat",
|
||||
"KeySerializationEncryption",
|
||||
"BestAvailableEncryption",
|
||||
"NoEncryption",
|
||||
]
|
||||
|
|
@ -0,0 +1,91 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
from enum import Enum
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.hazmat.backends import _get_backend
|
||||
|
||||
|
||||
def load_pem_private_key(data, password, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
return backend.load_pem_private_key(data, password)
|
||||
|
||||
|
||||
def load_pem_public_key(data, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
return backend.load_pem_public_key(data)
|
||||
|
||||
|
||||
def load_pem_parameters(data, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
return backend.load_pem_parameters(data)
|
||||
|
||||
|
||||
def load_der_private_key(data, password, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
return backend.load_der_private_key(data, password)
|
||||
|
||||
|
||||
def load_der_public_key(data, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
return backend.load_der_public_key(data)
|
||||
|
||||
|
||||
def load_der_parameters(data, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
return backend.load_der_parameters(data)
|
||||
|
||||
|
||||
class Encoding(Enum):
|
||||
PEM = "PEM"
|
||||
DER = "DER"
|
||||
OpenSSH = "OpenSSH"
|
||||
Raw = "Raw"
|
||||
X962 = "ANSI X9.62"
|
||||
SMIME = "S/MIME"
|
||||
|
||||
|
||||
class PrivateFormat(Enum):
|
||||
PKCS8 = "PKCS8"
|
||||
TraditionalOpenSSL = "TraditionalOpenSSL"
|
||||
Raw = "Raw"
|
||||
OpenSSH = "OpenSSH"
|
||||
|
||||
|
||||
class PublicFormat(Enum):
|
||||
SubjectPublicKeyInfo = "X.509 subjectPublicKeyInfo with PKCS#1"
|
||||
PKCS1 = "Raw PKCS#1"
|
||||
OpenSSH = "OpenSSH"
|
||||
Raw = "Raw"
|
||||
CompressedPoint = "X9.62 Compressed Point"
|
||||
UncompressedPoint = "X9.62 Uncompressed Point"
|
||||
|
||||
|
||||
class ParameterFormat(Enum):
|
||||
PKCS3 = "PKCS3"
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class KeySerializationEncryption(object):
|
||||
pass
|
||||
|
||||
|
||||
@utils.register_interface(KeySerializationEncryption)
|
||||
class BestAvailableEncryption(object):
|
||||
def __init__(self, password):
|
||||
if not isinstance(password, bytes) or len(password) == 0:
|
||||
raise ValueError("Password must be 1 or more bytes.")
|
||||
|
||||
self.password = password
|
||||
|
||||
|
||||
@utils.register_interface(KeySerializationEncryption)
|
||||
class NoEncryption(object):
|
||||
pass
|
||||
|
|
@ -0,0 +1,50 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.backends import _get_backend
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa
|
||||
|
||||
|
||||
def load_key_and_certificates(data, password, backend=None):
|
||||
backend = _get_backend(backend)
|
||||
return backend.load_key_and_certificates_from_pkcs12(data, password)
|
||||
|
||||
|
||||
def serialize_key_and_certificates(name, key, cert, cas, encryption_algorithm):
|
||||
if key is not None and not isinstance(
|
||||
key,
|
||||
(
|
||||
rsa.RSAPrivateKeyWithSerialization,
|
||||
dsa.DSAPrivateKeyWithSerialization,
|
||||
ec.EllipticCurvePrivateKeyWithSerialization,
|
||||
),
|
||||
):
|
||||
raise TypeError("Key must be RSA, DSA, or EllipticCurve private key.")
|
||||
if cert is not None and not isinstance(cert, x509.Certificate):
|
||||
raise TypeError("cert must be a certificate")
|
||||
|
||||
if cas is not None:
|
||||
cas = list(cas)
|
||||
if not all(isinstance(val, x509.Certificate) for val in cas):
|
||||
raise TypeError("all values in cas must be certificates")
|
||||
|
||||
if not isinstance(
|
||||
encryption_algorithm, serialization.KeySerializationEncryption
|
||||
):
|
||||
raise TypeError(
|
||||
"Key encryption algorithm must be a "
|
||||
"KeySerializationEncryption instance"
|
||||
)
|
||||
|
||||
if key is None and cert is None and not cas:
|
||||
raise ValueError("You must supply at least one of key, cert, or cas")
|
||||
|
||||
backend = _get_backend(None)
|
||||
return backend.serialize_key_and_certificates_to_pkcs12(
|
||||
name, key, cert, cas, encryption_algorithm
|
||||
)
|
||||
|
|
@ -0,0 +1,132 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from enum import Enum
|
||||
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.backends import _get_backend
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import ec, rsa
|
||||
from cryptography.utils import _check_byteslike
|
||||
|
||||
|
||||
def load_pem_pkcs7_certificates(data):
|
||||
backend = _get_backend(None)
|
||||
return backend.load_pem_pkcs7_certificates(data)
|
||||
|
||||
|
||||
def load_der_pkcs7_certificates(data):
|
||||
backend = _get_backend(None)
|
||||
return backend.load_der_pkcs7_certificates(data)
|
||||
|
||||
|
||||
class PKCS7SignatureBuilder(object):
|
||||
def __init__(self, data=None, signers=[], additional_certs=[]):
|
||||
self._data = data
|
||||
self._signers = signers
|
||||
self._additional_certs = additional_certs
|
||||
|
||||
def set_data(self, data):
|
||||
_check_byteslike("data", data)
|
||||
if self._data is not None:
|
||||
raise ValueError("data may only be set once")
|
||||
|
||||
return PKCS7SignatureBuilder(data, self._signers)
|
||||
|
||||
def add_signer(self, certificate, private_key, hash_algorithm):
|
||||
if not isinstance(
|
||||
hash_algorithm,
|
||||
(
|
||||
hashes.SHA1,
|
||||
hashes.SHA224,
|
||||
hashes.SHA256,
|
||||
hashes.SHA384,
|
||||
hashes.SHA512,
|
||||
),
|
||||
):
|
||||
raise TypeError(
|
||||
"hash_algorithm must be one of hashes.SHA1, SHA224, "
|
||||
"SHA256, SHA384, or SHA512"
|
||||
)
|
||||
if not isinstance(certificate, x509.Certificate):
|
||||
raise TypeError("certificate must be a x509.Certificate")
|
||||
|
||||
if not isinstance(
|
||||
private_key, (rsa.RSAPrivateKey, ec.EllipticCurvePrivateKey)
|
||||
):
|
||||
raise TypeError("Only RSA & EC keys are supported at this time.")
|
||||
|
||||
return PKCS7SignatureBuilder(
|
||||
self._data,
|
||||
self._signers + [(certificate, private_key, hash_algorithm)],
|
||||
)
|
||||
|
||||
def add_certificate(self, certificate):
|
||||
if not isinstance(certificate, x509.Certificate):
|
||||
raise TypeError("certificate must be a x509.Certificate")
|
||||
|
||||
return PKCS7SignatureBuilder(
|
||||
self._data, self._signers, self._additional_certs + [certificate]
|
||||
)
|
||||
|
||||
def sign(self, encoding, options, backend=None):
|
||||
if len(self._signers) == 0:
|
||||
raise ValueError("Must have at least one signer")
|
||||
if self._data is None:
|
||||
raise ValueError("You must add data to sign")
|
||||
options = list(options)
|
||||
if not all(isinstance(x, PKCS7Options) for x in options):
|
||||
raise ValueError("options must be from the PKCS7Options enum")
|
||||
if encoding not in (
|
||||
serialization.Encoding.PEM,
|
||||
serialization.Encoding.DER,
|
||||
serialization.Encoding.SMIME,
|
||||
):
|
||||
raise ValueError(
|
||||
"Must be PEM, DER, or SMIME from the Encoding enum"
|
||||
)
|
||||
|
||||
# Text is a meaningless option unless it is accompanied by
|
||||
# DetachedSignature
|
||||
if (
|
||||
PKCS7Options.Text in options
|
||||
and PKCS7Options.DetachedSignature not in options
|
||||
):
|
||||
raise ValueError(
|
||||
"When passing the Text option you must also pass "
|
||||
"DetachedSignature"
|
||||
)
|
||||
|
||||
if PKCS7Options.Text in options and encoding in (
|
||||
serialization.Encoding.DER,
|
||||
serialization.Encoding.PEM,
|
||||
):
|
||||
raise ValueError(
|
||||
"The Text option is only available for SMIME serialization"
|
||||
)
|
||||
|
||||
# No attributes implies no capabilities so we'll error if you try to
|
||||
# pass both.
|
||||
if (
|
||||
PKCS7Options.NoAttributes in options
|
||||
and PKCS7Options.NoCapabilities in options
|
||||
):
|
||||
raise ValueError(
|
||||
"NoAttributes is a superset of NoCapabilities. Do not pass "
|
||||
"both values."
|
||||
)
|
||||
|
||||
backend = _get_backend(backend)
|
||||
return backend.pkcs7_sign(self, encoding, options)
|
||||
|
||||
|
||||
class PKCS7Options(Enum):
|
||||
Text = "Add text/plain MIME type"
|
||||
Binary = "Don't translate input data into canonical MIME format"
|
||||
DetachedSignature = "Don't embed data in the PKCS7 structure"
|
||||
NoCapabilities = "Don't embed SMIME capabilities"
|
||||
NoAttributes = "Don't embed authenticatedAttributes"
|
||||
NoCerts = "Don't embed signer certificate"
|
||||
|
|
@ -0,0 +1,683 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import binascii
|
||||
import os
|
||||
import re
|
||||
import struct
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import UnsupportedAlgorithm
|
||||
from cryptography.hazmat.backends import _get_backend
|
||||
from cryptography.hazmat.primitives.asymmetric import dsa, ec, ed25519, rsa
|
||||
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
|
||||
from cryptography.hazmat.primitives.serialization import (
|
||||
Encoding,
|
||||
NoEncryption,
|
||||
PrivateFormat,
|
||||
PublicFormat,
|
||||
)
|
||||
|
||||
try:
|
||||
from bcrypt import kdf as _bcrypt_kdf
|
||||
|
||||
_bcrypt_supported = True
|
||||
except ImportError:
|
||||
_bcrypt_supported = False
|
||||
|
||||
def _bcrypt_kdf(*args, **kwargs):
|
||||
raise UnsupportedAlgorithm("Need bcrypt module")
|
||||
|
||||
|
||||
try:
|
||||
from base64 import encodebytes as _base64_encode
|
||||
except ImportError:
|
||||
from base64 import encodestring as _base64_encode
|
||||
|
||||
_SSH_ED25519 = b"ssh-ed25519"
|
||||
_SSH_RSA = b"ssh-rsa"
|
||||
_SSH_DSA = b"ssh-dss"
|
||||
_ECDSA_NISTP256 = b"ecdsa-sha2-nistp256"
|
||||
_ECDSA_NISTP384 = b"ecdsa-sha2-nistp384"
|
||||
_ECDSA_NISTP521 = b"ecdsa-sha2-nistp521"
|
||||
_CERT_SUFFIX = b"-cert-v01@openssh.com"
|
||||
|
||||
_SSH_PUBKEY_RC = re.compile(br"\A(\S+)[ \t]+(\S+)")
|
||||
_SK_MAGIC = b"openssh-key-v1\0"
|
||||
_SK_START = b"-----BEGIN OPENSSH PRIVATE KEY-----"
|
||||
_SK_END = b"-----END OPENSSH PRIVATE KEY-----"
|
||||
_BCRYPT = b"bcrypt"
|
||||
_NONE = b"none"
|
||||
_DEFAULT_CIPHER = b"aes256-ctr"
|
||||
_DEFAULT_ROUNDS = 16
|
||||
_MAX_PASSWORD = 72
|
||||
|
||||
# re is only way to work on bytes-like data
|
||||
_PEM_RC = re.compile(_SK_START + b"(.*?)" + _SK_END, re.DOTALL)
|
||||
|
||||
# padding for max blocksize
|
||||
_PADDING = memoryview(bytearray(range(1, 1 + 16)))
|
||||
|
||||
# ciphers that are actually used in key wrapping
|
||||
_SSH_CIPHERS = {
|
||||
b"aes256-ctr": (algorithms.AES, 32, modes.CTR, 16),
|
||||
b"aes256-cbc": (algorithms.AES, 32, modes.CBC, 16),
|
||||
}
|
||||
|
||||
# map local curve name to key type
|
||||
_ECDSA_KEY_TYPE = {
|
||||
"secp256r1": _ECDSA_NISTP256,
|
||||
"secp384r1": _ECDSA_NISTP384,
|
||||
"secp521r1": _ECDSA_NISTP521,
|
||||
}
|
||||
|
||||
_U32 = struct.Struct(b">I")
|
||||
_U64 = struct.Struct(b">Q")
|
||||
|
||||
|
||||
def _ecdsa_key_type(public_key):
|
||||
"""Return SSH key_type and curve_name for private key."""
|
||||
curve = public_key.curve
|
||||
if curve.name not in _ECDSA_KEY_TYPE:
|
||||
raise ValueError(
|
||||
"Unsupported curve for ssh private key: %r" % curve.name
|
||||
)
|
||||
return _ECDSA_KEY_TYPE[curve.name]
|
||||
|
||||
|
||||
def _ssh_pem_encode(data, prefix=_SK_START + b"\n", suffix=_SK_END + b"\n"):
|
||||
return b"".join([prefix, _base64_encode(data), suffix])
|
||||
|
||||
|
||||
def _check_block_size(data, block_len):
|
||||
"""Require data to be full blocks"""
|
||||
if not data or len(data) % block_len != 0:
|
||||
raise ValueError("Corrupt data: missing padding")
|
||||
|
||||
|
||||
def _check_empty(data):
|
||||
"""All data should have been parsed."""
|
||||
if data:
|
||||
raise ValueError("Corrupt data: unparsed data")
|
||||
|
||||
|
||||
def _init_cipher(ciphername, password, salt, rounds, backend):
|
||||
"""Generate key + iv and return cipher."""
|
||||
if not password:
|
||||
raise ValueError("Key is password-protected.")
|
||||
|
||||
algo, key_len, mode, iv_len = _SSH_CIPHERS[ciphername]
|
||||
seed = _bcrypt_kdf(password, salt, key_len + iv_len, rounds, True)
|
||||
return Cipher(algo(seed[:key_len]), mode(seed[key_len:]), backend)
|
||||
|
||||
|
||||
def _get_u32(data):
|
||||
"""Uint32"""
|
||||
if len(data) < 4:
|
||||
raise ValueError("Invalid data")
|
||||
return _U32.unpack(data[:4])[0], data[4:]
|
||||
|
||||
|
||||
def _get_u64(data):
|
||||
"""Uint64"""
|
||||
if len(data) < 8:
|
||||
raise ValueError("Invalid data")
|
||||
return _U64.unpack(data[:8])[0], data[8:]
|
||||
|
||||
|
||||
def _get_sshstr(data):
|
||||
"""Bytes with u32 length prefix"""
|
||||
n, data = _get_u32(data)
|
||||
if n > len(data):
|
||||
raise ValueError("Invalid data")
|
||||
return data[:n], data[n:]
|
||||
|
||||
|
||||
def _get_mpint(data):
|
||||
"""Big integer."""
|
||||
val, data = _get_sshstr(data)
|
||||
if val and six.indexbytes(val, 0) > 0x7F:
|
||||
raise ValueError("Invalid data")
|
||||
return utils.int_from_bytes(val, "big"), data
|
||||
|
||||
|
||||
def _to_mpint(val):
|
||||
"""Storage format for signed bigint."""
|
||||
if val < 0:
|
||||
raise ValueError("negative mpint not allowed")
|
||||
if not val:
|
||||
return b""
|
||||
nbytes = (val.bit_length() + 8) // 8
|
||||
return utils.int_to_bytes(val, nbytes)
|
||||
|
||||
|
||||
class _FragList(object):
|
||||
"""Build recursive structure without data copy."""
|
||||
|
||||
def __init__(self, init=None):
|
||||
self.flist = []
|
||||
if init:
|
||||
self.flist.extend(init)
|
||||
|
||||
def put_raw(self, val):
|
||||
"""Add plain bytes"""
|
||||
self.flist.append(val)
|
||||
|
||||
def put_u32(self, val):
|
||||
"""Big-endian uint32"""
|
||||
self.flist.append(_U32.pack(val))
|
||||
|
||||
def put_sshstr(self, val):
|
||||
"""Bytes prefixed with u32 length"""
|
||||
if isinstance(val, (bytes, memoryview, bytearray)):
|
||||
self.put_u32(len(val))
|
||||
self.flist.append(val)
|
||||
else:
|
||||
self.put_u32(val.size())
|
||||
self.flist.extend(val.flist)
|
||||
|
||||
def put_mpint(self, val):
|
||||
"""Big-endian bigint prefixed with u32 length"""
|
||||
self.put_sshstr(_to_mpint(val))
|
||||
|
||||
def size(self):
|
||||
"""Current number of bytes"""
|
||||
return sum(map(len, self.flist))
|
||||
|
||||
def render(self, dstbuf, pos=0):
|
||||
"""Write into bytearray"""
|
||||
for frag in self.flist:
|
||||
flen = len(frag)
|
||||
start, pos = pos, pos + flen
|
||||
dstbuf[start:pos] = frag
|
||||
return pos
|
||||
|
||||
def tobytes(self):
|
||||
"""Return as bytes"""
|
||||
buf = memoryview(bytearray(self.size()))
|
||||
self.render(buf)
|
||||
return buf.tobytes()
|
||||
|
||||
|
||||
class _SSHFormatRSA(object):
|
||||
"""Format for RSA keys.
|
||||
|
||||
Public:
|
||||
mpint e, n
|
||||
Private:
|
||||
mpint n, e, d, iqmp, p, q
|
||||
"""
|
||||
|
||||
def get_public(self, data):
|
||||
"""RSA public fields"""
|
||||
e, data = _get_mpint(data)
|
||||
n, data = _get_mpint(data)
|
||||
return (e, n), data
|
||||
|
||||
def load_public(self, key_type, data, backend):
|
||||
"""Make RSA public key from data."""
|
||||
(e, n), data = self.get_public(data)
|
||||
public_numbers = rsa.RSAPublicNumbers(e, n)
|
||||
public_key = public_numbers.public_key(backend)
|
||||
return public_key, data
|
||||
|
||||
def load_private(self, data, pubfields, backend):
|
||||
"""Make RSA private key from data."""
|
||||
n, data = _get_mpint(data)
|
||||
e, data = _get_mpint(data)
|
||||
d, data = _get_mpint(data)
|
||||
iqmp, data = _get_mpint(data)
|
||||
p, data = _get_mpint(data)
|
||||
q, data = _get_mpint(data)
|
||||
|
||||
if (e, n) != pubfields:
|
||||
raise ValueError("Corrupt data: rsa field mismatch")
|
||||
dmp1 = rsa.rsa_crt_dmp1(d, p)
|
||||
dmq1 = rsa.rsa_crt_dmq1(d, q)
|
||||
public_numbers = rsa.RSAPublicNumbers(e, n)
|
||||
private_numbers = rsa.RSAPrivateNumbers(
|
||||
p, q, d, dmp1, dmq1, iqmp, public_numbers
|
||||
)
|
||||
private_key = private_numbers.private_key(backend)
|
||||
return private_key, data
|
||||
|
||||
def encode_public(self, public_key, f_pub):
|
||||
"""Write RSA public key"""
|
||||
pubn = public_key.public_numbers()
|
||||
f_pub.put_mpint(pubn.e)
|
||||
f_pub.put_mpint(pubn.n)
|
||||
|
||||
def encode_private(self, private_key, f_priv):
|
||||
"""Write RSA private key"""
|
||||
private_numbers = private_key.private_numbers()
|
||||
public_numbers = private_numbers.public_numbers
|
||||
|
||||
f_priv.put_mpint(public_numbers.n)
|
||||
f_priv.put_mpint(public_numbers.e)
|
||||
|
||||
f_priv.put_mpint(private_numbers.d)
|
||||
f_priv.put_mpint(private_numbers.iqmp)
|
||||
f_priv.put_mpint(private_numbers.p)
|
||||
f_priv.put_mpint(private_numbers.q)
|
||||
|
||||
|
||||
class _SSHFormatDSA(object):
|
||||
"""Format for DSA keys.
|
||||
|
||||
Public:
|
||||
mpint p, q, g, y
|
||||
Private:
|
||||
mpint p, q, g, y, x
|
||||
"""
|
||||
|
||||
def get_public(self, data):
|
||||
"""DSA public fields"""
|
||||
p, data = _get_mpint(data)
|
||||
q, data = _get_mpint(data)
|
||||
g, data = _get_mpint(data)
|
||||
y, data = _get_mpint(data)
|
||||
return (p, q, g, y), data
|
||||
|
||||
def load_public(self, key_type, data, backend):
|
||||
"""Make DSA public key from data."""
|
||||
(p, q, g, y), data = self.get_public(data)
|
||||
parameter_numbers = dsa.DSAParameterNumbers(p, q, g)
|
||||
public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers)
|
||||
self._validate(public_numbers)
|
||||
public_key = public_numbers.public_key(backend)
|
||||
return public_key, data
|
||||
|
||||
def load_private(self, data, pubfields, backend):
|
||||
"""Make DSA private key from data."""
|
||||
(p, q, g, y), data = self.get_public(data)
|
||||
x, data = _get_mpint(data)
|
||||
|
||||
if (p, q, g, y) != pubfields:
|
||||
raise ValueError("Corrupt data: dsa field mismatch")
|
||||
parameter_numbers = dsa.DSAParameterNumbers(p, q, g)
|
||||
public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers)
|
||||
self._validate(public_numbers)
|
||||
private_numbers = dsa.DSAPrivateNumbers(x, public_numbers)
|
||||
private_key = private_numbers.private_key(backend)
|
||||
return private_key, data
|
||||
|
||||
def encode_public(self, public_key, f_pub):
|
||||
"""Write DSA public key"""
|
||||
public_numbers = public_key.public_numbers()
|
||||
parameter_numbers = public_numbers.parameter_numbers
|
||||
self._validate(public_numbers)
|
||||
|
||||
f_pub.put_mpint(parameter_numbers.p)
|
||||
f_pub.put_mpint(parameter_numbers.q)
|
||||
f_pub.put_mpint(parameter_numbers.g)
|
||||
f_pub.put_mpint(public_numbers.y)
|
||||
|
||||
def encode_private(self, private_key, f_priv):
|
||||
"""Write DSA private key"""
|
||||
self.encode_public(private_key.public_key(), f_priv)
|
||||
f_priv.put_mpint(private_key.private_numbers().x)
|
||||
|
||||
def _validate(self, public_numbers):
|
||||
parameter_numbers = public_numbers.parameter_numbers
|
||||
if parameter_numbers.p.bit_length() != 1024:
|
||||
raise ValueError("SSH supports only 1024 bit DSA keys")
|
||||
|
||||
|
||||
class _SSHFormatECDSA(object):
|
||||
"""Format for ECDSA keys.
|
||||
|
||||
Public:
|
||||
str curve
|
||||
bytes point
|
||||
Private:
|
||||
str curve
|
||||
bytes point
|
||||
mpint secret
|
||||
"""
|
||||
|
||||
def __init__(self, ssh_curve_name, curve):
|
||||
self.ssh_curve_name = ssh_curve_name
|
||||
self.curve = curve
|
||||
|
||||
def get_public(self, data):
|
||||
"""ECDSA public fields"""
|
||||
curve, data = _get_sshstr(data)
|
||||
point, data = _get_sshstr(data)
|
||||
if curve != self.ssh_curve_name:
|
||||
raise ValueError("Curve name mismatch")
|
||||
if six.indexbytes(point, 0) != 4:
|
||||
raise NotImplementedError("Need uncompressed point")
|
||||
return (curve, point), data
|
||||
|
||||
def load_public(self, key_type, data, backend):
|
||||
"""Make ECDSA public key from data."""
|
||||
(curve_name, point), data = self.get_public(data)
|
||||
public_key = ec.EllipticCurvePublicKey.from_encoded_point(
|
||||
self.curve, point.tobytes()
|
||||
)
|
||||
return public_key, data
|
||||
|
||||
def load_private(self, data, pubfields, backend):
|
||||
"""Make ECDSA private key from data."""
|
||||
(curve_name, point), data = self.get_public(data)
|
||||
secret, data = _get_mpint(data)
|
||||
|
||||
if (curve_name, point) != pubfields:
|
||||
raise ValueError("Corrupt data: ecdsa field mismatch")
|
||||
private_key = ec.derive_private_key(secret, self.curve, backend)
|
||||
return private_key, data
|
||||
|
||||
def encode_public(self, public_key, f_pub):
|
||||
"""Write ECDSA public key"""
|
||||
point = public_key.public_bytes(
|
||||
Encoding.X962, PublicFormat.UncompressedPoint
|
||||
)
|
||||
f_pub.put_sshstr(self.ssh_curve_name)
|
||||
f_pub.put_sshstr(point)
|
||||
|
||||
def encode_private(self, private_key, f_priv):
|
||||
"""Write ECDSA private key"""
|
||||
public_key = private_key.public_key()
|
||||
private_numbers = private_key.private_numbers()
|
||||
|
||||
self.encode_public(public_key, f_priv)
|
||||
f_priv.put_mpint(private_numbers.private_value)
|
||||
|
||||
|
||||
class _SSHFormatEd25519(object):
|
||||
"""Format for Ed25519 keys.
|
||||
|
||||
Public:
|
||||
bytes point
|
||||
Private:
|
||||
bytes point
|
||||
bytes secret_and_point
|
||||
"""
|
||||
|
||||
def get_public(self, data):
|
||||
"""Ed25519 public fields"""
|
||||
point, data = _get_sshstr(data)
|
||||
return (point,), data
|
||||
|
||||
def load_public(self, key_type, data, backend):
|
||||
"""Make Ed25519 public key from data."""
|
||||
(point,), data = self.get_public(data)
|
||||
public_key = ed25519.Ed25519PublicKey.from_public_bytes(
|
||||
point.tobytes()
|
||||
)
|
||||
return public_key, data
|
||||
|
||||
def load_private(self, data, pubfields, backend):
|
||||
"""Make Ed25519 private key from data."""
|
||||
(point,), data = self.get_public(data)
|
||||
keypair, data = _get_sshstr(data)
|
||||
|
||||
secret = keypair[:32]
|
||||
point2 = keypair[32:]
|
||||
if point != point2 or (point,) != pubfields:
|
||||
raise ValueError("Corrupt data: ed25519 field mismatch")
|
||||
private_key = ed25519.Ed25519PrivateKey.from_private_bytes(secret)
|
||||
return private_key, data
|
||||
|
||||
def encode_public(self, public_key, f_pub):
|
||||
"""Write Ed25519 public key"""
|
||||
raw_public_key = public_key.public_bytes(
|
||||
Encoding.Raw, PublicFormat.Raw
|
||||
)
|
||||
f_pub.put_sshstr(raw_public_key)
|
||||
|
||||
def encode_private(self, private_key, f_priv):
|
||||
"""Write Ed25519 private key"""
|
||||
public_key = private_key.public_key()
|
||||
raw_private_key = private_key.private_bytes(
|
||||
Encoding.Raw, PrivateFormat.Raw, NoEncryption()
|
||||
)
|
||||
raw_public_key = public_key.public_bytes(
|
||||
Encoding.Raw, PublicFormat.Raw
|
||||
)
|
||||
f_keypair = _FragList([raw_private_key, raw_public_key])
|
||||
|
||||
self.encode_public(public_key, f_priv)
|
||||
f_priv.put_sshstr(f_keypair)
|
||||
|
||||
|
||||
_KEY_FORMATS = {
|
||||
_SSH_RSA: _SSHFormatRSA(),
|
||||
_SSH_DSA: _SSHFormatDSA(),
|
||||
_SSH_ED25519: _SSHFormatEd25519(),
|
||||
_ECDSA_NISTP256: _SSHFormatECDSA(b"nistp256", ec.SECP256R1()),
|
||||
_ECDSA_NISTP384: _SSHFormatECDSA(b"nistp384", ec.SECP384R1()),
|
||||
_ECDSA_NISTP521: _SSHFormatECDSA(b"nistp521", ec.SECP521R1()),
|
||||
}
|
||||
|
||||
|
||||
def _lookup_kformat(key_type):
|
||||
"""Return valid format or throw error"""
|
||||
if not isinstance(key_type, bytes):
|
||||
key_type = memoryview(key_type).tobytes()
|
||||
if key_type in _KEY_FORMATS:
|
||||
return _KEY_FORMATS[key_type]
|
||||
raise UnsupportedAlgorithm("Unsupported key type: %r" % key_type)
|
||||
|
||||
|
||||
def load_ssh_private_key(data, password, backend=None):
|
||||
"""Load private key from OpenSSH custom encoding."""
|
||||
utils._check_byteslike("data", data)
|
||||
backend = _get_backend(backend)
|
||||
if password is not None:
|
||||
utils._check_bytes("password", password)
|
||||
|
||||
m = _PEM_RC.search(data)
|
||||
if not m:
|
||||
raise ValueError("Not OpenSSH private key format")
|
||||
p1 = m.start(1)
|
||||
p2 = m.end(1)
|
||||
data = binascii.a2b_base64(memoryview(data)[p1:p2])
|
||||
if not data.startswith(_SK_MAGIC):
|
||||
raise ValueError("Not OpenSSH private key format")
|
||||
data = memoryview(data)[len(_SK_MAGIC) :]
|
||||
|
||||
# parse header
|
||||
ciphername, data = _get_sshstr(data)
|
||||
kdfname, data = _get_sshstr(data)
|
||||
kdfoptions, data = _get_sshstr(data)
|
||||
nkeys, data = _get_u32(data)
|
||||
if nkeys != 1:
|
||||
raise ValueError("Only one key supported")
|
||||
|
||||
# load public key data
|
||||
pubdata, data = _get_sshstr(data)
|
||||
pub_key_type, pubdata = _get_sshstr(pubdata)
|
||||
kformat = _lookup_kformat(pub_key_type)
|
||||
pubfields, pubdata = kformat.get_public(pubdata)
|
||||
_check_empty(pubdata)
|
||||
|
||||
# load secret data
|
||||
edata, data = _get_sshstr(data)
|
||||
_check_empty(data)
|
||||
|
||||
if (ciphername, kdfname) != (_NONE, _NONE):
|
||||
ciphername = ciphername.tobytes()
|
||||
if ciphername not in _SSH_CIPHERS:
|
||||
raise UnsupportedAlgorithm("Unsupported cipher: %r" % ciphername)
|
||||
if kdfname != _BCRYPT:
|
||||
raise UnsupportedAlgorithm("Unsupported KDF: %r" % kdfname)
|
||||
blklen = _SSH_CIPHERS[ciphername][3]
|
||||
_check_block_size(edata, blklen)
|
||||
salt, kbuf = _get_sshstr(kdfoptions)
|
||||
rounds, kbuf = _get_u32(kbuf)
|
||||
_check_empty(kbuf)
|
||||
ciph = _init_cipher(
|
||||
ciphername, password, salt.tobytes(), rounds, backend
|
||||
)
|
||||
edata = memoryview(ciph.decryptor().update(edata))
|
||||
else:
|
||||
blklen = 8
|
||||
_check_block_size(edata, blklen)
|
||||
ck1, edata = _get_u32(edata)
|
||||
ck2, edata = _get_u32(edata)
|
||||
if ck1 != ck2:
|
||||
raise ValueError("Corrupt data: broken checksum")
|
||||
|
||||
# load per-key struct
|
||||
key_type, edata = _get_sshstr(edata)
|
||||
if key_type != pub_key_type:
|
||||
raise ValueError("Corrupt data: key type mismatch")
|
||||
private_key, edata = kformat.load_private(edata, pubfields, backend)
|
||||
comment, edata = _get_sshstr(edata)
|
||||
|
||||
# yes, SSH does padding check *after* all other parsing is done.
|
||||
# need to follow as it writes zero-byte padding too.
|
||||
if edata != _PADDING[: len(edata)]:
|
||||
raise ValueError("Corrupt data: invalid padding")
|
||||
|
||||
return private_key
|
||||
|
||||
|
||||
def serialize_ssh_private_key(private_key, password=None):
|
||||
"""Serialize private key with OpenSSH custom encoding."""
|
||||
if password is not None:
|
||||
utils._check_bytes("password", password)
|
||||
if password and len(password) > _MAX_PASSWORD:
|
||||
raise ValueError(
|
||||
"Passwords longer than 72 bytes are not supported by "
|
||||
"OpenSSH private key format"
|
||||
)
|
||||
|
||||
if isinstance(private_key, ec.EllipticCurvePrivateKey):
|
||||
key_type = _ecdsa_key_type(private_key.public_key())
|
||||
elif isinstance(private_key, rsa.RSAPrivateKey):
|
||||
key_type = _SSH_RSA
|
||||
elif isinstance(private_key, dsa.DSAPrivateKey):
|
||||
key_type = _SSH_DSA
|
||||
elif isinstance(private_key, ed25519.Ed25519PrivateKey):
|
||||
key_type = _SSH_ED25519
|
||||
else:
|
||||
raise ValueError("Unsupported key type")
|
||||
kformat = _lookup_kformat(key_type)
|
||||
|
||||
# setup parameters
|
||||
f_kdfoptions = _FragList()
|
||||
if password:
|
||||
ciphername = _DEFAULT_CIPHER
|
||||
blklen = _SSH_CIPHERS[ciphername][3]
|
||||
kdfname = _BCRYPT
|
||||
rounds = _DEFAULT_ROUNDS
|
||||
salt = os.urandom(16)
|
||||
f_kdfoptions.put_sshstr(salt)
|
||||
f_kdfoptions.put_u32(rounds)
|
||||
backend = _get_backend(None)
|
||||
ciph = _init_cipher(ciphername, password, salt, rounds, backend)
|
||||
else:
|
||||
ciphername = kdfname = _NONE
|
||||
blklen = 8
|
||||
ciph = None
|
||||
nkeys = 1
|
||||
checkval = os.urandom(4)
|
||||
comment = b""
|
||||
|
||||
# encode public and private parts together
|
||||
f_public_key = _FragList()
|
||||
f_public_key.put_sshstr(key_type)
|
||||
kformat.encode_public(private_key.public_key(), f_public_key)
|
||||
|
||||
f_secrets = _FragList([checkval, checkval])
|
||||
f_secrets.put_sshstr(key_type)
|
||||
kformat.encode_private(private_key, f_secrets)
|
||||
f_secrets.put_sshstr(comment)
|
||||
f_secrets.put_raw(_PADDING[: blklen - (f_secrets.size() % blklen)])
|
||||
|
||||
# top-level structure
|
||||
f_main = _FragList()
|
||||
f_main.put_raw(_SK_MAGIC)
|
||||
f_main.put_sshstr(ciphername)
|
||||
f_main.put_sshstr(kdfname)
|
||||
f_main.put_sshstr(f_kdfoptions)
|
||||
f_main.put_u32(nkeys)
|
||||
f_main.put_sshstr(f_public_key)
|
||||
f_main.put_sshstr(f_secrets)
|
||||
|
||||
# copy result info bytearray
|
||||
slen = f_secrets.size()
|
||||
mlen = f_main.size()
|
||||
buf = memoryview(bytearray(mlen + blklen))
|
||||
f_main.render(buf)
|
||||
ofs = mlen - slen
|
||||
|
||||
# encrypt in-place
|
||||
if ciph is not None:
|
||||
ciph.encryptor().update_into(buf[ofs:mlen], buf[ofs:])
|
||||
|
||||
txt = _ssh_pem_encode(buf[:mlen])
|
||||
buf[ofs:mlen] = bytearray(slen)
|
||||
return txt
|
||||
|
||||
|
||||
def load_ssh_public_key(data, backend=None):
|
||||
"""Load public key from OpenSSH one-line format."""
|
||||
backend = _get_backend(backend)
|
||||
utils._check_byteslike("data", data)
|
||||
|
||||
m = _SSH_PUBKEY_RC.match(data)
|
||||
if not m:
|
||||
raise ValueError("Invalid line format")
|
||||
key_type = orig_key_type = m.group(1)
|
||||
key_body = m.group(2)
|
||||
with_cert = False
|
||||
if _CERT_SUFFIX == key_type[-len(_CERT_SUFFIX) :]:
|
||||
with_cert = True
|
||||
key_type = key_type[: -len(_CERT_SUFFIX)]
|
||||
kformat = _lookup_kformat(key_type)
|
||||
|
||||
try:
|
||||
data = memoryview(binascii.a2b_base64(key_body))
|
||||
except (TypeError, binascii.Error):
|
||||
raise ValueError("Invalid key format")
|
||||
|
||||
inner_key_type, data = _get_sshstr(data)
|
||||
if inner_key_type != orig_key_type:
|
||||
raise ValueError("Invalid key format")
|
||||
if with_cert:
|
||||
nonce, data = _get_sshstr(data)
|
||||
public_key, data = kformat.load_public(key_type, data, backend)
|
||||
if with_cert:
|
||||
serial, data = _get_u64(data)
|
||||
cctype, data = _get_u32(data)
|
||||
key_id, data = _get_sshstr(data)
|
||||
principals, data = _get_sshstr(data)
|
||||
valid_after, data = _get_u64(data)
|
||||
valid_before, data = _get_u64(data)
|
||||
crit_options, data = _get_sshstr(data)
|
||||
extensions, data = _get_sshstr(data)
|
||||
reserved, data = _get_sshstr(data)
|
||||
sig_key, data = _get_sshstr(data)
|
||||
signature, data = _get_sshstr(data)
|
||||
_check_empty(data)
|
||||
return public_key
|
||||
|
||||
|
||||
def serialize_ssh_public_key(public_key):
|
||||
"""One-line public key format for OpenSSH"""
|
||||
if isinstance(public_key, ec.EllipticCurvePublicKey):
|
||||
key_type = _ecdsa_key_type(public_key)
|
||||
elif isinstance(public_key, rsa.RSAPublicKey):
|
||||
key_type = _SSH_RSA
|
||||
elif isinstance(public_key, dsa.DSAPublicKey):
|
||||
key_type = _SSH_DSA
|
||||
elif isinstance(public_key, ed25519.Ed25519PublicKey):
|
||||
key_type = _SSH_ED25519
|
||||
else:
|
||||
raise ValueError("Unsupported key type")
|
||||
kformat = _lookup_kformat(key_type)
|
||||
|
||||
f_pub = _FragList()
|
||||
f_pub.put_sshstr(key_type)
|
||||
kformat.encode_public(public_key, f_pub)
|
||||
|
||||
pub = binascii.b2a_base64(f_pub.tobytes()).strip()
|
||||
return b"".join([key_type, b" ", pub])
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
|
||||
class InvalidToken(Exception):
|
||||
pass
|
||||
|
|
@ -0,0 +1,69 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import struct
|
||||
|
||||
import six
|
||||
|
||||
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
|
||||
from cryptography.hazmat.backends import _get_backend
|
||||
from cryptography.hazmat.backends.interfaces import HMACBackend
|
||||
from cryptography.hazmat.primitives import constant_time, hmac
|
||||
from cryptography.hazmat.primitives.hashes import SHA1, SHA256, SHA512
|
||||
from cryptography.hazmat.primitives.twofactor import InvalidToken
|
||||
from cryptography.hazmat.primitives.twofactor.utils import _generate_uri
|
||||
|
||||
|
||||
class HOTP(object):
|
||||
def __init__(
|
||||
self, key, length, algorithm, backend=None, enforce_key_length=True
|
||||
):
|
||||
backend = _get_backend(backend)
|
||||
if not isinstance(backend, HMACBackend):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Backend object does not implement HMACBackend.",
|
||||
_Reasons.BACKEND_MISSING_INTERFACE,
|
||||
)
|
||||
|
||||
if len(key) < 16 and enforce_key_length is True:
|
||||
raise ValueError("Key length has to be at least 128 bits.")
|
||||
|
||||
if not isinstance(length, six.integer_types):
|
||||
raise TypeError("Length parameter must be an integer type.")
|
||||
|
||||
if length < 6 or length > 8:
|
||||
raise ValueError("Length of HOTP has to be between 6 to 8.")
|
||||
|
||||
if not isinstance(algorithm, (SHA1, SHA256, SHA512)):
|
||||
raise TypeError("Algorithm must be SHA1, SHA256 or SHA512.")
|
||||
|
||||
self._key = key
|
||||
self._length = length
|
||||
self._algorithm = algorithm
|
||||
self._backend = backend
|
||||
|
||||
def generate(self, counter):
|
||||
truncated_value = self._dynamic_truncate(counter)
|
||||
hotp = truncated_value % (10 ** self._length)
|
||||
return "{0:0{1}}".format(hotp, self._length).encode()
|
||||
|
||||
def verify(self, hotp, counter):
|
||||
if not constant_time.bytes_eq(self.generate(counter), hotp):
|
||||
raise InvalidToken("Supplied HOTP value does not match.")
|
||||
|
||||
def _dynamic_truncate(self, counter):
|
||||
ctx = hmac.HMAC(self._key, self._algorithm, self._backend)
|
||||
ctx.update(struct.pack(">Q", counter))
|
||||
hmac_value = ctx.finalize()
|
||||
|
||||
offset = six.indexbytes(hmac_value, len(hmac_value) - 1) & 0b1111
|
||||
p = hmac_value[offset : offset + 4]
|
||||
return struct.unpack(">I", p)[0] & 0x7FFFFFFF
|
||||
|
||||
def get_provisioning_uri(self, account_name, counter, issuer):
|
||||
return _generate_uri(
|
||||
self, "hotp", account_name, issuer, [("counter", int(counter))]
|
||||
)
|
||||
|
|
@ -0,0 +1,51 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
|
||||
from cryptography.hazmat.backends import _get_backend
|
||||
from cryptography.hazmat.backends.interfaces import HMACBackend
|
||||
from cryptography.hazmat.primitives import constant_time
|
||||
from cryptography.hazmat.primitives.twofactor import InvalidToken
|
||||
from cryptography.hazmat.primitives.twofactor.hotp import HOTP
|
||||
from cryptography.hazmat.primitives.twofactor.utils import _generate_uri
|
||||
|
||||
|
||||
class TOTP(object):
|
||||
def __init__(
|
||||
self,
|
||||
key,
|
||||
length,
|
||||
algorithm,
|
||||
time_step,
|
||||
backend=None,
|
||||
enforce_key_length=True,
|
||||
):
|
||||
backend = _get_backend(backend)
|
||||
if not isinstance(backend, HMACBackend):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Backend object does not implement HMACBackend.",
|
||||
_Reasons.BACKEND_MISSING_INTERFACE,
|
||||
)
|
||||
|
||||
self._time_step = time_step
|
||||
self._hotp = HOTP(key, length, algorithm, backend, enforce_key_length)
|
||||
|
||||
def generate(self, time):
|
||||
counter = int(time / self._time_step)
|
||||
return self._hotp.generate(counter)
|
||||
|
||||
def verify(self, totp, time):
|
||||
if not constant_time.bytes_eq(self.generate(time), totp):
|
||||
raise InvalidToken("Supplied TOTP value does not match.")
|
||||
|
||||
def get_provisioning_uri(self, account_name, issuer):
|
||||
return _generate_uri(
|
||||
self._hotp,
|
||||
"totp",
|
||||
account_name,
|
||||
issuer,
|
||||
[("period", int(self._time_step))],
|
||||
)
|
||||
|
|
@ -0,0 +1,33 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import base64
|
||||
|
||||
from six.moves.urllib.parse import quote, urlencode
|
||||
|
||||
|
||||
def _generate_uri(hotp, type_name, account_name, issuer, extra_parameters):
|
||||
parameters = [
|
||||
("digits", hotp._length),
|
||||
("secret", base64.b32encode(hotp._key)),
|
||||
("algorithm", hotp._algorithm.name.upper()),
|
||||
]
|
||||
|
||||
if issuer is not None:
|
||||
parameters.append(("issuer", issuer))
|
||||
|
||||
parameters.extend(extra_parameters)
|
||||
|
||||
uriparts = {
|
||||
"type": type_name,
|
||||
"label": (
|
||||
"%s:%s" % (quote(issuer), quote(account_name))
|
||||
if issuer
|
||||
else quote(account_name)
|
||||
),
|
||||
"parameters": urlencode(parameters),
|
||||
}
|
||||
return "otpauth://{type}/{label}?{parameters}".format(**uriparts)
|
||||
Loading…
Add table
Add a link
Reference in a new issue