diff --git a/packages/wakatime/__about__.py b/packages/wakatime/__about__.py index 6ee2c58..2823160 100644 --- a/packages/wakatime/__about__.py +++ b/packages/wakatime/__about__.py @@ -1,7 +1,7 @@ __title__ = 'wakatime' __description__ = 'Common interface to the WakaTime api.' __url__ = 'https://github.com/wakatime/wakatime' -__version_info__ = ('13', '0', '3') +__version_info__ = ('13', '0', '4') __version__ = '.'.join(__version_info__) __author__ = 'Alan Hamlett' __author_email__ = 'alan@wakatime.com' diff --git a/packages/wakatime/api.py b/packages/wakatime/api.py index ecff0a6..b06efe6 100644 --- a/packages/wakatime/api.py +++ b/packages/wakatime/api.py @@ -19,7 +19,6 @@ from .compat import u, is_py3, json from .constants import API_ERROR, AUTH_ERROR, SUCCESS, UNKNOWN_ERROR from .offlinequeue import Queue -from .packages.requests.exceptions import RequestException from .session_cache import SessionCache from .utils import get_hostname, get_user_agent from .packages import tzlocal @@ -38,6 +37,9 @@ except ImportError: # pragma: nocover sys.exit(UNKNOWN_ERROR) +from .packages.requests.exceptions import RequestException + + def send_heartbeats(heartbeats, args, configs, use_ntlm_proxy=False): """Send heartbeats to WakaTime API. diff --git a/packages/wakatime/compat.py b/packages/wakatime/compat.py index 18dc141..0d6c461 100644 --- a/packages/wakatime/compat.py +++ b/packages/wakatime/compat.py @@ -18,6 +18,7 @@ import sys is_py2 = (sys.version_info[0] == 2) +is_py26 = (sys.version_info[0] == 2 and sys.version_info[1] == 6) is_py3 = (sys.version_info[0] == 3) is_win = platform.system() == 'Windows' diff --git a/packages/wakatime/main.py b/packages/wakatime/main.py index 4cbcb88..ab65873 100644 --- a/packages/wakatime/main.py +++ b/packages/wakatime/main.py @@ -21,6 +21,11 @@ pwd = os.path.dirname(os.path.abspath(__file__)) sys.path.insert(0, os.path.dirname(pwd)) sys.path.insert(0, os.path.join(pwd, 'packages')) +from .compat import is_py26 + +if not is_py26: + sys.path.insert(0, os.path.join(pwd, 'packages', 'py27')) + from .__about__ import __version__ from .api import send_heartbeats, get_time_today from .arguments import parse_arguments @@ -28,8 +33,10 @@ from .compat import u, json from .constants import SUCCESS, UNKNOWN_ERROR, HEARTBEATS_PER_REQUEST from .logger import setup_logging + log = logging.getLogger('WakaTime') + from .heartbeat import Heartbeat from .offlinequeue import Queue diff --git a/packages/wakatime/packages/OpenSSL/SSL.py b/packages/wakatime/packages/OpenSSL/SSL.py new file mode 100644 index 0000000..a228b73 --- /dev/null +++ b/packages/wakatime/packages/OpenSSL/SSL.py @@ -0,0 +1,2521 @@ +import os +import socket +import warnings +from sys import platform +from functools import wraps, partial +from itertools import count, chain +from weakref import WeakValueDictionary +from errno import errorcode + +from six import ( + binary_type as _binary_type, integer_types as integer_types, int2byte, + indexbytes) + +from OpenSSL._util import ( + UNSPECIFIED as _UNSPECIFIED, + exception_from_error_queue as _exception_from_error_queue, + ffi as _ffi, + from_buffer as _from_buffer, + lib as _lib, + make_assert as _make_assert, + native as _native, + path_string as _path_string, + text_to_bytes_and_warn as _text_to_bytes_and_warn, + no_zero_allocator as _no_zero_allocator, +) + +from OpenSSL.crypto import ( + FILETYPE_PEM, _PassphraseHelper, PKey, X509Name, X509, X509Store) + +__all__ = [ + 'OPENSSL_VERSION_NUMBER', + 'SSLEAY_VERSION', + 'SSLEAY_CFLAGS', + 'SSLEAY_PLATFORM', + 'SSLEAY_DIR', + 'SSLEAY_BUILT_ON', + 'SENT_SHUTDOWN', + 'RECEIVED_SHUTDOWN', + 'SSLv2_METHOD', + 'SSLv3_METHOD', + 'SSLv23_METHOD', + 'TLSv1_METHOD', + 'TLSv1_1_METHOD', + 'TLSv1_2_METHOD', + 'OP_NO_SSLv2', + 'OP_NO_SSLv3', + 'OP_NO_TLSv1', + 'OP_NO_TLSv1_1', + 'OP_NO_TLSv1_2', + 'OP_NO_TLSv1_3', + 'MODE_RELEASE_BUFFERS', + 'OP_SINGLE_DH_USE', + 'OP_SINGLE_ECDH_USE', + 'OP_EPHEMERAL_RSA', + 'OP_MICROSOFT_SESS_ID_BUG', + 'OP_NETSCAPE_CHALLENGE_BUG', + 'OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG', + 'OP_SSLREF2_REUSE_CERT_TYPE_BUG', + 'OP_MICROSOFT_BIG_SSLV3_BUFFER', + 'OP_MSIE_SSLV2_RSA_PADDING', + 'OP_SSLEAY_080_CLIENT_DH_BUG', + 'OP_TLS_D5_BUG', + 'OP_TLS_BLOCK_PADDING_BUG', + 'OP_DONT_INSERT_EMPTY_FRAGMENTS', + 'OP_CIPHER_SERVER_PREFERENCE', + 'OP_TLS_ROLLBACK_BUG', + 'OP_PKCS1_CHECK_1', + 'OP_PKCS1_CHECK_2', + 'OP_NETSCAPE_CA_DN_BUG', + 'OP_NETSCAPE_DEMO_CIPHER_CHANGE_BUG', + 'OP_NO_COMPRESSION', + 'OP_NO_QUERY_MTU', + 'OP_COOKIE_EXCHANGE', + 'OP_NO_TICKET', + 'OP_ALL', + 'VERIFY_PEER', + 'VERIFY_FAIL_IF_NO_PEER_CERT', + 'VERIFY_CLIENT_ONCE', + 'VERIFY_NONE', + 'SESS_CACHE_OFF', + 'SESS_CACHE_CLIENT', + 'SESS_CACHE_SERVER', + 'SESS_CACHE_BOTH', + 'SESS_CACHE_NO_AUTO_CLEAR', + 'SESS_CACHE_NO_INTERNAL_LOOKUP', + 'SESS_CACHE_NO_INTERNAL_STORE', + 'SESS_CACHE_NO_INTERNAL', + 'SSL_ST_CONNECT', + 'SSL_ST_ACCEPT', + 'SSL_ST_MASK', + 'SSL_CB_LOOP', + 'SSL_CB_EXIT', + 'SSL_CB_READ', + 'SSL_CB_WRITE', + 'SSL_CB_ALERT', + 'SSL_CB_READ_ALERT', + 'SSL_CB_WRITE_ALERT', + 'SSL_CB_ACCEPT_LOOP', + 'SSL_CB_ACCEPT_EXIT', + 'SSL_CB_CONNECT_LOOP', + 'SSL_CB_CONNECT_EXIT', + 'SSL_CB_HANDSHAKE_START', + 'SSL_CB_HANDSHAKE_DONE', + 'Error', + 'WantReadError', + 'WantWriteError', + 'WantX509LookupError', + 'ZeroReturnError', + 'SysCallError', + 'SSLeay_version', + 'Session', + 'Context', + 'Connection' +] + +try: + _buffer = buffer +except NameError: + class _buffer(object): + pass + +OPENSSL_VERSION_NUMBER = _lib.OPENSSL_VERSION_NUMBER +SSLEAY_VERSION = _lib.SSLEAY_VERSION +SSLEAY_CFLAGS = _lib.SSLEAY_CFLAGS +SSLEAY_PLATFORM = _lib.SSLEAY_PLATFORM +SSLEAY_DIR = _lib.SSLEAY_DIR +SSLEAY_BUILT_ON = _lib.SSLEAY_BUILT_ON + +SENT_SHUTDOWN = _lib.SSL_SENT_SHUTDOWN +RECEIVED_SHUTDOWN = _lib.SSL_RECEIVED_SHUTDOWN + +SSLv2_METHOD = 1 +SSLv3_METHOD = 2 +SSLv23_METHOD = 3 +TLSv1_METHOD = 4 +TLSv1_1_METHOD = 5 +TLSv1_2_METHOD = 6 + +OP_NO_SSLv2 = _lib.SSL_OP_NO_SSLv2 +OP_NO_SSLv3 = _lib.SSL_OP_NO_SSLv3 +OP_NO_TLSv1 = _lib.SSL_OP_NO_TLSv1 +OP_NO_TLSv1_1 = _lib.SSL_OP_NO_TLSv1_1 +OP_NO_TLSv1_2 = _lib.SSL_OP_NO_TLSv1_2 +try: + OP_NO_TLSv1_3 = _lib.SSL_OP_NO_TLSv1_3 +except AttributeError: + pass + +MODE_RELEASE_BUFFERS = _lib.SSL_MODE_RELEASE_BUFFERS + +OP_SINGLE_DH_USE = _lib.SSL_OP_SINGLE_DH_USE +OP_SINGLE_ECDH_USE = _lib.SSL_OP_SINGLE_ECDH_USE +OP_EPHEMERAL_RSA = _lib.SSL_OP_EPHEMERAL_RSA +OP_MICROSOFT_SESS_ID_BUG = _lib.SSL_OP_MICROSOFT_SESS_ID_BUG +OP_NETSCAPE_CHALLENGE_BUG = _lib.SSL_OP_NETSCAPE_CHALLENGE_BUG +OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG = ( + _lib.SSL_OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG +) +OP_SSLREF2_REUSE_CERT_TYPE_BUG = _lib.SSL_OP_SSLREF2_REUSE_CERT_TYPE_BUG +OP_MICROSOFT_BIG_SSLV3_BUFFER = _lib.SSL_OP_MICROSOFT_BIG_SSLV3_BUFFER +OP_MSIE_SSLV2_RSA_PADDING = _lib.SSL_OP_MSIE_SSLV2_RSA_PADDING +OP_SSLEAY_080_CLIENT_DH_BUG = _lib.SSL_OP_SSLEAY_080_CLIENT_DH_BUG +OP_TLS_D5_BUG = _lib.SSL_OP_TLS_D5_BUG +OP_TLS_BLOCK_PADDING_BUG = _lib.SSL_OP_TLS_BLOCK_PADDING_BUG +OP_DONT_INSERT_EMPTY_FRAGMENTS = _lib.SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS +OP_CIPHER_SERVER_PREFERENCE = _lib.SSL_OP_CIPHER_SERVER_PREFERENCE +OP_TLS_ROLLBACK_BUG = _lib.SSL_OP_TLS_ROLLBACK_BUG +OP_PKCS1_CHECK_1 = _lib.SSL_OP_PKCS1_CHECK_1 +OP_PKCS1_CHECK_2 = _lib.SSL_OP_PKCS1_CHECK_2 +OP_NETSCAPE_CA_DN_BUG = _lib.SSL_OP_NETSCAPE_CA_DN_BUG +OP_NETSCAPE_DEMO_CIPHER_CHANGE_BUG = ( + _lib.SSL_OP_NETSCAPE_DEMO_CIPHER_CHANGE_BUG +) +OP_NO_COMPRESSION = _lib.SSL_OP_NO_COMPRESSION + +OP_NO_QUERY_MTU = _lib.SSL_OP_NO_QUERY_MTU +OP_COOKIE_EXCHANGE = _lib.SSL_OP_COOKIE_EXCHANGE +OP_NO_TICKET = _lib.SSL_OP_NO_TICKET + +OP_ALL = _lib.SSL_OP_ALL + +VERIFY_PEER = _lib.SSL_VERIFY_PEER +VERIFY_FAIL_IF_NO_PEER_CERT = _lib.SSL_VERIFY_FAIL_IF_NO_PEER_CERT +VERIFY_CLIENT_ONCE = _lib.SSL_VERIFY_CLIENT_ONCE +VERIFY_NONE = _lib.SSL_VERIFY_NONE + +SESS_CACHE_OFF = _lib.SSL_SESS_CACHE_OFF +SESS_CACHE_CLIENT = _lib.SSL_SESS_CACHE_CLIENT +SESS_CACHE_SERVER = _lib.SSL_SESS_CACHE_SERVER +SESS_CACHE_BOTH = _lib.SSL_SESS_CACHE_BOTH +SESS_CACHE_NO_AUTO_CLEAR = _lib.SSL_SESS_CACHE_NO_AUTO_CLEAR +SESS_CACHE_NO_INTERNAL_LOOKUP = _lib.SSL_SESS_CACHE_NO_INTERNAL_LOOKUP +SESS_CACHE_NO_INTERNAL_STORE = _lib.SSL_SESS_CACHE_NO_INTERNAL_STORE +SESS_CACHE_NO_INTERNAL = _lib.SSL_SESS_CACHE_NO_INTERNAL + +SSL_ST_CONNECT = _lib.SSL_ST_CONNECT +SSL_ST_ACCEPT = _lib.SSL_ST_ACCEPT +SSL_ST_MASK = _lib.SSL_ST_MASK +if _lib.Cryptography_HAS_SSL_ST: + SSL_ST_INIT = _lib.SSL_ST_INIT + SSL_ST_BEFORE = _lib.SSL_ST_BEFORE + SSL_ST_OK = _lib.SSL_ST_OK + SSL_ST_RENEGOTIATE = _lib.SSL_ST_RENEGOTIATE + __all__.extend([ + 'SSL_ST_INIT', + 'SSL_ST_BEFORE', + 'SSL_ST_OK', + 'SSL_ST_RENEGOTIATE', + ]) + +SSL_CB_LOOP = _lib.SSL_CB_LOOP +SSL_CB_EXIT = _lib.SSL_CB_EXIT +SSL_CB_READ = _lib.SSL_CB_READ +SSL_CB_WRITE = _lib.SSL_CB_WRITE +SSL_CB_ALERT = _lib.SSL_CB_ALERT +SSL_CB_READ_ALERT = _lib.SSL_CB_READ_ALERT +SSL_CB_WRITE_ALERT = _lib.SSL_CB_WRITE_ALERT +SSL_CB_ACCEPT_LOOP = _lib.SSL_CB_ACCEPT_LOOP +SSL_CB_ACCEPT_EXIT = _lib.SSL_CB_ACCEPT_EXIT +SSL_CB_CONNECT_LOOP = _lib.SSL_CB_CONNECT_LOOP +SSL_CB_CONNECT_EXIT = _lib.SSL_CB_CONNECT_EXIT +SSL_CB_HANDSHAKE_START = _lib.SSL_CB_HANDSHAKE_START +SSL_CB_HANDSHAKE_DONE = _lib.SSL_CB_HANDSHAKE_DONE + +# Taken from https://golang.org/src/crypto/x509/root_linux.go +_CERTIFICATE_FILE_LOCATIONS = [ + "/etc/ssl/certs/ca-certificates.crt", # Debian/Ubuntu/Gentoo etc. + "/etc/pki/tls/certs/ca-bundle.crt", # Fedora/RHEL 6 + "/etc/ssl/ca-bundle.pem", # OpenSUSE + "/etc/pki/tls/cacert.pem", # OpenELEC + "/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem", # CentOS/RHEL 7 +] + +_CERTIFICATE_PATH_LOCATIONS = [ + "/etc/ssl/certs", # SLES10/SLES11 +] + +# These values are compared to output from cffi's ffi.string so they must be +# byte strings. +_CRYPTOGRAPHY_MANYLINUX1_CA_DIR = b"/opt/pyca/cryptography/openssl/certs" +_CRYPTOGRAPHY_MANYLINUX1_CA_FILE = b"/opt/pyca/cryptography/openssl/cert.pem" + + +class Error(Exception): + """ + An error occurred in an `OpenSSL.SSL` API. + """ + + +_raise_current_error = partial(_exception_from_error_queue, Error) +_openssl_assert = _make_assert(Error) + + +class WantReadError(Error): + pass + + +class WantWriteError(Error): + pass + + +class WantX509LookupError(Error): + pass + + +class ZeroReturnError(Error): + pass + + +class SysCallError(Error): + pass + + +class _CallbackExceptionHelper(object): + """ + A base class for wrapper classes that allow for intelligent exception + handling in OpenSSL callbacks. + + :ivar list _problems: Any exceptions that occurred while executing in a + context where they could not be raised in the normal way. Typically + this is because OpenSSL has called into some Python code and requires a + return value. The exceptions are saved to be raised later when it is + possible to do so. + """ + + def __init__(self): + self._problems = [] + + def raise_if_problem(self): + """ + Raise an exception from the OpenSSL error queue or that was previously + captured whe running a callback. + """ + if self._problems: + try: + _raise_current_error() + except Error: + pass + raise self._problems.pop(0) + + +class _VerifyHelper(_CallbackExceptionHelper): + """ + Wrap a callback such that it can be used as a certificate verification + callback. + """ + + def __init__(self, callback): + _CallbackExceptionHelper.__init__(self) + + @wraps(callback) + def wrapper(ok, store_ctx): + x509 = _lib.X509_STORE_CTX_get_current_cert(store_ctx) + _lib.X509_up_ref(x509) + cert = X509._from_raw_x509_ptr(x509) + error_number = _lib.X509_STORE_CTX_get_error(store_ctx) + error_depth = _lib.X509_STORE_CTX_get_error_depth(store_ctx) + + index = _lib.SSL_get_ex_data_X509_STORE_CTX_idx() + ssl = _lib.X509_STORE_CTX_get_ex_data(store_ctx, index) + connection = Connection._reverse_mapping[ssl] + + try: + result = callback( + connection, cert, error_number, error_depth, ok + ) + except Exception as e: + self._problems.append(e) + return 0 + else: + if result: + _lib.X509_STORE_CTX_set_error(store_ctx, _lib.X509_V_OK) + return 1 + else: + return 0 + + self.callback = _ffi.callback( + "int (*)(int, X509_STORE_CTX *)", wrapper) + + +class _NpnAdvertiseHelper(_CallbackExceptionHelper): + """ + Wrap a callback such that it can be used as an NPN advertisement callback. + """ + + def __init__(self, callback): + _CallbackExceptionHelper.__init__(self) + + @wraps(callback) + def wrapper(ssl, out, outlen, arg): + try: + conn = Connection._reverse_mapping[ssl] + protos = callback(conn) + + # Join the protocols into a Python bytestring, length-prefixing + # each element. + protostr = b''.join( + chain.from_iterable((int2byte(len(p)), p) for p in protos) + ) + + # Save our callback arguments on the connection object. This is + # done to make sure that they don't get freed before OpenSSL + # uses them. Then, return them appropriately in the output + # parameters. + conn._npn_advertise_callback_args = [ + _ffi.new("unsigned int *", len(protostr)), + _ffi.new("unsigned char[]", protostr), + ] + outlen[0] = conn._npn_advertise_callback_args[0][0] + out[0] = conn._npn_advertise_callback_args[1] + return 0 + except Exception as e: + self._problems.append(e) + return 2 # SSL_TLSEXT_ERR_ALERT_FATAL + + self.callback = _ffi.callback( + "int (*)(SSL *, const unsigned char **, unsigned int *, void *)", + wrapper + ) + + +class _NpnSelectHelper(_CallbackExceptionHelper): + """ + Wrap a callback such that it can be used as an NPN selection callback. + """ + + def __init__(self, callback): + _CallbackExceptionHelper.__init__(self) + + @wraps(callback) + def wrapper(ssl, out, outlen, in_, inlen, arg): + try: + conn = Connection._reverse_mapping[ssl] + + # The string passed to us is actually made up of multiple + # length-prefixed bytestrings. We need to split that into a + # list. + instr = _ffi.buffer(in_, inlen)[:] + protolist = [] + while instr: + length = indexbytes(instr, 0) + proto = instr[1:length + 1] + protolist.append(proto) + instr = instr[length + 1:] + + # Call the callback + outstr = callback(conn, protolist) + + # Save our callback arguments on the connection object. This is + # done to make sure that they don't get freed before OpenSSL + # uses them. Then, return them appropriately in the output + # parameters. + conn._npn_select_callback_args = [ + _ffi.new("unsigned char *", len(outstr)), + _ffi.new("unsigned char[]", outstr), + ] + outlen[0] = conn._npn_select_callback_args[0][0] + out[0] = conn._npn_select_callback_args[1] + return 0 + except Exception as e: + self._problems.append(e) + return 2 # SSL_TLSEXT_ERR_ALERT_FATAL + + self.callback = _ffi.callback( + ("int (*)(SSL *, unsigned char **, unsigned char *, " + "const unsigned char *, unsigned int, void *)"), + wrapper + ) + + +NO_OVERLAPPING_PROTOCOLS = object() + + +class _ALPNSelectHelper(_CallbackExceptionHelper): + """ + Wrap a callback such that it can be used as an ALPN selection callback. + """ + + def __init__(self, callback): + _CallbackExceptionHelper.__init__(self) + + @wraps(callback) + def wrapper(ssl, out, outlen, in_, inlen, arg): + try: + conn = Connection._reverse_mapping[ssl] + + # The string passed to us is made up of multiple + # length-prefixed bytestrings. We need to split that into a + # list. + instr = _ffi.buffer(in_, inlen)[:] + protolist = [] + while instr: + encoded_len = indexbytes(instr, 0) + proto = instr[1:encoded_len + 1] + protolist.append(proto) + instr = instr[encoded_len + 1:] + + # Call the callback + outbytes = callback(conn, protolist) + any_accepted = True + if outbytes is NO_OVERLAPPING_PROTOCOLS: + outbytes = b'' + any_accepted = False + elif not isinstance(outbytes, _binary_type): + raise TypeError( + "ALPN callback must return a bytestring or the " + "special NO_OVERLAPPING_PROTOCOLS sentinel value." + ) + + # Save our callback arguments on the connection object to make + # sure that they don't get freed before OpenSSL can use them. + # Then, return them in the appropriate output parameters. + conn._alpn_select_callback_args = [ + _ffi.new("unsigned char *", len(outbytes)), + _ffi.new("unsigned char[]", outbytes), + ] + outlen[0] = conn._alpn_select_callback_args[0][0] + out[0] = conn._alpn_select_callback_args[1] + if not any_accepted: + return _lib.SSL_TLSEXT_ERR_NOACK + return _lib.SSL_TLSEXT_ERR_OK + except Exception as e: + self._problems.append(e) + return _lib.SSL_TLSEXT_ERR_ALERT_FATAL + + self.callback = _ffi.callback( + ("int (*)(SSL *, unsigned char **, unsigned char *, " + "const unsigned char *, unsigned int, void *)"), + wrapper + ) + + +class _OCSPServerCallbackHelper(_CallbackExceptionHelper): + """ + Wrap a callback such that it can be used as an OCSP callback for the server + side. + + Annoyingly, OpenSSL defines one OCSP callback but uses it in two different + ways. For servers, that callback is expected to retrieve some OCSP data and + hand it to OpenSSL, and may return only SSL_TLSEXT_ERR_OK, + SSL_TLSEXT_ERR_FATAL, and SSL_TLSEXT_ERR_NOACK. For clients, that callback + is expected to check the OCSP data, and returns a negative value on error, + 0 if the response is not acceptable, or positive if it is. These are + mutually exclusive return code behaviours, and they mean that we need two + helpers so that we always return an appropriate error code if the user's + code throws an exception. + + Given that we have to have two helpers anyway, these helpers are a bit more + helpery than most: specifically, they hide a few more of the OpenSSL + functions so that the user has an easier time writing these callbacks. + + This helper implements the server side. + """ + + def __init__(self, callback): + _CallbackExceptionHelper.__init__(self) + + @wraps(callback) + def wrapper(ssl, cdata): + try: + conn = Connection._reverse_mapping[ssl] + + # Extract the data if any was provided. + if cdata != _ffi.NULL: + data = _ffi.from_handle(cdata) + else: + data = None + + # Call the callback. + ocsp_data = callback(conn, data) + + if not isinstance(ocsp_data, _binary_type): + raise TypeError("OCSP callback must return a bytestring.") + + # If the OCSP data was provided, we will pass it to OpenSSL. + # However, we have an early exit here: if no OCSP data was + # provided we will just exit out and tell OpenSSL that there + # is nothing to do. + if not ocsp_data: + return 3 # SSL_TLSEXT_ERR_NOACK + + # OpenSSL takes ownership of this data and expects it to have + # been allocated by OPENSSL_malloc. + ocsp_data_length = len(ocsp_data) + data_ptr = _lib.OPENSSL_malloc(ocsp_data_length) + _ffi.buffer(data_ptr, ocsp_data_length)[:] = ocsp_data + + _lib.SSL_set_tlsext_status_ocsp_resp( + ssl, data_ptr, ocsp_data_length + ) + + return 0 + except Exception as e: + self._problems.append(e) + return 2 # SSL_TLSEXT_ERR_ALERT_FATAL + + self.callback = _ffi.callback("int (*)(SSL *, void *)", wrapper) + + +class _OCSPClientCallbackHelper(_CallbackExceptionHelper): + """ + Wrap a callback such that it can be used as an OCSP callback for the client + side. + + Annoyingly, OpenSSL defines one OCSP callback but uses it in two different + ways. For servers, that callback is expected to retrieve some OCSP data and + hand it to OpenSSL, and may return only SSL_TLSEXT_ERR_OK, + SSL_TLSEXT_ERR_FATAL, and SSL_TLSEXT_ERR_NOACK. For clients, that callback + is expected to check the OCSP data, and returns a negative value on error, + 0 if the response is not acceptable, or positive if it is. These are + mutually exclusive return code behaviours, and they mean that we need two + helpers so that we always return an appropriate error code if the user's + code throws an exception. + + Given that we have to have two helpers anyway, these helpers are a bit more + helpery than most: specifically, they hide a few more of the OpenSSL + functions so that the user has an easier time writing these callbacks. + + This helper implements the client side. + """ + + def __init__(self, callback): + _CallbackExceptionHelper.__init__(self) + + @wraps(callback) + def wrapper(ssl, cdata): + try: + conn = Connection._reverse_mapping[ssl] + + # Extract the data if any was provided. + if cdata != _ffi.NULL: + data = _ffi.from_handle(cdata) + else: + data = None + + # Get the OCSP data. + ocsp_ptr = _ffi.new("unsigned char **") + ocsp_len = _lib.SSL_get_tlsext_status_ocsp_resp(ssl, ocsp_ptr) + if ocsp_len < 0: + # No OCSP data. + ocsp_data = b'' + else: + # Copy the OCSP data, then pass it to the callback. + ocsp_data = _ffi.buffer(ocsp_ptr[0], ocsp_len)[:] + + valid = callback(conn, ocsp_data, data) + + # Return 1 on success or 0 on error. + return int(bool(valid)) + + except Exception as e: + self._problems.append(e) + # Return negative value if an exception is hit. + return -1 + + self.callback = _ffi.callback("int (*)(SSL *, void *)", wrapper) + + +def _asFileDescriptor(obj): + fd = None + if not isinstance(obj, integer_types): + meth = getattr(obj, "fileno", None) + if meth is not None: + obj = meth() + + if isinstance(obj, integer_types): + fd = obj + + if not isinstance(fd, integer_types): + raise TypeError("argument must be an int, or have a fileno() method.") + elif fd < 0: + raise ValueError( + "file descriptor cannot be a negative integer (%i)" % (fd,)) + + return fd + + +def SSLeay_version(type): + """ + Return a string describing the version of OpenSSL in use. + + :param type: One of the :const:`SSLEAY_` constants defined in this module. + """ + return _ffi.string(_lib.SSLeay_version(type)) + + +def _warn_npn(): + warnings.warn("NPN is deprecated. Protocols should switch to using ALPN.", + DeprecationWarning, stacklevel=3) + + +def _make_requires(flag, error): + """ + Builds a decorator that ensures that functions that rely on OpenSSL + functions that are not present in this build raise NotImplementedError, + rather than AttributeError coming out of cryptography. + + :param flag: A cryptography flag that guards the functions, e.g. + ``Cryptography_HAS_NEXTPROTONEG``. + :param error: The string to be used in the exception if the flag is false. + """ + def _requires_decorator(func): + if not flag: + @wraps(func) + def explode(*args, **kwargs): + raise NotImplementedError(error) + return explode + else: + return func + + return _requires_decorator + + +_requires_npn = _make_requires( + _lib.Cryptography_HAS_NEXTPROTONEG, "NPN not available" +) + + +_requires_alpn = _make_requires( + _lib.Cryptography_HAS_ALPN, "ALPN not available" +) + + +class Session(object): + """ + A class representing an SSL session. A session defines certain connection + parameters which may be re-used to speed up the setup of subsequent + connections. + + .. versionadded:: 0.14 + """ + pass + + +class Context(object): + """ + :class:`OpenSSL.SSL.Context` instances define the parameters for setting + up new SSL connections. + + :param method: One of SSLv2_METHOD, SSLv3_METHOD, SSLv23_METHOD, or + TLSv1_METHOD. + """ + _methods = { + SSLv2_METHOD: "SSLv2_method", + SSLv3_METHOD: "SSLv3_method", + SSLv23_METHOD: "SSLv23_method", + TLSv1_METHOD: "TLSv1_method", + TLSv1_1_METHOD: "TLSv1_1_method", + TLSv1_2_METHOD: "TLSv1_2_method", + } + _methods = dict( + (identifier, getattr(_lib, name)) + for (identifier, name) in _methods.items() + if getattr(_lib, name, None) is not None) + + def __init__(self, method): + if not isinstance(method, integer_types): + raise TypeError("method must be an integer") + + try: + method_func = self._methods[method] + except KeyError: + raise ValueError("No such protocol") + + method_obj = method_func() + _openssl_assert(method_obj != _ffi.NULL) + + context = _lib.SSL_CTX_new(method_obj) + _openssl_assert(context != _ffi.NULL) + context = _ffi.gc(context, _lib.SSL_CTX_free) + + # If SSL_CTX_set_ecdh_auto is available then set it so the ECDH curve + # will be auto-selected. This function was added in 1.0.2 and made a + # noop in 1.1.0+ (where it is set automatically). + try: + res = _lib.SSL_CTX_set_ecdh_auto(context, 1) + _openssl_assert(res == 1) + except AttributeError: + pass + + self._context = context + self._passphrase_helper = None + self._passphrase_callback = None + self._passphrase_userdata = None + self._verify_helper = None + self._verify_callback = None + self._info_callback = None + self._tlsext_servername_callback = None + self._app_data = None + self._npn_advertise_helper = None + self._npn_advertise_callback = None + self._npn_select_helper = None + self._npn_select_callback = None + self._alpn_select_helper = None + self._alpn_select_callback = None + self._ocsp_helper = None + self._ocsp_callback = None + self._ocsp_data = None + + self.set_mode(_lib.SSL_MODE_ENABLE_PARTIAL_WRITE) + + def load_verify_locations(self, cafile, capath=None): + """ + Let SSL know where we can find trusted certificates for the certificate + chain. Note that the certificates have to be in PEM format. + + If capath is passed, it must be a directory prepared using the + ``c_rehash`` tool included with OpenSSL. Either, but not both, of + *pemfile* or *capath* may be :data:`None`. + + :param cafile: In which file we can find the certificates (``bytes`` or + ``unicode``). + :param capath: In which directory we can find the certificates + (``bytes`` or ``unicode``). + + :return: None + """ + if cafile is None: + cafile = _ffi.NULL + else: + cafile = _path_string(cafile) + + if capath is None: + capath = _ffi.NULL + else: + capath = _path_string(capath) + + load_result = _lib.SSL_CTX_load_verify_locations( + self._context, cafile, capath + ) + if not load_result: + _raise_current_error() + + def _wrap_callback(self, callback): + @wraps(callback) + def wrapper(size, verify, userdata): + return callback(size, verify, self._passphrase_userdata) + return _PassphraseHelper( + FILETYPE_PEM, wrapper, more_args=True, truncate=True) + + def set_passwd_cb(self, callback, userdata=None): + """ + Set the passphrase callback. This function will be called + when a private key with a passphrase is loaded. + + :param callback: The Python callback to use. This must accept three + positional arguments. First, an integer giving the maximum length + of the passphrase it may return. If the returned passphrase is + longer than this, it will be truncated. Second, a boolean value + which will be true if the user should be prompted for the + passphrase twice and the callback should verify that the two values + supplied are equal. Third, the value given as the *userdata* + parameter to :meth:`set_passwd_cb`. The *callback* must return + a byte string. If an error occurs, *callback* should return a false + value (e.g. an empty string). + :param userdata: (optional) A Python object which will be given as + argument to the callback + :return: None + """ + if not callable(callback): + raise TypeError("callback must be callable") + + self._passphrase_helper = self._wrap_callback(callback) + self._passphrase_callback = self._passphrase_helper.callback + _lib.SSL_CTX_set_default_passwd_cb( + self._context, self._passphrase_callback) + self._passphrase_userdata = userdata + + def set_default_verify_paths(self): + """ + Specify that the platform provided CA certificates are to be used for + verification purposes. This method has some caveats related to the + binary wheels that cryptography (pyOpenSSL's primary dependency) ships: + + * macOS will only load certificates using this method if the user has + the ``openssl@1.1`` `Homebrew `_ formula installed + in the default location. + * Windows will not work. + * manylinux1 cryptography wheels will work on most common Linux + distributions in pyOpenSSL 17.1.0 and above. pyOpenSSL detects the + manylinux1 wheel and attempts to load roots via a fallback path. + + :return: None + """ + # SSL_CTX_set_default_verify_paths will attempt to load certs from + # both a cafile and capath that are set at compile time. However, + # it will first check environment variables and, if present, load + # those paths instead + set_result = _lib.SSL_CTX_set_default_verify_paths(self._context) + _openssl_assert(set_result == 1) + # After attempting to set default_verify_paths we need to know whether + # to go down the fallback path. + # First we'll check to see if any env vars have been set. If so, + # we won't try to do anything else because the user has set the path + # themselves. + dir_env_var = _ffi.string( + _lib.X509_get_default_cert_dir_env() + ).decode("ascii") + file_env_var = _ffi.string( + _lib.X509_get_default_cert_file_env() + ).decode("ascii") + if not self._check_env_vars_set(dir_env_var, file_env_var): + default_dir = _ffi.string(_lib.X509_get_default_cert_dir()) + default_file = _ffi.string(_lib.X509_get_default_cert_file()) + # Now we check to see if the default_dir and default_file are set + # to the exact values we use in our manylinux1 builds. If they are + # then we know to load the fallbacks + if ( + default_dir == _CRYPTOGRAPHY_MANYLINUX1_CA_DIR and + default_file == _CRYPTOGRAPHY_MANYLINUX1_CA_FILE + ): + # This is manylinux1, let's load our fallback paths + self._fallback_default_verify_paths( + _CERTIFICATE_FILE_LOCATIONS, + _CERTIFICATE_PATH_LOCATIONS + ) + + def _check_env_vars_set(self, dir_env_var, file_env_var): + """ + Check to see if the default cert dir/file environment vars are present. + + :return: bool + """ + return ( + os.environ.get(file_env_var) is not None or + os.environ.get(dir_env_var) is not None + ) + + def _fallback_default_verify_paths(self, file_path, dir_path): + """ + Default verify paths are based on the compiled version of OpenSSL. + However, when pyca/cryptography is compiled as a manylinux1 wheel + that compiled location can potentially be wrong. So, like Go, we + will try a predefined set of paths and attempt to load roots + from there. + + :return: None + """ + for cafile in file_path: + if os.path.isfile(cafile): + self.load_verify_locations(cafile) + break + + for capath in dir_path: + if os.path.isdir(capath): + self.load_verify_locations(None, capath) + break + + def use_certificate_chain_file(self, certfile): + """ + Load a certificate chain from a file. + + :param certfile: The name of the certificate chain file (``bytes`` or + ``unicode``). Must be PEM encoded. + + :return: None + """ + certfile = _path_string(certfile) + + result = _lib.SSL_CTX_use_certificate_chain_file( + self._context, certfile + ) + if not result: + _raise_current_error() + + def use_certificate_file(self, certfile, filetype=FILETYPE_PEM): + """ + Load a certificate from a file + + :param certfile: The name of the certificate file (``bytes`` or + ``unicode``). + :param filetype: (optional) The encoding of the file, which is either + :const:`FILETYPE_PEM` or :const:`FILETYPE_ASN1`. The default is + :const:`FILETYPE_PEM`. + + :return: None + """ + certfile = _path_string(certfile) + if not isinstance(filetype, integer_types): + raise TypeError("filetype must be an integer") + + use_result = _lib.SSL_CTX_use_certificate_file( + self._context, certfile, filetype + ) + if not use_result: + _raise_current_error() + + def use_certificate(self, cert): + """ + Load a certificate from a X509 object + + :param cert: The X509 object + :return: None + """ + if not isinstance(cert, X509): + raise TypeError("cert must be an X509 instance") + + use_result = _lib.SSL_CTX_use_certificate(self._context, cert._x509) + if not use_result: + _raise_current_error() + + def add_extra_chain_cert(self, certobj): + """ + Add certificate to chain + + :param certobj: The X509 certificate object to add to the chain + :return: None + """ + if not isinstance(certobj, X509): + raise TypeError("certobj must be an X509 instance") + + copy = _lib.X509_dup(certobj._x509) + add_result = _lib.SSL_CTX_add_extra_chain_cert(self._context, copy) + if not add_result: + # TODO: This is untested. + _lib.X509_free(copy) + _raise_current_error() + + def _raise_passphrase_exception(self): + if self._passphrase_helper is not None: + self._passphrase_helper.raise_if_problem(Error) + + _raise_current_error() + + def use_privatekey_file(self, keyfile, filetype=_UNSPECIFIED): + """ + Load a private key from a file + + :param keyfile: The name of the key file (``bytes`` or ``unicode``) + :param filetype: (optional) The encoding of the file, which is either + :const:`FILETYPE_PEM` or :const:`FILETYPE_ASN1`. The default is + :const:`FILETYPE_PEM`. + + :return: None + """ + keyfile = _path_string(keyfile) + + if filetype is _UNSPECIFIED: + filetype = FILETYPE_PEM + elif not isinstance(filetype, integer_types): + raise TypeError("filetype must be an integer") + + use_result = _lib.SSL_CTX_use_PrivateKey_file( + self._context, keyfile, filetype) + if not use_result: + self._raise_passphrase_exception() + + def use_privatekey(self, pkey): + """ + Load a private key from a PKey object + + :param pkey: The PKey object + :return: None + """ + if not isinstance(pkey, PKey): + raise TypeError("pkey must be a PKey instance") + + use_result = _lib.SSL_CTX_use_PrivateKey(self._context, pkey._pkey) + if not use_result: + self._raise_passphrase_exception() + + def check_privatekey(self): + """ + Check if the private key (loaded with :meth:`use_privatekey`) matches + the certificate (loaded with :meth:`use_certificate`) + + :return: :data:`None` (raises :exc:`Error` if something's wrong) + """ + if not _lib.SSL_CTX_check_private_key(self._context): + _raise_current_error() + + def load_client_ca(self, cafile): + """ + Load the trusted certificates that will be sent to the client. Does + not actually imply any of the certificates are trusted; that must be + configured separately. + + :param bytes cafile: The path to a certificates file in PEM format. + :return: None + """ + ca_list = _lib.SSL_load_client_CA_file( + _text_to_bytes_and_warn("cafile", cafile) + ) + _openssl_assert(ca_list != _ffi.NULL) + _lib.SSL_CTX_set_client_CA_list(self._context, ca_list) + + def set_session_id(self, buf): + """ + Set the session id to *buf* within which a session can be reused for + this Context object. This is needed when doing session resumption, + because there is no way for a stored session to know which Context + object it is associated with. + + :param bytes buf: The session id. + + :returns: None + """ + buf = _text_to_bytes_and_warn("buf", buf) + _openssl_assert( + _lib.SSL_CTX_set_session_id_context( + self._context, + buf, + len(buf), + ) == 1 + ) + + def set_session_cache_mode(self, mode): + """ + Set the behavior of the session cache used by all connections using + this Context. The previously set mode is returned. See + :const:`SESS_CACHE_*` for details about particular modes. + + :param mode: One or more of the SESS_CACHE_* flags (combine using + bitwise or) + :returns: The previously set caching mode. + + .. versionadded:: 0.14 + """ + if not isinstance(mode, integer_types): + raise TypeError("mode must be an integer") + + return _lib.SSL_CTX_set_session_cache_mode(self._context, mode) + + def get_session_cache_mode(self): + """ + Get the current session cache mode. + + :returns: The currently used cache mode. + + .. versionadded:: 0.14 + """ + return _lib.SSL_CTX_get_session_cache_mode(self._context) + + def set_verify(self, mode, callback): + """ + et the verification flags for this Context object to *mode* and specify + that *callback* should be used for verification callbacks. + + :param mode: The verify mode, this should be one of + :const:`VERIFY_NONE` and :const:`VERIFY_PEER`. If + :const:`VERIFY_PEER` is used, *mode* can be OR:ed with + :const:`VERIFY_FAIL_IF_NO_PEER_CERT` and + :const:`VERIFY_CLIENT_ONCE` to further control the behaviour. + :param callback: The Python callback to use. This should take five + arguments: A Connection object, an X509 object, and three integer + variables, which are in turn potential error number, error depth + and return code. *callback* should return True if verification + passes and False otherwise. + :return: None + + See SSL_CTX_set_verify(3SSL) for further details. + """ + if not isinstance(mode, integer_types): + raise TypeError("mode must be an integer") + + if not callable(callback): + raise TypeError("callback must be callable") + + self._verify_helper = _VerifyHelper(callback) + self._verify_callback = self._verify_helper.callback + _lib.SSL_CTX_set_verify(self._context, mode, self._verify_callback) + + def set_verify_depth(self, depth): + """ + Set the maximum depth for the certificate chain verification that shall + be allowed for this Context object. + + :param depth: An integer specifying the verify depth + :return: None + """ + if not isinstance(depth, integer_types): + raise TypeError("depth must be an integer") + + _lib.SSL_CTX_set_verify_depth(self._context, depth) + + def get_verify_mode(self): + """ + Retrieve the Context object's verify mode, as set by + :meth:`set_verify`. + + :return: The verify mode + """ + return _lib.SSL_CTX_get_verify_mode(self._context) + + def get_verify_depth(self): + """ + Retrieve the Context object's verify depth, as set by + :meth:`set_verify_depth`. + + :return: The verify depth + """ + return _lib.SSL_CTX_get_verify_depth(self._context) + + def load_tmp_dh(self, dhfile): + """ + Load parameters for Ephemeral Diffie-Hellman + + :param dhfile: The file to load EDH parameters from (``bytes`` or + ``unicode``). + + :return: None + """ + dhfile = _path_string(dhfile) + + bio = _lib.BIO_new_file(dhfile, b"r") + if bio == _ffi.NULL: + _raise_current_error() + bio = _ffi.gc(bio, _lib.BIO_free) + + dh = _lib.PEM_read_bio_DHparams(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) + dh = _ffi.gc(dh, _lib.DH_free) + _lib.SSL_CTX_set_tmp_dh(self._context, dh) + + def set_tmp_ecdh(self, curve): + """ + Select a curve to use for ECDHE key exchange. + + :param curve: A curve object to use as returned by either + :meth:`OpenSSL.crypto.get_elliptic_curve` or + :meth:`OpenSSL.crypto.get_elliptic_curves`. + + :return: None + """ + _lib.SSL_CTX_set_tmp_ecdh(self._context, curve._to_EC_KEY()) + + def set_cipher_list(self, cipher_list): + """ + Set the list of ciphers to be used in this context. + + See the OpenSSL manual for more information (e.g. + :manpage:`ciphers(1)`). + + :param bytes cipher_list: An OpenSSL cipher string. + :return: None + """ + cipher_list = _text_to_bytes_and_warn("cipher_list", cipher_list) + + if not isinstance(cipher_list, bytes): + raise TypeError("cipher_list must be a byte string.") + + _openssl_assert( + _lib.SSL_CTX_set_cipher_list(self._context, cipher_list) == 1 + ) + # In OpenSSL 1.1.1 setting the cipher list will always return TLS 1.3 + # ciphers even if you pass an invalid cipher. Applications (like + # Twisted) have tests that depend on an error being raised if an + # invalid cipher string is passed, but without the following check + # for the TLS 1.3 specific cipher suites it would never error. + tmpconn = Connection(self, None) + if ( + tmpconn.get_cipher_list() == [ + 'TLS_AES_256_GCM_SHA384', + 'TLS_CHACHA20_POLY1305_SHA256', + 'TLS_AES_128_GCM_SHA256' + ] + ): + raise Error( + [ + ( + 'SSL routines', + 'SSL_CTX_set_cipher_list', + 'no cipher match', + ), + ], + ) + + def set_client_ca_list(self, certificate_authorities): + """ + Set the list of preferred client certificate signers for this server + context. + + This list of certificate authorities will be sent to the client when + the server requests a client certificate. + + :param certificate_authorities: a sequence of X509Names. + :return: None + + .. versionadded:: 0.10 + """ + name_stack = _lib.sk_X509_NAME_new_null() + _openssl_assert(name_stack != _ffi.NULL) + + try: + for ca_name in certificate_authorities: + if not isinstance(ca_name, X509Name): + raise TypeError( + "client CAs must be X509Name objects, not %s " + "objects" % ( + type(ca_name).__name__, + ) + ) + copy = _lib.X509_NAME_dup(ca_name._name) + _openssl_assert(copy != _ffi.NULL) + push_result = _lib.sk_X509_NAME_push(name_stack, copy) + if not push_result: + _lib.X509_NAME_free(copy) + _raise_current_error() + except Exception: + _lib.sk_X509_NAME_free(name_stack) + raise + + _lib.SSL_CTX_set_client_CA_list(self._context, name_stack) + + def add_client_ca(self, certificate_authority): + """ + Add the CA certificate to the list of preferred signers for this + context. + + The list of certificate authorities will be sent to the client when the + server requests a client certificate. + + :param certificate_authority: certificate authority's X509 certificate. + :return: None + + .. versionadded:: 0.10 + """ + if not isinstance(certificate_authority, X509): + raise TypeError("certificate_authority must be an X509 instance") + + add_result = _lib.SSL_CTX_add_client_CA( + self._context, certificate_authority._x509) + _openssl_assert(add_result == 1) + + def set_timeout(self, timeout): + """ + Set the timeout for newly created sessions for this Context object to + *timeout*. The default value is 300 seconds. See the OpenSSL manual + for more information (e.g. :manpage:`SSL_CTX_set_timeout(3)`). + + :param timeout: The timeout in (whole) seconds + :return: The previous session timeout + """ + if not isinstance(timeout, integer_types): + raise TypeError("timeout must be an integer") + + return _lib.SSL_CTX_set_timeout(self._context, timeout) + + def get_timeout(self): + """ + Retrieve session timeout, as set by :meth:`set_timeout`. The default + is 300 seconds. + + :return: The session timeout + """ + return _lib.SSL_CTX_get_timeout(self._context) + + def set_info_callback(self, callback): + """ + Set the information callback to *callback*. This function will be + called from time to time during SSL handshakes. + + :param callback: The Python callback to use. This should take three + arguments: a Connection object and two integers. The first integer + specifies where in the SSL handshake the function was called, and + the other the return code from a (possibly failed) internal + function call. + :return: None + """ + @wraps(callback) + def wrapper(ssl, where, return_code): + callback(Connection._reverse_mapping[ssl], where, return_code) + self._info_callback = _ffi.callback( + "void (*)(const SSL *, int, int)", wrapper) + _lib.SSL_CTX_set_info_callback(self._context, self._info_callback) + + def get_app_data(self): + """ + Get the application data (supplied via :meth:`set_app_data()`) + + :return: The application data + """ + return self._app_data + + def set_app_data(self, data): + """ + Set the application data (will be returned from get_app_data()) + + :param data: Any Python object + :return: None + """ + self._app_data = data + + def get_cert_store(self): + """ + Get the certificate store for the context. This can be used to add + "trusted" certificates without using the + :meth:`load_verify_locations` method. + + :return: A X509Store object or None if it does not have one. + """ + store = _lib.SSL_CTX_get_cert_store(self._context) + if store == _ffi.NULL: + # TODO: This is untested. + return None + + pystore = X509Store.__new__(X509Store) + pystore._store = store + return pystore + + def set_options(self, options): + """ + Add options. Options set before are not cleared! + This method should be used with the :const:`OP_*` constants. + + :param options: The options to add. + :return: The new option bitmask. + """ + if not isinstance(options, integer_types): + raise TypeError("options must be an integer") + + return _lib.SSL_CTX_set_options(self._context, options) + + def set_mode(self, mode): + """ + Add modes via bitmask. Modes set before are not cleared! This method + should be used with the :const:`MODE_*` constants. + + :param mode: The mode to add. + :return: The new mode bitmask. + """ + if not isinstance(mode, integer_types): + raise TypeError("mode must be an integer") + + return _lib.SSL_CTX_set_mode(self._context, mode) + + def set_tlsext_servername_callback(self, callback): + """ + Specify a callback function to be called when clients specify a server + name. + + :param callback: The callback function. It will be invoked with one + argument, the Connection instance. + + .. versionadded:: 0.13 + """ + @wraps(callback) + def wrapper(ssl, alert, arg): + callback(Connection._reverse_mapping[ssl]) + return 0 + + self._tlsext_servername_callback = _ffi.callback( + "int (*)(SSL *, int *, void *)", wrapper) + _lib.SSL_CTX_set_tlsext_servername_callback( + self._context, self._tlsext_servername_callback) + + def set_tlsext_use_srtp(self, profiles): + """ + Enable support for negotiating SRTP keying material. + + :param bytes profiles: A colon delimited list of protection profile + names, like ``b'SRTP_AES128_CM_SHA1_80:SRTP_AES128_CM_SHA1_32'``. + :return: None + """ + if not isinstance(profiles, bytes): + raise TypeError("profiles must be a byte string.") + + _openssl_assert( + _lib.SSL_CTX_set_tlsext_use_srtp(self._context, profiles) == 0 + ) + + @_requires_npn + def set_npn_advertise_callback(self, callback): + """ + Specify a callback function that will be called when offering `Next + Protocol Negotiation + `_ as a server. + + :param callback: The callback function. It will be invoked with one + argument, the :class:`Connection` instance. It should return a + list of bytestrings representing the advertised protocols, like + ``[b'http/1.1', b'spdy/2']``. + + .. versionadded:: 0.15 + """ + _warn_npn() + self._npn_advertise_helper = _NpnAdvertiseHelper(callback) + self._npn_advertise_callback = self._npn_advertise_helper.callback + _lib.SSL_CTX_set_next_protos_advertised_cb( + self._context, self._npn_advertise_callback, _ffi.NULL) + + @_requires_npn + def set_npn_select_callback(self, callback): + """ + Specify a callback function that will be called when a server offers + Next Protocol Negotiation options. + + :param callback: The callback function. It will be invoked with two + arguments: the Connection, and a list of offered protocols as + bytestrings, e.g. ``[b'http/1.1', b'spdy/2']``. It should return + one of those bytestrings, the chosen protocol. + + .. versionadded:: 0.15 + """ + _warn_npn() + self._npn_select_helper = _NpnSelectHelper(callback) + self._npn_select_callback = self._npn_select_helper.callback + _lib.SSL_CTX_set_next_proto_select_cb( + self._context, self._npn_select_callback, _ffi.NULL) + + @_requires_alpn + def set_alpn_protos(self, protos): + """ + Specify the protocols that the client is prepared to speak after the + TLS connection has been negotiated using Application Layer Protocol + Negotiation. + + :param protos: A list of the protocols to be offered to the server. + This list should be a Python list of bytestrings representing the + protocols to offer, e.g. ``[b'http/1.1', b'spdy/2']``. + """ + # Take the list of protocols and join them together, prefixing them + # with their lengths. + protostr = b''.join( + chain.from_iterable((int2byte(len(p)), p) for p in protos) + ) + + # Build a C string from the list. We don't need to save this off + # because OpenSSL immediately copies the data out. + input_str = _ffi.new("unsigned char[]", protostr) + _lib.SSL_CTX_set_alpn_protos(self._context, input_str, len(protostr)) + + @_requires_alpn + def set_alpn_select_callback(self, callback): + """ + Specify a callback function that will be called on the server when a + client offers protocols using ALPN. + + :param callback: The callback function. It will be invoked with two + arguments: the Connection, and a list of offered protocols as + bytestrings, e.g ``[b'http/1.1', b'spdy/2']``. It can return + one of those bytestrings to indicate the chosen protocol, the + empty bytestring to terminate the TLS connection, or the + :py:obj:`NO_OVERLAPPING_PROTOCOLS` to indicate that no offered + protocol was selected, but that the connection should not be + aborted. + """ + self._alpn_select_helper = _ALPNSelectHelper(callback) + self._alpn_select_callback = self._alpn_select_helper.callback + _lib.SSL_CTX_set_alpn_select_cb( + self._context, self._alpn_select_callback, _ffi.NULL) + + def _set_ocsp_callback(self, helper, data): + """ + This internal helper does the common work for + ``set_ocsp_server_callback`` and ``set_ocsp_client_callback``, which is + almost all of it. + """ + self._ocsp_helper = helper + self._ocsp_callback = helper.callback + if data is None: + self._ocsp_data = _ffi.NULL + else: + self._ocsp_data = _ffi.new_handle(data) + + rc = _lib.SSL_CTX_set_tlsext_status_cb( + self._context, self._ocsp_callback + ) + _openssl_assert(rc == 1) + rc = _lib.SSL_CTX_set_tlsext_status_arg(self._context, self._ocsp_data) + _openssl_assert(rc == 1) + + def set_ocsp_server_callback(self, callback, data=None): + """ + Set a callback to provide OCSP data to be stapled to the TLS handshake + on the server side. + + :param callback: The callback function. It will be invoked with two + arguments: the Connection, and the optional arbitrary data you have + provided. The callback must return a bytestring that contains the + OCSP data to staple to the handshake. If no OCSP data is available + for this connection, return the empty bytestring. + :param data: Some opaque data that will be passed into the callback + function when called. This can be used to avoid needing to do + complex data lookups or to keep track of what context is being + used. This parameter is optional. + """ + helper = _OCSPServerCallbackHelper(callback) + self._set_ocsp_callback(helper, data) + + def set_ocsp_client_callback(self, callback, data=None): + """ + Set a callback to validate OCSP data stapled to the TLS handshake on + the client side. + + :param callback: The callback function. It will be invoked with three + arguments: the Connection, a bytestring containing the stapled OCSP + assertion, and the optional arbitrary data you have provided. The + callback must return a boolean that indicates the result of + validating the OCSP data: ``True`` if the OCSP data is valid and + the certificate can be trusted, or ``False`` if either the OCSP + data is invalid or the certificate has been revoked. + :param data: Some opaque data that will be passed into the callback + function when called. This can be used to avoid needing to do + complex data lookups or to keep track of what context is being + used. This parameter is optional. + """ + helper = _OCSPClientCallbackHelper(callback) + self._set_ocsp_callback(helper, data) + + +class Connection(object): + """ + """ + _reverse_mapping = WeakValueDictionary() + + def __init__(self, context, socket=None): + """ + Create a new Connection object, using the given OpenSSL.SSL.Context + instance and socket. + + :param context: An SSL Context to use for this connection + :param socket: The socket to use for transport layer + """ + if not isinstance(context, Context): + raise TypeError("context must be a Context instance") + + ssl = _lib.SSL_new(context._context) + self._ssl = _ffi.gc(ssl, _lib.SSL_free) + # We set SSL_MODE_AUTO_RETRY to handle situations where OpenSSL returns + # an SSL_ERROR_WANT_READ when processing a non-application data packet + # even though there is still data on the underlying transport. + # See https://github.com/openssl/openssl/issues/6234 for more details. + _lib.SSL_set_mode(self._ssl, _lib.SSL_MODE_AUTO_RETRY) + self._context = context + self._app_data = None + + # References to strings used for Next Protocol Negotiation. OpenSSL's + # header files suggest that these might get copied at some point, but + # doesn't specify when, so we store them here to make sure they don't + # get freed before OpenSSL uses them. + self._npn_advertise_callback_args = None + self._npn_select_callback_args = None + + # References to strings used for Application Layer Protocol + # Negotiation. These strings get copied at some point but it's well + # after the callback returns, so we have to hang them somewhere to + # avoid them getting freed. + self._alpn_select_callback_args = None + + self._reverse_mapping[self._ssl] = self + + if socket is None: + self._socket = None + # Don't set up any gc for these, SSL_free will take care of them. + self._into_ssl = _lib.BIO_new(_lib.BIO_s_mem()) + _openssl_assert(self._into_ssl != _ffi.NULL) + + self._from_ssl = _lib.BIO_new(_lib.BIO_s_mem()) + _openssl_assert(self._from_ssl != _ffi.NULL) + + _lib.SSL_set_bio(self._ssl, self._into_ssl, self._from_ssl) + else: + self._into_ssl = None + self._from_ssl = None + self._socket = socket + set_result = _lib.SSL_set_fd( + self._ssl, _asFileDescriptor(self._socket)) + _openssl_assert(set_result == 1) + + def __getattr__(self, name): + """ + Look up attributes on the wrapped socket object if they are not found + on the Connection object. + """ + if self._socket is None: + raise AttributeError("'%s' object has no attribute '%s'" % ( + self.__class__.__name__, name + )) + else: + return getattr(self._socket, name) + + def _raise_ssl_error(self, ssl, result): + if self._context._verify_helper is not None: + self._context._verify_helper.raise_if_problem() + if self._context._npn_advertise_helper is not None: + self._context._npn_advertise_helper.raise_if_problem() + if self._context._npn_select_helper is not None: + self._context._npn_select_helper.raise_if_problem() + if self._context._alpn_select_helper is not None: + self._context._alpn_select_helper.raise_if_problem() + if self._context._ocsp_helper is not None: + self._context._ocsp_helper.raise_if_problem() + + error = _lib.SSL_get_error(ssl, result) + if error == _lib.SSL_ERROR_WANT_READ: + raise WantReadError() + elif error == _lib.SSL_ERROR_WANT_WRITE: + raise WantWriteError() + elif error == _lib.SSL_ERROR_ZERO_RETURN: + raise ZeroReturnError() + elif error == _lib.SSL_ERROR_WANT_X509_LOOKUP: + # TODO: This is untested. + raise WantX509LookupError() + elif error == _lib.SSL_ERROR_SYSCALL: + if _lib.ERR_peek_error() == 0: + if result < 0: + if platform == "win32": + errno = _ffi.getwinerror()[0] + else: + errno = _ffi.errno + + if errno != 0: + raise SysCallError(errno, errorcode.get(errno)) + raise SysCallError(-1, "Unexpected EOF") + else: + # TODO: This is untested. + _raise_current_error() + elif error == _lib.SSL_ERROR_NONE: + pass + else: + _raise_current_error() + + def get_context(self): + """ + Retrieve the :class:`Context` object associated with this + :class:`Connection`. + """ + return self._context + + def set_context(self, context): + """ + Switch this connection to a new session context. + + :param context: A :class:`Context` instance giving the new session + context to use. + """ + if not isinstance(context, Context): + raise TypeError("context must be a Context instance") + + _lib.SSL_set_SSL_CTX(self._ssl, context._context) + self._context = context + + def get_servername(self): + """ + Retrieve the servername extension value if provided in the client hello + message, or None if there wasn't one. + + :return: A byte string giving the server name or :data:`None`. + + .. versionadded:: 0.13 + """ + name = _lib.SSL_get_servername( + self._ssl, _lib.TLSEXT_NAMETYPE_host_name + ) + if name == _ffi.NULL: + return None + + return _ffi.string(name) + + def set_tlsext_host_name(self, name): + """ + Set the value of the servername extension to send in the client hello. + + :param name: A byte string giving the name. + + .. versionadded:: 0.13 + """ + if not isinstance(name, bytes): + raise TypeError("name must be a byte string") + elif b"\0" in name: + raise TypeError("name must not contain NUL byte") + + # XXX I guess this can fail sometimes? + _lib.SSL_set_tlsext_host_name(self._ssl, name) + + def pending(self): + """ + Get the number of bytes that can be safely read from the SSL buffer + (**not** the underlying transport buffer). + + :return: The number of bytes available in the receive buffer. + """ + return _lib.SSL_pending(self._ssl) + + def send(self, buf, flags=0): + """ + Send data on the connection. NOTE: If you get one of the WantRead, + WantWrite or WantX509Lookup exceptions on this, you have to call the + method again with the SAME buffer. + + :param buf: The string, buffer or memoryview to send + :param flags: (optional) Included for compatibility with the socket + API, the value is ignored + :return: The number of bytes written + """ + # Backward compatibility + buf = _text_to_bytes_and_warn("buf", buf) + + with _from_buffer(buf) as data: + # check len(buf) instead of len(data) for testability + if len(buf) > 2147483647: + raise ValueError( + "Cannot send more than 2**31-1 bytes at once." + ) + + result = _lib.SSL_write(self._ssl, data, len(data)) + self._raise_ssl_error(self._ssl, result) + + return result + + write = send + + def sendall(self, buf, flags=0): + """ + Send "all" data on the connection. This calls send() repeatedly until + all data is sent. If an error occurs, it's impossible to tell how much + data has been sent. + + :param buf: The string, buffer or memoryview to send + :param flags: (optional) Included for compatibility with the socket + API, the value is ignored + :return: The number of bytes written + """ + buf = _text_to_bytes_and_warn("buf", buf) + + with _from_buffer(buf) as data: + + left_to_send = len(buf) + total_sent = 0 + + while left_to_send: + # SSL_write's num arg is an int, + # so we cannot send more than 2**31-1 bytes at once. + result = _lib.SSL_write( + self._ssl, + data + total_sent, + min(left_to_send, 2147483647) + ) + self._raise_ssl_error(self._ssl, result) + total_sent += result + left_to_send -= result + + return total_sent + + def recv(self, bufsiz, flags=None): + """ + Receive data on the connection. + + :param bufsiz: The maximum number of bytes to read + :param flags: (optional) The only supported flag is ``MSG_PEEK``, + all other flags are ignored. + :return: The string read from the Connection + """ + buf = _no_zero_allocator("char[]", bufsiz) + if flags is not None and flags & socket.MSG_PEEK: + result = _lib.SSL_peek(self._ssl, buf, bufsiz) + else: + result = _lib.SSL_read(self._ssl, buf, bufsiz) + self._raise_ssl_error(self._ssl, result) + return _ffi.buffer(buf, result)[:] + read = recv + + def recv_into(self, buffer, nbytes=None, flags=None): + """ + Receive data on the connection and copy it directly into the provided + buffer, rather than creating a new string. + + :param buffer: The buffer to copy into. + :param nbytes: (optional) The maximum number of bytes to read into the + buffer. If not present, defaults to the size of the buffer. If + larger than the size of the buffer, is reduced to the size of the + buffer. + :param flags: (optional) The only supported flag is ``MSG_PEEK``, + all other flags are ignored. + :return: The number of bytes read into the buffer. + """ + if nbytes is None: + nbytes = len(buffer) + else: + nbytes = min(nbytes, len(buffer)) + + # We need to create a temporary buffer. This is annoying, it would be + # better if we could pass memoryviews straight into the SSL_read call, + # but right now we can't. Revisit this if CFFI gets that ability. + buf = _no_zero_allocator("char[]", nbytes) + if flags is not None and flags & socket.MSG_PEEK: + result = _lib.SSL_peek(self._ssl, buf, nbytes) + else: + result = _lib.SSL_read(self._ssl, buf, nbytes) + self._raise_ssl_error(self._ssl, result) + + # This strange line is all to avoid a memory copy. The buffer protocol + # should allow us to assign a CFFI buffer to the LHS of this line, but + # on CPython 3.3+ that segfaults. As a workaround, we can temporarily + # wrap it in a memoryview. + buffer[:result] = memoryview(_ffi.buffer(buf, result)) + + return result + + def _handle_bio_errors(self, bio, result): + if _lib.BIO_should_retry(bio): + if _lib.BIO_should_read(bio): + raise WantReadError() + elif _lib.BIO_should_write(bio): + # TODO: This is untested. + raise WantWriteError() + elif _lib.BIO_should_io_special(bio): + # TODO: This is untested. I think io_special means the socket + # BIO has a not-yet connected socket. + raise ValueError("BIO_should_io_special") + else: + # TODO: This is untested. + raise ValueError("unknown bio failure") + else: + # TODO: This is untested. + _raise_current_error() + + def bio_read(self, bufsiz): + """ + If the Connection was created with a memory BIO, this method can be + used to read bytes from the write end of that memory BIO. Many + Connection methods will add bytes which must be read in this manner or + the buffer will eventually fill up and the Connection will be able to + take no further actions. + + :param bufsiz: The maximum number of bytes to read + :return: The string read. + """ + if self._from_ssl is None: + raise TypeError("Connection sock was not None") + + if not isinstance(bufsiz, integer_types): + raise TypeError("bufsiz must be an integer") + + buf = _no_zero_allocator("char[]", bufsiz) + result = _lib.BIO_read(self._from_ssl, buf, bufsiz) + if result <= 0: + self._handle_bio_errors(self._from_ssl, result) + + return _ffi.buffer(buf, result)[:] + + def bio_write(self, buf): + """ + If the Connection was created with a memory BIO, this method can be + used to add bytes to the read end of that memory BIO. The Connection + can then read the bytes (for example, in response to a call to + :meth:`recv`). + + :param buf: The string to put into the memory BIO. + :return: The number of bytes written + """ + buf = _text_to_bytes_and_warn("buf", buf) + + if self._into_ssl is None: + raise TypeError("Connection sock was not None") + + with _from_buffer(buf) as data: + result = _lib.BIO_write(self._into_ssl, data, len(data)) + if result <= 0: + self._handle_bio_errors(self._into_ssl, result) + return result + + def renegotiate(self): + """ + Renegotiate the session. + + :return: True if the renegotiation can be started, False otherwise + :rtype: bool + """ + if not self.renegotiate_pending(): + _openssl_assert(_lib.SSL_renegotiate(self._ssl) == 1) + return True + return False + + def do_handshake(self): + """ + Perform an SSL handshake (usually called after :meth:`renegotiate` or + one of :meth:`set_accept_state` or :meth:`set_connect_state`). This can + raise the same exceptions as :meth:`send` and :meth:`recv`. + + :return: None. + """ + result = _lib.SSL_do_handshake(self._ssl) + self._raise_ssl_error(self._ssl, result) + + def renegotiate_pending(self): + """ + Check if there's a renegotiation in progress, it will return False once + a renegotiation is finished. + + :return: Whether there's a renegotiation in progress + :rtype: bool + """ + return _lib.SSL_renegotiate_pending(self._ssl) == 1 + + def total_renegotiations(self): + """ + Find out the total number of renegotiations. + + :return: The number of renegotiations. + :rtype: int + """ + return _lib.SSL_total_renegotiations(self._ssl) + + def connect(self, addr): + """ + Call the :meth:`connect` method of the underlying socket and set up SSL + on the socket, using the :class:`Context` object supplied to this + :class:`Connection` object at creation. + + :param addr: A remote address + :return: What the socket's connect method returns + """ + _lib.SSL_set_connect_state(self._ssl) + return self._socket.connect(addr) + + def connect_ex(self, addr): + """ + Call the :meth:`connect_ex` method of the underlying socket and set up + SSL on the socket, using the Context object supplied to this Connection + object at creation. Note that if the :meth:`connect_ex` method of the + socket doesn't return 0, SSL won't be initialized. + + :param addr: A remove address + :return: What the socket's connect_ex method returns + """ + connect_ex = self._socket.connect_ex + self.set_connect_state() + return connect_ex(addr) + + def accept(self): + """ + Call the :meth:`accept` method of the underlying socket and set up SSL + on the returned socket, using the Context object supplied to this + :class:`Connection` object at creation. + + :return: A *(conn, addr)* pair where *conn* is the new + :class:`Connection` object created, and *address* is as returned by + the socket's :meth:`accept`. + """ + client, addr = self._socket.accept() + conn = Connection(self._context, client) + conn.set_accept_state() + return (conn, addr) + + def bio_shutdown(self): + """ + If the Connection was created with a memory BIO, this method can be + used to indicate that *end of file* has been reached on the read end of + that memory BIO. + + :return: None + """ + if self._from_ssl is None: + raise TypeError("Connection sock was not None") + + _lib.BIO_set_mem_eof_return(self._into_ssl, 0) + + def shutdown(self): + """ + Send the shutdown message to the Connection. + + :return: True if the shutdown completed successfully (i.e. both sides + have sent closure alerts), False otherwise (in which case you + call :meth:`recv` or :meth:`send` when the connection becomes + readable/writeable). + """ + result = _lib.SSL_shutdown(self._ssl) + if result < 0: + self._raise_ssl_error(self._ssl, result) + elif result > 0: + return True + else: + return False + + def get_cipher_list(self): + """ + Retrieve the list of ciphers used by the Connection object. + + :return: A list of native cipher strings. + """ + ciphers = [] + for i in count(): + result = _lib.SSL_get_cipher_list(self._ssl, i) + if result == _ffi.NULL: + break + ciphers.append(_native(_ffi.string(result))) + return ciphers + + def get_client_ca_list(self): + """ + Get CAs whose certificates are suggested for client authentication. + + :return: If this is a server connection, the list of certificate + authorities that will be sent or has been sent to the client, as + controlled by this :class:`Connection`'s :class:`Context`. + + If this is a client connection, the list will be empty until the + connection with the server is established. + + .. versionadded:: 0.10 + """ + ca_names = _lib.SSL_get_client_CA_list(self._ssl) + if ca_names == _ffi.NULL: + # TODO: This is untested. + return [] + + result = [] + for i in range(_lib.sk_X509_NAME_num(ca_names)): + name = _lib.sk_X509_NAME_value(ca_names, i) + copy = _lib.X509_NAME_dup(name) + _openssl_assert(copy != _ffi.NULL) + + pyname = X509Name.__new__(X509Name) + pyname._name = _ffi.gc(copy, _lib.X509_NAME_free) + result.append(pyname) + return result + + def makefile(self, *args, **kwargs): + """ + The makefile() method is not implemented, since there is no dup + semantics for SSL connections + + :raise: NotImplementedError + """ + raise NotImplementedError( + "Cannot make file object of OpenSSL.SSL.Connection") + + def get_app_data(self): + """ + Retrieve application data as set by :meth:`set_app_data`. + + :return: The application data + """ + return self._app_data + + def set_app_data(self, data): + """ + Set application data + + :param data: The application data + :return: None + """ + self._app_data = data + + def get_shutdown(self): + """ + Get the shutdown state of the Connection. + + :return: The shutdown state, a bitvector of SENT_SHUTDOWN, + RECEIVED_SHUTDOWN. + """ + return _lib.SSL_get_shutdown(self._ssl) + + def set_shutdown(self, state): + """ + Set the shutdown state of the Connection. + + :param state: bitvector of SENT_SHUTDOWN, RECEIVED_SHUTDOWN. + :return: None + """ + if not isinstance(state, integer_types): + raise TypeError("state must be an integer") + + _lib.SSL_set_shutdown(self._ssl, state) + + def get_state_string(self): + """ + Retrieve a verbose string detailing the state of the Connection. + + :return: A string representing the state + :rtype: bytes + """ + return _ffi.string(_lib.SSL_state_string_long(self._ssl)) + + def server_random(self): + """ + Retrieve the random value used with the server hello message. + + :return: A string representing the state + """ + session = _lib.SSL_get_session(self._ssl) + if session == _ffi.NULL: + return None + length = _lib.SSL_get_server_random(self._ssl, _ffi.NULL, 0) + assert length > 0 + outp = _no_zero_allocator("unsigned char[]", length) + _lib.SSL_get_server_random(self._ssl, outp, length) + return _ffi.buffer(outp, length)[:] + + def client_random(self): + """ + Retrieve the random value used with the client hello message. + + :return: A string representing the state + """ + session = _lib.SSL_get_session(self._ssl) + if session == _ffi.NULL: + return None + + length = _lib.SSL_get_client_random(self._ssl, _ffi.NULL, 0) + assert length > 0 + outp = _no_zero_allocator("unsigned char[]", length) + _lib.SSL_get_client_random(self._ssl, outp, length) + return _ffi.buffer(outp, length)[:] + + def master_key(self): + """ + Retrieve the value of the master key for this session. + + :return: A string representing the state + """ + session = _lib.SSL_get_session(self._ssl) + if session == _ffi.NULL: + return None + + length = _lib.SSL_SESSION_get_master_key(session, _ffi.NULL, 0) + assert length > 0 + outp = _no_zero_allocator("unsigned char[]", length) + _lib.SSL_SESSION_get_master_key(session, outp, length) + return _ffi.buffer(outp, length)[:] + + def export_keying_material(self, label, olen, context=None): + """ + Obtain keying material for application use. + + :param: label - a disambiguating label string as described in RFC 5705 + :param: olen - the length of the exported key material in bytes + :param: context - a per-association context value + :return: the exported key material bytes or None + """ + outp = _no_zero_allocator("unsigned char[]", olen) + context_buf = _ffi.NULL + context_len = 0 + use_context = 0 + if context is not None: + context_buf = context + context_len = len(context) + use_context = 1 + success = _lib.SSL_export_keying_material(self._ssl, outp, olen, + label, len(label), + context_buf, context_len, + use_context) + _openssl_assert(success == 1) + return _ffi.buffer(outp, olen)[:] + + def sock_shutdown(self, *args, **kwargs): + """ + Call the :meth:`shutdown` method of the underlying socket. + See :manpage:`shutdown(2)`. + + :return: What the socket's shutdown() method returns + """ + return self._socket.shutdown(*args, **kwargs) + + def get_certificate(self): + """ + Retrieve the local certificate (if any) + + :return: The local certificate + """ + cert = _lib.SSL_get_certificate(self._ssl) + if cert != _ffi.NULL: + _lib.X509_up_ref(cert) + return X509._from_raw_x509_ptr(cert) + return None + + def get_peer_certificate(self): + """ + Retrieve the other side's certificate (if any) + + :return: The peer's certificate + """ + cert = _lib.SSL_get_peer_certificate(self._ssl) + if cert != _ffi.NULL: + return X509._from_raw_x509_ptr(cert) + return None + + def get_peer_cert_chain(self): + """ + Retrieve the other side's certificate (if any) + + :return: A list of X509 instances giving the peer's certificate chain, + or None if it does not have one. + """ + cert_stack = _lib.SSL_get_peer_cert_chain(self._ssl) + if cert_stack == _ffi.NULL: + return None + + result = [] + for i in range(_lib.sk_X509_num(cert_stack)): + # TODO could incref instead of dup here + cert = _lib.X509_dup(_lib.sk_X509_value(cert_stack, i)) + pycert = X509._from_raw_x509_ptr(cert) + result.append(pycert) + return result + + def want_read(self): + """ + Checks if more data has to be read from the transport layer to complete + an operation. + + :return: True iff more data has to be read + """ + return _lib.SSL_want_read(self._ssl) + + def want_write(self): + """ + Checks if there is data to write to the transport layer to complete an + operation. + + :return: True iff there is data to write + """ + return _lib.SSL_want_write(self._ssl) + + def set_accept_state(self): + """ + Set the connection to work in server mode. The handshake will be + handled automatically by read/write. + + :return: None + """ + _lib.SSL_set_accept_state(self._ssl) + + def set_connect_state(self): + """ + Set the connection to work in client mode. The handshake will be + handled automatically by read/write. + + :return: None + """ + _lib.SSL_set_connect_state(self._ssl) + + def get_session(self): + """ + Returns the Session currently used. + + :return: An instance of :class:`OpenSSL.SSL.Session` or + :obj:`None` if no session exists. + + .. versionadded:: 0.14 + """ + session = _lib.SSL_get1_session(self._ssl) + if session == _ffi.NULL: + return None + + pysession = Session.__new__(Session) + pysession._session = _ffi.gc(session, _lib.SSL_SESSION_free) + return pysession + + def set_session(self, session): + """ + Set the session to be used when the TLS/SSL connection is established. + + :param session: A Session instance representing the session to use. + :returns: None + + .. versionadded:: 0.14 + """ + if not isinstance(session, Session): + raise TypeError("session must be a Session instance") + + result = _lib.SSL_set_session(self._ssl, session._session) + if not result: + _raise_current_error() + + def _get_finished_message(self, function): + """ + Helper to implement :meth:`get_finished` and + :meth:`get_peer_finished`. + + :param function: Either :data:`SSL_get_finished`: or + :data:`SSL_get_peer_finished`. + + :return: :data:`None` if the desired message has not yet been + received, otherwise the contents of the message. + :rtype: :class:`bytes` or :class:`NoneType` + """ + # The OpenSSL documentation says nothing about what might happen if the + # count argument given is zero. Specifically, it doesn't say whether + # the output buffer may be NULL in that case or not. Inspection of the + # implementation reveals that it calls memcpy() unconditionally. + # Section 7.1.4, paragraph 1 of the C standard suggests that + # memcpy(NULL, source, 0) is not guaranteed to produce defined (let + # alone desirable) behavior (though it probably does on just about + # every implementation...) + # + # Allocate a tiny buffer to pass in (instead of just passing NULL as + # one might expect) for the initial call so as to be safe against this + # potentially undefined behavior. + empty = _ffi.new("char[]", 0) + size = function(self._ssl, empty, 0) + if size == 0: + # No Finished message so far. + return None + + buf = _no_zero_allocator("char[]", size) + function(self._ssl, buf, size) + return _ffi.buffer(buf, size)[:] + + def get_finished(self): + """ + Obtain the latest TLS Finished message that we sent. + + :return: The contents of the message or :obj:`None` if the TLS + handshake has not yet completed. + :rtype: :class:`bytes` or :class:`NoneType` + + .. versionadded:: 0.15 + """ + return self._get_finished_message(_lib.SSL_get_finished) + + def get_peer_finished(self): + """ + Obtain the latest TLS Finished message that we received from the peer. + + :return: The contents of the message or :obj:`None` if the TLS + handshake has not yet completed. + :rtype: :class:`bytes` or :class:`NoneType` + + .. versionadded:: 0.15 + """ + return self._get_finished_message(_lib.SSL_get_peer_finished) + + def get_cipher_name(self): + """ + Obtain the name of the currently used cipher. + + :returns: The name of the currently used cipher or :obj:`None` + if no connection has been established. + :rtype: :class:`unicode` or :class:`NoneType` + + .. versionadded:: 0.15 + """ + cipher = _lib.SSL_get_current_cipher(self._ssl) + if cipher == _ffi.NULL: + return None + else: + name = _ffi.string(_lib.SSL_CIPHER_get_name(cipher)) + return name.decode("utf-8") + + def get_cipher_bits(self): + """ + Obtain the number of secret bits of the currently used cipher. + + :returns: The number of secret bits of the currently used cipher + or :obj:`None` if no connection has been established. + :rtype: :class:`int` or :class:`NoneType` + + .. versionadded:: 0.15 + """ + cipher = _lib.SSL_get_current_cipher(self._ssl) + if cipher == _ffi.NULL: + return None + else: + return _lib.SSL_CIPHER_get_bits(cipher, _ffi.NULL) + + def get_cipher_version(self): + """ + Obtain the protocol version of the currently used cipher. + + :returns: The protocol name of the currently used cipher + or :obj:`None` if no connection has been established. + :rtype: :class:`unicode` or :class:`NoneType` + + .. versionadded:: 0.15 + """ + cipher = _lib.SSL_get_current_cipher(self._ssl) + if cipher == _ffi.NULL: + return None + else: + version = _ffi.string(_lib.SSL_CIPHER_get_version(cipher)) + return version.decode("utf-8") + + def get_protocol_version_name(self): + """ + Retrieve the protocol version of the current connection. + + :returns: The TLS version of the current connection, for example + the value for TLS 1.2 would be ``TLSv1.2``or ``Unknown`` + for connections that were not successfully established. + :rtype: :class:`unicode` + """ + version = _ffi.string(_lib.SSL_get_version(self._ssl)) + return version.decode("utf-8") + + def get_protocol_version(self): + """ + Retrieve the SSL or TLS protocol version of the current connection. + + :returns: The TLS version of the current connection. For example, + it will return ``0x769`` for connections made over TLS version 1. + :rtype: :class:`int` + """ + version = _lib.SSL_version(self._ssl) + return version + + @_requires_npn + def get_next_proto_negotiated(self): + """ + Get the protocol that was negotiated by NPN. + + :returns: A bytestring of the protocol name. If no protocol has been + negotiated yet, returns an empty string. + + .. versionadded:: 0.15 + """ + _warn_npn() + data = _ffi.new("unsigned char **") + data_len = _ffi.new("unsigned int *") + + _lib.SSL_get0_next_proto_negotiated(self._ssl, data, data_len) + + return _ffi.buffer(data[0], data_len[0])[:] + + @_requires_alpn + def set_alpn_protos(self, protos): + """ + Specify the client's ALPN protocol list. + + These protocols are offered to the server during protocol negotiation. + + :param protos: A list of the protocols to be offered to the server. + This list should be a Python list of bytestrings representing the + protocols to offer, e.g. ``[b'http/1.1', b'spdy/2']``. + """ + # Take the list of protocols and join them together, prefixing them + # with their lengths. + protostr = b''.join( + chain.from_iterable((int2byte(len(p)), p) for p in protos) + ) + + # Build a C string from the list. We don't need to save this off + # because OpenSSL immediately copies the data out. + input_str = _ffi.new("unsigned char[]", protostr) + _lib.SSL_set_alpn_protos(self._ssl, input_str, len(protostr)) + + @_requires_alpn + def get_alpn_proto_negotiated(self): + """ + Get the protocol that was negotiated by ALPN. + + :returns: A bytestring of the protocol name. If no protocol has been + negotiated yet, returns an empty string. + """ + data = _ffi.new("unsigned char **") + data_len = _ffi.new("unsigned int *") + + _lib.SSL_get0_alpn_selected(self._ssl, data, data_len) + + if not data_len: + return b'' + + return _ffi.buffer(data[0], data_len[0])[:] + + def request_ocsp(self): + """ + Called to request that the server sends stapled OCSP data, if + available. If this is not called on the client side then the server + will not send OCSP data. Should be used in conjunction with + :meth:`Context.set_ocsp_client_callback`. + """ + rc = _lib.SSL_set_tlsext_status_type( + self._ssl, _lib.TLSEXT_STATUSTYPE_ocsp + ) + _openssl_assert(rc == 1) + + +# This is similar to the initialization calls at the end of OpenSSL/crypto.py +# but is exercised mostly by the Context initializer. +_lib.SSL_library_init() diff --git a/packages/wakatime/packages/OpenSSL/__init__.py b/packages/wakatime/packages/OpenSSL/__init__.py new file mode 100644 index 0000000..810d00d --- /dev/null +++ b/packages/wakatime/packages/OpenSSL/__init__.py @@ -0,0 +1,20 @@ +# Copyright (C) AB Strakt +# See LICENSE for details. + +""" +pyOpenSSL - A simple wrapper around the OpenSSL library +""" + +from OpenSSL import crypto, SSL +from OpenSSL.version import ( + __author__, __copyright__, __email__, __license__, __summary__, __title__, + __uri__, __version__, +) + + +__all__ = [ + "SSL", "crypto", + + "__author__", "__copyright__", "__email__", "__license__", "__summary__", + "__title__", "__uri__", "__version__", +] diff --git a/packages/wakatime/packages/OpenSSL/_util.py b/packages/wakatime/packages/OpenSSL/_util.py new file mode 100644 index 0000000..d8e3f66 --- /dev/null +++ b/packages/wakatime/packages/OpenSSL/_util.py @@ -0,0 +1,161 @@ +import sys +import warnings + +from six import PY2, binary_type, text_type + +from cryptography.hazmat.bindings.openssl.binding import Binding + + +binding = Binding() +binding.init_static_locks() +ffi = binding.ffi +lib = binding.lib + + +# This is a special CFFI allocator that does not bother to zero its memory +# after allocation. This has vastly better performance on large allocations and +# so should be used whenever we don't need the memory zeroed out. +no_zero_allocator = ffi.new_allocator(should_clear_after_alloc=False) + + +def text(charp): + """ + Get a native string type representing of the given CFFI ``char*`` object. + + :param charp: A C-style string represented using CFFI. + + :return: :class:`str` + """ + if not charp: + return "" + return native(ffi.string(charp)) + + +def exception_from_error_queue(exception_type): + """ + Convert an OpenSSL library failure into a Python exception. + + When a call to the native OpenSSL library fails, this is usually signalled + by the return value, and an error code is stored in an error queue + associated with the current thread. The err library provides functions to + obtain these error codes and textual error messages. + """ + errors = [] + + while True: + error = lib.ERR_get_error() + if error == 0: + break + errors.append(( + text(lib.ERR_lib_error_string(error)), + text(lib.ERR_func_error_string(error)), + text(lib.ERR_reason_error_string(error)))) + + raise exception_type(errors) + + +def make_assert(error): + """ + Create an assert function that uses :func:`exception_from_error_queue` to + raise an exception wrapped by *error*. + """ + def openssl_assert(ok): + """ + If *ok* is not True, retrieve the error from OpenSSL and raise it. + """ + if ok is not True: + exception_from_error_queue(error) + + return openssl_assert + + +def native(s): + """ + Convert :py:class:`bytes` or :py:class:`unicode` to the native + :py:class:`str` type, using UTF-8 encoding if conversion is necessary. + + :raise UnicodeError: The input string is not UTF-8 decodeable. + + :raise TypeError: The input is neither :py:class:`bytes` nor + :py:class:`unicode`. + """ + if not isinstance(s, (binary_type, text_type)): + raise TypeError("%r is neither bytes nor unicode" % s) + if PY2: + if isinstance(s, text_type): + return s.encode("utf-8") + else: + if isinstance(s, binary_type): + return s.decode("utf-8") + return s + + +def path_string(s): + """ + Convert a Python string to a :py:class:`bytes` string identifying the same + path and which can be passed into an OpenSSL API accepting a filename. + + :param s: An instance of :py:class:`bytes` or :py:class:`unicode`. + + :return: An instance of :py:class:`bytes`. + """ + if isinstance(s, binary_type): + return s + elif isinstance(s, text_type): + return s.encode(sys.getfilesystemencoding()) + else: + raise TypeError("Path must be represented as bytes or unicode string") + + +if PY2: + def byte_string(s): + return s +else: + def byte_string(s): + return s.encode("charmap") + + +# A marker object to observe whether some optional arguments are passed any +# value or not. +UNSPECIFIED = object() + +_TEXT_WARNING = ( + text_type.__name__ + " for {0} is no longer accepted, use bytes" +) + + +def text_to_bytes_and_warn(label, obj): + """ + If ``obj`` is text, emit a warning that it should be bytes instead and try + to convert it to bytes automatically. + + :param str label: The name of the parameter from which ``obj`` was taken + (so a developer can easily find the source of the problem and correct + it). + + :return: If ``obj`` is the text string type, a ``bytes`` object giving the + UTF-8 encoding of that text is returned. Otherwise, ``obj`` itself is + returned. + """ + if isinstance(obj, text_type): + warnings.warn( + _TEXT_WARNING.format(label), + category=DeprecationWarning, + stacklevel=3 + ) + return obj.encode('utf-8') + return obj + + +try: + # newer versions of cffi free the buffer deterministically + with ffi.from_buffer(b""): + pass + from_buffer = ffi.from_buffer +except AttributeError: + # cffi < 0.12 frees the buffer with refcounting gc + from contextlib import contextmanager + + @contextmanager + def from_buffer(*args): + yield ffi.from_buffer(*args) diff --git a/packages/wakatime/packages/OpenSSL/crypto.py b/packages/wakatime/packages/OpenSSL/crypto.py new file mode 100644 index 0000000..5a734cc --- /dev/null +++ b/packages/wakatime/packages/OpenSSL/crypto.py @@ -0,0 +1,3064 @@ +import datetime + +from base64 import b16encode +from functools import partial +from operator import __eq__, __ne__, __lt__, __le__, __gt__, __ge__ + +from six import ( + integer_types as _integer_types, + text_type as _text_type, + PY3 as _PY3) + +from cryptography import x509 +from cryptography.hazmat.primitives.asymmetric import dsa, rsa + +from OpenSSL._util import ( + ffi as _ffi, + lib as _lib, + exception_from_error_queue as _exception_from_error_queue, + byte_string as _byte_string, + native as _native, + UNSPECIFIED as _UNSPECIFIED, + text_to_bytes_and_warn as _text_to_bytes_and_warn, + make_assert as _make_assert, +) + +__all__ = [ + 'FILETYPE_PEM', + 'FILETYPE_ASN1', + 'FILETYPE_TEXT', + 'TYPE_RSA', + 'TYPE_DSA', + 'Error', + 'PKey', + 'get_elliptic_curves', + 'get_elliptic_curve', + 'X509Name', + 'X509Extension', + 'X509Req', + 'X509', + 'X509StoreFlags', + 'X509Store', + 'X509StoreContextError', + 'X509StoreContext', + 'load_certificate', + 'dump_certificate', + 'dump_publickey', + 'dump_privatekey', + 'Revoked', + 'CRL', + 'PKCS7', + 'PKCS12', + 'NetscapeSPKI', + 'load_publickey', + 'load_privatekey', + 'dump_certificate_request', + 'load_certificate_request', + 'sign', + 'verify', + 'dump_crl', + 'load_crl', + 'load_pkcs7_data', + 'load_pkcs12' +] + +FILETYPE_PEM = _lib.SSL_FILETYPE_PEM +FILETYPE_ASN1 = _lib.SSL_FILETYPE_ASN1 + +# TODO This was an API mistake. OpenSSL has no such constant. +FILETYPE_TEXT = 2 ** 16 - 1 + +TYPE_RSA = _lib.EVP_PKEY_RSA +TYPE_DSA = _lib.EVP_PKEY_DSA +TYPE_DH = _lib.EVP_PKEY_DH +TYPE_EC = _lib.EVP_PKEY_EC + + +class Error(Exception): + """ + An error occurred in an `OpenSSL.crypto` API. + """ + + +_raise_current_error = partial(_exception_from_error_queue, Error) +_openssl_assert = _make_assert(Error) + + +def _get_backend(): + """ + Importing the backend from cryptography has the side effect of activating + the osrandom engine. This mutates the global state of OpenSSL in the + process and causes issues for various programs that use subinterpreters or + embed Python. By putting the import in this function we can avoid + triggering this side effect unless _get_backend is called. + """ + from cryptography.hazmat.backends.openssl.backend import backend + return backend + + +def _untested_error(where): + """ + An OpenSSL API failed somehow. Additionally, the failure which was + encountered isn't one that's exercised by the test suite so future behavior + of pyOpenSSL is now somewhat less predictable. + """ + raise RuntimeError("Unknown %s failure" % (where,)) + + +def _new_mem_buf(buffer=None): + """ + Allocate a new OpenSSL memory BIO. + + Arrange for the garbage collector to clean it up automatically. + + :param buffer: None or some bytes to use to put into the BIO so that they + can be read out. + """ + if buffer is None: + bio = _lib.BIO_new(_lib.BIO_s_mem()) + free = _lib.BIO_free + else: + data = _ffi.new("char[]", buffer) + bio = _lib.BIO_new_mem_buf(data, len(buffer)) + + # Keep the memory alive as long as the bio is alive! + def free(bio, ref=data): + return _lib.BIO_free(bio) + + _openssl_assert(bio != _ffi.NULL) + + bio = _ffi.gc(bio, free) + return bio + + +def _bio_to_string(bio): + """ + Copy the contents of an OpenSSL BIO object into a Python byte string. + """ + result_buffer = _ffi.new('char**') + buffer_length = _lib.BIO_get_mem_data(bio, result_buffer) + return _ffi.buffer(result_buffer[0], buffer_length)[:] + + +def _set_asn1_time(boundary, when): + """ + The the time value of an ASN1 time object. + + @param boundary: An ASN1_TIME pointer (or an object safely + castable to that type) which will have its value set. + @param when: A string representation of the desired time value. + + @raise TypeError: If C{when} is not a L{bytes} string. + @raise ValueError: If C{when} does not represent a time in the required + format. + @raise RuntimeError: If the time value cannot be set for some other + (unspecified) reason. + """ + if not isinstance(when, bytes): + raise TypeError("when must be a byte string") + + set_result = _lib.ASN1_TIME_set_string(boundary, when) + if set_result == 0: + raise ValueError("Invalid string") + + +def _get_asn1_time(timestamp): + """ + Retrieve the time value of an ASN1 time object. + + @param timestamp: An ASN1_GENERALIZEDTIME* (or an object safely castable to + that type) from which the time value will be retrieved. + + @return: The time value from C{timestamp} as a L{bytes} string in a certain + format. Or C{None} if the object contains no time value. + """ + string_timestamp = _ffi.cast('ASN1_STRING*', timestamp) + if _lib.ASN1_STRING_length(string_timestamp) == 0: + return None + elif ( + _lib.ASN1_STRING_type(string_timestamp) == _lib.V_ASN1_GENERALIZEDTIME + ): + return _ffi.string(_lib.ASN1_STRING_data(string_timestamp)) + else: + generalized_timestamp = _ffi.new("ASN1_GENERALIZEDTIME**") + _lib.ASN1_TIME_to_generalizedtime(timestamp, generalized_timestamp) + if generalized_timestamp[0] == _ffi.NULL: + # This may happen: + # - if timestamp was not an ASN1_TIME + # - if allocating memory for the ASN1_GENERALIZEDTIME failed + # - if a copy of the time data from timestamp cannot be made for + # the newly allocated ASN1_GENERALIZEDTIME + # + # These are difficult to test. cffi enforces the ASN1_TIME type. + # Memory allocation failures are a pain to trigger + # deterministically. + _untested_error("ASN1_TIME_to_generalizedtime") + else: + string_timestamp = _ffi.cast( + "ASN1_STRING*", generalized_timestamp[0]) + string_data = _lib.ASN1_STRING_data(string_timestamp) + string_result = _ffi.string(string_data) + _lib.ASN1_GENERALIZEDTIME_free(generalized_timestamp[0]) + return string_result + + +class _X509NameInvalidator(object): + def __init__(self): + self._names = [] + + def add(self, name): + self._names.append(name) + + def clear(self): + for name in self._names: + # Breaks the object, but also prevents UAF! + del name._name + + +class PKey(object): + """ + A class representing an DSA or RSA public key or key pair. + """ + _only_public = False + _initialized = True + + def __init__(self): + pkey = _lib.EVP_PKEY_new() + self._pkey = _ffi.gc(pkey, _lib.EVP_PKEY_free) + self._initialized = False + + def to_cryptography_key(self): + """ + Export as a ``cryptography`` key. + + :rtype: One of ``cryptography``'s `key interfaces`_. + + .. _key interfaces: https://cryptography.io/en/latest/hazmat/\ + primitives/asymmetric/rsa/#key-interfaces + + .. versionadded:: 16.1.0 + """ + backend = _get_backend() + if self._only_public: + return backend._evp_pkey_to_public_key(self._pkey) + else: + return backend._evp_pkey_to_private_key(self._pkey) + + @classmethod + def from_cryptography_key(cls, crypto_key): + """ + Construct based on a ``cryptography`` *crypto_key*. + + :param crypto_key: A ``cryptography`` key. + :type crypto_key: One of ``cryptography``'s `key interfaces`_. + + :rtype: PKey + + .. versionadded:: 16.1.0 + """ + pkey = cls() + if not isinstance(crypto_key, (rsa.RSAPublicKey, rsa.RSAPrivateKey, + dsa.DSAPublicKey, dsa.DSAPrivateKey)): + raise TypeError("Unsupported key type") + + pkey._pkey = crypto_key._evp_pkey + if isinstance(crypto_key, (rsa.RSAPublicKey, dsa.DSAPublicKey)): + pkey._only_public = True + pkey._initialized = True + return pkey + + def generate_key(self, type, bits): + """ + Generate a key pair of the given type, with the given number of bits. + + This generates a key "into" the this object. + + :param type: The key type. + :type type: :py:data:`TYPE_RSA` or :py:data:`TYPE_DSA` + :param bits: The number of bits. + :type bits: :py:data:`int` ``>= 0`` + :raises TypeError: If :py:data:`type` or :py:data:`bits` isn't + of the appropriate type. + :raises ValueError: If the number of bits isn't an integer of + the appropriate size. + :return: ``None`` + """ + if not isinstance(type, int): + raise TypeError("type must be an integer") + + if not isinstance(bits, int): + raise TypeError("bits must be an integer") + + if type == TYPE_RSA: + if bits <= 0: + raise ValueError("Invalid number of bits") + + # TODO Check error return + exponent = _lib.BN_new() + exponent = _ffi.gc(exponent, _lib.BN_free) + _lib.BN_set_word(exponent, _lib.RSA_F4) + + rsa = _lib.RSA_new() + + result = _lib.RSA_generate_key_ex(rsa, bits, exponent, _ffi.NULL) + _openssl_assert(result == 1) + + result = _lib.EVP_PKEY_assign_RSA(self._pkey, rsa) + _openssl_assert(result == 1) + + elif type == TYPE_DSA: + dsa = _lib.DSA_new() + _openssl_assert(dsa != _ffi.NULL) + + dsa = _ffi.gc(dsa, _lib.DSA_free) + res = _lib.DSA_generate_parameters_ex( + dsa, bits, _ffi.NULL, 0, _ffi.NULL, _ffi.NULL, _ffi.NULL + ) + _openssl_assert(res == 1) + + _openssl_assert(_lib.DSA_generate_key(dsa) == 1) + _openssl_assert(_lib.EVP_PKEY_set1_DSA(self._pkey, dsa) == 1) + else: + raise Error("No such key type") + + self._initialized = True + + def check(self): + """ + Check the consistency of an RSA private key. + + This is the Python equivalent of OpenSSL's ``RSA_check_key``. + + :return: ``True`` if key is consistent. + + :raise OpenSSL.crypto.Error: if the key is inconsistent. + + :raise TypeError: if the key is of a type which cannot be checked. + Only RSA keys can currently be checked. + """ + if self._only_public: + raise TypeError("public key only") + + if _lib.EVP_PKEY_type(self.type()) != _lib.EVP_PKEY_RSA: + raise TypeError("key type unsupported") + + rsa = _lib.EVP_PKEY_get1_RSA(self._pkey) + rsa = _ffi.gc(rsa, _lib.RSA_free) + result = _lib.RSA_check_key(rsa) + if result: + return True + _raise_current_error() + + def type(self): + """ + Returns the type of the key + + :return: The type of the key. + """ + return _lib.EVP_PKEY_id(self._pkey) + + def bits(self): + """ + Returns the number of bits of the key + + :return: The number of bits of the key. + """ + return _lib.EVP_PKEY_bits(self._pkey) + + +class _EllipticCurve(object): + """ + A representation of a supported elliptic curve. + + @cvar _curves: :py:obj:`None` until an attempt is made to load the curves. + Thereafter, a :py:type:`set` containing :py:type:`_EllipticCurve` + instances each of which represents one curve supported by the system. + @type _curves: :py:type:`NoneType` or :py:type:`set` + """ + _curves = None + + if _PY3: + # This only necessary on Python 3. Morever, it is broken on Python 2. + def __ne__(self, other): + """ + Implement cooperation with the right-hand side argument of ``!=``. + + Python 3 seems to have dropped this cooperation in this very narrow + circumstance. + """ + if isinstance(other, _EllipticCurve): + return super(_EllipticCurve, self).__ne__(other) + return NotImplemented + + @classmethod + def _load_elliptic_curves(cls, lib): + """ + Get the curves supported by OpenSSL. + + :param lib: The OpenSSL library binding object. + + :return: A :py:type:`set` of ``cls`` instances giving the names of the + elliptic curves the underlying library supports. + """ + num_curves = lib.EC_get_builtin_curves(_ffi.NULL, 0) + builtin_curves = _ffi.new('EC_builtin_curve[]', num_curves) + # The return value on this call should be num_curves again. We + # could check it to make sure but if it *isn't* then.. what could + # we do? Abort the whole process, I suppose...? -exarkun + lib.EC_get_builtin_curves(builtin_curves, num_curves) + return set( + cls.from_nid(lib, c.nid) + for c in builtin_curves) + + @classmethod + def _get_elliptic_curves(cls, lib): + """ + Get, cache, and return the curves supported by OpenSSL. + + :param lib: The OpenSSL library binding object. + + :return: A :py:type:`set` of ``cls`` instances giving the names of the + elliptic curves the underlying library supports. + """ + if cls._curves is None: + cls._curves = cls._load_elliptic_curves(lib) + return cls._curves + + @classmethod + def from_nid(cls, lib, nid): + """ + Instantiate a new :py:class:`_EllipticCurve` associated with the given + OpenSSL NID. + + :param lib: The OpenSSL library binding object. + + :param nid: The OpenSSL NID the resulting curve object will represent. + This must be a curve NID (and not, for example, a hash NID) or + subsequent operations will fail in unpredictable ways. + :type nid: :py:class:`int` + + :return: The curve object. + """ + return cls(lib, nid, _ffi.string(lib.OBJ_nid2sn(nid)).decode("ascii")) + + def __init__(self, lib, nid, name): + """ + :param _lib: The :py:mod:`cryptography` binding instance used to + interface with OpenSSL. + + :param _nid: The OpenSSL NID identifying the curve this object + represents. + :type _nid: :py:class:`int` + + :param name: The OpenSSL short name identifying the curve this object + represents. + :type name: :py:class:`unicode` + """ + self._lib = lib + self._nid = nid + self.name = name + + def __repr__(self): + return "" % (self.name,) + + def _to_EC_KEY(self): + """ + Create a new OpenSSL EC_KEY structure initialized to use this curve. + + The structure is automatically garbage collected when the Python object + is garbage collected. + """ + key = self._lib.EC_KEY_new_by_curve_name(self._nid) + return _ffi.gc(key, _lib.EC_KEY_free) + + +def get_elliptic_curves(): + """ + Return a set of objects representing the elliptic curves supported in the + OpenSSL build in use. + + The curve objects have a :py:class:`unicode` ``name`` attribute by which + they identify themselves. + + The curve objects are useful as values for the argument accepted by + :py:meth:`Context.set_tmp_ecdh` to specify which elliptical curve should be + used for ECDHE key exchange. + """ + return _EllipticCurve._get_elliptic_curves(_lib) + + +def get_elliptic_curve(name): + """ + Return a single curve object selected by name. + + See :py:func:`get_elliptic_curves` for information about curve objects. + + :param name: The OpenSSL short name identifying the curve object to + retrieve. + :type name: :py:class:`unicode` + + If the named curve is not supported then :py:class:`ValueError` is raised. + """ + for curve in get_elliptic_curves(): + if curve.name == name: + return curve + raise ValueError("unknown curve name", name) + + +class X509Name(object): + """ + An X.509 Distinguished Name. + + :ivar countryName: The country of the entity. + :ivar C: Alias for :py:attr:`countryName`. + + :ivar stateOrProvinceName: The state or province of the entity. + :ivar ST: Alias for :py:attr:`stateOrProvinceName`. + + :ivar localityName: The locality of the entity. + :ivar L: Alias for :py:attr:`localityName`. + + :ivar organizationName: The organization name of the entity. + :ivar O: Alias for :py:attr:`organizationName`. + + :ivar organizationalUnitName: The organizational unit of the entity. + :ivar OU: Alias for :py:attr:`organizationalUnitName` + + :ivar commonName: The common name of the entity. + :ivar CN: Alias for :py:attr:`commonName`. + + :ivar emailAddress: The e-mail address of the entity. + """ + + def __init__(self, name): + """ + Create a new X509Name, copying the given X509Name instance. + + :param name: The name to copy. + :type name: :py:class:`X509Name` + """ + name = _lib.X509_NAME_dup(name._name) + self._name = _ffi.gc(name, _lib.X509_NAME_free) + + def __setattr__(self, name, value): + if name.startswith('_'): + return super(X509Name, self).__setattr__(name, value) + + # Note: we really do not want str subclasses here, so we do not use + # isinstance. + if type(name) is not str: + raise TypeError("attribute name must be string, not '%.200s'" % ( + type(value).__name__,)) + + nid = _lib.OBJ_txt2nid(_byte_string(name)) + if nid == _lib.NID_undef: + try: + _raise_current_error() + except Error: + pass + raise AttributeError("No such attribute") + + # If there's an old entry for this NID, remove it + for i in range(_lib.X509_NAME_entry_count(self._name)): + ent = _lib.X509_NAME_get_entry(self._name, i) + ent_obj = _lib.X509_NAME_ENTRY_get_object(ent) + ent_nid = _lib.OBJ_obj2nid(ent_obj) + if nid == ent_nid: + ent = _lib.X509_NAME_delete_entry(self._name, i) + _lib.X509_NAME_ENTRY_free(ent) + break + + if isinstance(value, _text_type): + value = value.encode('utf-8') + + add_result = _lib.X509_NAME_add_entry_by_NID( + self._name, nid, _lib.MBSTRING_UTF8, value, -1, -1, 0) + if not add_result: + _raise_current_error() + + def __getattr__(self, name): + """ + Find attribute. An X509Name object has the following attributes: + countryName (alias C), stateOrProvince (alias ST), locality (alias L), + organization (alias O), organizationalUnit (alias OU), commonName + (alias CN) and more... + """ + nid = _lib.OBJ_txt2nid(_byte_string(name)) + if nid == _lib.NID_undef: + # This is a bit weird. OBJ_txt2nid indicated failure, but it seems + # a lower level function, a2d_ASN1_OBJECT, also feels the need to + # push something onto the error queue. If we don't clean that up + # now, someone else will bump into it later and be quite confused. + # See lp#314814. + try: + _raise_current_error() + except Error: + pass + return super(X509Name, self).__getattr__(name) + + entry_index = _lib.X509_NAME_get_index_by_NID(self._name, nid, -1) + if entry_index == -1: + return None + + entry = _lib.X509_NAME_get_entry(self._name, entry_index) + data = _lib.X509_NAME_ENTRY_get_data(entry) + + result_buffer = _ffi.new("unsigned char**") + data_length = _lib.ASN1_STRING_to_UTF8(result_buffer, data) + _openssl_assert(data_length >= 0) + + try: + result = _ffi.buffer( + result_buffer[0], data_length + )[:].decode('utf-8') + finally: + # XXX untested + _lib.OPENSSL_free(result_buffer[0]) + return result + + def _cmp(op): + def f(self, other): + if not isinstance(other, X509Name): + return NotImplemented + result = _lib.X509_NAME_cmp(self._name, other._name) + return op(result, 0) + return f + + __eq__ = _cmp(__eq__) + __ne__ = _cmp(__ne__) + + __lt__ = _cmp(__lt__) + __le__ = _cmp(__le__) + + __gt__ = _cmp(__gt__) + __ge__ = _cmp(__ge__) + + def __repr__(self): + """ + String representation of an X509Name + """ + result_buffer = _ffi.new("char[]", 512) + format_result = _lib.X509_NAME_oneline( + self._name, result_buffer, len(result_buffer)) + _openssl_assert(format_result != _ffi.NULL) + + return "" % ( + _native(_ffi.string(result_buffer)),) + + def hash(self): + """ + Return an integer representation of the first four bytes of the + MD5 digest of the DER representation of the name. + + This is the Python equivalent of OpenSSL's ``X509_NAME_hash``. + + :return: The (integer) hash of this name. + :rtype: :py:class:`int` + """ + return _lib.X509_NAME_hash(self._name) + + def der(self): + """ + Return the DER encoding of this name. + + :return: The DER encoded form of this name. + :rtype: :py:class:`bytes` + """ + result_buffer = _ffi.new('unsigned char**') + encode_result = _lib.i2d_X509_NAME(self._name, result_buffer) + _openssl_assert(encode_result >= 0) + + string_result = _ffi.buffer(result_buffer[0], encode_result)[:] + _lib.OPENSSL_free(result_buffer[0]) + return string_result + + def get_components(self): + """ + Returns the components of this name, as a sequence of 2-tuples. + + :return: The components of this name. + :rtype: :py:class:`list` of ``name, value`` tuples. + """ + result = [] + for i in range(_lib.X509_NAME_entry_count(self._name)): + ent = _lib.X509_NAME_get_entry(self._name, i) + + fname = _lib.X509_NAME_ENTRY_get_object(ent) + fval = _lib.X509_NAME_ENTRY_get_data(ent) + + nid = _lib.OBJ_obj2nid(fname) + name = _lib.OBJ_nid2sn(nid) + + # ffi.string does not handle strings containing NULL bytes + # (which may have been generated by old, broken software) + value = _ffi.buffer(_lib.ASN1_STRING_data(fval), + _lib.ASN1_STRING_length(fval))[:] + result.append((_ffi.string(name), value)) + + return result + + +class X509Extension(object): + """ + An X.509 v3 certificate extension. + """ + + def __init__(self, type_name, critical, value, subject=None, issuer=None): + """ + Initializes an X509 extension. + + :param type_name: The name of the type of extension_ to create. + :type type_name: :py:data:`bytes` + + :param bool critical: A flag indicating whether this is a critical + extension. + + :param value: The value of the extension. + :type value: :py:data:`bytes` + + :param subject: Optional X509 certificate to use as subject. + :type subject: :py:class:`X509` + + :param issuer: Optional X509 certificate to use as issuer. + :type issuer: :py:class:`X509` + + .. _extension: https://www.openssl.org/docs/manmaster/man5/ + x509v3_config.html#STANDARD-EXTENSIONS + """ + ctx = _ffi.new("X509V3_CTX*") + + # A context is necessary for any extension which uses the r2i + # conversion method. That is, X509V3_EXT_nconf may segfault if passed + # a NULL ctx. Start off by initializing most of the fields to NULL. + _lib.X509V3_set_ctx(ctx, _ffi.NULL, _ffi.NULL, _ffi.NULL, _ffi.NULL, 0) + + # We have no configuration database - but perhaps we should (some + # extensions may require it). + _lib.X509V3_set_ctx_nodb(ctx) + + # Initialize the subject and issuer, if appropriate. ctx is a local, + # and as far as I can tell none of the X509V3_* APIs invoked here steal + # any references, so no need to mess with reference counts or + # duplicates. + if issuer is not None: + if not isinstance(issuer, X509): + raise TypeError("issuer must be an X509 instance") + ctx.issuer_cert = issuer._x509 + if subject is not None: + if not isinstance(subject, X509): + raise TypeError("subject must be an X509 instance") + ctx.subject_cert = subject._x509 + + if critical: + # There are other OpenSSL APIs which would let us pass in critical + # separately, but they're harder to use, and since value is already + # a pile of crappy junk smuggling a ton of utterly important + # structured data, what's the point of trying to avoid nasty stuff + # with strings? (However, X509V3_EXT_i2d in particular seems like + # it would be a better API to invoke. I do not know where to get + # the ext_struc it desires for its last parameter, though.) + value = b"critical," + value + + extension = _lib.X509V3_EXT_nconf(_ffi.NULL, ctx, type_name, value) + if extension == _ffi.NULL: + _raise_current_error() + self._extension = _ffi.gc(extension, _lib.X509_EXTENSION_free) + + @property + def _nid(self): + return _lib.OBJ_obj2nid( + _lib.X509_EXTENSION_get_object(self._extension) + ) + + _prefixes = { + _lib.GEN_EMAIL: "email", + _lib.GEN_DNS: "DNS", + _lib.GEN_URI: "URI", + } + + def _subjectAltNameString(self): + names = _ffi.cast( + "GENERAL_NAMES*", _lib.X509V3_EXT_d2i(self._extension) + ) + + names = _ffi.gc(names, _lib.GENERAL_NAMES_free) + parts = [] + for i in range(_lib.sk_GENERAL_NAME_num(names)): + name = _lib.sk_GENERAL_NAME_value(names, i) + try: + label = self._prefixes[name.type] + except KeyError: + bio = _new_mem_buf() + _lib.GENERAL_NAME_print(bio, name) + parts.append(_native(_bio_to_string(bio))) + else: + value = _native( + _ffi.buffer(name.d.ia5.data, name.d.ia5.length)[:]) + parts.append(label + ":" + value) + return ", ".join(parts) + + def __str__(self): + """ + :return: a nice text representation of the extension + """ + if _lib.NID_subject_alt_name == self._nid: + return self._subjectAltNameString() + + bio = _new_mem_buf() + print_result = _lib.X509V3_EXT_print(bio, self._extension, 0, 0) + _openssl_assert(print_result != 0) + + return _native(_bio_to_string(bio)) + + def get_critical(self): + """ + Returns the critical field of this X.509 extension. + + :return: The critical field. + """ + return _lib.X509_EXTENSION_get_critical(self._extension) + + def get_short_name(self): + """ + Returns the short type name of this X.509 extension. + + The result is a byte string such as :py:const:`b"basicConstraints"`. + + :return: The short type name. + :rtype: :py:data:`bytes` + + .. versionadded:: 0.12 + """ + obj = _lib.X509_EXTENSION_get_object(self._extension) + nid = _lib.OBJ_obj2nid(obj) + return _ffi.string(_lib.OBJ_nid2sn(nid)) + + def get_data(self): + """ + Returns the data of the X509 extension, encoded as ASN.1. + + :return: The ASN.1 encoded data of this X509 extension. + :rtype: :py:data:`bytes` + + .. versionadded:: 0.12 + """ + octet_result = _lib.X509_EXTENSION_get_data(self._extension) + string_result = _ffi.cast('ASN1_STRING*', octet_result) + char_result = _lib.ASN1_STRING_data(string_result) + result_length = _lib.ASN1_STRING_length(string_result) + return _ffi.buffer(char_result, result_length)[:] + + +class X509Req(object): + """ + An X.509 certificate signing requests. + """ + + def __init__(self): + req = _lib.X509_REQ_new() + self._req = _ffi.gc(req, _lib.X509_REQ_free) + # Default to version 0. + self.set_version(0) + + def to_cryptography(self): + """ + Export as a ``cryptography`` certificate signing request. + + :rtype: ``cryptography.x509.CertificateSigningRequest`` + + .. versionadded:: 17.1.0 + """ + from cryptography.hazmat.backends.openssl.x509 import ( + _CertificateSigningRequest + ) + backend = _get_backend() + return _CertificateSigningRequest(backend, self._req) + + @classmethod + def from_cryptography(cls, crypto_req): + """ + Construct based on a ``cryptography`` *crypto_req*. + + :param crypto_req: A ``cryptography`` X.509 certificate signing request + :type crypto_req: ``cryptography.x509.CertificateSigningRequest`` + + :rtype: X509Req + + .. versionadded:: 17.1.0 + """ + if not isinstance(crypto_req, x509.CertificateSigningRequest): + raise TypeError("Must be a certificate signing request") + + req = cls() + req._req = crypto_req._x509_req + return req + + def set_pubkey(self, pkey): + """ + Set the public key of the certificate signing request. + + :param pkey: The public key to use. + :type pkey: :py:class:`PKey` + + :return: ``None`` + """ + set_result = _lib.X509_REQ_set_pubkey(self._req, pkey._pkey) + _openssl_assert(set_result == 1) + + def get_pubkey(self): + """ + Get the public key of the certificate signing request. + + :return: The public key. + :rtype: :py:class:`PKey` + """ + pkey = PKey.__new__(PKey) + pkey._pkey = _lib.X509_REQ_get_pubkey(self._req) + _openssl_assert(pkey._pkey != _ffi.NULL) + pkey._pkey = _ffi.gc(pkey._pkey, _lib.EVP_PKEY_free) + pkey._only_public = True + return pkey + + def set_version(self, version): + """ + Set the version subfield (RFC 2459, section 4.1.2.1) of the certificate + request. + + :param int version: The version number. + :return: ``None`` + """ + set_result = _lib.X509_REQ_set_version(self._req, version) + _openssl_assert(set_result == 1) + + def get_version(self): + """ + Get the version subfield (RFC 2459, section 4.1.2.1) of the certificate + request. + + :return: The value of the version subfield. + :rtype: :py:class:`int` + """ + return _lib.X509_REQ_get_version(self._req) + + def get_subject(self): + """ + Return the subject of this certificate signing request. + + This creates a new :class:`X509Name` that wraps the underlying subject + name field on the certificate signing request. Modifying it will modify + the underlying signing request, and will have the effect of modifying + any other :class:`X509Name` that refers to this subject. + + :return: The subject of this certificate signing request. + :rtype: :class:`X509Name` + """ + name = X509Name.__new__(X509Name) + name._name = _lib.X509_REQ_get_subject_name(self._req) + _openssl_assert(name._name != _ffi.NULL) + + # The name is owned by the X509Req structure. As long as the X509Name + # Python object is alive, keep the X509Req Python object alive. + name._owner = self + + return name + + def add_extensions(self, extensions): + """ + Add extensions to the certificate signing request. + + :param extensions: The X.509 extensions to add. + :type extensions: iterable of :py:class:`X509Extension` + :return: ``None`` + """ + stack = _lib.sk_X509_EXTENSION_new_null() + _openssl_assert(stack != _ffi.NULL) + + stack = _ffi.gc(stack, _lib.sk_X509_EXTENSION_free) + + for ext in extensions: + if not isinstance(ext, X509Extension): + raise ValueError("One of the elements is not an X509Extension") + + # TODO push can fail (here and elsewhere) + _lib.sk_X509_EXTENSION_push(stack, ext._extension) + + add_result = _lib.X509_REQ_add_extensions(self._req, stack) + _openssl_assert(add_result == 1) + + def get_extensions(self): + """ + Get X.509 extensions in the certificate signing request. + + :return: The X.509 extensions in this request. + :rtype: :py:class:`list` of :py:class:`X509Extension` objects. + + .. versionadded:: 0.15 + """ + exts = [] + native_exts_obj = _lib.X509_REQ_get_extensions(self._req) + for i in range(_lib.sk_X509_EXTENSION_num(native_exts_obj)): + ext = X509Extension.__new__(X509Extension) + ext._extension = _lib.sk_X509_EXTENSION_value(native_exts_obj, i) + exts.append(ext) + return exts + + def sign(self, pkey, digest): + """ + Sign the certificate signing request with this key and digest type. + + :param pkey: The key pair to sign with. + :type pkey: :py:class:`PKey` + :param digest: The name of the message digest to use for the signature, + e.g. :py:data:`b"sha256"`. + :type digest: :py:class:`bytes` + :return: ``None`` + """ + if pkey._only_public: + raise ValueError("Key has only public part") + + if not pkey._initialized: + raise ValueError("Key is uninitialized") + + digest_obj = _lib.EVP_get_digestbyname(_byte_string(digest)) + if digest_obj == _ffi.NULL: + raise ValueError("No such digest method") + + sign_result = _lib.X509_REQ_sign(self._req, pkey._pkey, digest_obj) + _openssl_assert(sign_result > 0) + + def verify(self, pkey): + """ + Verifies the signature on this certificate signing request. + + :param PKey key: A public key. + + :return: ``True`` if the signature is correct. + :rtype: bool + + :raises OpenSSL.crypto.Error: If the signature is invalid or there is a + problem verifying the signature. + """ + if not isinstance(pkey, PKey): + raise TypeError("pkey must be a PKey instance") + + result = _lib.X509_REQ_verify(self._req, pkey._pkey) + if result <= 0: + _raise_current_error() + + return result + + +class X509(object): + """ + An X.509 certificate. + """ + def __init__(self): + x509 = _lib.X509_new() + _openssl_assert(x509 != _ffi.NULL) + self._x509 = _ffi.gc(x509, _lib.X509_free) + + self._issuer_invalidator = _X509NameInvalidator() + self._subject_invalidator = _X509NameInvalidator() + + @classmethod + def _from_raw_x509_ptr(cls, x509): + cert = cls.__new__(cls) + cert._x509 = _ffi.gc(x509, _lib.X509_free) + cert._issuer_invalidator = _X509NameInvalidator() + cert._subject_invalidator = _X509NameInvalidator() + return cert + + def to_cryptography(self): + """ + Export as a ``cryptography`` certificate. + + :rtype: ``cryptography.x509.Certificate`` + + .. versionadded:: 17.1.0 + """ + from cryptography.hazmat.backends.openssl.x509 import _Certificate + backend = _get_backend() + return _Certificate(backend, self._x509) + + @classmethod + def from_cryptography(cls, crypto_cert): + """ + Construct based on a ``cryptography`` *crypto_cert*. + + :param crypto_key: A ``cryptography`` X.509 certificate. + :type crypto_key: ``cryptography.x509.Certificate`` + + :rtype: X509 + + .. versionadded:: 17.1.0 + """ + if not isinstance(crypto_cert, x509.Certificate): + raise TypeError("Must be a certificate") + + cert = cls() + cert._x509 = crypto_cert._x509 + return cert + + def set_version(self, version): + """ + Set the version number of the certificate. Note that the + version value is zero-based, eg. a value of 0 is V1. + + :param version: The version number of the certificate. + :type version: :py:class:`int` + + :return: ``None`` + """ + if not isinstance(version, int): + raise TypeError("version must be an integer") + + _lib.X509_set_version(self._x509, version) + + def get_version(self): + """ + Return the version number of the certificate. + + :return: The version number of the certificate. + :rtype: :py:class:`int` + """ + return _lib.X509_get_version(self._x509) + + def get_pubkey(self): + """ + Get the public key of the certificate. + + :return: The public key. + :rtype: :py:class:`PKey` + """ + pkey = PKey.__new__(PKey) + pkey._pkey = _lib.X509_get_pubkey(self._x509) + if pkey._pkey == _ffi.NULL: + _raise_current_error() + pkey._pkey = _ffi.gc(pkey._pkey, _lib.EVP_PKEY_free) + pkey._only_public = True + return pkey + + def set_pubkey(self, pkey): + """ + Set the public key of the certificate. + + :param pkey: The public key. + :type pkey: :py:class:`PKey` + + :return: :py:data:`None` + """ + if not isinstance(pkey, PKey): + raise TypeError("pkey must be a PKey instance") + + set_result = _lib.X509_set_pubkey(self._x509, pkey._pkey) + _openssl_assert(set_result == 1) + + def sign(self, pkey, digest): + """ + Sign the certificate with this key and digest type. + + :param pkey: The key to sign with. + :type pkey: :py:class:`PKey` + + :param digest: The name of the message digest to use. + :type digest: :py:class:`bytes` + + :return: :py:data:`None` + """ + if not isinstance(pkey, PKey): + raise TypeError("pkey must be a PKey instance") + + if pkey._only_public: + raise ValueError("Key only has public part") + + if not pkey._initialized: + raise ValueError("Key is uninitialized") + + evp_md = _lib.EVP_get_digestbyname(_byte_string(digest)) + if evp_md == _ffi.NULL: + raise ValueError("No such digest method") + + sign_result = _lib.X509_sign(self._x509, pkey._pkey, evp_md) + _openssl_assert(sign_result > 0) + + def get_signature_algorithm(self): + """ + Return the signature algorithm used in the certificate. + + :return: The name of the algorithm. + :rtype: :py:class:`bytes` + + :raises ValueError: If the signature algorithm is undefined. + + .. versionadded:: 0.13 + """ + algor = _lib.X509_get0_tbs_sigalg(self._x509) + nid = _lib.OBJ_obj2nid(algor.algorithm) + if nid == _lib.NID_undef: + raise ValueError("Undefined signature algorithm") + return _ffi.string(_lib.OBJ_nid2ln(nid)) + + def digest(self, digest_name): + """ + Return the digest of the X509 object. + + :param digest_name: The name of the digest algorithm to use. + :type digest_name: :py:class:`bytes` + + :return: The digest of the object, formatted as + :py:const:`b":"`-delimited hex pairs. + :rtype: :py:class:`bytes` + """ + digest = _lib.EVP_get_digestbyname(_byte_string(digest_name)) + if digest == _ffi.NULL: + raise ValueError("No such digest method") + + result_buffer = _ffi.new("unsigned char[]", _lib.EVP_MAX_MD_SIZE) + result_length = _ffi.new("unsigned int[]", 1) + result_length[0] = len(result_buffer) + + digest_result = _lib.X509_digest( + self._x509, digest, result_buffer, result_length) + _openssl_assert(digest_result == 1) + + return b":".join([ + b16encode(ch).upper() for ch + in _ffi.buffer(result_buffer, result_length[0])]) + + def subject_name_hash(self): + """ + Return the hash of the X509 subject. + + :return: The hash of the subject. + :rtype: :py:class:`bytes` + """ + return _lib.X509_subject_name_hash(self._x509) + + def set_serial_number(self, serial): + """ + Set the serial number of the certificate. + + :param serial: The new serial number. + :type serial: :py:class:`int` + + :return: :py:data`None` + """ + if not isinstance(serial, _integer_types): + raise TypeError("serial must be an integer") + + hex_serial = hex(serial)[2:] + if not isinstance(hex_serial, bytes): + hex_serial = hex_serial.encode('ascii') + + bignum_serial = _ffi.new("BIGNUM**") + + # BN_hex2bn stores the result in &bignum. Unless it doesn't feel like + # it. If bignum is still NULL after this call, then the return value + # is actually the result. I hope. -exarkun + small_serial = _lib.BN_hex2bn(bignum_serial, hex_serial) + + if bignum_serial[0] == _ffi.NULL: + set_result = _lib.ASN1_INTEGER_set( + _lib.X509_get_serialNumber(self._x509), small_serial) + if set_result: + # TODO Not tested + _raise_current_error() + else: + asn1_serial = _lib.BN_to_ASN1_INTEGER(bignum_serial[0], _ffi.NULL) + _lib.BN_free(bignum_serial[0]) + if asn1_serial == _ffi.NULL: + # TODO Not tested + _raise_current_error() + asn1_serial = _ffi.gc(asn1_serial, _lib.ASN1_INTEGER_free) + set_result = _lib.X509_set_serialNumber(self._x509, asn1_serial) + _openssl_assert(set_result == 1) + + def get_serial_number(self): + """ + Return the serial number of this certificate. + + :return: The serial number. + :rtype: int + """ + asn1_serial = _lib.X509_get_serialNumber(self._x509) + bignum_serial = _lib.ASN1_INTEGER_to_BN(asn1_serial, _ffi.NULL) + try: + hex_serial = _lib.BN_bn2hex(bignum_serial) + try: + hexstring_serial = _ffi.string(hex_serial) + serial = int(hexstring_serial, 16) + return serial + finally: + _lib.OPENSSL_free(hex_serial) + finally: + _lib.BN_free(bignum_serial) + + def gmtime_adj_notAfter(self, amount): + """ + Adjust the time stamp on which the certificate stops being valid. + + :param int amount: The number of seconds by which to adjust the + timestamp. + :return: ``None`` + """ + if not isinstance(amount, int): + raise TypeError("amount must be an integer") + + notAfter = _lib.X509_get_notAfter(self._x509) + _lib.X509_gmtime_adj(notAfter, amount) + + def gmtime_adj_notBefore(self, amount): + """ + Adjust the timestamp on which the certificate starts being valid. + + :param amount: The number of seconds by which to adjust the timestamp. + :return: ``None`` + """ + if not isinstance(amount, int): + raise TypeError("amount must be an integer") + + notBefore = _lib.X509_get_notBefore(self._x509) + _lib.X509_gmtime_adj(notBefore, amount) + + def has_expired(self): + """ + Check whether the certificate has expired. + + :return: ``True`` if the certificate has expired, ``False`` otherwise. + :rtype: bool + """ + time_string = _native(self.get_notAfter()) + not_after = datetime.datetime.strptime(time_string, "%Y%m%d%H%M%SZ") + + return not_after < datetime.datetime.utcnow() + + def _get_boundary_time(self, which): + return _get_asn1_time(which(self._x509)) + + def get_notBefore(self): + """ + Get the timestamp at which the certificate starts being valid. + + The timestamp is formatted as an ASN.1 TIME:: + + YYYYMMDDhhmmssZ + + :return: A timestamp string, or ``None`` if there is none. + :rtype: bytes or NoneType + """ + return self._get_boundary_time(_lib.X509_get_notBefore) + + def _set_boundary_time(self, which, when): + return _set_asn1_time(which(self._x509), when) + + def set_notBefore(self, when): + """ + Set the timestamp at which the certificate starts being valid. + + The timestamp is formatted as an ASN.1 TIME:: + + YYYYMMDDhhmmssZ + + :param bytes when: A timestamp string. + :return: ``None`` + """ + return self._set_boundary_time(_lib.X509_get_notBefore, when) + + def get_notAfter(self): + """ + Get the timestamp at which the certificate stops being valid. + + The timestamp is formatted as an ASN.1 TIME:: + + YYYYMMDDhhmmssZ + + :return: A timestamp string, or ``None`` if there is none. + :rtype: bytes or NoneType + """ + return self._get_boundary_time(_lib.X509_get_notAfter) + + def set_notAfter(self, when): + """ + Set the timestamp at which the certificate stops being valid. + + The timestamp is formatted as an ASN.1 TIME:: + + YYYYMMDDhhmmssZ + + :param bytes when: A timestamp string. + :return: ``None`` + """ + return self._set_boundary_time(_lib.X509_get_notAfter, when) + + def _get_name(self, which): + name = X509Name.__new__(X509Name) + name._name = which(self._x509) + _openssl_assert(name._name != _ffi.NULL) + + # The name is owned by the X509 structure. As long as the X509Name + # Python object is alive, keep the X509 Python object alive. + name._owner = self + + return name + + def _set_name(self, which, name): + if not isinstance(name, X509Name): + raise TypeError("name must be an X509Name") + set_result = which(self._x509, name._name) + _openssl_assert(set_result == 1) + + def get_issuer(self): + """ + Return the issuer of this certificate. + + This creates a new :class:`X509Name` that wraps the underlying issuer + name field on the certificate. Modifying it will modify the underlying + certificate, and will have the effect of modifying any other + :class:`X509Name` that refers to this issuer. + + :return: The issuer of this certificate. + :rtype: :class:`X509Name` + """ + name = self._get_name(_lib.X509_get_issuer_name) + self._issuer_invalidator.add(name) + return name + + def set_issuer(self, issuer): + """ + Set the issuer of this certificate. + + :param issuer: The issuer. + :type issuer: :py:class:`X509Name` + + :return: ``None`` + """ + self._set_name(_lib.X509_set_issuer_name, issuer) + self._issuer_invalidator.clear() + + def get_subject(self): + """ + Return the subject of this certificate. + + This creates a new :class:`X509Name` that wraps the underlying subject + name field on the certificate. Modifying it will modify the underlying + certificate, and will have the effect of modifying any other + :class:`X509Name` that refers to this subject. + + :return: The subject of this certificate. + :rtype: :class:`X509Name` + """ + name = self._get_name(_lib.X509_get_subject_name) + self._subject_invalidator.add(name) + return name + + def set_subject(self, subject): + """ + Set the subject of this certificate. + + :param subject: The subject. + :type subject: :py:class:`X509Name` + + :return: ``None`` + """ + self._set_name(_lib.X509_set_subject_name, subject) + self._subject_invalidator.clear() + + def get_extension_count(self): + """ + Get the number of extensions on this certificate. + + :return: The number of extensions. + :rtype: :py:class:`int` + + .. versionadded:: 0.12 + """ + return _lib.X509_get_ext_count(self._x509) + + def add_extensions(self, extensions): + """ + Add extensions to the certificate. + + :param extensions: The extensions to add. + :type extensions: An iterable of :py:class:`X509Extension` objects. + :return: ``None`` + """ + for ext in extensions: + if not isinstance(ext, X509Extension): + raise ValueError("One of the elements is not an X509Extension") + + add_result = _lib.X509_add_ext(self._x509, ext._extension, -1) + if not add_result: + _raise_current_error() + + def get_extension(self, index): + """ + Get a specific extension of the certificate by index. + + Extensions on a certificate are kept in order. The index + parameter selects which extension will be returned. + + :param int index: The index of the extension to retrieve. + :return: The extension at the specified index. + :rtype: :py:class:`X509Extension` + :raises IndexError: If the extension index was out of bounds. + + .. versionadded:: 0.12 + """ + ext = X509Extension.__new__(X509Extension) + ext._extension = _lib.X509_get_ext(self._x509, index) + if ext._extension == _ffi.NULL: + raise IndexError("extension index out of bounds") + + extension = _lib.X509_EXTENSION_dup(ext._extension) + ext._extension = _ffi.gc(extension, _lib.X509_EXTENSION_free) + return ext + + +class X509StoreFlags(object): + """ + Flags for X509 verification, used to change the behavior of + :class:`X509Store`. + + See `OpenSSL Verification Flags`_ for details. + + .. _OpenSSL Verification Flags: + https://www.openssl.org/docs/manmaster/man3/X509_VERIFY_PARAM_set_flags.html + """ + CRL_CHECK = _lib.X509_V_FLAG_CRL_CHECK + CRL_CHECK_ALL = _lib.X509_V_FLAG_CRL_CHECK_ALL + IGNORE_CRITICAL = _lib.X509_V_FLAG_IGNORE_CRITICAL + X509_STRICT = _lib.X509_V_FLAG_X509_STRICT + ALLOW_PROXY_CERTS = _lib.X509_V_FLAG_ALLOW_PROXY_CERTS + POLICY_CHECK = _lib.X509_V_FLAG_POLICY_CHECK + EXPLICIT_POLICY = _lib.X509_V_FLAG_EXPLICIT_POLICY + INHIBIT_MAP = _lib.X509_V_FLAG_INHIBIT_MAP + NOTIFY_POLICY = _lib.X509_V_FLAG_NOTIFY_POLICY + CHECK_SS_SIGNATURE = _lib.X509_V_FLAG_CHECK_SS_SIGNATURE + CB_ISSUER_CHECK = _lib.X509_V_FLAG_CB_ISSUER_CHECK + + +class X509Store(object): + """ + An X.509 store. + + An X.509 store is used to describe a context in which to verify a + certificate. A description of a context may include a set of certificates + to trust, a set of certificate revocation lists, verification flags and + more. + + An X.509 store, being only a description, cannot be used by itself to + verify a certificate. To carry out the actual verification process, see + :class:`X509StoreContext`. + """ + + def __init__(self): + store = _lib.X509_STORE_new() + self._store = _ffi.gc(store, _lib.X509_STORE_free) + + def add_cert(self, cert): + """ + Adds a trusted certificate to this store. + + Adding a certificate with this method adds this certificate as a + *trusted* certificate. + + :param X509 cert: The certificate to add to this store. + + :raises TypeError: If the certificate is not an :class:`X509`. + + :raises OpenSSL.crypto.Error: If OpenSSL was unhappy with your + certificate. + + :return: ``None`` if the certificate was added successfully. + """ + if not isinstance(cert, X509): + raise TypeError() + + # As of OpenSSL 1.1.0i adding the same cert to the store more than + # once doesn't cause an error. Accordingly, this code now silences + # the error for OpenSSL < 1.1.0i as well. + if _lib.X509_STORE_add_cert(self._store, cert._x509) == 0: + code = _lib.ERR_peek_error() + err_reason = _lib.ERR_GET_REASON(code) + _openssl_assert( + err_reason == _lib.X509_R_CERT_ALREADY_IN_HASH_TABLE + ) + _lib.ERR_clear_error() + + def add_crl(self, crl): + """ + Add a certificate revocation list to this store. + + The certificate revocation lists added to a store will only be used if + the associated flags are configured to check certificate revocation + lists. + + .. versionadded:: 16.1.0 + + :param CRL crl: The certificate revocation list to add to this store. + :return: ``None`` if the certificate revocation list was added + successfully. + """ + _openssl_assert(_lib.X509_STORE_add_crl(self._store, crl._crl) != 0) + + def set_flags(self, flags): + """ + Set verification flags to this store. + + Verification flags can be combined by oring them together. + + .. note:: + + Setting a verification flag sometimes requires clients to add + additional information to the store, otherwise a suitable error will + be raised. + + For example, in setting flags to enable CRL checking a + suitable CRL must be added to the store otherwise an error will be + raised. + + .. versionadded:: 16.1.0 + + :param int flags: The verification flags to set on this store. + See :class:`X509StoreFlags` for available constants. + :return: ``None`` if the verification flags were successfully set. + """ + _openssl_assert(_lib.X509_STORE_set_flags(self._store, flags) != 0) + + def set_time(self, vfy_time): + """ + Set the time against which the certificates are verified. + + Normally the current time is used. + + .. note:: + + For example, you can determine if a certificate was valid at a given + time. + + .. versionadded:: 17.0.0 + + :param datetime vfy_time: The verification time to set on this store. + :return: ``None`` if the verification time was successfully set. + """ + param = _lib.X509_VERIFY_PARAM_new() + param = _ffi.gc(param, _lib.X509_VERIFY_PARAM_free) + + _lib.X509_VERIFY_PARAM_set_time(param, int(vfy_time.strftime('%s'))) + _openssl_assert(_lib.X509_STORE_set1_param(self._store, param) != 0) + + +class X509StoreContextError(Exception): + """ + An exception raised when an error occurred while verifying a certificate + using `OpenSSL.X509StoreContext.verify_certificate`. + + :ivar certificate: The certificate which caused verificate failure. + :type certificate: :class:`X509` + """ + + def __init__(self, message, certificate): + super(X509StoreContextError, self).__init__(message) + self.certificate = certificate + + +class X509StoreContext(object): + """ + An X.509 store context. + + An X.509 store context is used to carry out the actual verification process + of a certificate in a described context. For describing such a context, see + :class:`X509Store`. + + :ivar _store_ctx: The underlying X509_STORE_CTX structure used by this + instance. It is dynamically allocated and automatically garbage + collected. + :ivar _store: See the ``store`` ``__init__`` parameter. + :ivar _cert: See the ``certificate`` ``__init__`` parameter. + :param X509Store store: The certificates which will be trusted for the + purposes of any verifications. + :param X509 certificate: The certificate to be verified. + """ + + def __init__(self, store, certificate): + store_ctx = _lib.X509_STORE_CTX_new() + self._store_ctx = _ffi.gc(store_ctx, _lib.X509_STORE_CTX_free) + self._store = store + self._cert = certificate + # Make the store context available for use after instantiating this + # class by initializing it now. Per testing, subsequent calls to + # :meth:`_init` have no adverse affect. + self._init() + + def _init(self): + """ + Set up the store context for a subsequent verification operation. + + Calling this method more than once without first calling + :meth:`_cleanup` will leak memory. + """ + ret = _lib.X509_STORE_CTX_init( + self._store_ctx, self._store._store, self._cert._x509, _ffi.NULL + ) + if ret <= 0: + _raise_current_error() + + def _cleanup(self): + """ + Internally cleans up the store context. + + The store context can then be reused with a new call to :meth:`_init`. + """ + _lib.X509_STORE_CTX_cleanup(self._store_ctx) + + def _exception_from_context(self): + """ + Convert an OpenSSL native context error failure into a Python + exception. + + When a call to native OpenSSL X509_verify_cert fails, additional + information about the failure can be obtained from the store context. + """ + errors = [ + _lib.X509_STORE_CTX_get_error(self._store_ctx), + _lib.X509_STORE_CTX_get_error_depth(self._store_ctx), + _native(_ffi.string(_lib.X509_verify_cert_error_string( + _lib.X509_STORE_CTX_get_error(self._store_ctx)))), + ] + # A context error should always be associated with a certificate, so we + # expect this call to never return :class:`None`. + _x509 = _lib.X509_STORE_CTX_get_current_cert(self._store_ctx) + _cert = _lib.X509_dup(_x509) + pycert = X509._from_raw_x509_ptr(_cert) + return X509StoreContextError(errors, pycert) + + def set_store(self, store): + """ + Set the context's X.509 store. + + .. versionadded:: 0.15 + + :param X509Store store: The store description which will be used for + the purposes of any *future* verifications. + """ + self._store = store + + def verify_certificate(self): + """ + Verify a certificate in a context. + + .. versionadded:: 0.15 + + :raises X509StoreContextError: If an error occurred when validating a + certificate in the context. Sets ``certificate`` attribute to + indicate which certificate caused the error. + """ + # Always re-initialize the store context in case + # :meth:`verify_certificate` is called multiple times. + # + # :meth:`_init` is called in :meth:`__init__` so _cleanup is called + # before _init to ensure memory is not leaked. + self._cleanup() + self._init() + ret = _lib.X509_verify_cert(self._store_ctx) + self._cleanup() + if ret <= 0: + raise self._exception_from_context() + + +def load_certificate(type, buffer): + """ + Load a certificate (X509) from the string *buffer* encoded with the + type *type*. + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1) + + :param bytes buffer: The buffer the certificate is stored in + + :return: The X509 object + """ + if isinstance(buffer, _text_type): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + if type == FILETYPE_PEM: + x509 = _lib.PEM_read_bio_X509(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) + elif type == FILETYPE_ASN1: + x509 = _lib.d2i_X509_bio(bio, _ffi.NULL) + else: + raise ValueError( + "type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + if x509 == _ffi.NULL: + _raise_current_error() + + return X509._from_raw_x509_ptr(x509) + + +def dump_certificate(type, cert): + """ + Dump the certificate *cert* into a buffer string encoded with the type + *type*. + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1, or + FILETYPE_TEXT) + :param cert: The certificate to dump + :return: The buffer with the dumped certificate in + """ + bio = _new_mem_buf() + + if type == FILETYPE_PEM: + result_code = _lib.PEM_write_bio_X509(bio, cert._x509) + elif type == FILETYPE_ASN1: + result_code = _lib.i2d_X509_bio(bio, cert._x509) + elif type == FILETYPE_TEXT: + result_code = _lib.X509_print_ex(bio, cert._x509, 0, 0) + else: + raise ValueError( + "type argument must be FILETYPE_PEM, FILETYPE_ASN1, or " + "FILETYPE_TEXT") + + assert result_code == 1 + return _bio_to_string(bio) + + +def dump_publickey(type, pkey): + """ + Dump a public key to a buffer. + + :param type: The file type (one of :data:`FILETYPE_PEM` or + :data:`FILETYPE_ASN1`). + :param PKey pkey: The public key to dump + :return: The buffer with the dumped key in it. + :rtype: bytes + """ + bio = _new_mem_buf() + if type == FILETYPE_PEM: + write_bio = _lib.PEM_write_bio_PUBKEY + elif type == FILETYPE_ASN1: + write_bio = _lib.i2d_PUBKEY_bio + else: + raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + result_code = write_bio(bio, pkey._pkey) + if result_code != 1: # pragma: no cover + _raise_current_error() + + return _bio_to_string(bio) + + +def dump_privatekey(type, pkey, cipher=None, passphrase=None): + """ + Dump the private key *pkey* into a buffer string encoded with the type + *type*. Optionally (if *type* is :const:`FILETYPE_PEM`) encrypting it + using *cipher* and *passphrase*. + + :param type: The file type (one of :const:`FILETYPE_PEM`, + :const:`FILETYPE_ASN1`, or :const:`FILETYPE_TEXT`) + :param PKey pkey: The PKey to dump + :param cipher: (optional) if encrypted PEM format, the cipher to use + :param passphrase: (optional) if encrypted PEM format, this can be either + the passphrase to use, or a callback for providing the passphrase. + + :return: The buffer with the dumped key in + :rtype: bytes + """ + bio = _new_mem_buf() + + if not isinstance(pkey, PKey): + raise TypeError("pkey must be a PKey") + + if cipher is not None: + if passphrase is None: + raise TypeError( + "if a value is given for cipher " + "one must also be given for passphrase") + cipher_obj = _lib.EVP_get_cipherbyname(_byte_string(cipher)) + if cipher_obj == _ffi.NULL: + raise ValueError("Invalid cipher name") + else: + cipher_obj = _ffi.NULL + + helper = _PassphraseHelper(type, passphrase) + if type == FILETYPE_PEM: + result_code = _lib.PEM_write_bio_PrivateKey( + bio, pkey._pkey, cipher_obj, _ffi.NULL, 0, + helper.callback, helper.callback_args) + helper.raise_if_problem() + elif type == FILETYPE_ASN1: + result_code = _lib.i2d_PrivateKey_bio(bio, pkey._pkey) + elif type == FILETYPE_TEXT: + if _lib.EVP_PKEY_id(pkey._pkey) != _lib.EVP_PKEY_RSA: + raise TypeError("Only RSA keys are supported for FILETYPE_TEXT") + + rsa = _ffi.gc( + _lib.EVP_PKEY_get1_RSA(pkey._pkey), + _lib.RSA_free + ) + result_code = _lib.RSA_print(bio, rsa, 0) + else: + raise ValueError( + "type argument must be FILETYPE_PEM, FILETYPE_ASN1, or " + "FILETYPE_TEXT") + + _openssl_assert(result_code != 0) + + return _bio_to_string(bio) + + +class Revoked(object): + """ + A certificate revocation. + """ + # https://www.openssl.org/docs/manmaster/man5/x509v3_config.html#CRL-distribution-points + # which differs from crl_reasons of crypto/x509v3/v3_enum.c that matches + # OCSP_crl_reason_str. We use the latter, just like the command line + # program. + _crl_reasons = [ + b"unspecified", + b"keyCompromise", + b"CACompromise", + b"affiliationChanged", + b"superseded", + b"cessationOfOperation", + b"certificateHold", + # b"removeFromCRL", + ] + + def __init__(self): + revoked = _lib.X509_REVOKED_new() + self._revoked = _ffi.gc(revoked, _lib.X509_REVOKED_free) + + def set_serial(self, hex_str): + """ + Set the serial number. + + The serial number is formatted as a hexadecimal number encoded in + ASCII. + + :param bytes hex_str: The new serial number. + + :return: ``None`` + """ + bignum_serial = _ffi.gc(_lib.BN_new(), _lib.BN_free) + bignum_ptr = _ffi.new("BIGNUM**") + bignum_ptr[0] = bignum_serial + bn_result = _lib.BN_hex2bn(bignum_ptr, hex_str) + if not bn_result: + raise ValueError("bad hex string") + + asn1_serial = _ffi.gc( + _lib.BN_to_ASN1_INTEGER(bignum_serial, _ffi.NULL), + _lib.ASN1_INTEGER_free) + _lib.X509_REVOKED_set_serialNumber(self._revoked, asn1_serial) + + def get_serial(self): + """ + Get the serial number. + + The serial number is formatted as a hexadecimal number encoded in + ASCII. + + :return: The serial number. + :rtype: bytes + """ + bio = _new_mem_buf() + + asn1_int = _lib.X509_REVOKED_get0_serialNumber(self._revoked) + _openssl_assert(asn1_int != _ffi.NULL) + result = _lib.i2a_ASN1_INTEGER(bio, asn1_int) + _openssl_assert(result >= 0) + return _bio_to_string(bio) + + def _delete_reason(self): + for i in range(_lib.X509_REVOKED_get_ext_count(self._revoked)): + ext = _lib.X509_REVOKED_get_ext(self._revoked, i) + obj = _lib.X509_EXTENSION_get_object(ext) + if _lib.OBJ_obj2nid(obj) == _lib.NID_crl_reason: + _lib.X509_EXTENSION_free(ext) + _lib.X509_REVOKED_delete_ext(self._revoked, i) + break + + def set_reason(self, reason): + """ + Set the reason of this revocation. + + If :data:`reason` is ``None``, delete the reason instead. + + :param reason: The reason string. + :type reason: :class:`bytes` or :class:`NoneType` + + :return: ``None`` + + .. seealso:: + + :meth:`all_reasons`, which gives you a list of all supported + reasons which you might pass to this method. + """ + if reason is None: + self._delete_reason() + elif not isinstance(reason, bytes): + raise TypeError("reason must be None or a byte string") + else: + reason = reason.lower().replace(b' ', b'') + reason_code = [r.lower() for r in self._crl_reasons].index(reason) + + new_reason_ext = _lib.ASN1_ENUMERATED_new() + _openssl_assert(new_reason_ext != _ffi.NULL) + new_reason_ext = _ffi.gc(new_reason_ext, _lib.ASN1_ENUMERATED_free) + + set_result = _lib.ASN1_ENUMERATED_set(new_reason_ext, reason_code) + _openssl_assert(set_result != _ffi.NULL) + + self._delete_reason() + add_result = _lib.X509_REVOKED_add1_ext_i2d( + self._revoked, _lib.NID_crl_reason, new_reason_ext, 0, 0) + _openssl_assert(add_result == 1) + + def get_reason(self): + """ + Get the reason of this revocation. + + :return: The reason, or ``None`` if there is none. + :rtype: bytes or NoneType + + .. seealso:: + + :meth:`all_reasons`, which gives you a list of all supported + reasons this method might return. + """ + for i in range(_lib.X509_REVOKED_get_ext_count(self._revoked)): + ext = _lib.X509_REVOKED_get_ext(self._revoked, i) + obj = _lib.X509_EXTENSION_get_object(ext) + if _lib.OBJ_obj2nid(obj) == _lib.NID_crl_reason: + bio = _new_mem_buf() + + print_result = _lib.X509V3_EXT_print(bio, ext, 0, 0) + if not print_result: + print_result = _lib.M_ASN1_OCTET_STRING_print( + bio, _lib.X509_EXTENSION_get_data(ext) + ) + _openssl_assert(print_result != 0) + + return _bio_to_string(bio) + + def all_reasons(self): + """ + Return a list of all the supported reason strings. + + This list is a copy; modifying it does not change the supported reason + strings. + + :return: A list of reason strings. + :rtype: :class:`list` of :class:`bytes` + """ + return self._crl_reasons[:] + + def set_rev_date(self, when): + """ + Set the revocation timestamp. + + :param bytes when: The timestamp of the revocation, + as ASN.1 TIME. + :return: ``None`` + """ + dt = _lib.X509_REVOKED_get0_revocationDate(self._revoked) + return _set_asn1_time(dt, when) + + def get_rev_date(self): + """ + Get the revocation timestamp. + + :return: The timestamp of the revocation, as ASN.1 TIME. + :rtype: bytes + """ + dt = _lib.X509_REVOKED_get0_revocationDate(self._revoked) + return _get_asn1_time(dt) + + +class CRL(object): + """ + A certificate revocation list. + """ + + def __init__(self): + crl = _lib.X509_CRL_new() + self._crl = _ffi.gc(crl, _lib.X509_CRL_free) + + def to_cryptography(self): + """ + Export as a ``cryptography`` CRL. + + :rtype: ``cryptography.x509.CertificateRevocationList`` + + .. versionadded:: 17.1.0 + """ + from cryptography.hazmat.backends.openssl.x509 import ( + _CertificateRevocationList + ) + backend = _get_backend() + return _CertificateRevocationList(backend, self._crl) + + @classmethod + def from_cryptography(cls, crypto_crl): + """ + Construct based on a ``cryptography`` *crypto_crl*. + + :param crypto_crl: A ``cryptography`` certificate revocation list + :type crypto_crl: ``cryptography.x509.CertificateRevocationList`` + + :rtype: CRL + + .. versionadded:: 17.1.0 + """ + if not isinstance(crypto_crl, x509.CertificateRevocationList): + raise TypeError("Must be a certificate revocation list") + + crl = cls() + crl._crl = crypto_crl._x509_crl + return crl + + def get_revoked(self): + """ + Return the revocations in this certificate revocation list. + + These revocations will be provided by value, not by reference. + That means it's okay to mutate them: it won't affect this CRL. + + :return: The revocations in this CRL. + :rtype: :class:`tuple` of :class:`Revocation` + """ + results = [] + revoked_stack = _lib.X509_CRL_get_REVOKED(self._crl) + for i in range(_lib.sk_X509_REVOKED_num(revoked_stack)): + revoked = _lib.sk_X509_REVOKED_value(revoked_stack, i) + revoked_copy = _lib.Cryptography_X509_REVOKED_dup(revoked) + pyrev = Revoked.__new__(Revoked) + pyrev._revoked = _ffi.gc(revoked_copy, _lib.X509_REVOKED_free) + results.append(pyrev) + if results: + return tuple(results) + + def add_revoked(self, revoked): + """ + Add a revoked (by value not reference) to the CRL structure + + This revocation will be added by value, not by reference. That + means it's okay to mutate it after adding: it won't affect + this CRL. + + :param Revoked revoked: The new revocation. + :return: ``None`` + """ + copy = _lib.Cryptography_X509_REVOKED_dup(revoked._revoked) + _openssl_assert(copy != _ffi.NULL) + + add_result = _lib.X509_CRL_add0_revoked(self._crl, copy) + _openssl_assert(add_result != 0) + + def get_issuer(self): + """ + Get the CRL's issuer. + + .. versionadded:: 16.1.0 + + :rtype: X509Name + """ + _issuer = _lib.X509_NAME_dup(_lib.X509_CRL_get_issuer(self._crl)) + _openssl_assert(_issuer != _ffi.NULL) + _issuer = _ffi.gc(_issuer, _lib.X509_NAME_free) + issuer = X509Name.__new__(X509Name) + issuer._name = _issuer + return issuer + + def set_version(self, version): + """ + Set the CRL version. + + .. versionadded:: 16.1.0 + + :param int version: The version of the CRL. + :return: ``None`` + """ + _openssl_assert(_lib.X509_CRL_set_version(self._crl, version) != 0) + + def _set_boundary_time(self, which, when): + return _set_asn1_time(which(self._crl), when) + + def set_lastUpdate(self, when): + """ + Set when the CRL was last updated. + + The timestamp is formatted as an ASN.1 TIME:: + + YYYYMMDDhhmmssZ + + .. versionadded:: 16.1.0 + + :param bytes when: A timestamp string. + :return: ``None`` + """ + return self._set_boundary_time(_lib.X509_CRL_get_lastUpdate, when) + + def set_nextUpdate(self, when): + """ + Set when the CRL will next be udpated. + + The timestamp is formatted as an ASN.1 TIME:: + + YYYYMMDDhhmmssZ + + .. versionadded:: 16.1.0 + + :param bytes when: A timestamp string. + :return: ``None`` + """ + return self._set_boundary_time(_lib.X509_CRL_get_nextUpdate, when) + + def sign(self, issuer_cert, issuer_key, digest): + """ + Sign the CRL. + + Signing a CRL enables clients to associate the CRL itself with an + issuer. Before a CRL is meaningful to other OpenSSL functions, it must + be signed by an issuer. + + This method implicitly sets the issuer's name based on the issuer + certificate and private key used to sign the CRL. + + .. versionadded:: 16.1.0 + + :param X509 issuer_cert: The issuer's certificate. + :param PKey issuer_key: The issuer's private key. + :param bytes digest: The digest method to sign the CRL with. + """ + digest_obj = _lib.EVP_get_digestbyname(digest) + _openssl_assert(digest_obj != _ffi.NULL) + _lib.X509_CRL_set_issuer_name( + self._crl, _lib.X509_get_subject_name(issuer_cert._x509)) + _lib.X509_CRL_sort(self._crl) + result = _lib.X509_CRL_sign(self._crl, issuer_key._pkey, digest_obj) + _openssl_assert(result != 0) + + def export(self, cert, key, type=FILETYPE_PEM, days=100, + digest=_UNSPECIFIED): + """ + Export the CRL as a string. + + :param X509 cert: The certificate used to sign the CRL. + :param PKey key: The key used to sign the CRL. + :param int type: The export format, either :data:`FILETYPE_PEM`, + :data:`FILETYPE_ASN1`, or :data:`FILETYPE_TEXT`. + :param int days: The number of days until the next update of this CRL. + :param bytes digest: The name of the message digest to use (eg + ``b"sha256"``). + :rtype: bytes + """ + + if not isinstance(cert, X509): + raise TypeError("cert must be an X509 instance") + if not isinstance(key, PKey): + raise TypeError("key must be a PKey instance") + if not isinstance(type, int): + raise TypeError("type must be an integer") + + if digest is _UNSPECIFIED: + raise TypeError("digest must be provided") + + digest_obj = _lib.EVP_get_digestbyname(digest) + if digest_obj == _ffi.NULL: + raise ValueError("No such digest method") + + bio = _lib.BIO_new(_lib.BIO_s_mem()) + _openssl_assert(bio != _ffi.NULL) + + # A scratch time object to give different values to different CRL + # fields + sometime = _lib.ASN1_TIME_new() + _openssl_assert(sometime != _ffi.NULL) + + _lib.X509_gmtime_adj(sometime, 0) + _lib.X509_CRL_set_lastUpdate(self._crl, sometime) + + _lib.X509_gmtime_adj(sometime, days * 24 * 60 * 60) + _lib.X509_CRL_set_nextUpdate(self._crl, sometime) + + _lib.X509_CRL_set_issuer_name( + self._crl, _lib.X509_get_subject_name(cert._x509) + ) + + sign_result = _lib.X509_CRL_sign(self._crl, key._pkey, digest_obj) + if not sign_result: + _raise_current_error() + + return dump_crl(type, self) + + +class PKCS7(object): + def type_is_signed(self): + """ + Check if this NID_pkcs7_signed object + + :return: True if the PKCS7 is of type signed + """ + return bool(_lib.PKCS7_type_is_signed(self._pkcs7)) + + def type_is_enveloped(self): + """ + Check if this NID_pkcs7_enveloped object + + :returns: True if the PKCS7 is of type enveloped + """ + return bool(_lib.PKCS7_type_is_enveloped(self._pkcs7)) + + def type_is_signedAndEnveloped(self): + """ + Check if this NID_pkcs7_signedAndEnveloped object + + :returns: True if the PKCS7 is of type signedAndEnveloped + """ + return bool(_lib.PKCS7_type_is_signedAndEnveloped(self._pkcs7)) + + def type_is_data(self): + """ + Check if this NID_pkcs7_data object + + :return: True if the PKCS7 is of type data + """ + return bool(_lib.PKCS7_type_is_data(self._pkcs7)) + + def get_type_name(self): + """ + Returns the type name of the PKCS7 structure + + :return: A string with the typename + """ + nid = _lib.OBJ_obj2nid(self._pkcs7.type) + string_type = _lib.OBJ_nid2sn(nid) + return _ffi.string(string_type) + + +class PKCS12(object): + """ + A PKCS #12 archive. + """ + + def __init__(self): + self._pkey = None + self._cert = None + self._cacerts = None + self._friendlyname = None + + def get_certificate(self): + """ + Get the certificate in the PKCS #12 structure. + + :return: The certificate, or :py:const:`None` if there is none. + :rtype: :py:class:`X509` or :py:const:`None` + """ + return self._cert + + def set_certificate(self, cert): + """ + Set the certificate in the PKCS #12 structure. + + :param cert: The new certificate, or :py:const:`None` to unset it. + :type cert: :py:class:`X509` or :py:const:`None` + + :return: ``None`` + """ + if not isinstance(cert, X509): + raise TypeError("cert must be an X509 instance") + self._cert = cert + + def get_privatekey(self): + """ + Get the private key in the PKCS #12 structure. + + :return: The private key, or :py:const:`None` if there is none. + :rtype: :py:class:`PKey` + """ + return self._pkey + + def set_privatekey(self, pkey): + """ + Set the certificate portion of the PKCS #12 structure. + + :param pkey: The new private key, or :py:const:`None` to unset it. + :type pkey: :py:class:`PKey` or :py:const:`None` + + :return: ``None`` + """ + if not isinstance(pkey, PKey): + raise TypeError("pkey must be a PKey instance") + self._pkey = pkey + + def get_ca_certificates(self): + """ + Get the CA certificates in the PKCS #12 structure. + + :return: A tuple with the CA certificates in the chain, or + :py:const:`None` if there are none. + :rtype: :py:class:`tuple` of :py:class:`X509` or :py:const:`None` + """ + if self._cacerts is not None: + return tuple(self._cacerts) + + def set_ca_certificates(self, cacerts): + """ + Replace or set the CA certificates within the PKCS12 object. + + :param cacerts: The new CA certificates, or :py:const:`None` to unset + them. + :type cacerts: An iterable of :py:class:`X509` or :py:const:`None` + + :return: ``None`` + """ + if cacerts is None: + self._cacerts = None + else: + cacerts = list(cacerts) + for cert in cacerts: + if not isinstance(cert, X509): + raise TypeError( + "iterable must only contain X509 instances" + ) + self._cacerts = cacerts + + def set_friendlyname(self, name): + """ + Set the friendly name in the PKCS #12 structure. + + :param name: The new friendly name, or :py:const:`None` to unset. + :type name: :py:class:`bytes` or :py:const:`None` + + :return: ``None`` + """ + if name is None: + self._friendlyname = None + elif not isinstance(name, bytes): + raise TypeError( + "name must be a byte string or None (not %r)" % (name,) + ) + self._friendlyname = name + + def get_friendlyname(self): + """ + Get the friendly name in the PKCS# 12 structure. + + :returns: The friendly name, or :py:const:`None` if there is none. + :rtype: :py:class:`bytes` or :py:const:`None` + """ + return self._friendlyname + + def export(self, passphrase=None, iter=2048, maciter=1): + """ + Dump a PKCS12 object as a string. + + For more information, see the :c:func:`PKCS12_create` man page. + + :param passphrase: The passphrase used to encrypt the structure. Unlike + some other passphrase arguments, this *must* be a string, not a + callback. + :type passphrase: :py:data:`bytes` + + :param iter: Number of times to repeat the encryption step. + :type iter: :py:data:`int` + + :param maciter: Number of times to repeat the MAC step. + :type maciter: :py:data:`int` + + :return: The string representation of the PKCS #12 structure. + :rtype: + """ + passphrase = _text_to_bytes_and_warn("passphrase", passphrase) + + if self._cacerts is None: + cacerts = _ffi.NULL + else: + cacerts = _lib.sk_X509_new_null() + cacerts = _ffi.gc(cacerts, _lib.sk_X509_free) + for cert in self._cacerts: + _lib.sk_X509_push(cacerts, cert._x509) + + if passphrase is None: + passphrase = _ffi.NULL + + friendlyname = self._friendlyname + if friendlyname is None: + friendlyname = _ffi.NULL + + if self._pkey is None: + pkey = _ffi.NULL + else: + pkey = self._pkey._pkey + + if self._cert is None: + cert = _ffi.NULL + else: + cert = self._cert._x509 + + pkcs12 = _lib.PKCS12_create( + passphrase, friendlyname, pkey, cert, cacerts, + _lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC, + _lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC, + iter, maciter, 0) + if pkcs12 == _ffi.NULL: + _raise_current_error() + pkcs12 = _ffi.gc(pkcs12, _lib.PKCS12_free) + + bio = _new_mem_buf() + _lib.i2d_PKCS12_bio(bio, pkcs12) + return _bio_to_string(bio) + + +class NetscapeSPKI(object): + """ + A Netscape SPKI object. + """ + + def __init__(self): + spki = _lib.NETSCAPE_SPKI_new() + self._spki = _ffi.gc(spki, _lib.NETSCAPE_SPKI_free) + + def sign(self, pkey, digest): + """ + Sign the certificate request with this key and digest type. + + :param pkey: The private key to sign with. + :type pkey: :py:class:`PKey` + + :param digest: The message digest to use. + :type digest: :py:class:`bytes` + + :return: ``None`` + """ + if pkey._only_public: + raise ValueError("Key has only public part") + + if not pkey._initialized: + raise ValueError("Key is uninitialized") + + digest_obj = _lib.EVP_get_digestbyname(_byte_string(digest)) + if digest_obj == _ffi.NULL: + raise ValueError("No such digest method") + + sign_result = _lib.NETSCAPE_SPKI_sign( + self._spki, pkey._pkey, digest_obj + ) + _openssl_assert(sign_result > 0) + + def verify(self, key): + """ + Verifies a signature on a certificate request. + + :param PKey key: The public key that signature is supposedly from. + + :return: ``True`` if the signature is correct. + :rtype: bool + + :raises OpenSSL.crypto.Error: If the signature is invalid, or there was + a problem verifying the signature. + """ + answer = _lib.NETSCAPE_SPKI_verify(self._spki, key._pkey) + if answer <= 0: + _raise_current_error() + return True + + def b64_encode(self): + """ + Generate a base64 encoded representation of this SPKI object. + + :return: The base64 encoded string. + :rtype: :py:class:`bytes` + """ + encoded = _lib.NETSCAPE_SPKI_b64_encode(self._spki) + result = _ffi.string(encoded) + _lib.OPENSSL_free(encoded) + return result + + def get_pubkey(self): + """ + Get the public key of this certificate. + + :return: The public key. + :rtype: :py:class:`PKey` + """ + pkey = PKey.__new__(PKey) + pkey._pkey = _lib.NETSCAPE_SPKI_get_pubkey(self._spki) + _openssl_assert(pkey._pkey != _ffi.NULL) + pkey._pkey = _ffi.gc(pkey._pkey, _lib.EVP_PKEY_free) + pkey._only_public = True + return pkey + + def set_pubkey(self, pkey): + """ + Set the public key of the certificate + + :param pkey: The public key + :return: ``None`` + """ + set_result = _lib.NETSCAPE_SPKI_set_pubkey(self._spki, pkey._pkey) + _openssl_assert(set_result == 1) + + +class _PassphraseHelper(object): + def __init__(self, type, passphrase, more_args=False, truncate=False): + if type != FILETYPE_PEM and passphrase is not None: + raise ValueError( + "only FILETYPE_PEM key format supports encryption" + ) + self._passphrase = passphrase + self._more_args = more_args + self._truncate = truncate + self._problems = [] + + @property + def callback(self): + if self._passphrase is None: + return _ffi.NULL + elif isinstance(self._passphrase, bytes): + return _ffi.NULL + elif callable(self._passphrase): + return _ffi.callback("pem_password_cb", self._read_passphrase) + else: + raise TypeError( + "Last argument must be a byte string or a callable." + ) + + @property + def callback_args(self): + if self._passphrase is None: + return _ffi.NULL + elif isinstance(self._passphrase, bytes): + return self._passphrase + elif callable(self._passphrase): + return _ffi.NULL + else: + raise TypeError( + "Last argument must be a byte string or a callable." + ) + + def raise_if_problem(self, exceptionType=Error): + if self._problems: + + # Flush the OpenSSL error queue + try: + _exception_from_error_queue(exceptionType) + except exceptionType: + pass + + raise self._problems.pop(0) + + def _read_passphrase(self, buf, size, rwflag, userdata): + try: + if self._more_args: + result = self._passphrase(size, rwflag, userdata) + else: + result = self._passphrase(rwflag) + if not isinstance(result, bytes): + raise ValueError("String expected") + if len(result) > size: + if self._truncate: + result = result[:size] + else: + raise ValueError( + "passphrase returned by callback is too long" + ) + for i in range(len(result)): + buf[i] = result[i:i + 1] + return len(result) + except Exception as e: + self._problems.append(e) + return 0 + + +def load_publickey(type, buffer): + """ + Load a public key from a buffer. + + :param type: The file type (one of :data:`FILETYPE_PEM`, + :data:`FILETYPE_ASN1`). + :param buffer: The buffer the key is stored in. + :type buffer: A Python string object, either unicode or bytestring. + :return: The PKey object. + :rtype: :class:`PKey` + """ + if isinstance(buffer, _text_type): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + if type == FILETYPE_PEM: + evp_pkey = _lib.PEM_read_bio_PUBKEY( + bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) + elif type == FILETYPE_ASN1: + evp_pkey = _lib.d2i_PUBKEY_bio(bio, _ffi.NULL) + else: + raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + if evp_pkey == _ffi.NULL: + _raise_current_error() + + pkey = PKey.__new__(PKey) + pkey._pkey = _ffi.gc(evp_pkey, _lib.EVP_PKEY_free) + pkey._only_public = True + return pkey + + +def load_privatekey(type, buffer, passphrase=None): + """ + Load a private key (PKey) from the string *buffer* encoded with the type + *type*. + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1) + :param buffer: The buffer the key is stored in + :param passphrase: (optional) if encrypted PEM format, this can be + either the passphrase to use, or a callback for + providing the passphrase. + + :return: The PKey object + """ + if isinstance(buffer, _text_type): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + helper = _PassphraseHelper(type, passphrase) + if type == FILETYPE_PEM: + evp_pkey = _lib.PEM_read_bio_PrivateKey( + bio, _ffi.NULL, helper.callback, helper.callback_args) + helper.raise_if_problem() + elif type == FILETYPE_ASN1: + evp_pkey = _lib.d2i_PrivateKey_bio(bio, _ffi.NULL) + else: + raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + if evp_pkey == _ffi.NULL: + _raise_current_error() + + pkey = PKey.__new__(PKey) + pkey._pkey = _ffi.gc(evp_pkey, _lib.EVP_PKEY_free) + return pkey + + +def dump_certificate_request(type, req): + """ + Dump the certificate request *req* into a buffer string encoded with the + type *type*. + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1) + :param req: The certificate request to dump + :return: The buffer with the dumped certificate request in + """ + bio = _new_mem_buf() + + if type == FILETYPE_PEM: + result_code = _lib.PEM_write_bio_X509_REQ(bio, req._req) + elif type == FILETYPE_ASN1: + result_code = _lib.i2d_X509_REQ_bio(bio, req._req) + elif type == FILETYPE_TEXT: + result_code = _lib.X509_REQ_print_ex(bio, req._req, 0, 0) + else: + raise ValueError( + "type argument must be FILETYPE_PEM, FILETYPE_ASN1, or " + "FILETYPE_TEXT" + ) + + _openssl_assert(result_code != 0) + + return _bio_to_string(bio) + + +def load_certificate_request(type, buffer): + """ + Load a certificate request (X509Req) from the string *buffer* encoded with + the type *type*. + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1) + :param buffer: The buffer the certificate request is stored in + :return: The X509Req object + """ + if isinstance(buffer, _text_type): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + if type == FILETYPE_PEM: + req = _lib.PEM_read_bio_X509_REQ(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) + elif type == FILETYPE_ASN1: + req = _lib.d2i_X509_REQ_bio(bio, _ffi.NULL) + else: + raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + _openssl_assert(req != _ffi.NULL) + + x509req = X509Req.__new__(X509Req) + x509req._req = _ffi.gc(req, _lib.X509_REQ_free) + return x509req + + +def sign(pkey, data, digest): + """ + Sign a data string using the given key and message digest. + + :param pkey: PKey to sign with + :param data: data to be signed + :param digest: message digest to use + :return: signature + + .. versionadded:: 0.11 + """ + data = _text_to_bytes_and_warn("data", data) + + digest_obj = _lib.EVP_get_digestbyname(_byte_string(digest)) + if digest_obj == _ffi.NULL: + raise ValueError("No such digest method") + + md_ctx = _lib.Cryptography_EVP_MD_CTX_new() + md_ctx = _ffi.gc(md_ctx, _lib.Cryptography_EVP_MD_CTX_free) + + _lib.EVP_SignInit(md_ctx, digest_obj) + _lib.EVP_SignUpdate(md_ctx, data, len(data)) + + length = _lib.EVP_PKEY_size(pkey._pkey) + _openssl_assert(length > 0) + signature_buffer = _ffi.new("unsigned char[]", length) + signature_length = _ffi.new("unsigned int *") + final_result = _lib.EVP_SignFinal( + md_ctx, signature_buffer, signature_length, pkey._pkey) + _openssl_assert(final_result == 1) + + return _ffi.buffer(signature_buffer, signature_length[0])[:] + + +def verify(cert, signature, data, digest): + """ + Verify the signature for a data string. + + :param cert: signing certificate (X509 object) corresponding to the + private key which generated the signature. + :param signature: signature returned by sign function + :param data: data to be verified + :param digest: message digest to use + :return: ``None`` if the signature is correct, raise exception otherwise. + + .. versionadded:: 0.11 + """ + data = _text_to_bytes_and_warn("data", data) + + digest_obj = _lib.EVP_get_digestbyname(_byte_string(digest)) + if digest_obj == _ffi.NULL: + raise ValueError("No such digest method") + + pkey = _lib.X509_get_pubkey(cert._x509) + _openssl_assert(pkey != _ffi.NULL) + pkey = _ffi.gc(pkey, _lib.EVP_PKEY_free) + + md_ctx = _lib.Cryptography_EVP_MD_CTX_new() + md_ctx = _ffi.gc(md_ctx, _lib.Cryptography_EVP_MD_CTX_free) + + _lib.EVP_VerifyInit(md_ctx, digest_obj) + _lib.EVP_VerifyUpdate(md_ctx, data, len(data)) + verify_result = _lib.EVP_VerifyFinal( + md_ctx, signature, len(signature), pkey + ) + + if verify_result != 1: + _raise_current_error() + + +def dump_crl(type, crl): + """ + Dump a certificate revocation list to a buffer. + + :param type: The file type (one of ``FILETYPE_PEM``, ``FILETYPE_ASN1``, or + ``FILETYPE_TEXT``). + :param CRL crl: The CRL to dump. + + :return: The buffer with the CRL. + :rtype: bytes + """ + bio = _new_mem_buf() + + if type == FILETYPE_PEM: + ret = _lib.PEM_write_bio_X509_CRL(bio, crl._crl) + elif type == FILETYPE_ASN1: + ret = _lib.i2d_X509_CRL_bio(bio, crl._crl) + elif type == FILETYPE_TEXT: + ret = _lib.X509_CRL_print(bio, crl._crl) + else: + raise ValueError( + "type argument must be FILETYPE_PEM, FILETYPE_ASN1, or " + "FILETYPE_TEXT") + + assert ret == 1 + return _bio_to_string(bio) + + +def load_crl(type, buffer): + """ + Load Certificate Revocation List (CRL) data from a string *buffer*. + *buffer* encoded with the type *type*. + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1) + :param buffer: The buffer the CRL is stored in + + :return: The PKey object + """ + if isinstance(buffer, _text_type): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + if type == FILETYPE_PEM: + crl = _lib.PEM_read_bio_X509_CRL(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) + elif type == FILETYPE_ASN1: + crl = _lib.d2i_X509_CRL_bio(bio, _ffi.NULL) + else: + raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + if crl == _ffi.NULL: + _raise_current_error() + + result = CRL.__new__(CRL) + result._crl = _ffi.gc(crl, _lib.X509_CRL_free) + return result + + +def load_pkcs7_data(type, buffer): + """ + Load pkcs7 data from the string *buffer* encoded with the type + *type*. + + :param type: The file type (one of FILETYPE_PEM or FILETYPE_ASN1) + :param buffer: The buffer with the pkcs7 data. + :return: The PKCS7 object + """ + if isinstance(buffer, _text_type): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + if type == FILETYPE_PEM: + pkcs7 = _lib.PEM_read_bio_PKCS7(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) + elif type == FILETYPE_ASN1: + pkcs7 = _lib.d2i_PKCS7_bio(bio, _ffi.NULL) + else: + raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + if pkcs7 == _ffi.NULL: + _raise_current_error() + + pypkcs7 = PKCS7.__new__(PKCS7) + pypkcs7._pkcs7 = _ffi.gc(pkcs7, _lib.PKCS7_free) + return pypkcs7 + + +def load_pkcs12(buffer, passphrase=None): + """ + Load pkcs12 data from the string *buffer*. If the pkcs12 structure is + encrypted, a *passphrase* must be included. The MAC is always + checked and thus required. + + See also the man page for the C function :py:func:`PKCS12_parse`. + + :param buffer: The buffer the certificate is stored in + :param passphrase: (Optional) The password to decrypt the PKCS12 lump + :returns: The PKCS12 object + """ + passphrase = _text_to_bytes_and_warn("passphrase", passphrase) + + if isinstance(buffer, _text_type): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + # Use null passphrase if passphrase is None or empty string. With PKCS#12 + # password based encryption no password and a zero length password are two + # different things, but OpenSSL implementation will try both to figure out + # which one works. + if not passphrase: + passphrase = _ffi.NULL + + p12 = _lib.d2i_PKCS12_bio(bio, _ffi.NULL) + if p12 == _ffi.NULL: + _raise_current_error() + p12 = _ffi.gc(p12, _lib.PKCS12_free) + + pkey = _ffi.new("EVP_PKEY**") + cert = _ffi.new("X509**") + cacerts = _ffi.new("Cryptography_STACK_OF_X509**") + + parse_result = _lib.PKCS12_parse(p12, passphrase, pkey, cert, cacerts) + if not parse_result: + _raise_current_error() + + cacerts = _ffi.gc(cacerts[0], _lib.sk_X509_free) + + # openssl 1.0.0 sometimes leaves an X509_check_private_key error in the + # queue for no particular reason. This error isn't interesting to anyone + # outside this function. It's not even interesting to us. Get rid of it. + try: + _raise_current_error() + except Error: + pass + + if pkey[0] == _ffi.NULL: + pykey = None + else: + pykey = PKey.__new__(PKey) + pykey._pkey = _ffi.gc(pkey[0], _lib.EVP_PKEY_free) + + if cert[0] == _ffi.NULL: + pycert = None + friendlyname = None + else: + pycert = X509._from_raw_x509_ptr(cert[0]) + + friendlyname_length = _ffi.new("int*") + friendlyname_buffer = _lib.X509_alias_get0( + cert[0], friendlyname_length + ) + friendlyname = _ffi.buffer( + friendlyname_buffer, friendlyname_length[0] + )[:] + if friendlyname_buffer == _ffi.NULL: + friendlyname = None + + pycacerts = [] + for i in range(_lib.sk_X509_num(cacerts)): + x509 = _lib.sk_X509_value(cacerts, i) + pycacert = X509._from_raw_x509_ptr(x509) + pycacerts.append(pycacert) + if not pycacerts: + pycacerts = None + + pkcs12 = PKCS12.__new__(PKCS12) + pkcs12._pkey = pykey + pkcs12._cert = pycert + pkcs12._cacerts = pycacerts + pkcs12._friendlyname = friendlyname + return pkcs12 + + +# There are no direct unit tests for this initialization. It is tested +# indirectly since it is necessary for functions like dump_privatekey when +# using encryption. +# +# Thus OpenSSL.test.test_crypto.FunctionTests.test_dump_privatekey_passphrase +# and some other similar tests may fail without this (though they may not if +# the Python runtime has already done some initialization of the underlying +# OpenSSL library (and is linked against the same one that cryptography is +# using)). +_lib.OpenSSL_add_all_algorithms() + +# This is similar but exercised mainly by exception_from_error_queue. It calls +# both ERR_load_crypto_strings() and ERR_load_SSL_strings(). +_lib.SSL_load_error_strings() + + +# Set the default string mask to match OpenSSL upstream (since 2005) and +# RFC5280 recommendations. +_lib.ASN1_STRING_set_default_mask_asc(b'utf8only') diff --git a/packages/wakatime/packages/OpenSSL/debug.py b/packages/wakatime/packages/OpenSSL/debug.py new file mode 100644 index 0000000..0d37bf5 --- /dev/null +++ b/packages/wakatime/packages/OpenSSL/debug.py @@ -0,0 +1,42 @@ +from __future__ import print_function + +import ssl +import sys + +import OpenSSL.SSL +import cffi +import cryptography + +from . import version + + +_env_info = u"""\ +pyOpenSSL: {pyopenssl} +cryptography: {cryptography} +cffi: {cffi} +cryptography's compiled against OpenSSL: {crypto_openssl_compile} +cryptography's linked OpenSSL: {crypto_openssl_link} +Pythons's OpenSSL: {python_openssl} +Python executable: {python} +Python version: {python_version} +Platform: {platform} +sys.path: {sys_path}""".format( + pyopenssl=version.__version__, + crypto_openssl_compile=OpenSSL._util.ffi.string( + OpenSSL._util.lib.OPENSSL_VERSION_TEXT, + ).decode("ascii"), + crypto_openssl_link=OpenSSL.SSL.SSLeay_version( + OpenSSL.SSL.SSLEAY_VERSION + ).decode("ascii"), + python_openssl=getattr(ssl, "OPENSSL_VERSION", "n/a"), + cryptography=cryptography.__version__, + cffi=cffi.__version__, + python=sys.executable, + python_version=sys.version, + platform=sys.platform, + sys_path=sys.path, +) + + +if __name__ == "__main__": + print(_env_info) diff --git a/packages/wakatime/packages/OpenSSL/rand.py b/packages/wakatime/packages/OpenSSL/rand.py new file mode 100644 index 0000000..d2c1767 --- /dev/null +++ b/packages/wakatime/packages/OpenSSL/rand.py @@ -0,0 +1,40 @@ +""" +PRNG management routines, thin wrappers. +""" + +from OpenSSL._util import lib as _lib + + +def add(buffer, entropy): + """ + Mix bytes from *string* into the PRNG state. + + The *entropy* argument is (the lower bound of) an estimate of how much + randomness is contained in *string*, measured in bytes. + + For more information, see e.g. :rfc:`1750`. + + This function is only relevant if you are forking Python processes and + need to reseed the CSPRNG after fork. + + :param buffer: Buffer with random data. + :param entropy: The entropy (in bytes) measurement of the buffer. + + :return: :obj:`None` + """ + if not isinstance(buffer, bytes): + raise TypeError("buffer must be a byte string") + + if not isinstance(entropy, int): + raise TypeError("entropy must be an integer") + + _lib.RAND_add(buffer, len(buffer), entropy) + + +def status(): + """ + Check whether the PRNG has been seeded with enough data. + + :return: 1 if the PRNG is seeded enough, 0 otherwise. + """ + return _lib.RAND_status() diff --git a/packages/wakatime/packages/OpenSSL/tsafe.py b/packages/wakatime/packages/OpenSSL/tsafe.py new file mode 100644 index 0000000..f1c6f67 --- /dev/null +++ b/packages/wakatime/packages/OpenSSL/tsafe.py @@ -0,0 +1,31 @@ +import warnings +from threading import RLock as _RLock + +from OpenSSL import SSL as _ssl + + +warnings.warn( + "OpenSSL.tsafe is deprecated and will be removed", + DeprecationWarning, stacklevel=3 +) + + +class Connection: + def __init__(self, *args): + self._ssl_conn = _ssl.Connection(*args) + self._lock = _RLock() + + for f in ('get_context', 'pending', 'send', 'write', 'recv', 'read', + 'renegotiate', 'bind', 'listen', 'connect', 'accept', + 'setblocking', 'fileno', 'shutdown', 'close', 'get_cipher_list', + 'getpeername', 'getsockname', 'getsockopt', 'setsockopt', + 'makefile', 'get_app_data', 'set_app_data', 'state_string', + 'sock_shutdown', 'get_peer_certificate', 'get_peer_cert_chain', + 'want_read', 'want_write', 'set_connect_state', + 'set_accept_state', 'connect_ex', 'sendall'): + exec("""def %s(self, *args): + self._lock.acquire() + try: + return self._ssl_conn.%s(*args) + finally: + self._lock.release()\n""" % (f, f)) diff --git a/packages/wakatime/packages/OpenSSL/version.py b/packages/wakatime/packages/OpenSSL/version.py new file mode 100644 index 0000000..1e9696b --- /dev/null +++ b/packages/wakatime/packages/OpenSSL/version.py @@ -0,0 +1,22 @@ +# Copyright (C) AB Strakt +# Copyright (C) Jean-Paul Calderone +# See LICENSE for details. + +""" +pyOpenSSL - A simple wrapper around the OpenSSL library +""" + +__all__ = [ + "__author__", "__copyright__", "__email__", "__license__", "__summary__", + "__title__", "__uri__", "__version__", +] + +__version__ = "19.1.0" + +__title__ = "pyOpenSSL" +__uri__ = "https://pyopenssl.org/" +__summary__ = "Python wrapper module around the OpenSSL library" +__author__ = "The pyOpenSSL developers" +__email__ = "cryptography-dev@python.org" +__license__ = "Apache License, Version 2.0" +__copyright__ = "Copyright 2001-2017 {0}".format(__author__) diff --git a/packages/wakatime/packages/ipaddress.py b/packages/wakatime/packages/ipaddress.py new file mode 100644 index 0000000..3e6f9e4 --- /dev/null +++ b/packages/wakatime/packages/ipaddress.py @@ -0,0 +1,2420 @@ +# Copyright 2007 Google Inc. +# Licensed to PSF under a Contributor Agreement. + +"""A fast, lightweight IPv4/IPv6 manipulation library in Python. + +This library is used to create/poke/manipulate IPv4 and IPv6 addresses +and networks. + +""" + +from __future__ import unicode_literals + + +import itertools +import struct + +__version__ = '1.0.23' + +# Compatibility functions +_compat_int_types = (int,) +try: + _compat_int_types = (int, long) +except NameError: + pass +try: + _compat_str = unicode +except NameError: + _compat_str = str + assert bytes != str +if b'\0'[0] == 0: # Python 3 semantics + def _compat_bytes_to_byte_vals(byt): + return byt +else: + def _compat_bytes_to_byte_vals(byt): + return [struct.unpack(b'!B', b)[0] for b in byt] +try: + _compat_int_from_byte_vals = int.from_bytes +except AttributeError: + def _compat_int_from_byte_vals(bytvals, endianess): + assert endianess == 'big' + res = 0 + for bv in bytvals: + assert isinstance(bv, _compat_int_types) + res = (res << 8) + bv + return res + + +def _compat_to_bytes(intval, length, endianess): + assert isinstance(intval, _compat_int_types) + assert endianess == 'big' + if length == 4: + if intval < 0 or intval >= 2 ** 32: + raise struct.error("integer out of range for 'I' format code") + return struct.pack(b'!I', intval) + elif length == 16: + if intval < 0 or intval >= 2 ** 128: + raise struct.error("integer out of range for 'QQ' format code") + return struct.pack(b'!QQ', intval >> 64, intval & 0xffffffffffffffff) + else: + raise NotImplementedError() + + +if hasattr(int, 'bit_length'): + # Not int.bit_length , since that won't work in 2.7 where long exists + def _compat_bit_length(i): + return i.bit_length() +else: + def _compat_bit_length(i): + for res in itertools.count(): + if i >> res == 0: + return res + + +def _compat_range(start, end, step=1): + assert step > 0 + i = start + while i < end: + yield i + i += step + + +class _TotalOrderingMixin(object): + __slots__ = () + + # Helper that derives the other comparison operations from + # __lt__ and __eq__ + # We avoid functools.total_ordering because it doesn't handle + # NotImplemented correctly yet (http://bugs.python.org/issue10042) + def __eq__(self, other): + raise NotImplementedError + + def __ne__(self, other): + equal = self.__eq__(other) + if equal is NotImplemented: + return NotImplemented + return not equal + + def __lt__(self, other): + raise NotImplementedError + + def __le__(self, other): + less = self.__lt__(other) + if less is NotImplemented or not less: + return self.__eq__(other) + return less + + def __gt__(self, other): + less = self.__lt__(other) + if less is NotImplemented: + return NotImplemented + equal = self.__eq__(other) + if equal is NotImplemented: + return NotImplemented + return not (less or equal) + + def __ge__(self, other): + less = self.__lt__(other) + if less is NotImplemented: + return NotImplemented + return not less + + +IPV4LENGTH = 32 +IPV6LENGTH = 128 + + +class AddressValueError(ValueError): + """A Value Error related to the address.""" + + +class NetmaskValueError(ValueError): + """A Value Error related to the netmask.""" + + +def ip_address(address): + """Take an IP string/int and return an object of the correct type. + + Args: + address: A string or integer, the IP address. Either IPv4 or + IPv6 addresses may be supplied; integers less than 2**32 will + be considered to be IPv4 by default. + + Returns: + An IPv4Address or IPv6Address object. + + Raises: + ValueError: if the *address* passed isn't either a v4 or a v6 + address + + """ + try: + return IPv4Address(address) + except (AddressValueError, NetmaskValueError): + pass + + try: + return IPv6Address(address) + except (AddressValueError, NetmaskValueError): + pass + + if isinstance(address, bytes): + raise AddressValueError( + '%r does not appear to be an IPv4 or IPv6 address. ' + 'Did you pass in a bytes (str in Python 2) instead of' + ' a unicode object?' % address) + + raise ValueError('%r does not appear to be an IPv4 or IPv6 address' % + address) + + +def ip_network(address, strict=True): + """Take an IP string/int and return an object of the correct type. + + Args: + address: A string or integer, the IP network. Either IPv4 or + IPv6 networks may be supplied; integers less than 2**32 will + be considered to be IPv4 by default. + + Returns: + An IPv4Network or IPv6Network object. + + Raises: + ValueError: if the string passed isn't either a v4 or a v6 + address. Or if the network has host bits set. + + """ + try: + return IPv4Network(address, strict) + except (AddressValueError, NetmaskValueError): + pass + + try: + return IPv6Network(address, strict) + except (AddressValueError, NetmaskValueError): + pass + + if isinstance(address, bytes): + raise AddressValueError( + '%r does not appear to be an IPv4 or IPv6 network. ' + 'Did you pass in a bytes (str in Python 2) instead of' + ' a unicode object?' % address) + + raise ValueError('%r does not appear to be an IPv4 or IPv6 network' % + address) + + +def ip_interface(address): + """Take an IP string/int and return an object of the correct type. + + Args: + address: A string or integer, the IP address. Either IPv4 or + IPv6 addresses may be supplied; integers less than 2**32 will + be considered to be IPv4 by default. + + Returns: + An IPv4Interface or IPv6Interface object. + + Raises: + ValueError: if the string passed isn't either a v4 or a v6 + address. + + Notes: + The IPv?Interface classes describe an Address on a particular + Network, so they're basically a combination of both the Address + and Network classes. + + """ + try: + return IPv4Interface(address) + except (AddressValueError, NetmaskValueError): + pass + + try: + return IPv6Interface(address) + except (AddressValueError, NetmaskValueError): + pass + + raise ValueError('%r does not appear to be an IPv4 or IPv6 interface' % + address) + + +def v4_int_to_packed(address): + """Represent an address as 4 packed bytes in network (big-endian) order. + + Args: + address: An integer representation of an IPv4 IP address. + + Returns: + The integer address packed as 4 bytes in network (big-endian) order. + + Raises: + ValueError: If the integer is negative or too large to be an + IPv4 IP address. + + """ + try: + return _compat_to_bytes(address, 4, 'big') + except (struct.error, OverflowError): + raise ValueError("Address negative or too large for IPv4") + + +def v6_int_to_packed(address): + """Represent an address as 16 packed bytes in network (big-endian) order. + + Args: + address: An integer representation of an IPv6 IP address. + + Returns: + The integer address packed as 16 bytes in network (big-endian) order. + + """ + try: + return _compat_to_bytes(address, 16, 'big') + except (struct.error, OverflowError): + raise ValueError("Address negative or too large for IPv6") + + +def _split_optional_netmask(address): + """Helper to split the netmask and raise AddressValueError if needed""" + addr = _compat_str(address).split('/') + if len(addr) > 2: + raise AddressValueError("Only one '/' permitted in %r" % address) + return addr + + +def _find_address_range(addresses): + """Find a sequence of sorted deduplicated IPv#Address. + + Args: + addresses: a list of IPv#Address objects. + + Yields: + A tuple containing the first and last IP addresses in the sequence. + + """ + it = iter(addresses) + first = last = next(it) + for ip in it: + if ip._ip != last._ip + 1: + yield first, last + first = ip + last = ip + yield first, last + + +def _count_righthand_zero_bits(number, bits): + """Count the number of zero bits on the right hand side. + + Args: + number: an integer. + bits: maximum number of bits to count. + + Returns: + The number of zero bits on the right hand side of the number. + + """ + if number == 0: + return bits + return min(bits, _compat_bit_length(~number & (number - 1))) + + +def summarize_address_range(first, last): + """Summarize a network range given the first and last IP addresses. + + Example: + >>> list(summarize_address_range(IPv4Address('192.0.2.0'), + ... IPv4Address('192.0.2.130'))) + ... #doctest: +NORMALIZE_WHITESPACE + [IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'), + IPv4Network('192.0.2.130/32')] + + Args: + first: the first IPv4Address or IPv6Address in the range. + last: the last IPv4Address or IPv6Address in the range. + + Returns: + An iterator of the summarized IPv(4|6) network objects. + + Raise: + TypeError: + If the first and last objects are not IP addresses. + If the first and last objects are not the same version. + ValueError: + If the last object is not greater than the first. + If the version of the first address is not 4 or 6. + + """ + if (not (isinstance(first, _BaseAddress) and + isinstance(last, _BaseAddress))): + raise TypeError('first and last must be IP addresses, not networks') + if first.version != last.version: + raise TypeError("%s and %s are not of the same version" % ( + first, last)) + if first > last: + raise ValueError('last IP address must be greater than first') + + if first.version == 4: + ip = IPv4Network + elif first.version == 6: + ip = IPv6Network + else: + raise ValueError('unknown IP version') + + ip_bits = first._max_prefixlen + first_int = first._ip + last_int = last._ip + while first_int <= last_int: + nbits = min(_count_righthand_zero_bits(first_int, ip_bits), + _compat_bit_length(last_int - first_int + 1) - 1) + net = ip((first_int, ip_bits - nbits)) + yield net + first_int += 1 << nbits + if first_int - 1 == ip._ALL_ONES: + break + + +def _collapse_addresses_internal(addresses): + """Loops through the addresses, collapsing concurrent netblocks. + + Example: + + ip1 = IPv4Network('192.0.2.0/26') + ip2 = IPv4Network('192.0.2.64/26') + ip3 = IPv4Network('192.0.2.128/26') + ip4 = IPv4Network('192.0.2.192/26') + + _collapse_addresses_internal([ip1, ip2, ip3, ip4]) -> + [IPv4Network('192.0.2.0/24')] + + This shouldn't be called directly; it is called via + collapse_addresses([]). + + Args: + addresses: A list of IPv4Network's or IPv6Network's + + Returns: + A list of IPv4Network's or IPv6Network's depending on what we were + passed. + + """ + # First merge + to_merge = list(addresses) + subnets = {} + while to_merge: + net = to_merge.pop() + supernet = net.supernet() + existing = subnets.get(supernet) + if existing is None: + subnets[supernet] = net + elif existing != net: + # Merge consecutive subnets + del subnets[supernet] + to_merge.append(supernet) + # Then iterate over resulting networks, skipping subsumed subnets + last = None + for net in sorted(subnets.values()): + if last is not None: + # Since they are sorted, + # last.network_address <= net.network_address is a given. + if last.broadcast_address >= net.broadcast_address: + continue + yield net + last = net + + +def collapse_addresses(addresses): + """Collapse a list of IP objects. + + Example: + collapse_addresses([IPv4Network('192.0.2.0/25'), + IPv4Network('192.0.2.128/25')]) -> + [IPv4Network('192.0.2.0/24')] + + Args: + addresses: An iterator of IPv4Network or IPv6Network objects. + + Returns: + An iterator of the collapsed IPv(4|6)Network objects. + + Raises: + TypeError: If passed a list of mixed version objects. + + """ + addrs = [] + ips = [] + nets = [] + + # split IP addresses and networks + for ip in addresses: + if isinstance(ip, _BaseAddress): + if ips and ips[-1]._version != ip._version: + raise TypeError("%s and %s are not of the same version" % ( + ip, ips[-1])) + ips.append(ip) + elif ip._prefixlen == ip._max_prefixlen: + if ips and ips[-1]._version != ip._version: + raise TypeError("%s and %s are not of the same version" % ( + ip, ips[-1])) + try: + ips.append(ip.ip) + except AttributeError: + ips.append(ip.network_address) + else: + if nets and nets[-1]._version != ip._version: + raise TypeError("%s and %s are not of the same version" % ( + ip, nets[-1])) + nets.append(ip) + + # sort and dedup + ips = sorted(set(ips)) + + # find consecutive address ranges in the sorted sequence and summarize them + if ips: + for first, last in _find_address_range(ips): + addrs.extend(summarize_address_range(first, last)) + + return _collapse_addresses_internal(addrs + nets) + + +def get_mixed_type_key(obj): + """Return a key suitable for sorting between networks and addresses. + + Address and Network objects are not sortable by default; they're + fundamentally different so the expression + + IPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24') + + doesn't make any sense. There are some times however, where you may wish + to have ipaddress sort these for you anyway. If you need to do this, you + can use this function as the key= argument to sorted(). + + Args: + obj: either a Network or Address object. + Returns: + appropriate key. + + """ + if isinstance(obj, _BaseNetwork): + return obj._get_networks_key() + elif isinstance(obj, _BaseAddress): + return obj._get_address_key() + return NotImplemented + + +class _IPAddressBase(_TotalOrderingMixin): + + """The mother class.""" + + __slots__ = () + + @property + def exploded(self): + """Return the longhand version of the IP address as a string.""" + return self._explode_shorthand_ip_string() + + @property + def compressed(self): + """Return the shorthand version of the IP address as a string.""" + return _compat_str(self) + + @property + def reverse_pointer(self): + """The name of the reverse DNS pointer for the IP address, e.g.: + >>> ipaddress.ip_address("127.0.0.1").reverse_pointer + '1.0.0.127.in-addr.arpa' + >>> ipaddress.ip_address("2001:db8::1").reverse_pointer + '1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa' + + """ + return self._reverse_pointer() + + @property + def version(self): + msg = '%200s has no version specified' % (type(self),) + raise NotImplementedError(msg) + + def _check_int_address(self, address): + if address < 0: + msg = "%d (< 0) is not permitted as an IPv%d address" + raise AddressValueError(msg % (address, self._version)) + if address > self._ALL_ONES: + msg = "%d (>= 2**%d) is not permitted as an IPv%d address" + raise AddressValueError(msg % (address, self._max_prefixlen, + self._version)) + + def _check_packed_address(self, address, expected_len): + address_len = len(address) + if address_len != expected_len: + msg = ( + '%r (len %d != %d) is not permitted as an IPv%d address. ' + 'Did you pass in a bytes (str in Python 2) instead of' + ' a unicode object?') + raise AddressValueError(msg % (address, address_len, + expected_len, self._version)) + + @classmethod + def _ip_int_from_prefix(cls, prefixlen): + """Turn the prefix length into a bitwise netmask + + Args: + prefixlen: An integer, the prefix length. + + Returns: + An integer. + + """ + return cls._ALL_ONES ^ (cls._ALL_ONES >> prefixlen) + + @classmethod + def _prefix_from_ip_int(cls, ip_int): + """Return prefix length from the bitwise netmask. + + Args: + ip_int: An integer, the netmask in expanded bitwise format + + Returns: + An integer, the prefix length. + + Raises: + ValueError: If the input intermingles zeroes & ones + """ + trailing_zeroes = _count_righthand_zero_bits(ip_int, + cls._max_prefixlen) + prefixlen = cls._max_prefixlen - trailing_zeroes + leading_ones = ip_int >> trailing_zeroes + all_ones = (1 << prefixlen) - 1 + if leading_ones != all_ones: + byteslen = cls._max_prefixlen // 8 + details = _compat_to_bytes(ip_int, byteslen, 'big') + msg = 'Netmask pattern %r mixes zeroes & ones' + raise ValueError(msg % details) + return prefixlen + + @classmethod + def _report_invalid_netmask(cls, netmask_str): + msg = '%r is not a valid netmask' % netmask_str + raise NetmaskValueError(msg) + + @classmethod + def _prefix_from_prefix_string(cls, prefixlen_str): + """Return prefix length from a numeric string + + Args: + prefixlen_str: The string to be converted + + Returns: + An integer, the prefix length. + + Raises: + NetmaskValueError: If the input is not a valid netmask + """ + # int allows a leading +/- as well as surrounding whitespace, + # so we ensure that isn't the case + if not _BaseV4._DECIMAL_DIGITS.issuperset(prefixlen_str): + cls._report_invalid_netmask(prefixlen_str) + try: + prefixlen = int(prefixlen_str) + except ValueError: + cls._report_invalid_netmask(prefixlen_str) + if not (0 <= prefixlen <= cls._max_prefixlen): + cls._report_invalid_netmask(prefixlen_str) + return prefixlen + + @classmethod + def _prefix_from_ip_string(cls, ip_str): + """Turn a netmask/hostmask string into a prefix length + + Args: + ip_str: The netmask/hostmask to be converted + + Returns: + An integer, the prefix length. + + Raises: + NetmaskValueError: If the input is not a valid netmask/hostmask + """ + # Parse the netmask/hostmask like an IP address. + try: + ip_int = cls._ip_int_from_string(ip_str) + except AddressValueError: + cls._report_invalid_netmask(ip_str) + + # Try matching a netmask (this would be /1*0*/ as a bitwise regexp). + # Note that the two ambiguous cases (all-ones and all-zeroes) are + # treated as netmasks. + try: + return cls._prefix_from_ip_int(ip_int) + except ValueError: + pass + + # Invert the bits, and try matching a /0+1+/ hostmask instead. + ip_int ^= cls._ALL_ONES + try: + return cls._prefix_from_ip_int(ip_int) + except ValueError: + cls._report_invalid_netmask(ip_str) + + def __reduce__(self): + return self.__class__, (_compat_str(self),) + + +class _BaseAddress(_IPAddressBase): + + """A generic IP object. + + This IP class contains the version independent methods which are + used by single IP addresses. + """ + + __slots__ = () + + def __int__(self): + return self._ip + + def __eq__(self, other): + try: + return (self._ip == other._ip and + self._version == other._version) + except AttributeError: + return NotImplemented + + def __lt__(self, other): + if not isinstance(other, _IPAddressBase): + return NotImplemented + if not isinstance(other, _BaseAddress): + raise TypeError('%s and %s are not of the same type' % ( + self, other)) + if self._version != other._version: + raise TypeError('%s and %s are not of the same version' % ( + self, other)) + if self._ip != other._ip: + return self._ip < other._ip + return False + + # Shorthand for Integer addition and subtraction. This is not + # meant to ever support addition/subtraction of addresses. + def __add__(self, other): + if not isinstance(other, _compat_int_types): + return NotImplemented + return self.__class__(int(self) + other) + + def __sub__(self, other): + if not isinstance(other, _compat_int_types): + return NotImplemented + return self.__class__(int(self) - other) + + def __repr__(self): + return '%s(%r)' % (self.__class__.__name__, _compat_str(self)) + + def __str__(self): + return _compat_str(self._string_from_ip_int(self._ip)) + + def __hash__(self): + return hash(hex(int(self._ip))) + + def _get_address_key(self): + return (self._version, self) + + def __reduce__(self): + return self.__class__, (self._ip,) + + +class _BaseNetwork(_IPAddressBase): + + """A generic IP network object. + + This IP class contains the version independent methods which are + used by networks. + + """ + def __init__(self, address): + self._cache = {} + + def __repr__(self): + return '%s(%r)' % (self.__class__.__name__, _compat_str(self)) + + def __str__(self): + return '%s/%d' % (self.network_address, self.prefixlen) + + def hosts(self): + """Generate Iterator over usable hosts in a network. + + This is like __iter__ except it doesn't return the network + or broadcast addresses. + + """ + network = int(self.network_address) + broadcast = int(self.broadcast_address) + for x in _compat_range(network + 1, broadcast): + yield self._address_class(x) + + def __iter__(self): + network = int(self.network_address) + broadcast = int(self.broadcast_address) + for x in _compat_range(network, broadcast + 1): + yield self._address_class(x) + + def __getitem__(self, n): + network = int(self.network_address) + broadcast = int(self.broadcast_address) + if n >= 0: + if network + n > broadcast: + raise IndexError('address out of range') + return self._address_class(network + n) + else: + n += 1 + if broadcast + n < network: + raise IndexError('address out of range') + return self._address_class(broadcast + n) + + def __lt__(self, other): + if not isinstance(other, _IPAddressBase): + return NotImplemented + if not isinstance(other, _BaseNetwork): + raise TypeError('%s and %s are not of the same type' % ( + self, other)) + if self._version != other._version: + raise TypeError('%s and %s are not of the same version' % ( + self, other)) + if self.network_address != other.network_address: + return self.network_address < other.network_address + if self.netmask != other.netmask: + return self.netmask < other.netmask + return False + + def __eq__(self, other): + try: + return (self._version == other._version and + self.network_address == other.network_address and + int(self.netmask) == int(other.netmask)) + except AttributeError: + return NotImplemented + + def __hash__(self): + return hash(int(self.network_address) ^ int(self.netmask)) + + def __contains__(self, other): + # always false if one is v4 and the other is v6. + if self._version != other._version: + return False + # dealing with another network. + if isinstance(other, _BaseNetwork): + return False + # dealing with another address + else: + # address + return (int(self.network_address) <= int(other._ip) <= + int(self.broadcast_address)) + + def overlaps(self, other): + """Tell if self is partly contained in other.""" + return self.network_address in other or ( + self.broadcast_address in other or ( + other.network_address in self or ( + other.broadcast_address in self))) + + @property + def broadcast_address(self): + x = self._cache.get('broadcast_address') + if x is None: + x = self._address_class(int(self.network_address) | + int(self.hostmask)) + self._cache['broadcast_address'] = x + return x + + @property + def hostmask(self): + x = self._cache.get('hostmask') + if x is None: + x = self._address_class(int(self.netmask) ^ self._ALL_ONES) + self._cache['hostmask'] = x + return x + + @property + def with_prefixlen(self): + return '%s/%d' % (self.network_address, self._prefixlen) + + @property + def with_netmask(self): + return '%s/%s' % (self.network_address, self.netmask) + + @property + def with_hostmask(self): + return '%s/%s' % (self.network_address, self.hostmask) + + @property + def num_addresses(self): + """Number of hosts in the current subnet.""" + return int(self.broadcast_address) - int(self.network_address) + 1 + + @property + def _address_class(self): + # Returning bare address objects (rather than interfaces) allows for + # more consistent behaviour across the network address, broadcast + # address and individual host addresses. + msg = '%200s has no associated address class' % (type(self),) + raise NotImplementedError(msg) + + @property + def prefixlen(self): + return self._prefixlen + + def address_exclude(self, other): + """Remove an address from a larger block. + + For example: + + addr1 = ip_network('192.0.2.0/28') + addr2 = ip_network('192.0.2.1/32') + list(addr1.address_exclude(addr2)) = + [IPv4Network('192.0.2.0/32'), IPv4Network('192.0.2.2/31'), + IPv4Network('192.0.2.4/30'), IPv4Network('192.0.2.8/29')] + + or IPv6: + + addr1 = ip_network('2001:db8::1/32') + addr2 = ip_network('2001:db8::1/128') + list(addr1.address_exclude(addr2)) = + [ip_network('2001:db8::1/128'), + ip_network('2001:db8::2/127'), + ip_network('2001:db8::4/126'), + ip_network('2001:db8::8/125'), + ... + ip_network('2001:db8:8000::/33')] + + Args: + other: An IPv4Network or IPv6Network object of the same type. + + Returns: + An iterator of the IPv(4|6)Network objects which is self + minus other. + + Raises: + TypeError: If self and other are of differing address + versions, or if other is not a network object. + ValueError: If other is not completely contained by self. + + """ + if not self._version == other._version: + raise TypeError("%s and %s are not of the same version" % ( + self, other)) + + if not isinstance(other, _BaseNetwork): + raise TypeError("%s is not a network object" % other) + + if not other.subnet_of(self): + raise ValueError('%s not contained in %s' % (other, self)) + if other == self: + return + + # Make sure we're comparing the network of other. + other = other.__class__('%s/%s' % (other.network_address, + other.prefixlen)) + + s1, s2 = self.subnets() + while s1 != other and s2 != other: + if other.subnet_of(s1): + yield s2 + s1, s2 = s1.subnets() + elif other.subnet_of(s2): + yield s1 + s1, s2 = s2.subnets() + else: + # If we got here, there's a bug somewhere. + raise AssertionError('Error performing exclusion: ' + 's1: %s s2: %s other: %s' % + (s1, s2, other)) + if s1 == other: + yield s2 + elif s2 == other: + yield s1 + else: + # If we got here, there's a bug somewhere. + raise AssertionError('Error performing exclusion: ' + 's1: %s s2: %s other: %s' % + (s1, s2, other)) + + def compare_networks(self, other): + """Compare two IP objects. + + This is only concerned about the comparison of the integer + representation of the network addresses. This means that the + host bits aren't considered at all in this method. If you want + to compare host bits, you can easily enough do a + 'HostA._ip < HostB._ip' + + Args: + other: An IP object. + + Returns: + If the IP versions of self and other are the same, returns: + + -1 if self < other: + eg: IPv4Network('192.0.2.0/25') < IPv4Network('192.0.2.128/25') + IPv6Network('2001:db8::1000/124') < + IPv6Network('2001:db8::2000/124') + 0 if self == other + eg: IPv4Network('192.0.2.0/24') == IPv4Network('192.0.2.0/24') + IPv6Network('2001:db8::1000/124') == + IPv6Network('2001:db8::1000/124') + 1 if self > other + eg: IPv4Network('192.0.2.128/25') > IPv4Network('192.0.2.0/25') + IPv6Network('2001:db8::2000/124') > + IPv6Network('2001:db8::1000/124') + + Raises: + TypeError if the IP versions are different. + + """ + # does this need to raise a ValueError? + if self._version != other._version: + raise TypeError('%s and %s are not of the same type' % ( + self, other)) + # self._version == other._version below here: + if self.network_address < other.network_address: + return -1 + if self.network_address > other.network_address: + return 1 + # self.network_address == other.network_address below here: + if self.netmask < other.netmask: + return -1 + if self.netmask > other.netmask: + return 1 + return 0 + + def _get_networks_key(self): + """Network-only key function. + + Returns an object that identifies this address' network and + netmask. This function is a suitable "key" argument for sorted() + and list.sort(). + + """ + return (self._version, self.network_address, self.netmask) + + def subnets(self, prefixlen_diff=1, new_prefix=None): + """The subnets which join to make the current subnet. + + In the case that self contains only one IP + (self._prefixlen == 32 for IPv4 or self._prefixlen == 128 + for IPv6), yield an iterator with just ourself. + + Args: + prefixlen_diff: An integer, the amount the prefix length + should be increased by. This should not be set if + new_prefix is also set. + new_prefix: The desired new prefix length. This must be a + larger number (smaller prefix) than the existing prefix. + This should not be set if prefixlen_diff is also set. + + Returns: + An iterator of IPv(4|6) objects. + + Raises: + ValueError: The prefixlen_diff is too small or too large. + OR + prefixlen_diff and new_prefix are both set or new_prefix + is a smaller number than the current prefix (smaller + number means a larger network) + + """ + if self._prefixlen == self._max_prefixlen: + yield self + return + + if new_prefix is not None: + if new_prefix < self._prefixlen: + raise ValueError('new prefix must be longer') + if prefixlen_diff != 1: + raise ValueError('cannot set prefixlen_diff and new_prefix') + prefixlen_diff = new_prefix - self._prefixlen + + if prefixlen_diff < 0: + raise ValueError('prefix length diff must be > 0') + new_prefixlen = self._prefixlen + prefixlen_diff + + if new_prefixlen > self._max_prefixlen: + raise ValueError( + 'prefix length diff %d is invalid for netblock %s' % ( + new_prefixlen, self)) + + start = int(self.network_address) + end = int(self.broadcast_address) + 1 + step = (int(self.hostmask) + 1) >> prefixlen_diff + for new_addr in _compat_range(start, end, step): + current = self.__class__((new_addr, new_prefixlen)) + yield current + + def supernet(self, prefixlen_diff=1, new_prefix=None): + """The supernet containing the current network. + + Args: + prefixlen_diff: An integer, the amount the prefix length of + the network should be decreased by. For example, given a + /24 network and a prefixlen_diff of 3, a supernet with a + /21 netmask is returned. + + Returns: + An IPv4 network object. + + Raises: + ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have + a negative prefix length. + OR + If prefixlen_diff and new_prefix are both set or new_prefix is a + larger number than the current prefix (larger number means a + smaller network) + + """ + if self._prefixlen == 0: + return self + + if new_prefix is not None: + if new_prefix > self._prefixlen: + raise ValueError('new prefix must be shorter') + if prefixlen_diff != 1: + raise ValueError('cannot set prefixlen_diff and new_prefix') + prefixlen_diff = self._prefixlen - new_prefix + + new_prefixlen = self.prefixlen - prefixlen_diff + if new_prefixlen < 0: + raise ValueError( + 'current prefixlen is %d, cannot have a prefixlen_diff of %d' % + (self.prefixlen, prefixlen_diff)) + return self.__class__(( + int(self.network_address) & (int(self.netmask) << prefixlen_diff), + new_prefixlen)) + + @property + def is_multicast(self): + """Test if the address is reserved for multicast use. + + Returns: + A boolean, True if the address is a multicast address. + See RFC 2373 2.7 for details. + + """ + return (self.network_address.is_multicast and + self.broadcast_address.is_multicast) + + @staticmethod + def _is_subnet_of(a, b): + try: + # Always false if one is v4 and the other is v6. + if a._version != b._version: + raise TypeError( + "%s and %s are not of the same version" % (a, b)) + return (b.network_address <= a.network_address and + b.broadcast_address >= a.broadcast_address) + except AttributeError: + raise TypeError("Unable to test subnet containment " + "between %s and %s" % (a, b)) + + def subnet_of(self, other): + """Return True if this network is a subnet of other.""" + return self._is_subnet_of(self, other) + + def supernet_of(self, other): + """Return True if this network is a supernet of other.""" + return self._is_subnet_of(other, self) + + @property + def is_reserved(self): + """Test if the address is otherwise IETF reserved. + + Returns: + A boolean, True if the address is within one of the + reserved IPv6 Network ranges. + + """ + return (self.network_address.is_reserved and + self.broadcast_address.is_reserved) + + @property + def is_link_local(self): + """Test if the address is reserved for link-local. + + Returns: + A boolean, True if the address is reserved per RFC 4291. + + """ + return (self.network_address.is_link_local and + self.broadcast_address.is_link_local) + + @property + def is_private(self): + """Test if this address is allocated for private networks. + + Returns: + A boolean, True if the address is reserved per + iana-ipv4-special-registry or iana-ipv6-special-registry. + + """ + return (self.network_address.is_private and + self.broadcast_address.is_private) + + @property + def is_global(self): + """Test if this address is allocated for public networks. + + Returns: + A boolean, True if the address is not reserved per + iana-ipv4-special-registry or iana-ipv6-special-registry. + + """ + return not self.is_private + + @property + def is_unspecified(self): + """Test if the address is unspecified. + + Returns: + A boolean, True if this is the unspecified address as defined in + RFC 2373 2.5.2. + + """ + return (self.network_address.is_unspecified and + self.broadcast_address.is_unspecified) + + @property + def is_loopback(self): + """Test if the address is a loopback address. + + Returns: + A boolean, True if the address is a loopback address as defined in + RFC 2373 2.5.3. + + """ + return (self.network_address.is_loopback and + self.broadcast_address.is_loopback) + + +class _BaseV4(object): + + """Base IPv4 object. + + The following methods are used by IPv4 objects in both single IP + addresses and networks. + + """ + + __slots__ = () + _version = 4 + # Equivalent to 255.255.255.255 or 32 bits of 1's. + _ALL_ONES = (2 ** IPV4LENGTH) - 1 + _DECIMAL_DIGITS = frozenset('0123456789') + + # the valid octets for host and netmasks. only useful for IPv4. + _valid_mask_octets = frozenset([255, 254, 252, 248, 240, 224, 192, 128, 0]) + + _max_prefixlen = IPV4LENGTH + # There are only a handful of valid v4 netmasks, so we cache them all + # when constructed (see _make_netmask()). + _netmask_cache = {} + + def _explode_shorthand_ip_string(self): + return _compat_str(self) + + @classmethod + def _make_netmask(cls, arg): + """Make a (netmask, prefix_len) tuple from the given argument. + + Argument can be: + - an integer (the prefix length) + - a string representing the prefix length (e.g. "24") + - a string representing the prefix netmask (e.g. "255.255.255.0") + """ + if arg not in cls._netmask_cache: + if isinstance(arg, _compat_int_types): + prefixlen = arg + else: + try: + # Check for a netmask in prefix length form + prefixlen = cls._prefix_from_prefix_string(arg) + except NetmaskValueError: + # Check for a netmask or hostmask in dotted-quad form. + # This may raise NetmaskValueError. + prefixlen = cls._prefix_from_ip_string(arg) + netmask = IPv4Address(cls._ip_int_from_prefix(prefixlen)) + cls._netmask_cache[arg] = netmask, prefixlen + return cls._netmask_cache[arg] + + @classmethod + def _ip_int_from_string(cls, ip_str): + """Turn the given IP string into an integer for comparison. + + Args: + ip_str: A string, the IP ip_str. + + Returns: + The IP ip_str as an integer. + + Raises: + AddressValueError: if ip_str isn't a valid IPv4 Address. + + """ + if not ip_str: + raise AddressValueError('Address cannot be empty') + + octets = ip_str.split('.') + if len(octets) != 4: + raise AddressValueError("Expected 4 octets in %r" % ip_str) + + try: + return _compat_int_from_byte_vals( + map(cls._parse_octet, octets), 'big') + except ValueError as exc: + raise AddressValueError("%s in %r" % (exc, ip_str)) + + @classmethod + def _parse_octet(cls, octet_str): + """Convert a decimal octet into an integer. + + Args: + octet_str: A string, the number to parse. + + Returns: + The octet as an integer. + + Raises: + ValueError: if the octet isn't strictly a decimal from [0..255]. + + """ + if not octet_str: + raise ValueError("Empty octet not permitted") + # Whitelist the characters, since int() allows a lot of bizarre stuff. + if not cls._DECIMAL_DIGITS.issuperset(octet_str): + msg = "Only decimal digits permitted in %r" + raise ValueError(msg % octet_str) + # We do the length check second, since the invalid character error + # is likely to be more informative for the user + if len(octet_str) > 3: + msg = "At most 3 characters permitted in %r" + raise ValueError(msg % octet_str) + # Convert to integer (we know digits are legal) + octet_int = int(octet_str, 10) + # Any octets that look like they *might* be written in octal, + # and which don't look exactly the same in both octal and + # decimal are rejected as ambiguous + if octet_int > 7 and octet_str[0] == '0': + msg = "Ambiguous (octal/decimal) value in %r not permitted" + raise ValueError(msg % octet_str) + if octet_int > 255: + raise ValueError("Octet %d (> 255) not permitted" % octet_int) + return octet_int + + @classmethod + def _string_from_ip_int(cls, ip_int): + """Turns a 32-bit integer into dotted decimal notation. + + Args: + ip_int: An integer, the IP address. + + Returns: + The IP address as a string in dotted decimal notation. + + """ + return '.'.join(_compat_str(struct.unpack(b'!B', b)[0] + if isinstance(b, bytes) + else b) + for b in _compat_to_bytes(ip_int, 4, 'big')) + + def _is_hostmask(self, ip_str): + """Test if the IP string is a hostmask (rather than a netmask). + + Args: + ip_str: A string, the potential hostmask. + + Returns: + A boolean, True if the IP string is a hostmask. + + """ + bits = ip_str.split('.') + try: + parts = [x for x in map(int, bits) if x in self._valid_mask_octets] + except ValueError: + return False + if len(parts) != len(bits): + return False + if parts[0] < parts[-1]: + return True + return False + + def _reverse_pointer(self): + """Return the reverse DNS pointer name for the IPv4 address. + + This implements the method described in RFC1035 3.5. + + """ + reverse_octets = _compat_str(self).split('.')[::-1] + return '.'.join(reverse_octets) + '.in-addr.arpa' + + @property + def max_prefixlen(self): + return self._max_prefixlen + + @property + def version(self): + return self._version + + +class IPv4Address(_BaseV4, _BaseAddress): + + """Represent and manipulate single IPv4 Addresses.""" + + __slots__ = ('_ip', '__weakref__') + + def __init__(self, address): + + """ + Args: + address: A string or integer representing the IP + + Additionally, an integer can be passed, so + IPv4Address('192.0.2.1') == IPv4Address(3221225985). + or, more generally + IPv4Address(int(IPv4Address('192.0.2.1'))) == + IPv4Address('192.0.2.1') + + Raises: + AddressValueError: If ipaddress isn't a valid IPv4 address. + + """ + # Efficient constructor from integer. + if isinstance(address, _compat_int_types): + self._check_int_address(address) + self._ip = address + return + + # Constructing from a packed address + if isinstance(address, bytes): + self._check_packed_address(address, 4) + bvs = _compat_bytes_to_byte_vals(address) + self._ip = _compat_int_from_byte_vals(bvs, 'big') + return + + # Assume input argument to be string or any object representation + # which converts into a formatted IP string. + addr_str = _compat_str(address) + if '/' in addr_str: + raise AddressValueError("Unexpected '/' in %r" % address) + self._ip = self._ip_int_from_string(addr_str) + + @property + def packed(self): + """The binary representation of this address.""" + return v4_int_to_packed(self._ip) + + @property + def is_reserved(self): + """Test if the address is otherwise IETF reserved. + + Returns: + A boolean, True if the address is within the + reserved IPv4 Network range. + + """ + return self in self._constants._reserved_network + + @property + def is_private(self): + """Test if this address is allocated for private networks. + + Returns: + A boolean, True if the address is reserved per + iana-ipv4-special-registry. + + """ + return any(self in net for net in self._constants._private_networks) + + @property + def is_global(self): + return ( + self not in self._constants._public_network and + not self.is_private) + + @property + def is_multicast(self): + """Test if the address is reserved for multicast use. + + Returns: + A boolean, True if the address is multicast. + See RFC 3171 for details. + + """ + return self in self._constants._multicast_network + + @property + def is_unspecified(self): + """Test if the address is unspecified. + + Returns: + A boolean, True if this is the unspecified address as defined in + RFC 5735 3. + + """ + return self == self._constants._unspecified_address + + @property + def is_loopback(self): + """Test if the address is a loopback address. + + Returns: + A boolean, True if the address is a loopback per RFC 3330. + + """ + return self in self._constants._loopback_network + + @property + def is_link_local(self): + """Test if the address is reserved for link-local. + + Returns: + A boolean, True if the address is link-local per RFC 3927. + + """ + return self in self._constants._linklocal_network + + +class IPv4Interface(IPv4Address): + + def __init__(self, address): + if isinstance(address, (bytes, _compat_int_types)): + IPv4Address.__init__(self, address) + self.network = IPv4Network(self._ip) + self._prefixlen = self._max_prefixlen + return + + if isinstance(address, tuple): + IPv4Address.__init__(self, address[0]) + if len(address) > 1: + self._prefixlen = int(address[1]) + else: + self._prefixlen = self._max_prefixlen + + self.network = IPv4Network(address, strict=False) + self.netmask = self.network.netmask + self.hostmask = self.network.hostmask + return + + addr = _split_optional_netmask(address) + IPv4Address.__init__(self, addr[0]) + + self.network = IPv4Network(address, strict=False) + self._prefixlen = self.network._prefixlen + + self.netmask = self.network.netmask + self.hostmask = self.network.hostmask + + def __str__(self): + return '%s/%d' % (self._string_from_ip_int(self._ip), + self.network.prefixlen) + + def __eq__(self, other): + address_equal = IPv4Address.__eq__(self, other) + if not address_equal or address_equal is NotImplemented: + return address_equal + try: + return self.network == other.network + except AttributeError: + # An interface with an associated network is NOT the + # same as an unassociated address. That's why the hash + # takes the extra info into account. + return False + + def __lt__(self, other): + address_less = IPv4Address.__lt__(self, other) + if address_less is NotImplemented: + return NotImplemented + try: + return (self.network < other.network or + self.network == other.network and address_less) + except AttributeError: + # We *do* allow addresses and interfaces to be sorted. The + # unassociated address is considered less than all interfaces. + return False + + def __hash__(self): + return self._ip ^ self._prefixlen ^ int(self.network.network_address) + + __reduce__ = _IPAddressBase.__reduce__ + + @property + def ip(self): + return IPv4Address(self._ip) + + @property + def with_prefixlen(self): + return '%s/%s' % (self._string_from_ip_int(self._ip), + self._prefixlen) + + @property + def with_netmask(self): + return '%s/%s' % (self._string_from_ip_int(self._ip), + self.netmask) + + @property + def with_hostmask(self): + return '%s/%s' % (self._string_from_ip_int(self._ip), + self.hostmask) + + +class IPv4Network(_BaseV4, _BaseNetwork): + + """This class represents and manipulates 32-bit IPv4 network + addresses.. + + Attributes: [examples for IPv4Network('192.0.2.0/27')] + .network_address: IPv4Address('192.0.2.0') + .hostmask: IPv4Address('0.0.0.31') + .broadcast_address: IPv4Address('192.0.2.32') + .netmask: IPv4Address('255.255.255.224') + .prefixlen: 27 + + """ + # Class to use when creating address objects + _address_class = IPv4Address + + def __init__(self, address, strict=True): + + """Instantiate a new IPv4 network object. + + Args: + address: A string or integer representing the IP [& network]. + '192.0.2.0/24' + '192.0.2.0/255.255.255.0' + '192.0.0.2/0.0.0.255' + are all functionally the same in IPv4. Similarly, + '192.0.2.1' + '192.0.2.1/255.255.255.255' + '192.0.2.1/32' + are also functionally equivalent. That is to say, failing to + provide a subnetmask will create an object with a mask of /32. + + If the mask (portion after the / in the argument) is given in + dotted quad form, it is treated as a netmask if it starts with a + non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it + starts with a zero field (e.g. 0.255.255.255 == /8), with the + single exception of an all-zero mask which is treated as a + netmask == /0. If no mask is given, a default of /32 is used. + + Additionally, an integer can be passed, so + IPv4Network('192.0.2.1') == IPv4Network(3221225985) + or, more generally + IPv4Interface(int(IPv4Interface('192.0.2.1'))) == + IPv4Interface('192.0.2.1') + + Raises: + AddressValueError: If ipaddress isn't a valid IPv4 address. + NetmaskValueError: If the netmask isn't valid for + an IPv4 address. + ValueError: If strict is True and a network address is not + supplied. + + """ + _BaseNetwork.__init__(self, address) + + # Constructing from a packed address or integer + if isinstance(address, (_compat_int_types, bytes)): + self.network_address = IPv4Address(address) + self.netmask, self._prefixlen = self._make_netmask( + self._max_prefixlen) + # fixme: address/network test here. + return + + if isinstance(address, tuple): + if len(address) > 1: + arg = address[1] + else: + # We weren't given an address[1] + arg = self._max_prefixlen + self.network_address = IPv4Address(address[0]) + self.netmask, self._prefixlen = self._make_netmask(arg) + packed = int(self.network_address) + if packed & int(self.netmask) != packed: + if strict: + raise ValueError('%s has host bits set' % self) + else: + self.network_address = IPv4Address(packed & + int(self.netmask)) + return + + # Assume input argument to be string or any object representation + # which converts into a formatted IP prefix string. + addr = _split_optional_netmask(address) + self.network_address = IPv4Address(self._ip_int_from_string(addr[0])) + + if len(addr) == 2: + arg = addr[1] + else: + arg = self._max_prefixlen + self.netmask, self._prefixlen = self._make_netmask(arg) + + if strict: + if (IPv4Address(int(self.network_address) & int(self.netmask)) != + self.network_address): + raise ValueError('%s has host bits set' % self) + self.network_address = IPv4Address(int(self.network_address) & + int(self.netmask)) + + if self._prefixlen == (self._max_prefixlen - 1): + self.hosts = self.__iter__ + + @property + def is_global(self): + """Test if this address is allocated for public networks. + + Returns: + A boolean, True if the address is not reserved per + iana-ipv4-special-registry. + + """ + return (not (self.network_address in IPv4Network('100.64.0.0/10') and + self.broadcast_address in IPv4Network('100.64.0.0/10')) and + not self.is_private) + + +class _IPv4Constants(object): + + _linklocal_network = IPv4Network('169.254.0.0/16') + + _loopback_network = IPv4Network('127.0.0.0/8') + + _multicast_network = IPv4Network('224.0.0.0/4') + + _public_network = IPv4Network('100.64.0.0/10') + + _private_networks = [ + IPv4Network('0.0.0.0/8'), + IPv4Network('10.0.0.0/8'), + IPv4Network('127.0.0.0/8'), + IPv4Network('169.254.0.0/16'), + IPv4Network('172.16.0.0/12'), + IPv4Network('192.0.0.0/29'), + IPv4Network('192.0.0.170/31'), + IPv4Network('192.0.2.0/24'), + IPv4Network('192.168.0.0/16'), + IPv4Network('198.18.0.0/15'), + IPv4Network('198.51.100.0/24'), + IPv4Network('203.0.113.0/24'), + IPv4Network('240.0.0.0/4'), + IPv4Network('255.255.255.255/32'), + ] + + _reserved_network = IPv4Network('240.0.0.0/4') + + _unspecified_address = IPv4Address('0.0.0.0') + + +IPv4Address._constants = _IPv4Constants + + +class _BaseV6(object): + + """Base IPv6 object. + + The following methods are used by IPv6 objects in both single IP + addresses and networks. + + """ + + __slots__ = () + _version = 6 + _ALL_ONES = (2 ** IPV6LENGTH) - 1 + _HEXTET_COUNT = 8 + _HEX_DIGITS = frozenset('0123456789ABCDEFabcdef') + _max_prefixlen = IPV6LENGTH + + # There are only a bunch of valid v6 netmasks, so we cache them all + # when constructed (see _make_netmask()). + _netmask_cache = {} + + @classmethod + def _make_netmask(cls, arg): + """Make a (netmask, prefix_len) tuple from the given argument. + + Argument can be: + - an integer (the prefix length) + - a string representing the prefix length (e.g. "24") + - a string representing the prefix netmask (e.g. "255.255.255.0") + """ + if arg not in cls._netmask_cache: + if isinstance(arg, _compat_int_types): + prefixlen = arg + else: + prefixlen = cls._prefix_from_prefix_string(arg) + netmask = IPv6Address(cls._ip_int_from_prefix(prefixlen)) + cls._netmask_cache[arg] = netmask, prefixlen + return cls._netmask_cache[arg] + + @classmethod + def _ip_int_from_string(cls, ip_str): + """Turn an IPv6 ip_str into an integer. + + Args: + ip_str: A string, the IPv6 ip_str. + + Returns: + An int, the IPv6 address + + Raises: + AddressValueError: if ip_str isn't a valid IPv6 Address. + + """ + if not ip_str: + raise AddressValueError('Address cannot be empty') + + parts = ip_str.split(':') + + # An IPv6 address needs at least 2 colons (3 parts). + _min_parts = 3 + if len(parts) < _min_parts: + msg = "At least %d parts expected in %r" % (_min_parts, ip_str) + raise AddressValueError(msg) + + # If the address has an IPv4-style suffix, convert it to hexadecimal. + if '.' in parts[-1]: + try: + ipv4_int = IPv4Address(parts.pop())._ip + except AddressValueError as exc: + raise AddressValueError("%s in %r" % (exc, ip_str)) + parts.append('%x' % ((ipv4_int >> 16) & 0xFFFF)) + parts.append('%x' % (ipv4_int & 0xFFFF)) + + # An IPv6 address can't have more than 8 colons (9 parts). + # The extra colon comes from using the "::" notation for a single + # leading or trailing zero part. + _max_parts = cls._HEXTET_COUNT + 1 + if len(parts) > _max_parts: + msg = "At most %d colons permitted in %r" % ( + _max_parts - 1, ip_str) + raise AddressValueError(msg) + + # Disregarding the endpoints, find '::' with nothing in between. + # This indicates that a run of zeroes has been skipped. + skip_index = None + for i in _compat_range(1, len(parts) - 1): + if not parts[i]: + if skip_index is not None: + # Can't have more than one '::' + msg = "At most one '::' permitted in %r" % ip_str + raise AddressValueError(msg) + skip_index = i + + # parts_hi is the number of parts to copy from above/before the '::' + # parts_lo is the number of parts to copy from below/after the '::' + if skip_index is not None: + # If we found a '::', then check if it also covers the endpoints. + parts_hi = skip_index + parts_lo = len(parts) - skip_index - 1 + if not parts[0]: + parts_hi -= 1 + if parts_hi: + msg = "Leading ':' only permitted as part of '::' in %r" + raise AddressValueError(msg % ip_str) # ^: requires ^:: + if not parts[-1]: + parts_lo -= 1 + if parts_lo: + msg = "Trailing ':' only permitted as part of '::' in %r" + raise AddressValueError(msg % ip_str) # :$ requires ::$ + parts_skipped = cls._HEXTET_COUNT - (parts_hi + parts_lo) + if parts_skipped < 1: + msg = "Expected at most %d other parts with '::' in %r" + raise AddressValueError(msg % (cls._HEXTET_COUNT - 1, ip_str)) + else: + # Otherwise, allocate the entire address to parts_hi. The + # endpoints could still be empty, but _parse_hextet() will check + # for that. + if len(parts) != cls._HEXTET_COUNT: + msg = "Exactly %d parts expected without '::' in %r" + raise AddressValueError(msg % (cls._HEXTET_COUNT, ip_str)) + if not parts[0]: + msg = "Leading ':' only permitted as part of '::' in %r" + raise AddressValueError(msg % ip_str) # ^: requires ^:: + if not parts[-1]: + msg = "Trailing ':' only permitted as part of '::' in %r" + raise AddressValueError(msg % ip_str) # :$ requires ::$ + parts_hi = len(parts) + parts_lo = 0 + parts_skipped = 0 + + try: + # Now, parse the hextets into a 128-bit integer. + ip_int = 0 + for i in range(parts_hi): + ip_int <<= 16 + ip_int |= cls._parse_hextet(parts[i]) + ip_int <<= 16 * parts_skipped + for i in range(-parts_lo, 0): + ip_int <<= 16 + ip_int |= cls._parse_hextet(parts[i]) + return ip_int + except ValueError as exc: + raise AddressValueError("%s in %r" % (exc, ip_str)) + + @classmethod + def _parse_hextet(cls, hextet_str): + """Convert an IPv6 hextet string into an integer. + + Args: + hextet_str: A string, the number to parse. + + Returns: + The hextet as an integer. + + Raises: + ValueError: if the input isn't strictly a hex number from + [0..FFFF]. + + """ + # Whitelist the characters, since int() allows a lot of bizarre stuff. + if not cls._HEX_DIGITS.issuperset(hextet_str): + raise ValueError("Only hex digits permitted in %r" % hextet_str) + # We do the length check second, since the invalid character error + # is likely to be more informative for the user + if len(hextet_str) > 4: + msg = "At most 4 characters permitted in %r" + raise ValueError(msg % hextet_str) + # Length check means we can skip checking the integer value + return int(hextet_str, 16) + + @classmethod + def _compress_hextets(cls, hextets): + """Compresses a list of hextets. + + Compresses a list of strings, replacing the longest continuous + sequence of "0" in the list with "" and adding empty strings at + the beginning or at the end of the string such that subsequently + calling ":".join(hextets) will produce the compressed version of + the IPv6 address. + + Args: + hextets: A list of strings, the hextets to compress. + + Returns: + A list of strings. + + """ + best_doublecolon_start = -1 + best_doublecolon_len = 0 + doublecolon_start = -1 + doublecolon_len = 0 + for index, hextet in enumerate(hextets): + if hextet == '0': + doublecolon_len += 1 + if doublecolon_start == -1: + # Start of a sequence of zeros. + doublecolon_start = index + if doublecolon_len > best_doublecolon_len: + # This is the longest sequence of zeros so far. + best_doublecolon_len = doublecolon_len + best_doublecolon_start = doublecolon_start + else: + doublecolon_len = 0 + doublecolon_start = -1 + + if best_doublecolon_len > 1: + best_doublecolon_end = (best_doublecolon_start + + best_doublecolon_len) + # For zeros at the end of the address. + if best_doublecolon_end == len(hextets): + hextets += [''] + hextets[best_doublecolon_start:best_doublecolon_end] = [''] + # For zeros at the beginning of the address. + if best_doublecolon_start == 0: + hextets = [''] + hextets + + return hextets + + @classmethod + def _string_from_ip_int(cls, ip_int=None): + """Turns a 128-bit integer into hexadecimal notation. + + Args: + ip_int: An integer, the IP address. + + Returns: + A string, the hexadecimal representation of the address. + + Raises: + ValueError: The address is bigger than 128 bits of all ones. + + """ + if ip_int is None: + ip_int = int(cls._ip) + + if ip_int > cls._ALL_ONES: + raise ValueError('IPv6 address is too large') + + hex_str = '%032x' % ip_int + hextets = ['%x' % int(hex_str[x:x + 4], 16) for x in range(0, 32, 4)] + + hextets = cls._compress_hextets(hextets) + return ':'.join(hextets) + + def _explode_shorthand_ip_string(self): + """Expand a shortened IPv6 address. + + Args: + ip_str: A string, the IPv6 address. + + Returns: + A string, the expanded IPv6 address. + + """ + if isinstance(self, IPv6Network): + ip_str = _compat_str(self.network_address) + elif isinstance(self, IPv6Interface): + ip_str = _compat_str(self.ip) + else: + ip_str = _compat_str(self) + + ip_int = self._ip_int_from_string(ip_str) + hex_str = '%032x' % ip_int + parts = [hex_str[x:x + 4] for x in range(0, 32, 4)] + if isinstance(self, (_BaseNetwork, IPv6Interface)): + return '%s/%d' % (':'.join(parts), self._prefixlen) + return ':'.join(parts) + + def _reverse_pointer(self): + """Return the reverse DNS pointer name for the IPv6 address. + + This implements the method described in RFC3596 2.5. + + """ + reverse_chars = self.exploded[::-1].replace(':', '') + return '.'.join(reverse_chars) + '.ip6.arpa' + + @property + def max_prefixlen(self): + return self._max_prefixlen + + @property + def version(self): + return self._version + + +class IPv6Address(_BaseV6, _BaseAddress): + + """Represent and manipulate single IPv6 Addresses.""" + + __slots__ = ('_ip', '__weakref__') + + def __init__(self, address): + """Instantiate a new IPv6 address object. + + Args: + address: A string or integer representing the IP + + Additionally, an integer can be passed, so + IPv6Address('2001:db8::') == + IPv6Address(42540766411282592856903984951653826560) + or, more generally + IPv6Address(int(IPv6Address('2001:db8::'))) == + IPv6Address('2001:db8::') + + Raises: + AddressValueError: If address isn't a valid IPv6 address. + + """ + # Efficient constructor from integer. + if isinstance(address, _compat_int_types): + self._check_int_address(address) + self._ip = address + return + + # Constructing from a packed address + if isinstance(address, bytes): + self._check_packed_address(address, 16) + bvs = _compat_bytes_to_byte_vals(address) + self._ip = _compat_int_from_byte_vals(bvs, 'big') + return + + # Assume input argument to be string or any object representation + # which converts into a formatted IP string. + addr_str = _compat_str(address) + if '/' in addr_str: + raise AddressValueError("Unexpected '/' in %r" % address) + self._ip = self._ip_int_from_string(addr_str) + + @property + def packed(self): + """The binary representation of this address.""" + return v6_int_to_packed(self._ip) + + @property + def is_multicast(self): + """Test if the address is reserved for multicast use. + + Returns: + A boolean, True if the address is a multicast address. + See RFC 2373 2.7 for details. + + """ + return self in self._constants._multicast_network + + @property + def is_reserved(self): + """Test if the address is otherwise IETF reserved. + + Returns: + A boolean, True if the address is within one of the + reserved IPv6 Network ranges. + + """ + return any(self in x for x in self._constants._reserved_networks) + + @property + def is_link_local(self): + """Test if the address is reserved for link-local. + + Returns: + A boolean, True if the address is reserved per RFC 4291. + + """ + return self in self._constants._linklocal_network + + @property + def is_site_local(self): + """Test if the address is reserved for site-local. + + Note that the site-local address space has been deprecated by RFC 3879. + Use is_private to test if this address is in the space of unique local + addresses as defined by RFC 4193. + + Returns: + A boolean, True if the address is reserved per RFC 3513 2.5.6. + + """ + return self in self._constants._sitelocal_network + + @property + def is_private(self): + """Test if this address is allocated for private networks. + + Returns: + A boolean, True if the address is reserved per + iana-ipv6-special-registry. + + """ + return any(self in net for net in self._constants._private_networks) + + @property + def is_global(self): + """Test if this address is allocated for public networks. + + Returns: + A boolean, true if the address is not reserved per + iana-ipv6-special-registry. + + """ + return not self.is_private + + @property + def is_unspecified(self): + """Test if the address is unspecified. + + Returns: + A boolean, True if this is the unspecified address as defined in + RFC 2373 2.5.2. + + """ + return self._ip == 0 + + @property + def is_loopback(self): + """Test if the address is a loopback address. + + Returns: + A boolean, True if the address is a loopback address as defined in + RFC 2373 2.5.3. + + """ + return self._ip == 1 + + @property + def ipv4_mapped(self): + """Return the IPv4 mapped address. + + Returns: + If the IPv6 address is a v4 mapped address, return the + IPv4 mapped address. Return None otherwise. + + """ + if (self._ip >> 32) != 0xFFFF: + return None + return IPv4Address(self._ip & 0xFFFFFFFF) + + @property + def teredo(self): + """Tuple of embedded teredo IPs. + + Returns: + Tuple of the (server, client) IPs or None if the address + doesn't appear to be a teredo address (doesn't start with + 2001::/32) + + """ + if (self._ip >> 96) != 0x20010000: + return None + return (IPv4Address((self._ip >> 64) & 0xFFFFFFFF), + IPv4Address(~self._ip & 0xFFFFFFFF)) + + @property + def sixtofour(self): + """Return the IPv4 6to4 embedded address. + + Returns: + The IPv4 6to4-embedded address if present or None if the + address doesn't appear to contain a 6to4 embedded address. + + """ + if (self._ip >> 112) != 0x2002: + return None + return IPv4Address((self._ip >> 80) & 0xFFFFFFFF) + + +class IPv6Interface(IPv6Address): + + def __init__(self, address): + if isinstance(address, (bytes, _compat_int_types)): + IPv6Address.__init__(self, address) + self.network = IPv6Network(self._ip) + self._prefixlen = self._max_prefixlen + return + if isinstance(address, tuple): + IPv6Address.__init__(self, address[0]) + if len(address) > 1: + self._prefixlen = int(address[1]) + else: + self._prefixlen = self._max_prefixlen + self.network = IPv6Network(address, strict=False) + self.netmask = self.network.netmask + self.hostmask = self.network.hostmask + return + + addr = _split_optional_netmask(address) + IPv6Address.__init__(self, addr[0]) + self.network = IPv6Network(address, strict=False) + self.netmask = self.network.netmask + self._prefixlen = self.network._prefixlen + self.hostmask = self.network.hostmask + + def __str__(self): + return '%s/%d' % (self._string_from_ip_int(self._ip), + self.network.prefixlen) + + def __eq__(self, other): + address_equal = IPv6Address.__eq__(self, other) + if not address_equal or address_equal is NotImplemented: + return address_equal + try: + return self.network == other.network + except AttributeError: + # An interface with an associated network is NOT the + # same as an unassociated address. That's why the hash + # takes the extra info into account. + return False + + def __lt__(self, other): + address_less = IPv6Address.__lt__(self, other) + if address_less is NotImplemented: + return NotImplemented + try: + return (self.network < other.network or + self.network == other.network and address_less) + except AttributeError: + # We *do* allow addresses and interfaces to be sorted. The + # unassociated address is considered less than all interfaces. + return False + + def __hash__(self): + return self._ip ^ self._prefixlen ^ int(self.network.network_address) + + __reduce__ = _IPAddressBase.__reduce__ + + @property + def ip(self): + return IPv6Address(self._ip) + + @property + def with_prefixlen(self): + return '%s/%s' % (self._string_from_ip_int(self._ip), + self._prefixlen) + + @property + def with_netmask(self): + return '%s/%s' % (self._string_from_ip_int(self._ip), + self.netmask) + + @property + def with_hostmask(self): + return '%s/%s' % (self._string_from_ip_int(self._ip), + self.hostmask) + + @property + def is_unspecified(self): + return self._ip == 0 and self.network.is_unspecified + + @property + def is_loopback(self): + return self._ip == 1 and self.network.is_loopback + + +class IPv6Network(_BaseV6, _BaseNetwork): + + """This class represents and manipulates 128-bit IPv6 networks. + + Attributes: [examples for IPv6('2001:db8::1000/124')] + .network_address: IPv6Address('2001:db8::1000') + .hostmask: IPv6Address('::f') + .broadcast_address: IPv6Address('2001:db8::100f') + .netmask: IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0') + .prefixlen: 124 + + """ + + # Class to use when creating address objects + _address_class = IPv6Address + + def __init__(self, address, strict=True): + """Instantiate a new IPv6 Network object. + + Args: + address: A string or integer representing the IPv6 network or the + IP and prefix/netmask. + '2001:db8::/128' + '2001:db8:0000:0000:0000:0000:0000:0000/128' + '2001:db8::' + are all functionally the same in IPv6. That is to say, + failing to provide a subnetmask will create an object with + a mask of /128. + + Additionally, an integer can be passed, so + IPv6Network('2001:db8::') == + IPv6Network(42540766411282592856903984951653826560) + or, more generally + IPv6Network(int(IPv6Network('2001:db8::'))) == + IPv6Network('2001:db8::') + + strict: A boolean. If true, ensure that we have been passed + A true network address, eg, 2001:db8::1000/124 and not an + IP address on a network, eg, 2001:db8::1/124. + + Raises: + AddressValueError: If address isn't a valid IPv6 address. + NetmaskValueError: If the netmask isn't valid for + an IPv6 address. + ValueError: If strict was True and a network address was not + supplied. + + """ + _BaseNetwork.__init__(self, address) + + # Efficient constructor from integer or packed address + if isinstance(address, (bytes, _compat_int_types)): + self.network_address = IPv6Address(address) + self.netmask, self._prefixlen = self._make_netmask( + self._max_prefixlen) + return + + if isinstance(address, tuple): + if len(address) > 1: + arg = address[1] + else: + arg = self._max_prefixlen + self.netmask, self._prefixlen = self._make_netmask(arg) + self.network_address = IPv6Address(address[0]) + packed = int(self.network_address) + if packed & int(self.netmask) != packed: + if strict: + raise ValueError('%s has host bits set' % self) + else: + self.network_address = IPv6Address(packed & + int(self.netmask)) + return + + # Assume input argument to be string or any object representation + # which converts into a formatted IP prefix string. + addr = _split_optional_netmask(address) + + self.network_address = IPv6Address(self._ip_int_from_string(addr[0])) + + if len(addr) == 2: + arg = addr[1] + else: + arg = self._max_prefixlen + self.netmask, self._prefixlen = self._make_netmask(arg) + + if strict: + if (IPv6Address(int(self.network_address) & int(self.netmask)) != + self.network_address): + raise ValueError('%s has host bits set' % self) + self.network_address = IPv6Address(int(self.network_address) & + int(self.netmask)) + + if self._prefixlen == (self._max_prefixlen - 1): + self.hosts = self.__iter__ + + def hosts(self): + """Generate Iterator over usable hosts in a network. + + This is like __iter__ except it doesn't return the + Subnet-Router anycast address. + + """ + network = int(self.network_address) + broadcast = int(self.broadcast_address) + for x in _compat_range(network + 1, broadcast + 1): + yield self._address_class(x) + + @property + def is_site_local(self): + """Test if the address is reserved for site-local. + + Note that the site-local address space has been deprecated by RFC 3879. + Use is_private to test if this address is in the space of unique local + addresses as defined by RFC 4193. + + Returns: + A boolean, True if the address is reserved per RFC 3513 2.5.6. + + """ + return (self.network_address.is_site_local and + self.broadcast_address.is_site_local) + + +class _IPv6Constants(object): + + _linklocal_network = IPv6Network('fe80::/10') + + _multicast_network = IPv6Network('ff00::/8') + + _private_networks = [ + IPv6Network('::1/128'), + IPv6Network('::/128'), + IPv6Network('::ffff:0:0/96'), + IPv6Network('100::/64'), + IPv6Network('2001::/23'), + IPv6Network('2001:2::/48'), + IPv6Network('2001:db8::/32'), + IPv6Network('2001:10::/28'), + IPv6Network('fc00::/7'), + IPv6Network('fe80::/10'), + ] + + _reserved_networks = [ + IPv6Network('::/8'), IPv6Network('100::/8'), + IPv6Network('200::/7'), IPv6Network('400::/6'), + IPv6Network('800::/5'), IPv6Network('1000::/4'), + IPv6Network('4000::/3'), IPv6Network('6000::/3'), + IPv6Network('8000::/3'), IPv6Network('A000::/3'), + IPv6Network('C000::/3'), IPv6Network('E000::/4'), + IPv6Network('F000::/5'), IPv6Network('F800::/6'), + IPv6Network('FE00::/9'), + ] + + _sitelocal_network = IPv6Network('fec0::/10') + + +IPv6Address._constants = _IPv6Constants diff --git a/packages/wakatime/packages/py27/cryptography/__about__.py b/packages/wakatime/packages/py27/cryptography/__about__.py new file mode 100644 index 0000000..4055e29 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/__about__.py @@ -0,0 +1,23 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +__all__ = [ + "__title__", "__summary__", "__uri__", "__version__", "__author__", + "__email__", "__license__", "__copyright__", +] + +__title__ = "cryptography" +__summary__ = ("cryptography is a package which provides cryptographic recipes" + " and primitives to Python developers.") +__uri__ = "https://github.com/pyca/cryptography" + +__version__ = "2.8" + +__author__ = "The cryptography developers" +__email__ = "cryptography-dev@python.org" + +__license__ = "BSD or Apache License, Version 2.0" +__copyright__ = "Copyright 2013-2019 {}".format(__author__) diff --git a/packages/wakatime/packages/py27/cryptography/__init__.py b/packages/wakatime/packages/py27/cryptography/__init__.py new file mode 100644 index 0000000..6da0b38 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/__init__.py @@ -0,0 +1,16 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography.__about__ import ( + __author__, __copyright__, __email__, __license__, __summary__, __title__, + __uri__, __version__ +) + + +__all__ = [ + "__title__", "__summary__", "__uri__", "__version__", "__author__", + "__email__", "__license__", "__copyright__", +] diff --git a/packages/wakatime/packages/py27/cryptography/exceptions.py b/packages/wakatime/packages/py27/cryptography/exceptions.py new file mode 100644 index 0000000..1d52d7d --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/exceptions.py @@ -0,0 +1,58 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from enum import Enum + + +class _Reasons(Enum): + BACKEND_MISSING_INTERFACE = 0 + UNSUPPORTED_HASH = 1 + UNSUPPORTED_CIPHER = 2 + UNSUPPORTED_PADDING = 3 + UNSUPPORTED_MGF = 4 + UNSUPPORTED_PUBLIC_KEY_ALGORITHM = 5 + UNSUPPORTED_ELLIPTIC_CURVE = 6 + UNSUPPORTED_SERIALIZATION = 7 + UNSUPPORTED_X509 = 8 + UNSUPPORTED_EXCHANGE_ALGORITHM = 9 + UNSUPPORTED_DIFFIE_HELLMAN = 10 + UNSUPPORTED_MAC = 11 + + +class UnsupportedAlgorithm(Exception): + def __init__(self, message, reason=None): + super(UnsupportedAlgorithm, self).__init__(message) + self._reason = reason + + +class AlreadyFinalized(Exception): + pass + + +class AlreadyUpdated(Exception): + pass + + +class NotYetFinalized(Exception): + pass + + +class InvalidTag(Exception): + pass + + +class InvalidSignature(Exception): + pass + + +class InternalError(Exception): + def __init__(self, msg, err_code): + super(InternalError, self).__init__(msg) + self.err_code = err_code + + +class InvalidKey(Exception): + pass diff --git a/packages/wakatime/packages/py27/cryptography/fernet.py b/packages/wakatime/packages/py27/cryptography/fernet.py new file mode 100644 index 0000000..b990def --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/fernet.py @@ -0,0 +1,171 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import base64 +import binascii +import os +import struct +import time + +import six + +from cryptography import utils +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes, padding +from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes +from cryptography.hazmat.primitives.hmac import HMAC + + +class InvalidToken(Exception): + pass + + +_MAX_CLOCK_SKEW = 60 + + +class Fernet(object): + def __init__(self, key, backend=None): + if backend is None: + backend = default_backend() + + key = base64.urlsafe_b64decode(key) + if len(key) != 32: + raise ValueError( + "Fernet key must be 32 url-safe base64-encoded bytes." + ) + + self._signing_key = key[:16] + self._encryption_key = key[16:] + self._backend = backend + + @classmethod + def generate_key(cls): + return base64.urlsafe_b64encode(os.urandom(32)) + + def encrypt(self, data): + current_time = int(time.time()) + iv = os.urandom(16) + return self._encrypt_from_parts(data, current_time, iv) + + def _encrypt_from_parts(self, data, current_time, iv): + utils._check_bytes("data", data) + + padder = padding.PKCS7(algorithms.AES.block_size).padder() + padded_data = padder.update(data) + padder.finalize() + encryptor = Cipher( + algorithms.AES(self._encryption_key), modes.CBC(iv), self._backend + ).encryptor() + ciphertext = encryptor.update(padded_data) + encryptor.finalize() + + basic_parts = ( + b"\x80" + struct.pack(">Q", current_time) + iv + ciphertext + ) + + h = HMAC(self._signing_key, hashes.SHA256(), backend=self._backend) + h.update(basic_parts) + hmac = h.finalize() + return base64.urlsafe_b64encode(basic_parts + hmac) + + def decrypt(self, token, ttl=None): + timestamp, data = Fernet._get_unverified_token_data(token) + return self._decrypt_data(data, timestamp, ttl) + + def extract_timestamp(self, token): + timestamp, data = Fernet._get_unverified_token_data(token) + # Verify the token was not tampered with. + self._verify_signature(data) + return timestamp + + @staticmethod + def _get_unverified_token_data(token): + utils._check_bytes("token", token) + try: + data = base64.urlsafe_b64decode(token) + except (TypeError, binascii.Error): + raise InvalidToken + + if not data or six.indexbytes(data, 0) != 0x80: + raise InvalidToken + + try: + timestamp, = struct.unpack(">Q", data[1:9]) + except struct.error: + raise InvalidToken + return timestamp, data + + def _verify_signature(self, data): + h = HMAC(self._signing_key, hashes.SHA256(), backend=self._backend) + h.update(data[:-32]) + try: + h.verify(data[-32:]) + except InvalidSignature: + raise InvalidToken + + def _decrypt_data(self, data, timestamp, ttl): + current_time = int(time.time()) + if ttl is not None: + if timestamp + ttl < current_time: + raise InvalidToken + + if current_time + _MAX_CLOCK_SKEW < timestamp: + raise InvalidToken + + self._verify_signature(data) + + iv = data[9:25] + ciphertext = data[25:-32] + decryptor = Cipher( + algorithms.AES(self._encryption_key), modes.CBC(iv), self._backend + ).decryptor() + plaintext_padded = decryptor.update(ciphertext) + try: + plaintext_padded += decryptor.finalize() + except ValueError: + raise InvalidToken + unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder() + + unpadded = unpadder.update(plaintext_padded) + try: + unpadded += unpadder.finalize() + except ValueError: + raise InvalidToken + return unpadded + + +class MultiFernet(object): + def __init__(self, fernets): + fernets = list(fernets) + if not fernets: + raise ValueError( + "MultiFernet requires at least one Fernet instance" + ) + self._fernets = fernets + + def encrypt(self, msg): + return self._fernets[0].encrypt(msg) + + def rotate(self, msg): + timestamp, data = Fernet._get_unverified_token_data(msg) + for f in self._fernets: + try: + p = f._decrypt_data(data, timestamp, None) + break + except InvalidToken: + pass + else: + raise InvalidToken + + iv = os.urandom(16) + return self._fernets[0]._encrypt_from_parts(p, timestamp, iv) + + def decrypt(self, msg, ttl=None): + for f in self._fernets: + try: + return f.decrypt(msg, ttl) + except InvalidToken: + pass + raise InvalidToken diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/__init__.py b/packages/wakatime/packages/py27/cryptography/hazmat/__init__.py new file mode 100644 index 0000000..9f06a99 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/__init__.py @@ -0,0 +1,11 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +""" +Hazardous Materials + +This is a "Hazardous Materials" module. You should ONLY use it if you're +100% absolutely sure that you know what you're doing because this module +is full of land mines, dragons, and dinosaurs with laser guns. +""" +from __future__ import absolute_import, division, print_function diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/_der.py b/packages/wakatime/packages/py27/cryptography/hazmat/_der.py new file mode 100644 index 0000000..51518d6 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/_der.py @@ -0,0 +1,156 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import six + +from cryptography.utils import int_from_bytes, int_to_bytes + + +# This module contains a lightweight DER encoder and decoder. See X.690 for the +# specification. This module intentionally does not implement the more complex +# BER encoding, only DER. +# +# Note this implementation treats an element's constructed bit as part of the +# tag. This is fine for DER, where the bit is always computable from the type. + + +CONSTRUCTED = 0x20 +CONTEXT_SPECIFIC = 0x80 + +INTEGER = 0x02 +BIT_STRING = 0x03 +OCTET_STRING = 0x04 +NULL = 0x05 +OBJECT_IDENTIFIER = 0x06 +SEQUENCE = 0x10 | CONSTRUCTED +SET = 0x11 | CONSTRUCTED +PRINTABLE_STRING = 0x13 +UTC_TIME = 0x17 +GENERALIZED_TIME = 0x18 + + +class DERReader(object): + def __init__(self, data): + self.data = memoryview(data) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, tb): + if exc_value is None: + self.check_empty() + + def is_empty(self): + return len(self.data) == 0 + + def check_empty(self): + if not self.is_empty(): + raise ValueError("Invalid DER input: trailing data") + + def read_byte(self): + if len(self.data) < 1: + raise ValueError("Invalid DER input: insufficient data") + ret = six.indexbytes(self.data, 0) + self.data = self.data[1:] + return ret + + def read_bytes(self, n): + if len(self.data) < n: + raise ValueError("Invalid DER input: insufficient data") + ret = self.data[:n] + self.data = self.data[n:] + return ret + + def read_any_element(self): + tag = self.read_byte() + # Tag numbers 31 or higher are stored in multiple bytes. No supported + # ASN.1 types use such tags, so reject these. + if tag & 0x1f == 0x1f: + raise ValueError("Invalid DER input: unexpected high tag number") + length_byte = self.read_byte() + if length_byte & 0x80 == 0: + # If the high bit is clear, the first length byte is the length. + length = length_byte + else: + # If the high bit is set, the first length byte encodes the length + # of the length. + length_byte &= 0x7f + if length_byte == 0: + raise ValueError( + "Invalid DER input: indefinite length form is not allowed " + "in DER" + ) + length = 0 + for i in range(length_byte): + length <<= 8 + length |= self.read_byte() + if length == 0: + raise ValueError( + "Invalid DER input: length was not minimally-encoded" + ) + if length < 0x80: + # If the length could have been encoded in short form, it must + # not use long form. + raise ValueError( + "Invalid DER input: length was not minimally-encoded" + ) + body = self.read_bytes(length) + return tag, DERReader(body) + + def read_element(self, expected_tag): + tag, body = self.read_any_element() + if tag != expected_tag: + raise ValueError("Invalid DER input: unexpected tag") + return body + + def read_single_element(self, expected_tag): + with self: + return self.read_element(expected_tag) + + def read_optional_element(self, expected_tag): + if len(self.data) > 0 and six.indexbytes(self.data, 0) == expected_tag: + return self.read_element(expected_tag) + return None + + def as_integer(self): + if len(self.data) == 0: + raise ValueError("Invalid DER input: empty integer contents") + first = six.indexbytes(self.data, 0) + if first & 0x80 == 0x80: + raise ValueError("Negative DER integers are not supported") + # The first 9 bits must not all be zero or all be ones. Otherwise, the + # encoding should have been one byte shorter. + if len(self.data) > 1: + second = six.indexbytes(self.data, 1) + if first == 0 and second & 0x80 == 0: + raise ValueError( + "Invalid DER input: integer not minimally-encoded" + ) + return int_from_bytes(self.data, "big") + + +def encode_der_integer(x): + if not isinstance(x, six.integer_types): + raise ValueError("Value must be an integer") + if x < 0: + raise ValueError("Negative integers are not supported") + n = x.bit_length() // 8 + 1 + return int_to_bytes(x, n) + + +def encode_der(tag, *children): + length = 0 + for child in children: + length += len(child) + chunks = [six.int2byte(tag)] + if length < 0x80: + chunks.append(six.int2byte(length)) + else: + length_bytes = int_to_bytes(length) + chunks.append(six.int2byte(0x80 | len(length_bytes))) + chunks.append(length_bytes) + chunks.extend(children) + return b"".join(chunks) diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/_oid.py b/packages/wakatime/packages/py27/cryptography/hazmat/_oid.py new file mode 100644 index 0000000..4b08722 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/_oid.py @@ -0,0 +1,67 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils + + +class ObjectIdentifier(object): + def __init__(self, dotted_string): + self._dotted_string = dotted_string + + nodes = self._dotted_string.split(".") + intnodes = [] + + # There must be at least 2 nodes, the first node must be 0..2, and + # if less than 2, the second node cannot have a value outside the + # range 0..39. All nodes must be integers. + for node in nodes: + try: + intnodes.append(int(node, 0)) + except ValueError: + raise ValueError( + "Malformed OID: %s (non-integer nodes)" % ( + self._dotted_string)) + + if len(nodes) < 2: + raise ValueError( + "Malformed OID: %s (insufficient number of nodes)" % ( + self._dotted_string)) + + if intnodes[0] > 2: + raise ValueError( + "Malformed OID: %s (first node outside valid range)" % ( + self._dotted_string)) + + if intnodes[0] < 2 and intnodes[1] >= 40: + raise ValueError( + "Malformed OID: %s (second node outside valid range)" % ( + self._dotted_string)) + + def __eq__(self, other): + if not isinstance(other, ObjectIdentifier): + return NotImplemented + + return self.dotted_string == other.dotted_string + + def __ne__(self, other): + return not self == other + + def __repr__(self): + return "".format( + self.dotted_string, + self._name + ) + + def __hash__(self): + return hash(self.dotted_string) + + @property + def _name(self): + # Lazy import to avoid an import cycle + from cryptography.x509.oid import _OID_NAMES + return _OID_NAMES.get(self, "Unknown OID") + + dotted_string = utils.read_only_property("_dotted_string") diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/backends/__init__.py b/packages/wakatime/packages/py27/cryptography/hazmat/backends/__init__.py new file mode 100644 index 0000000..565bde7 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/backends/__init__.py @@ -0,0 +1,18 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + + +_default_backend = None + + +def default_backend(): + global _default_backend + + if _default_backend is None: + from cryptography.hazmat.backends.openssl.backend import backend + _default_backend = backend + + return _default_backend diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/backends/interfaces.py b/packages/wakatime/packages/py27/cryptography/hazmat/backends/interfaces.py new file mode 100644 index 0000000..20f4164 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/backends/interfaces.py @@ -0,0 +1,395 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + + +@six.add_metaclass(abc.ABCMeta) +class CipherBackend(object): + @abc.abstractmethod + def cipher_supported(self, cipher, mode): + """ + Return True if the given cipher and mode are supported. + """ + + @abc.abstractmethod + def create_symmetric_encryption_ctx(self, cipher, mode): + """ + Get a CipherContext that can be used for encryption. + """ + + @abc.abstractmethod + def create_symmetric_decryption_ctx(self, cipher, mode): + """ + Get a CipherContext that can be used for decryption. + """ + + +@six.add_metaclass(abc.ABCMeta) +class HashBackend(object): + @abc.abstractmethod + def hash_supported(self, algorithm): + """ + Return True if the hash algorithm is supported by this backend. + """ + + @abc.abstractmethod + def create_hash_ctx(self, algorithm): + """ + Create a HashContext for calculating a message digest. + """ + + +@six.add_metaclass(abc.ABCMeta) +class HMACBackend(object): + @abc.abstractmethod + def hmac_supported(self, algorithm): + """ + Return True if the hash algorithm is supported for HMAC by this + backend. + """ + + @abc.abstractmethod + def create_hmac_ctx(self, key, algorithm): + """ + Create a context for calculating a message authentication code. + """ + + +@six.add_metaclass(abc.ABCMeta) +class CMACBackend(object): + @abc.abstractmethod + def cmac_algorithm_supported(self, algorithm): + """ + Returns True if the block cipher is supported for CMAC by this backend + """ + + @abc.abstractmethod + def create_cmac_ctx(self, algorithm): + """ + Create a context for calculating a message authentication code. + """ + + +@six.add_metaclass(abc.ABCMeta) +class PBKDF2HMACBackend(object): + @abc.abstractmethod + def pbkdf2_hmac_supported(self, algorithm): + """ + Return True if the hash algorithm is supported for PBKDF2 by this + backend. + """ + + @abc.abstractmethod + def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations, + key_material): + """ + Return length bytes derived from provided PBKDF2 parameters. + """ + + +@six.add_metaclass(abc.ABCMeta) +class RSABackend(object): + @abc.abstractmethod + def generate_rsa_private_key(self, public_exponent, key_size): + """ + Generate an RSAPrivateKey instance with public_exponent and a modulus + of key_size bits. + """ + + @abc.abstractmethod + def rsa_padding_supported(self, padding): + """ + Returns True if the backend supports the given padding options. + """ + + @abc.abstractmethod + def generate_rsa_parameters_supported(self, public_exponent, key_size): + """ + Returns True if the backend supports the given parameters for key + generation. + """ + + @abc.abstractmethod + def load_rsa_private_numbers(self, numbers): + """ + Returns an RSAPrivateKey provider. + """ + + @abc.abstractmethod + def load_rsa_public_numbers(self, numbers): + """ + Returns an RSAPublicKey provider. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DSABackend(object): + @abc.abstractmethod + def generate_dsa_parameters(self, key_size): + """ + Generate a DSAParameters instance with a modulus of key_size bits. + """ + + @abc.abstractmethod + def generate_dsa_private_key(self, parameters): + """ + Generate a DSAPrivateKey instance with parameters as a DSAParameters + object. + """ + + @abc.abstractmethod + def generate_dsa_private_key_and_parameters(self, key_size): + """ + Generate a DSAPrivateKey instance using key size only. + """ + + @abc.abstractmethod + def dsa_hash_supported(self, algorithm): + """ + Return True if the hash algorithm is supported by the backend for DSA. + """ + + @abc.abstractmethod + def dsa_parameters_supported(self, p, q, g): + """ + Return True if the parameters are supported by the backend for DSA. + """ + + @abc.abstractmethod + def load_dsa_private_numbers(self, numbers): + """ + Returns a DSAPrivateKey provider. + """ + + @abc.abstractmethod + def load_dsa_public_numbers(self, numbers): + """ + Returns a DSAPublicKey provider. + """ + + @abc.abstractmethod + def load_dsa_parameter_numbers(self, numbers): + """ + Returns a DSAParameters provider. + """ + + +@six.add_metaclass(abc.ABCMeta) +class EllipticCurveBackend(object): + @abc.abstractmethod + def elliptic_curve_signature_algorithm_supported( + self, signature_algorithm, curve + ): + """ + Returns True if the backend supports the named elliptic curve with the + specified signature algorithm. + """ + + @abc.abstractmethod + def elliptic_curve_supported(self, curve): + """ + Returns True if the backend supports the named elliptic curve. + """ + + @abc.abstractmethod + def generate_elliptic_curve_private_key(self, curve): + """ + Return an object conforming to the EllipticCurvePrivateKey interface. + """ + + @abc.abstractmethod + def load_elliptic_curve_public_numbers(self, numbers): + """ + Return an EllipticCurvePublicKey provider using the given numbers. + """ + + @abc.abstractmethod + def load_elliptic_curve_private_numbers(self, numbers): + """ + Return an EllipticCurvePrivateKey provider using the given numbers. + """ + + @abc.abstractmethod + def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve): + """ + Returns whether the exchange algorithm is supported by this backend. + """ + + @abc.abstractmethod + def derive_elliptic_curve_private_key(self, private_value, curve): + """ + Compute the private key given the private value and curve. + """ + + +@six.add_metaclass(abc.ABCMeta) +class PEMSerializationBackend(object): + @abc.abstractmethod + def load_pem_private_key(self, data, password): + """ + Loads a private key from PEM encoded data, using the provided password + if the data is encrypted. + """ + + @abc.abstractmethod + def load_pem_public_key(self, data): + """ + Loads a public key from PEM encoded data. + """ + + @abc.abstractmethod + def load_pem_parameters(self, data): + """ + Load encryption parameters from PEM encoded data. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DERSerializationBackend(object): + @abc.abstractmethod + def load_der_private_key(self, data, password): + """ + Loads a private key from DER encoded data. Uses the provided password + if the data is encrypted. + """ + + @abc.abstractmethod + def load_der_public_key(self, data): + """ + Loads a public key from DER encoded data. + """ + + @abc.abstractmethod + def load_der_parameters(self, data): + """ + Load encryption parameters from DER encoded data. + """ + + +@six.add_metaclass(abc.ABCMeta) +class X509Backend(object): + @abc.abstractmethod + def load_pem_x509_certificate(self, data): + """ + Load an X.509 certificate from PEM encoded data. + """ + + @abc.abstractmethod + def load_der_x509_certificate(self, data): + """ + Load an X.509 certificate from DER encoded data. + """ + + @abc.abstractmethod + def load_der_x509_csr(self, data): + """ + Load an X.509 CSR from DER encoded data. + """ + + @abc.abstractmethod + def load_pem_x509_csr(self, data): + """ + Load an X.509 CSR from PEM encoded data. + """ + + @abc.abstractmethod + def create_x509_csr(self, builder, private_key, algorithm): + """ + Create and sign an X.509 CSR from a CSR builder object. + """ + + @abc.abstractmethod + def create_x509_certificate(self, builder, private_key, algorithm): + """ + Create and sign an X.509 certificate from a CertificateBuilder object. + """ + + @abc.abstractmethod + def create_x509_crl(self, builder, private_key, algorithm): + """ + Create and sign an X.509 CertificateRevocationList from a + CertificateRevocationListBuilder object. + """ + + @abc.abstractmethod + def create_x509_revoked_certificate(self, builder): + """ + Create a RevokedCertificate object from a RevokedCertificateBuilder + object. + """ + + @abc.abstractmethod + def x509_name_bytes(self, name): + """ + Compute the DER encoded bytes of an X509 Name object. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DHBackend(object): + @abc.abstractmethod + def generate_dh_parameters(self, generator, key_size): + """ + Generate a DHParameters instance with a modulus of key_size bits. + Using the given generator. Often 2 or 5. + """ + + @abc.abstractmethod + def generate_dh_private_key(self, parameters): + """ + Generate a DHPrivateKey instance with parameters as a DHParameters + object. + """ + + @abc.abstractmethod + def generate_dh_private_key_and_parameters(self, generator, key_size): + """ + Generate a DHPrivateKey instance using key size only. + Using the given generator. Often 2 or 5. + """ + + @abc.abstractmethod + def load_dh_private_numbers(self, numbers): + """ + Load a DHPrivateKey from DHPrivateNumbers + """ + + @abc.abstractmethod + def load_dh_public_numbers(self, numbers): + """ + Load a DHPublicKey from DHPublicNumbers. + """ + + @abc.abstractmethod + def load_dh_parameter_numbers(self, numbers): + """ + Load DHParameters from DHParameterNumbers. + """ + + @abc.abstractmethod + def dh_parameters_supported(self, p, g, q=None): + """ + Returns whether the backend supports DH with these parameter values. + """ + + @abc.abstractmethod + def dh_x942_serialization_supported(self): + """ + Returns True if the backend supports the serialization of DH objects + with subgroup order (q). + """ + + +@six.add_metaclass(abc.ABCMeta) +class ScryptBackend(object): + @abc.abstractmethod + def derive_scrypt(self, key_material, salt, length, n, r, p): + """ + Return bytes derived from provided Scrypt parameters. + """ diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/__init__.py b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/__init__.py new file mode 100644 index 0000000..8eadeb6 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/__init__.py @@ -0,0 +1,10 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography.hazmat.backends.openssl.backend import backend + + +__all__ = ["backend"] diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/aead.py b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/aead.py new file mode 100644 index 0000000..0cad15c --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/aead.py @@ -0,0 +1,162 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography.exceptions import InvalidTag + + +_ENCRYPT = 1 +_DECRYPT = 0 + + +def _aead_cipher_name(cipher): + from cryptography.hazmat.primitives.ciphers.aead import ( + AESCCM, AESGCM, ChaCha20Poly1305 + ) + if isinstance(cipher, ChaCha20Poly1305): + return b"chacha20-poly1305" + elif isinstance(cipher, AESCCM): + return "aes-{}-ccm".format(len(cipher._key) * 8).encode("ascii") + else: + assert isinstance(cipher, AESGCM) + return "aes-{}-gcm".format(len(cipher._key) * 8).encode("ascii") + + +def _aead_setup(backend, cipher_name, key, nonce, tag, tag_len, operation): + evp_cipher = backend._lib.EVP_get_cipherbyname(cipher_name) + backend.openssl_assert(evp_cipher != backend._ffi.NULL) + ctx = backend._lib.EVP_CIPHER_CTX_new() + ctx = backend._ffi.gc(ctx, backend._lib.EVP_CIPHER_CTX_free) + res = backend._lib.EVP_CipherInit_ex( + ctx, evp_cipher, + backend._ffi.NULL, + backend._ffi.NULL, + backend._ffi.NULL, + int(operation == _ENCRYPT) + ) + backend.openssl_assert(res != 0) + res = backend._lib.EVP_CIPHER_CTX_set_key_length(ctx, len(key)) + backend.openssl_assert(res != 0) + res = backend._lib.EVP_CIPHER_CTX_ctrl( + ctx, backend._lib.EVP_CTRL_AEAD_SET_IVLEN, len(nonce), + backend._ffi.NULL + ) + backend.openssl_assert(res != 0) + if operation == _DECRYPT: + res = backend._lib.EVP_CIPHER_CTX_ctrl( + ctx, backend._lib.EVP_CTRL_AEAD_SET_TAG, len(tag), tag + ) + backend.openssl_assert(res != 0) + elif cipher_name.endswith(b"-ccm"): + res = backend._lib.EVP_CIPHER_CTX_ctrl( + ctx, backend._lib.EVP_CTRL_AEAD_SET_TAG, tag_len, backend._ffi.NULL + ) + backend.openssl_assert(res != 0) + + nonce_ptr = backend._ffi.from_buffer(nonce) + key_ptr = backend._ffi.from_buffer(key) + res = backend._lib.EVP_CipherInit_ex( + ctx, + backend._ffi.NULL, + backend._ffi.NULL, + key_ptr, + nonce_ptr, + int(operation == _ENCRYPT) + ) + backend.openssl_assert(res != 0) + return ctx + + +def _set_length(backend, ctx, data_len): + intptr = backend._ffi.new("int *") + res = backend._lib.EVP_CipherUpdate( + ctx, + backend._ffi.NULL, + intptr, + backend._ffi.NULL, + data_len + ) + backend.openssl_assert(res != 0) + + +def _process_aad(backend, ctx, associated_data): + outlen = backend._ffi.new("int *") + res = backend._lib.EVP_CipherUpdate( + ctx, backend._ffi.NULL, outlen, associated_data, len(associated_data) + ) + backend.openssl_assert(res != 0) + + +def _process_data(backend, ctx, data): + outlen = backend._ffi.new("int *") + buf = backend._ffi.new("unsigned char[]", len(data)) + res = backend._lib.EVP_CipherUpdate(ctx, buf, outlen, data, len(data)) + backend.openssl_assert(res != 0) + return backend._ffi.buffer(buf, outlen[0])[:] + + +def _encrypt(backend, cipher, nonce, data, associated_data, tag_length): + from cryptography.hazmat.primitives.ciphers.aead import AESCCM + cipher_name = _aead_cipher_name(cipher) + ctx = _aead_setup( + backend, cipher_name, cipher._key, nonce, None, tag_length, _ENCRYPT + ) + # CCM requires us to pass the length of the data before processing anything + # However calling this with any other AEAD results in an error + if isinstance(cipher, AESCCM): + _set_length(backend, ctx, len(data)) + + _process_aad(backend, ctx, associated_data) + processed_data = _process_data(backend, ctx, data) + outlen = backend._ffi.new("int *") + res = backend._lib.EVP_CipherFinal_ex(ctx, backend._ffi.NULL, outlen) + backend.openssl_assert(res != 0) + backend.openssl_assert(outlen[0] == 0) + tag_buf = backend._ffi.new("unsigned char[]", tag_length) + res = backend._lib.EVP_CIPHER_CTX_ctrl( + ctx, backend._lib.EVP_CTRL_AEAD_GET_TAG, tag_length, tag_buf + ) + backend.openssl_assert(res != 0) + tag = backend._ffi.buffer(tag_buf)[:] + + return processed_data + tag + + +def _decrypt(backend, cipher, nonce, data, associated_data, tag_length): + from cryptography.hazmat.primitives.ciphers.aead import AESCCM + if len(data) < tag_length: + raise InvalidTag + tag = data[-tag_length:] + data = data[:-tag_length] + cipher_name = _aead_cipher_name(cipher) + ctx = _aead_setup( + backend, cipher_name, cipher._key, nonce, tag, tag_length, _DECRYPT + ) + # CCM requires us to pass the length of the data before processing anything + # However calling this with any other AEAD results in an error + if isinstance(cipher, AESCCM): + _set_length(backend, ctx, len(data)) + + _process_aad(backend, ctx, associated_data) + # CCM has a different error path if the tag doesn't match. Errors are + # raised in Update and Final is irrelevant. + if isinstance(cipher, AESCCM): + outlen = backend._ffi.new("int *") + buf = backend._ffi.new("unsigned char[]", len(data)) + res = backend._lib.EVP_CipherUpdate(ctx, buf, outlen, data, len(data)) + if res != 1: + backend._consume_errors() + raise InvalidTag + + processed_data = backend._ffi.buffer(buf, outlen[0])[:] + else: + processed_data = _process_data(backend, ctx, data) + outlen = backend._ffi.new("int *") + res = backend._lib.EVP_CipherFinal_ex(ctx, backend._ffi.NULL, outlen) + if res == 0: + backend._consume_errors() + raise InvalidTag + + return processed_data diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/backend.py b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/backend.py new file mode 100644 index 0000000..7e9fa20 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/backend.py @@ -0,0 +1,2483 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import base64 +import collections +import contextlib +import itertools +from contextlib import contextmanager + +import six +from six.moves import range + +from cryptography import utils, x509 +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat._der import ( + INTEGER, NULL, SEQUENCE, encode_der, encode_der_integer +) +from cryptography.hazmat.backends.interfaces import ( + CMACBackend, CipherBackend, DERSerializationBackend, DHBackend, DSABackend, + EllipticCurveBackend, HMACBackend, HashBackend, PBKDF2HMACBackend, + PEMSerializationBackend, RSABackend, ScryptBackend, X509Backend +) +from cryptography.hazmat.backends.openssl import aead +from cryptography.hazmat.backends.openssl.ciphers import _CipherContext +from cryptography.hazmat.backends.openssl.cmac import _CMACContext +from cryptography.hazmat.backends.openssl.decode_asn1 import ( + _CRL_ENTRY_REASON_ENUM_TO_CODE +) +from cryptography.hazmat.backends.openssl.dh import ( + _DHParameters, _DHPrivateKey, _DHPublicKey, _dh_params_dup +) +from cryptography.hazmat.backends.openssl.dsa import ( + _DSAParameters, _DSAPrivateKey, _DSAPublicKey +) +from cryptography.hazmat.backends.openssl.ec import ( + _EllipticCurvePrivateKey, _EllipticCurvePublicKey +) +from cryptography.hazmat.backends.openssl.ed25519 import ( + _Ed25519PrivateKey, _Ed25519PublicKey +) +from cryptography.hazmat.backends.openssl.ed448 import ( + _ED448_KEY_SIZE, _Ed448PrivateKey, _Ed448PublicKey +) +from cryptography.hazmat.backends.openssl.encode_asn1 import ( + _CRL_ENTRY_EXTENSION_ENCODE_HANDLERS, + _CRL_EXTENSION_ENCODE_HANDLERS, _EXTENSION_ENCODE_HANDLERS, + _OCSP_BASICRESP_EXTENSION_ENCODE_HANDLERS, + _OCSP_REQUEST_EXTENSION_ENCODE_HANDLERS, + _encode_asn1_int_gc, _encode_asn1_str_gc, _encode_name_gc, _txt2obj_gc, +) +from cryptography.hazmat.backends.openssl.hashes import _HashContext +from cryptography.hazmat.backends.openssl.hmac import _HMACContext +from cryptography.hazmat.backends.openssl.ocsp import ( + _OCSPRequest, _OCSPResponse +) +from cryptography.hazmat.backends.openssl.poly1305 import ( + _POLY1305_KEY_SIZE, _Poly1305Context +) +from cryptography.hazmat.backends.openssl.rsa import ( + _RSAPrivateKey, _RSAPublicKey +) +from cryptography.hazmat.backends.openssl.x25519 import ( + _X25519PrivateKey, _X25519PublicKey +) +from cryptography.hazmat.backends.openssl.x448 import ( + _X448PrivateKey, _X448PublicKey +) +from cryptography.hazmat.backends.openssl.x509 import ( + _Certificate, _CertificateRevocationList, + _CertificateSigningRequest, _RevokedCertificate +) +from cryptography.hazmat.bindings.openssl import binding +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import ( + dsa, ec, ed25519, ed448, rsa +) +from cryptography.hazmat.primitives.asymmetric.padding import ( + MGF1, OAEP, PKCS1v15, PSS +) +from cryptography.hazmat.primitives.ciphers.algorithms import ( + AES, ARC4, Blowfish, CAST5, Camellia, ChaCha20, IDEA, SEED, TripleDES +) +from cryptography.hazmat.primitives.ciphers.modes import ( + CBC, CFB, CFB8, CTR, ECB, GCM, OFB, XTS +) +from cryptography.hazmat.primitives.kdf import scrypt +from cryptography.hazmat.primitives.serialization import ssh +from cryptography.x509 import ocsp + + +_MemoryBIO = collections.namedtuple("_MemoryBIO", ["bio", "char_ptr"]) + + +@utils.register_interface(CipherBackend) +@utils.register_interface(CMACBackend) +@utils.register_interface(DERSerializationBackend) +@utils.register_interface(DHBackend) +@utils.register_interface(DSABackend) +@utils.register_interface(EllipticCurveBackend) +@utils.register_interface(HashBackend) +@utils.register_interface(HMACBackend) +@utils.register_interface(PBKDF2HMACBackend) +@utils.register_interface(RSABackend) +@utils.register_interface(PEMSerializationBackend) +@utils.register_interface(X509Backend) +@utils.register_interface_if( + binding.Binding().lib.Cryptography_HAS_SCRYPT, ScryptBackend +) +class Backend(object): + """ + OpenSSL API binding interfaces. + """ + name = "openssl" + + def __init__(self): + self._binding = binding.Binding() + self._ffi = self._binding.ffi + self._lib = self._binding.lib + + self._cipher_registry = {} + self._register_default_ciphers() + self.activate_osrandom_engine() + self._dh_types = [self._lib.EVP_PKEY_DH] + if self._lib.Cryptography_HAS_EVP_PKEY_DHX: + self._dh_types.append(self._lib.EVP_PKEY_DHX) + + def openssl_assert(self, ok): + return binding._openssl_assert(self._lib, ok) + + def activate_builtin_random(self): + if self._lib.Cryptography_HAS_ENGINE: + # Obtain a new structural reference. + e = self._lib.ENGINE_get_default_RAND() + if e != self._ffi.NULL: + self._lib.ENGINE_unregister_RAND(e) + # Reset the RNG to use the built-in. + res = self._lib.RAND_set_rand_method(self._ffi.NULL) + self.openssl_assert(res == 1) + # decrement the structural reference from get_default_RAND + res = self._lib.ENGINE_finish(e) + self.openssl_assert(res == 1) + + @contextlib.contextmanager + def _get_osurandom_engine(self): + # Fetches an engine by id and returns it. This creates a structural + # reference. + e = self._lib.ENGINE_by_id(self._lib.Cryptography_osrandom_engine_id) + self.openssl_assert(e != self._ffi.NULL) + # Initialize the engine for use. This adds a functional reference. + res = self._lib.ENGINE_init(e) + self.openssl_assert(res == 1) + + try: + yield e + finally: + # Decrement the structural ref incremented by ENGINE_by_id. + res = self._lib.ENGINE_free(e) + self.openssl_assert(res == 1) + # Decrement the functional ref incremented by ENGINE_init. + res = self._lib.ENGINE_finish(e) + self.openssl_assert(res == 1) + + def activate_osrandom_engine(self): + if self._lib.Cryptography_HAS_ENGINE: + # Unregister and free the current engine. + self.activate_builtin_random() + with self._get_osurandom_engine() as e: + # Set the engine as the default RAND provider. + res = self._lib.ENGINE_set_default_RAND(e) + self.openssl_assert(res == 1) + # Reset the RNG to use the engine + res = self._lib.RAND_set_rand_method(self._ffi.NULL) + self.openssl_assert(res == 1) + + def osrandom_engine_implementation(self): + buf = self._ffi.new("char[]", 64) + with self._get_osurandom_engine() as e: + res = self._lib.ENGINE_ctrl_cmd(e, b"get_implementation", + len(buf), buf, + self._ffi.NULL, 0) + self.openssl_assert(res > 0) + return self._ffi.string(buf).decode('ascii') + + def openssl_version_text(self): + """ + Friendly string name of the loaded OpenSSL library. This is not + necessarily the same version as it was compiled against. + + Example: OpenSSL 1.0.1e 11 Feb 2013 + """ + return self._ffi.string( + self._lib.OpenSSL_version(self._lib.OPENSSL_VERSION) + ).decode("ascii") + + def openssl_version_number(self): + return self._lib.OpenSSL_version_num() + + def create_hmac_ctx(self, key, algorithm): + return _HMACContext(self, key, algorithm) + + def _evp_md_from_algorithm(self, algorithm): + if algorithm.name == "blake2b" or algorithm.name == "blake2s": + alg = "{}{}".format( + algorithm.name, algorithm.digest_size * 8 + ).encode("ascii") + else: + alg = algorithm.name.encode("ascii") + + evp_md = self._lib.EVP_get_digestbyname(alg) + return evp_md + + def _evp_md_non_null_from_algorithm(self, algorithm): + evp_md = self._evp_md_from_algorithm(algorithm) + self.openssl_assert(evp_md != self._ffi.NULL) + return evp_md + + def hash_supported(self, algorithm): + evp_md = self._evp_md_from_algorithm(algorithm) + return evp_md != self._ffi.NULL + + def hmac_supported(self, algorithm): + return self.hash_supported(algorithm) + + def create_hash_ctx(self, algorithm): + return _HashContext(self, algorithm) + + def cipher_supported(self, cipher, mode): + try: + adapter = self._cipher_registry[type(cipher), type(mode)] + except KeyError: + return False + evp_cipher = adapter(self, cipher, mode) + return self._ffi.NULL != evp_cipher + + def register_cipher_adapter(self, cipher_cls, mode_cls, adapter): + if (cipher_cls, mode_cls) in self._cipher_registry: + raise ValueError("Duplicate registration for: {} {}.".format( + cipher_cls, mode_cls) + ) + self._cipher_registry[cipher_cls, mode_cls] = adapter + + def _register_default_ciphers(self): + for mode_cls in [CBC, CTR, ECB, OFB, CFB, CFB8, GCM]: + self.register_cipher_adapter( + AES, + mode_cls, + GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}") + ) + for mode_cls in [CBC, CTR, ECB, OFB, CFB]: + self.register_cipher_adapter( + Camellia, + mode_cls, + GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}") + ) + for mode_cls in [CBC, CFB, CFB8, OFB]: + self.register_cipher_adapter( + TripleDES, + mode_cls, + GetCipherByName("des-ede3-{mode.name}") + ) + self.register_cipher_adapter( + TripleDES, + ECB, + GetCipherByName("des-ede3") + ) + for mode_cls in [CBC, CFB, OFB, ECB]: + self.register_cipher_adapter( + Blowfish, + mode_cls, + GetCipherByName("bf-{mode.name}") + ) + for mode_cls in [CBC, CFB, OFB, ECB]: + self.register_cipher_adapter( + SEED, + mode_cls, + GetCipherByName("seed-{mode.name}") + ) + for cipher_cls, mode_cls in itertools.product( + [CAST5, IDEA], + [CBC, OFB, CFB, ECB], + ): + self.register_cipher_adapter( + cipher_cls, + mode_cls, + GetCipherByName("{cipher.name}-{mode.name}") + ) + self.register_cipher_adapter( + ARC4, + type(None), + GetCipherByName("rc4") + ) + self.register_cipher_adapter( + ChaCha20, + type(None), + GetCipherByName("chacha20") + ) + self.register_cipher_adapter(AES, XTS, _get_xts_cipher) + + def create_symmetric_encryption_ctx(self, cipher, mode): + return _CipherContext(self, cipher, mode, _CipherContext._ENCRYPT) + + def create_symmetric_decryption_ctx(self, cipher, mode): + return _CipherContext(self, cipher, mode, _CipherContext._DECRYPT) + + def pbkdf2_hmac_supported(self, algorithm): + return self.hmac_supported(algorithm) + + def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations, + key_material): + buf = self._ffi.new("unsigned char[]", length) + evp_md = self._evp_md_non_null_from_algorithm(algorithm) + key_material_ptr = self._ffi.from_buffer(key_material) + res = self._lib.PKCS5_PBKDF2_HMAC( + key_material_ptr, + len(key_material), + salt, + len(salt), + iterations, + evp_md, + length, + buf + ) + self.openssl_assert(res == 1) + return self._ffi.buffer(buf)[:] + + def _consume_errors(self): + return binding._consume_errors(self._lib) + + def _bn_to_int(self, bn): + assert bn != self._ffi.NULL + + if not six.PY2: + # Python 3 has constant time from_bytes, so use that. + bn_num_bytes = self._lib.BN_num_bytes(bn) + bin_ptr = self._ffi.new("unsigned char[]", bn_num_bytes) + bin_len = self._lib.BN_bn2bin(bn, bin_ptr) + # A zero length means the BN has value 0 + self.openssl_assert(bin_len >= 0) + val = int.from_bytes(self._ffi.buffer(bin_ptr)[:bin_len], "big") + if self._lib.BN_is_negative(bn): + val = -val + return val + else: + # Under Python 2 the best we can do is hex() + hex_cdata = self._lib.BN_bn2hex(bn) + self.openssl_assert(hex_cdata != self._ffi.NULL) + hex_str = self._ffi.string(hex_cdata) + self._lib.OPENSSL_free(hex_cdata) + return int(hex_str, 16) + + def _int_to_bn(self, num, bn=None): + """ + Converts a python integer to a BIGNUM. The returned BIGNUM will not + be garbage collected (to support adding them to structs that take + ownership of the object). Be sure to register it for GC if it will + be discarded after use. + """ + assert bn is None or bn != self._ffi.NULL + + if bn is None: + bn = self._ffi.NULL + + if not six.PY2: + # Python 3 has constant time to_bytes, so use that. + + binary = num.to_bytes(int(num.bit_length() / 8.0 + 1), "big") + bn_ptr = self._lib.BN_bin2bn(binary, len(binary), bn) + self.openssl_assert(bn_ptr != self._ffi.NULL) + return bn_ptr + + else: + # Under Python 2 the best we can do is hex(), [2:] removes the 0x + # prefix. + hex_num = hex(num).rstrip("L")[2:].encode("ascii") + bn_ptr = self._ffi.new("BIGNUM **") + bn_ptr[0] = bn + res = self._lib.BN_hex2bn(bn_ptr, hex_num) + self.openssl_assert(res != 0) + self.openssl_assert(bn_ptr[0] != self._ffi.NULL) + return bn_ptr[0] + + def generate_rsa_private_key(self, public_exponent, key_size): + rsa._verify_rsa_parameters(public_exponent, key_size) + + rsa_cdata = self._lib.RSA_new() + self.openssl_assert(rsa_cdata != self._ffi.NULL) + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + + bn = self._int_to_bn(public_exponent) + bn = self._ffi.gc(bn, self._lib.BN_free) + + res = self._lib.RSA_generate_key_ex( + rsa_cdata, key_size, bn, self._ffi.NULL + ) + self.openssl_assert(res == 1) + evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata) + + return _RSAPrivateKey(self, rsa_cdata, evp_pkey) + + def generate_rsa_parameters_supported(self, public_exponent, key_size): + return (public_exponent >= 3 and public_exponent & 1 != 0 and + key_size >= 512) + + def load_rsa_private_numbers(self, numbers): + rsa._check_private_key_components( + numbers.p, + numbers.q, + numbers.d, + numbers.dmp1, + numbers.dmq1, + numbers.iqmp, + numbers.public_numbers.e, + numbers.public_numbers.n + ) + rsa_cdata = self._lib.RSA_new() + self.openssl_assert(rsa_cdata != self._ffi.NULL) + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + p = self._int_to_bn(numbers.p) + q = self._int_to_bn(numbers.q) + d = self._int_to_bn(numbers.d) + dmp1 = self._int_to_bn(numbers.dmp1) + dmq1 = self._int_to_bn(numbers.dmq1) + iqmp = self._int_to_bn(numbers.iqmp) + e = self._int_to_bn(numbers.public_numbers.e) + n = self._int_to_bn(numbers.public_numbers.n) + res = self._lib.RSA_set0_factors(rsa_cdata, p, q) + self.openssl_assert(res == 1) + res = self._lib.RSA_set0_key(rsa_cdata, n, e, d) + self.openssl_assert(res == 1) + res = self._lib.RSA_set0_crt_params(rsa_cdata, dmp1, dmq1, iqmp) + self.openssl_assert(res == 1) + res = self._lib.RSA_blinding_on(rsa_cdata, self._ffi.NULL) + self.openssl_assert(res == 1) + evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata) + + return _RSAPrivateKey(self, rsa_cdata, evp_pkey) + + def load_rsa_public_numbers(self, numbers): + rsa._check_public_key_components(numbers.e, numbers.n) + rsa_cdata = self._lib.RSA_new() + self.openssl_assert(rsa_cdata != self._ffi.NULL) + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + e = self._int_to_bn(numbers.e) + n = self._int_to_bn(numbers.n) + res = self._lib.RSA_set0_key(rsa_cdata, n, e, self._ffi.NULL) + self.openssl_assert(res == 1) + evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata) + + return _RSAPublicKey(self, rsa_cdata, evp_pkey) + + def _create_evp_pkey_gc(self): + evp_pkey = self._lib.EVP_PKEY_new() + self.openssl_assert(evp_pkey != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + return evp_pkey + + def _rsa_cdata_to_evp_pkey(self, rsa_cdata): + evp_pkey = self._create_evp_pkey_gc() + res = self._lib.EVP_PKEY_set1_RSA(evp_pkey, rsa_cdata) + self.openssl_assert(res == 1) + return evp_pkey + + def _bytes_to_bio(self, data): + """ + Return a _MemoryBIO namedtuple of (BIO, char*). + + The char* is the storage for the BIO and it must stay alive until the + BIO is finished with. + """ + data_ptr = self._ffi.from_buffer(data) + bio = self._lib.BIO_new_mem_buf( + data_ptr, len(data) + ) + self.openssl_assert(bio != self._ffi.NULL) + + return _MemoryBIO(self._ffi.gc(bio, self._lib.BIO_free), data_ptr) + + def _create_mem_bio_gc(self): + """ + Creates an empty memory BIO. + """ + bio_method = self._lib.BIO_s_mem() + self.openssl_assert(bio_method != self._ffi.NULL) + bio = self._lib.BIO_new(bio_method) + self.openssl_assert(bio != self._ffi.NULL) + bio = self._ffi.gc(bio, self._lib.BIO_free) + return bio + + def _read_mem_bio(self, bio): + """ + Reads a memory BIO. This only works on memory BIOs. + """ + buf = self._ffi.new("char **") + buf_len = self._lib.BIO_get_mem_data(bio, buf) + self.openssl_assert(buf_len > 0) + self.openssl_assert(buf[0] != self._ffi.NULL) + bio_data = self._ffi.buffer(buf[0], buf_len)[:] + return bio_data + + def _evp_pkey_to_private_key(self, evp_pkey): + """ + Return the appropriate type of PrivateKey given an evp_pkey cdata + pointer. + """ + + key_type = self._lib.EVP_PKEY_id(evp_pkey) + + if key_type == self._lib.EVP_PKEY_RSA: + rsa_cdata = self._lib.EVP_PKEY_get1_RSA(evp_pkey) + self.openssl_assert(rsa_cdata != self._ffi.NULL) + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + return _RSAPrivateKey(self, rsa_cdata, evp_pkey) + elif key_type == self._lib.EVP_PKEY_DSA: + dsa_cdata = self._lib.EVP_PKEY_get1_DSA(evp_pkey) + self.openssl_assert(dsa_cdata != self._ffi.NULL) + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + return _DSAPrivateKey(self, dsa_cdata, evp_pkey) + elif key_type == self._lib.EVP_PKEY_EC: + ec_cdata = self._lib.EVP_PKEY_get1_EC_KEY(evp_pkey) + self.openssl_assert(ec_cdata != self._ffi.NULL) + ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free) + return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey) + elif key_type in self._dh_types: + dh_cdata = self._lib.EVP_PKEY_get1_DH(evp_pkey) + self.openssl_assert(dh_cdata != self._ffi.NULL) + dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free) + return _DHPrivateKey(self, dh_cdata, evp_pkey) + elif key_type == getattr(self._lib, "EVP_PKEY_ED25519", None): + # EVP_PKEY_ED25519 is not present in OpenSSL < 1.1.1 + return _Ed25519PrivateKey(self, evp_pkey) + elif key_type == getattr(self._lib, "EVP_PKEY_X448", None): + # EVP_PKEY_X448 is not present in OpenSSL < 1.1.1 + return _X448PrivateKey(self, evp_pkey) + elif key_type == getattr(self._lib, "EVP_PKEY_X25519", None): + # EVP_PKEY_X25519 is not present in OpenSSL < 1.1.0 + return _X25519PrivateKey(self, evp_pkey) + elif key_type == getattr(self._lib, "EVP_PKEY_ED448", None): + # EVP_PKEY_ED448 is not present in OpenSSL < 1.1.1 + return _Ed448PrivateKey(self, evp_pkey) + else: + raise UnsupportedAlgorithm("Unsupported key type.") + + def _evp_pkey_to_public_key(self, evp_pkey): + """ + Return the appropriate type of PublicKey given an evp_pkey cdata + pointer. + """ + + key_type = self._lib.EVP_PKEY_id(evp_pkey) + + if key_type == self._lib.EVP_PKEY_RSA: + rsa_cdata = self._lib.EVP_PKEY_get1_RSA(evp_pkey) + self.openssl_assert(rsa_cdata != self._ffi.NULL) + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + return _RSAPublicKey(self, rsa_cdata, evp_pkey) + elif key_type == self._lib.EVP_PKEY_DSA: + dsa_cdata = self._lib.EVP_PKEY_get1_DSA(evp_pkey) + self.openssl_assert(dsa_cdata != self._ffi.NULL) + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + return _DSAPublicKey(self, dsa_cdata, evp_pkey) + elif key_type == self._lib.EVP_PKEY_EC: + ec_cdata = self._lib.EVP_PKEY_get1_EC_KEY(evp_pkey) + self.openssl_assert(ec_cdata != self._ffi.NULL) + ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free) + return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey) + elif key_type in self._dh_types: + dh_cdata = self._lib.EVP_PKEY_get1_DH(evp_pkey) + self.openssl_assert(dh_cdata != self._ffi.NULL) + dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free) + return _DHPublicKey(self, dh_cdata, evp_pkey) + elif key_type == getattr(self._lib, "EVP_PKEY_ED25519", None): + # EVP_PKEY_ED25519 is not present in OpenSSL < 1.1.1 + return _Ed25519PublicKey(self, evp_pkey) + elif key_type == getattr(self._lib, "EVP_PKEY_X448", None): + # EVP_PKEY_X448 is not present in OpenSSL < 1.1.1 + return _X448PublicKey(self, evp_pkey) + elif key_type == getattr(self._lib, "EVP_PKEY_X25519", None): + # EVP_PKEY_X25519 is not present in OpenSSL < 1.1.0 + return _X25519PublicKey(self, evp_pkey) + elif key_type == getattr(self._lib, "EVP_PKEY_ED448", None): + # EVP_PKEY_X25519 is not present in OpenSSL < 1.1.1 + return _Ed448PublicKey(self, evp_pkey) + else: + raise UnsupportedAlgorithm("Unsupported key type.") + + def _oaep_hash_supported(self, algorithm): + if self._lib.Cryptography_HAS_RSA_OAEP_MD: + return isinstance( + algorithm, ( + hashes.SHA1, + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, + ) + ) + else: + return isinstance(algorithm, hashes.SHA1) + + def rsa_padding_supported(self, padding): + if isinstance(padding, PKCS1v15): + return True + elif isinstance(padding, PSS) and isinstance(padding._mgf, MGF1): + return self.hash_supported(padding._mgf._algorithm) + elif isinstance(padding, OAEP) and isinstance(padding._mgf, MGF1): + return ( + self._oaep_hash_supported(padding._mgf._algorithm) and + self._oaep_hash_supported(padding._algorithm) and + ( + (padding._label is None or len(padding._label) == 0) or + self._lib.Cryptography_HAS_RSA_OAEP_LABEL == 1 + ) + ) + else: + return False + + def generate_dsa_parameters(self, key_size): + if key_size not in (1024, 2048, 3072): + raise ValueError("Key size must be 1024 or 2048 or 3072 bits.") + + ctx = self._lib.DSA_new() + self.openssl_assert(ctx != self._ffi.NULL) + ctx = self._ffi.gc(ctx, self._lib.DSA_free) + + res = self._lib.DSA_generate_parameters_ex( + ctx, key_size, self._ffi.NULL, 0, + self._ffi.NULL, self._ffi.NULL, self._ffi.NULL + ) + + self.openssl_assert(res == 1) + + return _DSAParameters(self, ctx) + + def generate_dsa_private_key(self, parameters): + ctx = self._lib.DSAparams_dup(parameters._dsa_cdata) + self.openssl_assert(ctx != self._ffi.NULL) + ctx = self._ffi.gc(ctx, self._lib.DSA_free) + self._lib.DSA_generate_key(ctx) + evp_pkey = self._dsa_cdata_to_evp_pkey(ctx) + + return _DSAPrivateKey(self, ctx, evp_pkey) + + def generate_dsa_private_key_and_parameters(self, key_size): + parameters = self.generate_dsa_parameters(key_size) + return self.generate_dsa_private_key(parameters) + + def _dsa_cdata_set_values(self, dsa_cdata, p, q, g, pub_key, priv_key): + res = self._lib.DSA_set0_pqg(dsa_cdata, p, q, g) + self.openssl_assert(res == 1) + res = self._lib.DSA_set0_key(dsa_cdata, pub_key, priv_key) + self.openssl_assert(res == 1) + + def load_dsa_private_numbers(self, numbers): + dsa._check_dsa_private_numbers(numbers) + parameter_numbers = numbers.public_numbers.parameter_numbers + + dsa_cdata = self._lib.DSA_new() + self.openssl_assert(dsa_cdata != self._ffi.NULL) + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + + p = self._int_to_bn(parameter_numbers.p) + q = self._int_to_bn(parameter_numbers.q) + g = self._int_to_bn(parameter_numbers.g) + pub_key = self._int_to_bn(numbers.public_numbers.y) + priv_key = self._int_to_bn(numbers.x) + self._dsa_cdata_set_values(dsa_cdata, p, q, g, pub_key, priv_key) + + evp_pkey = self._dsa_cdata_to_evp_pkey(dsa_cdata) + + return _DSAPrivateKey(self, dsa_cdata, evp_pkey) + + def load_dsa_public_numbers(self, numbers): + dsa._check_dsa_parameters(numbers.parameter_numbers) + dsa_cdata = self._lib.DSA_new() + self.openssl_assert(dsa_cdata != self._ffi.NULL) + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + + p = self._int_to_bn(numbers.parameter_numbers.p) + q = self._int_to_bn(numbers.parameter_numbers.q) + g = self._int_to_bn(numbers.parameter_numbers.g) + pub_key = self._int_to_bn(numbers.y) + priv_key = self._ffi.NULL + self._dsa_cdata_set_values(dsa_cdata, p, q, g, pub_key, priv_key) + + evp_pkey = self._dsa_cdata_to_evp_pkey(dsa_cdata) + + return _DSAPublicKey(self, dsa_cdata, evp_pkey) + + def load_dsa_parameter_numbers(self, numbers): + dsa._check_dsa_parameters(numbers) + dsa_cdata = self._lib.DSA_new() + self.openssl_assert(dsa_cdata != self._ffi.NULL) + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + + p = self._int_to_bn(numbers.p) + q = self._int_to_bn(numbers.q) + g = self._int_to_bn(numbers.g) + res = self._lib.DSA_set0_pqg(dsa_cdata, p, q, g) + self.openssl_assert(res == 1) + + return _DSAParameters(self, dsa_cdata) + + def _dsa_cdata_to_evp_pkey(self, dsa_cdata): + evp_pkey = self._create_evp_pkey_gc() + res = self._lib.EVP_PKEY_set1_DSA(evp_pkey, dsa_cdata) + self.openssl_assert(res == 1) + return evp_pkey + + def dsa_hash_supported(self, algorithm): + return self.hash_supported(algorithm) + + def dsa_parameters_supported(self, p, q, g): + return True + + def cmac_algorithm_supported(self, algorithm): + return self.cipher_supported( + algorithm, CBC(b"\x00" * algorithm.block_size) + ) + + def create_cmac_ctx(self, algorithm): + return _CMACContext(self, algorithm) + + def create_x509_csr(self, builder, private_key, algorithm): + if not isinstance(builder, x509.CertificateSigningRequestBuilder): + raise TypeError('Builder type mismatch.') + + if isinstance(private_key, + (ed25519.Ed25519PrivateKey, ed448.Ed448PrivateKey)): + if algorithm is not None: + raise ValueError( + "algorithm must be None when signing via ed25519 or ed448" + ) + elif not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError('Algorithm must be a registered hash algorithm.') + elif ( + isinstance(algorithm, hashes.MD5) and not + isinstance(private_key, rsa.RSAPrivateKey) + ): + raise ValueError( + "MD5 is not a supported hash algorithm for EC/DSA CSRs" + ) + + # Resolve the signature algorithm. + evp_md = self._evp_md_x509_null_if_eddsa(private_key, algorithm) + + # Create an empty request. + x509_req = self._lib.X509_REQ_new() + self.openssl_assert(x509_req != self._ffi.NULL) + x509_req = self._ffi.gc(x509_req, self._lib.X509_REQ_free) + + # Set x509 version. + res = self._lib.X509_REQ_set_version(x509_req, x509.Version.v1.value) + self.openssl_assert(res == 1) + + # Set subject name. + res = self._lib.X509_REQ_set_subject_name( + x509_req, _encode_name_gc(self, builder._subject_name) + ) + self.openssl_assert(res == 1) + + # Set subject public key. + public_key = private_key.public_key() + res = self._lib.X509_REQ_set_pubkey( + x509_req, public_key._evp_pkey + ) + self.openssl_assert(res == 1) + + # Add extensions. + sk_extension = self._lib.sk_X509_EXTENSION_new_null() + self.openssl_assert(sk_extension != self._ffi.NULL) + sk_extension = self._ffi.gc( + sk_extension, + lambda x: self._lib.sk_X509_EXTENSION_pop_free( + x, self._ffi.addressof( + self._lib._original_lib, "X509_EXTENSION_free" + ) + ) + ) + # Don't GC individual extensions because the memory is owned by + # sk_extensions and will be freed along with it. + self._create_x509_extensions( + extensions=builder._extensions, + handlers=_EXTENSION_ENCODE_HANDLERS, + x509_obj=sk_extension, + add_func=self._lib.sk_X509_EXTENSION_insert, + gc=False + ) + res = self._lib.X509_REQ_add_extensions(x509_req, sk_extension) + self.openssl_assert(res == 1) + + # Sign the request using the requester's private key. + res = self._lib.X509_REQ_sign( + x509_req, private_key._evp_pkey, evp_md + ) + if res == 0: + errors = self._consume_errors() + self.openssl_assert( + errors[0]._lib_reason_match( + self._lib.ERR_LIB_RSA, + self._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY + ) + ) + + raise ValueError("Digest too big for RSA key") + + return _CertificateSigningRequest(self, x509_req) + + def create_x509_certificate(self, builder, private_key, algorithm): + if not isinstance(builder, x509.CertificateBuilder): + raise TypeError('Builder type mismatch.') + if isinstance(private_key, + (ed25519.Ed25519PrivateKey, ed448.Ed448PrivateKey)): + if algorithm is not None: + raise ValueError( + "algorithm must be None when signing via ed25519 or ed448" + ) + elif not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError('Algorithm must be a registered hash algorithm.') + + if ( + isinstance(algorithm, hashes.MD5) and not + isinstance(private_key, rsa.RSAPrivateKey) + ): + raise ValueError( + "MD5 is only (reluctantly) supported for RSA certificates" + ) + + # Resolve the signature algorithm. + evp_md = self._evp_md_x509_null_if_eddsa(private_key, algorithm) + + # Create an empty certificate. + x509_cert = self._lib.X509_new() + x509_cert = self._ffi.gc(x509_cert, backend._lib.X509_free) + + # Set the x509 version. + res = self._lib.X509_set_version(x509_cert, builder._version.value) + self.openssl_assert(res == 1) + + # Set the subject's name. + res = self._lib.X509_set_subject_name( + x509_cert, _encode_name_gc(self, builder._subject_name) + ) + self.openssl_assert(res == 1) + + # Set the subject's public key. + res = self._lib.X509_set_pubkey( + x509_cert, builder._public_key._evp_pkey + ) + self.openssl_assert(res == 1) + + # Set the certificate serial number. + serial_number = _encode_asn1_int_gc(self, builder._serial_number) + res = self._lib.X509_set_serialNumber(x509_cert, serial_number) + self.openssl_assert(res == 1) + + # Set the "not before" time. + self._set_asn1_time( + self._lib.X509_getm_notBefore(x509_cert), builder._not_valid_before + ) + + # Set the "not after" time. + self._set_asn1_time( + self._lib.X509_getm_notAfter(x509_cert), builder._not_valid_after + ) + + # Add extensions. + self._create_x509_extensions( + extensions=builder._extensions, + handlers=_EXTENSION_ENCODE_HANDLERS, + x509_obj=x509_cert, + add_func=self._lib.X509_add_ext, + gc=True + ) + + # Set the issuer name. + res = self._lib.X509_set_issuer_name( + x509_cert, _encode_name_gc(self, builder._issuer_name) + ) + self.openssl_assert(res == 1) + + # Sign the certificate with the issuer's private key. + res = self._lib.X509_sign( + x509_cert, private_key._evp_pkey, evp_md + ) + if res == 0: + errors = self._consume_errors() + self.openssl_assert( + errors[0]._lib_reason_match( + self._lib.ERR_LIB_RSA, + self._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY + ) + ) + raise ValueError("Digest too big for RSA key") + + return _Certificate(self, x509_cert) + + def _evp_md_x509_null_if_eddsa(self, private_key, algorithm): + if isinstance(private_key, + (ed25519.Ed25519PrivateKey, ed448.Ed448PrivateKey)): + # OpenSSL requires us to pass NULL for EVP_MD for ed25519/ed448 + return self._ffi.NULL + else: + return self._evp_md_non_null_from_algorithm(algorithm) + + def _set_asn1_time(self, asn1_time, time): + if time.year >= 2050: + asn1_str = time.strftime('%Y%m%d%H%M%SZ').encode('ascii') + else: + asn1_str = time.strftime('%y%m%d%H%M%SZ').encode('ascii') + res = self._lib.ASN1_TIME_set_string(asn1_time, asn1_str) + self.openssl_assert(res == 1) + + def _create_asn1_time(self, time): + asn1_time = self._lib.ASN1_TIME_new() + self.openssl_assert(asn1_time != self._ffi.NULL) + asn1_time = self._ffi.gc(asn1_time, self._lib.ASN1_TIME_free) + self._set_asn1_time(asn1_time, time) + return asn1_time + + def create_x509_crl(self, builder, private_key, algorithm): + if not isinstance(builder, x509.CertificateRevocationListBuilder): + raise TypeError('Builder type mismatch.') + if isinstance(private_key, + (ed25519.Ed25519PrivateKey, ed448.Ed448PrivateKey)): + if algorithm is not None: + raise ValueError( + "algorithm must be None when signing via ed25519 or ed448" + ) + elif not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError('Algorithm must be a registered hash algorithm.') + + if ( + isinstance(algorithm, hashes.MD5) and not + isinstance(private_key, rsa.RSAPrivateKey) + ): + raise ValueError( + "MD5 is not a supported hash algorithm for EC/DSA CRLs" + ) + + evp_md = self._evp_md_x509_null_if_eddsa(private_key, algorithm) + + # Create an empty CRL. + x509_crl = self._lib.X509_CRL_new() + x509_crl = self._ffi.gc(x509_crl, backend._lib.X509_CRL_free) + + # Set the x509 CRL version. We only support v2 (integer value 1). + res = self._lib.X509_CRL_set_version(x509_crl, 1) + self.openssl_assert(res == 1) + + # Set the issuer name. + res = self._lib.X509_CRL_set_issuer_name( + x509_crl, _encode_name_gc(self, builder._issuer_name) + ) + self.openssl_assert(res == 1) + + # Set the last update time. + last_update = self._create_asn1_time(builder._last_update) + res = self._lib.X509_CRL_set_lastUpdate(x509_crl, last_update) + self.openssl_assert(res == 1) + + # Set the next update time. + next_update = self._create_asn1_time(builder._next_update) + res = self._lib.X509_CRL_set_nextUpdate(x509_crl, next_update) + self.openssl_assert(res == 1) + + # Add extensions. + self._create_x509_extensions( + extensions=builder._extensions, + handlers=_CRL_EXTENSION_ENCODE_HANDLERS, + x509_obj=x509_crl, + add_func=self._lib.X509_CRL_add_ext, + gc=True + ) + + # add revoked certificates + for revoked_cert in builder._revoked_certificates: + # Duplicating because the X509_CRL takes ownership and will free + # this memory when X509_CRL_free is called. + revoked = self._lib.Cryptography_X509_REVOKED_dup( + revoked_cert._x509_revoked + ) + self.openssl_assert(revoked != self._ffi.NULL) + res = self._lib.X509_CRL_add0_revoked(x509_crl, revoked) + self.openssl_assert(res == 1) + + res = self._lib.X509_CRL_sign( + x509_crl, private_key._evp_pkey, evp_md + ) + if res == 0: + errors = self._consume_errors() + self.openssl_assert( + errors[0]._lib_reason_match( + self._lib.ERR_LIB_RSA, + self._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY + ) + ) + raise ValueError("Digest too big for RSA key") + + return _CertificateRevocationList(self, x509_crl) + + def _create_x509_extensions(self, extensions, handlers, x509_obj, + add_func, gc): + for i, extension in enumerate(extensions): + x509_extension = self._create_x509_extension( + handlers, extension + ) + self.openssl_assert(x509_extension != self._ffi.NULL) + + if gc: + x509_extension = self._ffi.gc( + x509_extension, self._lib.X509_EXTENSION_free + ) + res = add_func(x509_obj, x509_extension, i) + self.openssl_assert(res >= 1) + + def _create_raw_x509_extension(self, extension, value): + obj = _txt2obj_gc(self, extension.oid.dotted_string) + return self._lib.X509_EXTENSION_create_by_OBJ( + self._ffi.NULL, obj, 1 if extension.critical else 0, value + ) + + def _create_x509_extension(self, handlers, extension): + if isinstance(extension.value, x509.UnrecognizedExtension): + value = _encode_asn1_str_gc(self, extension.value.value) + return self._create_raw_x509_extension(extension, value) + elif isinstance(extension.value, x509.TLSFeature): + asn1 = encode_der( + SEQUENCE, + *[ + encode_der(INTEGER, encode_der_integer(x.value)) + for x in extension.value + ] + ) + value = _encode_asn1_str_gc(self, asn1) + return self._create_raw_x509_extension(extension, value) + elif isinstance(extension.value, x509.PrecertPoison): + value = _encode_asn1_str_gc(self, encode_der(NULL)) + return self._create_raw_x509_extension(extension, value) + else: + try: + encode = handlers[extension.oid] + except KeyError: + raise NotImplementedError( + 'Extension not supported: {}'.format(extension.oid) + ) + + ext_struct = encode(self, extension.value) + nid = self._lib.OBJ_txt2nid( + extension.oid.dotted_string.encode("ascii") + ) + backend.openssl_assert(nid != self._lib.NID_undef) + return self._lib.X509V3_EXT_i2d( + nid, 1 if extension.critical else 0, ext_struct + ) + + def create_x509_revoked_certificate(self, builder): + if not isinstance(builder, x509.RevokedCertificateBuilder): + raise TypeError('Builder type mismatch.') + + x509_revoked = self._lib.X509_REVOKED_new() + self.openssl_assert(x509_revoked != self._ffi.NULL) + x509_revoked = self._ffi.gc(x509_revoked, self._lib.X509_REVOKED_free) + serial_number = _encode_asn1_int_gc(self, builder._serial_number) + res = self._lib.X509_REVOKED_set_serialNumber( + x509_revoked, serial_number + ) + self.openssl_assert(res == 1) + rev_date = self._create_asn1_time(builder._revocation_date) + res = self._lib.X509_REVOKED_set_revocationDate(x509_revoked, rev_date) + self.openssl_assert(res == 1) + # add CRL entry extensions + self._create_x509_extensions( + extensions=builder._extensions, + handlers=_CRL_ENTRY_EXTENSION_ENCODE_HANDLERS, + x509_obj=x509_revoked, + add_func=self._lib.X509_REVOKED_add_ext, + gc=True + ) + return _RevokedCertificate(self, None, x509_revoked) + + def load_pem_private_key(self, data, password): + return self._load_key( + self._lib.PEM_read_bio_PrivateKey, + self._evp_pkey_to_private_key, + data, + password, + ) + + def load_pem_public_key(self, data): + mem_bio = self._bytes_to_bio(data) + evp_pkey = self._lib.PEM_read_bio_PUBKEY( + mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL + ) + if evp_pkey != self._ffi.NULL: + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + return self._evp_pkey_to_public_key(evp_pkey) + else: + # It's not a (RSA/DSA/ECDSA) subjectPublicKeyInfo, but we still + # need to check to see if it is a pure PKCS1 RSA public key (not + # embedded in a subjectPublicKeyInfo) + self._consume_errors() + res = self._lib.BIO_reset(mem_bio.bio) + self.openssl_assert(res == 1) + rsa_cdata = self._lib.PEM_read_bio_RSAPublicKey( + mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL + ) + if rsa_cdata != self._ffi.NULL: + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata) + return _RSAPublicKey(self, rsa_cdata, evp_pkey) + else: + self._handle_key_loading_error() + + def load_pem_parameters(self, data): + mem_bio = self._bytes_to_bio(data) + # only DH is supported currently + dh_cdata = self._lib.PEM_read_bio_DHparams( + mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL) + if dh_cdata != self._ffi.NULL: + dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free) + return _DHParameters(self, dh_cdata) + else: + self._handle_key_loading_error() + + def load_der_private_key(self, data, password): + # OpenSSL has a function called d2i_AutoPrivateKey that in theory + # handles this automatically, however it doesn't handle encrypted + # private keys. Instead we try to load the key two different ways. + # First we'll try to load it as a traditional key. + bio_data = self._bytes_to_bio(data) + key = self._evp_pkey_from_der_traditional_key(bio_data, password) + if key: + return self._evp_pkey_to_private_key(key) + else: + # Finally we try to load it with the method that handles encrypted + # PKCS8 properly. + return self._load_key( + self._lib.d2i_PKCS8PrivateKey_bio, + self._evp_pkey_to_private_key, + data, + password, + ) + + def _evp_pkey_from_der_traditional_key(self, bio_data, password): + key = self._lib.d2i_PrivateKey_bio(bio_data.bio, self._ffi.NULL) + if key != self._ffi.NULL: + key = self._ffi.gc(key, self._lib.EVP_PKEY_free) + if password is not None: + raise TypeError( + "Password was given but private key is not encrypted." + ) + + return key + else: + self._consume_errors() + return None + + def load_der_public_key(self, data): + mem_bio = self._bytes_to_bio(data) + evp_pkey = self._lib.d2i_PUBKEY_bio(mem_bio.bio, self._ffi.NULL) + if evp_pkey != self._ffi.NULL: + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + return self._evp_pkey_to_public_key(evp_pkey) + else: + # It's not a (RSA/DSA/ECDSA) subjectPublicKeyInfo, but we still + # need to check to see if it is a pure PKCS1 RSA public key (not + # embedded in a subjectPublicKeyInfo) + self._consume_errors() + res = self._lib.BIO_reset(mem_bio.bio) + self.openssl_assert(res == 1) + rsa_cdata = self._lib.d2i_RSAPublicKey_bio( + mem_bio.bio, self._ffi.NULL + ) + if rsa_cdata != self._ffi.NULL: + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata) + return _RSAPublicKey(self, rsa_cdata, evp_pkey) + else: + self._handle_key_loading_error() + + def load_der_parameters(self, data): + mem_bio = self._bytes_to_bio(data) + dh_cdata = self._lib.d2i_DHparams_bio( + mem_bio.bio, self._ffi.NULL + ) + if dh_cdata != self._ffi.NULL: + dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free) + return _DHParameters(self, dh_cdata) + elif self._lib.Cryptography_HAS_EVP_PKEY_DHX: + # We check to see if the is dhx. + self._consume_errors() + res = self._lib.BIO_reset(mem_bio.bio) + self.openssl_assert(res == 1) + dh_cdata = self._lib.Cryptography_d2i_DHxparams_bio( + mem_bio.bio, self._ffi.NULL + ) + if dh_cdata != self._ffi.NULL: + dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free) + return _DHParameters(self, dh_cdata) + + self._handle_key_loading_error() + + def load_pem_x509_certificate(self, data): + mem_bio = self._bytes_to_bio(data) + x509 = self._lib.PEM_read_bio_X509( + mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL + ) + if x509 == self._ffi.NULL: + self._consume_errors() + raise ValueError( + "Unable to load certificate. See https://cryptography.io/en/la" + "test/faq/#why-can-t-i-import-my-pem-file for more details." + ) + + x509 = self._ffi.gc(x509, self._lib.X509_free) + return _Certificate(self, x509) + + def load_der_x509_certificate(self, data): + mem_bio = self._bytes_to_bio(data) + x509 = self._lib.d2i_X509_bio(mem_bio.bio, self._ffi.NULL) + if x509 == self._ffi.NULL: + self._consume_errors() + raise ValueError("Unable to load certificate") + + x509 = self._ffi.gc(x509, self._lib.X509_free) + return _Certificate(self, x509) + + def load_pem_x509_crl(self, data): + mem_bio = self._bytes_to_bio(data) + x509_crl = self._lib.PEM_read_bio_X509_CRL( + mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL + ) + if x509_crl == self._ffi.NULL: + self._consume_errors() + raise ValueError( + "Unable to load CRL. See https://cryptography.io/en/la" + "test/faq/#why-can-t-i-import-my-pem-file for more details." + ) + + x509_crl = self._ffi.gc(x509_crl, self._lib.X509_CRL_free) + return _CertificateRevocationList(self, x509_crl) + + def load_der_x509_crl(self, data): + mem_bio = self._bytes_to_bio(data) + x509_crl = self._lib.d2i_X509_CRL_bio(mem_bio.bio, self._ffi.NULL) + if x509_crl == self._ffi.NULL: + self._consume_errors() + raise ValueError("Unable to load CRL") + + x509_crl = self._ffi.gc(x509_crl, self._lib.X509_CRL_free) + return _CertificateRevocationList(self, x509_crl) + + def load_pem_x509_csr(self, data): + mem_bio = self._bytes_to_bio(data) + x509_req = self._lib.PEM_read_bio_X509_REQ( + mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL + ) + if x509_req == self._ffi.NULL: + self._consume_errors() + raise ValueError( + "Unable to load request. See https://cryptography.io/en/la" + "test/faq/#why-can-t-i-import-my-pem-file for more details." + ) + + x509_req = self._ffi.gc(x509_req, self._lib.X509_REQ_free) + return _CertificateSigningRequest(self, x509_req) + + def load_der_x509_csr(self, data): + mem_bio = self._bytes_to_bio(data) + x509_req = self._lib.d2i_X509_REQ_bio(mem_bio.bio, self._ffi.NULL) + if x509_req == self._ffi.NULL: + self._consume_errors() + raise ValueError("Unable to load request") + + x509_req = self._ffi.gc(x509_req, self._lib.X509_REQ_free) + return _CertificateSigningRequest(self, x509_req) + + def _load_key(self, openssl_read_func, convert_func, data, password): + mem_bio = self._bytes_to_bio(data) + + userdata = self._ffi.new("CRYPTOGRAPHY_PASSWORD_DATA *") + if password is not None: + utils._check_byteslike("password", password) + password_ptr = self._ffi.from_buffer(password) + userdata.password = password_ptr + userdata.length = len(password) + + evp_pkey = openssl_read_func( + mem_bio.bio, + self._ffi.NULL, + self._ffi.addressof( + self._lib._original_lib, "Cryptography_pem_password_cb" + ), + userdata, + ) + + if evp_pkey == self._ffi.NULL: + if userdata.error != 0: + errors = self._consume_errors() + self.openssl_assert(errors) + if userdata.error == -1: + raise TypeError( + "Password was not given but private key is encrypted" + ) + else: + assert userdata.error == -2 + raise ValueError( + "Passwords longer than {} bytes are not supported " + "by this backend.".format(userdata.maxsize - 1) + ) + else: + self._handle_key_loading_error() + + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + + if password is not None and userdata.called == 0: + raise TypeError( + "Password was given but private key is not encrypted.") + + assert ( + (password is not None and userdata.called == 1) or + password is None + ) + + return convert_func(evp_pkey) + + def _handle_key_loading_error(self): + errors = self._consume_errors() + + if not errors: + raise ValueError("Could not deserialize key data.") + + elif ( + errors[0]._lib_reason_match( + self._lib.ERR_LIB_EVP, self._lib.EVP_R_BAD_DECRYPT + ) or errors[0]._lib_reason_match( + self._lib.ERR_LIB_PKCS12, + self._lib.PKCS12_R_PKCS12_CIPHERFINAL_ERROR + ) + ): + raise ValueError("Bad decrypt. Incorrect password?") + + elif ( + errors[0]._lib_reason_match( + self._lib.ERR_LIB_EVP, self._lib.EVP_R_UNKNOWN_PBE_ALGORITHM + ) or errors[0]._lib_reason_match( + self._lib.ERR_LIB_PEM, self._lib.PEM_R_UNSUPPORTED_ENCRYPTION + ) + ): + raise UnsupportedAlgorithm( + "PEM data is encrypted with an unsupported cipher", + _Reasons.UNSUPPORTED_CIPHER + ) + + elif any( + error._lib_reason_match( + self._lib.ERR_LIB_EVP, + self._lib.EVP_R_UNSUPPORTED_PRIVATE_KEY_ALGORITHM + ) + for error in errors + ): + raise ValueError("Unsupported public key algorithm.") + + else: + assert errors[0].lib in ( + self._lib.ERR_LIB_EVP, + self._lib.ERR_LIB_PEM, + self._lib.ERR_LIB_ASN1, + ) + raise ValueError("Could not deserialize key data.") + + def elliptic_curve_supported(self, curve): + try: + curve_nid = self._elliptic_curve_to_nid(curve) + except UnsupportedAlgorithm: + curve_nid = self._lib.NID_undef + + group = self._lib.EC_GROUP_new_by_curve_name(curve_nid) + + if group == self._ffi.NULL: + errors = self._consume_errors() + self.openssl_assert( + curve_nid == self._lib.NID_undef or + errors[0]._lib_reason_match( + self._lib.ERR_LIB_EC, + self._lib.EC_R_UNKNOWN_GROUP + ) + ) + return False + else: + self.openssl_assert(curve_nid != self._lib.NID_undef) + self._lib.EC_GROUP_free(group) + return True + + def elliptic_curve_signature_algorithm_supported( + self, signature_algorithm, curve + ): + # We only support ECDSA right now. + if not isinstance(signature_algorithm, ec.ECDSA): + return False + + return self.elliptic_curve_supported(curve) + + def generate_elliptic_curve_private_key(self, curve): + """ + Generate a new private key on the named curve. + """ + + if self.elliptic_curve_supported(curve): + ec_cdata = self._ec_key_new_by_curve(curve) + + res = self._lib.EC_KEY_generate_key(ec_cdata) + self.openssl_assert(res == 1) + + evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata) + + return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey) + else: + raise UnsupportedAlgorithm( + "Backend object does not support {}.".format(curve.name), + _Reasons.UNSUPPORTED_ELLIPTIC_CURVE + ) + + def load_elliptic_curve_private_numbers(self, numbers): + public = numbers.public_numbers + + ec_cdata = self._ec_key_new_by_curve(public.curve) + + private_value = self._ffi.gc( + self._int_to_bn(numbers.private_value), self._lib.BN_clear_free + ) + res = self._lib.EC_KEY_set_private_key(ec_cdata, private_value) + self.openssl_assert(res == 1) + + ec_cdata = self._ec_key_set_public_key_affine_coordinates( + ec_cdata, public.x, public.y) + + evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata) + + return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey) + + def load_elliptic_curve_public_numbers(self, numbers): + ec_cdata = self._ec_key_new_by_curve(numbers.curve) + ec_cdata = self._ec_key_set_public_key_affine_coordinates( + ec_cdata, numbers.x, numbers.y) + evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata) + + return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey) + + def load_elliptic_curve_public_bytes(self, curve, point_bytes): + ec_cdata = self._ec_key_new_by_curve(curve) + group = self._lib.EC_KEY_get0_group(ec_cdata) + self.openssl_assert(group != self._ffi.NULL) + point = self._lib.EC_POINT_new(group) + self.openssl_assert(point != self._ffi.NULL) + point = self._ffi.gc(point, self._lib.EC_POINT_free) + with self._tmp_bn_ctx() as bn_ctx: + res = self._lib.EC_POINT_oct2point( + group, point, point_bytes, len(point_bytes), bn_ctx + ) + if res != 1: + self._consume_errors() + raise ValueError("Invalid public bytes for the given curve") + + res = self._lib.EC_KEY_set_public_key(ec_cdata, point) + self.openssl_assert(res == 1) + evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata) + return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey) + + def derive_elliptic_curve_private_key(self, private_value, curve): + ec_cdata = self._ec_key_new_by_curve(curve) + + get_func, group = self._ec_key_determine_group_get_func(ec_cdata) + + point = self._lib.EC_POINT_new(group) + self.openssl_assert(point != self._ffi.NULL) + point = self._ffi.gc(point, self._lib.EC_POINT_free) + + value = self._int_to_bn(private_value) + value = self._ffi.gc(value, self._lib.BN_clear_free) + + with self._tmp_bn_ctx() as bn_ctx: + res = self._lib.EC_POINT_mul(group, point, value, self._ffi.NULL, + self._ffi.NULL, bn_ctx) + self.openssl_assert(res == 1) + + bn_x = self._lib.BN_CTX_get(bn_ctx) + bn_y = self._lib.BN_CTX_get(bn_ctx) + + res = get_func(group, point, bn_x, bn_y, bn_ctx) + self.openssl_assert(res == 1) + + res = self._lib.EC_KEY_set_public_key(ec_cdata, point) + self.openssl_assert(res == 1) + private = self._int_to_bn(private_value) + private = self._ffi.gc(private, self._lib.BN_clear_free) + res = self._lib.EC_KEY_set_private_key(ec_cdata, private) + self.openssl_assert(res == 1) + + evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata) + + return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey) + + def _ec_key_new_by_curve(self, curve): + curve_nid = self._elliptic_curve_to_nid(curve) + ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid) + self.openssl_assert(ec_cdata != self._ffi.NULL) + return self._ffi.gc(ec_cdata, self._lib.EC_KEY_free) + + def load_der_ocsp_request(self, data): + mem_bio = self._bytes_to_bio(data) + request = self._lib.d2i_OCSP_REQUEST_bio(mem_bio.bio, self._ffi.NULL) + if request == self._ffi.NULL: + self._consume_errors() + raise ValueError("Unable to load OCSP request") + + request = self._ffi.gc(request, self._lib.OCSP_REQUEST_free) + return _OCSPRequest(self, request) + + def load_der_ocsp_response(self, data): + mem_bio = self._bytes_to_bio(data) + response = self._lib.d2i_OCSP_RESPONSE_bio(mem_bio.bio, self._ffi.NULL) + if response == self._ffi.NULL: + self._consume_errors() + raise ValueError("Unable to load OCSP response") + + response = self._ffi.gc(response, self._lib.OCSP_RESPONSE_free) + return _OCSPResponse(self, response) + + def create_ocsp_request(self, builder): + ocsp_req = self._lib.OCSP_REQUEST_new() + self.openssl_assert(ocsp_req != self._ffi.NULL) + ocsp_req = self._ffi.gc(ocsp_req, self._lib.OCSP_REQUEST_free) + cert, issuer, algorithm = builder._request + evp_md = self._evp_md_non_null_from_algorithm(algorithm) + certid = self._lib.OCSP_cert_to_id( + evp_md, cert._x509, issuer._x509 + ) + self.openssl_assert(certid != self._ffi.NULL) + onereq = self._lib.OCSP_request_add0_id(ocsp_req, certid) + self.openssl_assert(onereq != self._ffi.NULL) + self._create_x509_extensions( + extensions=builder._extensions, + handlers=_OCSP_REQUEST_EXTENSION_ENCODE_HANDLERS, + x509_obj=ocsp_req, + add_func=self._lib.OCSP_REQUEST_add_ext, + gc=True, + ) + return _OCSPRequest(self, ocsp_req) + + def _create_ocsp_basic_response(self, builder, private_key, algorithm): + basic = self._lib.OCSP_BASICRESP_new() + self.openssl_assert(basic != self._ffi.NULL) + basic = self._ffi.gc(basic, self._lib.OCSP_BASICRESP_free) + evp_md = self._evp_md_non_null_from_algorithm( + builder._response._algorithm + ) + certid = self._lib.OCSP_cert_to_id( + evp_md, builder._response._cert._x509, + builder._response._issuer._x509 + ) + self.openssl_assert(certid != self._ffi.NULL) + certid = self._ffi.gc(certid, self._lib.OCSP_CERTID_free) + if builder._response._revocation_reason is None: + reason = -1 + else: + reason = _CRL_ENTRY_REASON_ENUM_TO_CODE[ + builder._response._revocation_reason + ] + if builder._response._revocation_time is None: + rev_time = self._ffi.NULL + else: + rev_time = self._create_asn1_time( + builder._response._revocation_time + ) + + next_update = self._ffi.NULL + if builder._response._next_update is not None: + next_update = self._create_asn1_time( + builder._response._next_update + ) + + this_update = self._create_asn1_time(builder._response._this_update) + + res = self._lib.OCSP_basic_add1_status( + basic, + certid, + builder._response._cert_status.value, + reason, + rev_time, + this_update, + next_update + ) + self.openssl_assert(res != self._ffi.NULL) + # okay, now sign the basic structure + evp_md = self._evp_md_x509_null_if_eddsa(private_key, algorithm) + responder_cert, responder_encoding = builder._responder_id + flags = self._lib.OCSP_NOCERTS + if responder_encoding is ocsp.OCSPResponderEncoding.HASH: + flags |= self._lib.OCSP_RESPID_KEY + + if builder._certs is not None: + for cert in builder._certs: + res = self._lib.OCSP_basic_add1_cert(basic, cert._x509) + self.openssl_assert(res == 1) + + self._create_x509_extensions( + extensions=builder._extensions, + handlers=_OCSP_BASICRESP_EXTENSION_ENCODE_HANDLERS, + x509_obj=basic, + add_func=self._lib.OCSP_BASICRESP_add_ext, + gc=True, + ) + + res = self._lib.OCSP_basic_sign( + basic, responder_cert._x509, private_key._evp_pkey, + evp_md, self._ffi.NULL, flags + ) + if res != 1: + errors = self._consume_errors() + self.openssl_assert( + errors[0]._lib_reason_match( + self._lib.ERR_LIB_X509, + self._lib.X509_R_KEY_VALUES_MISMATCH + ) + ) + raise ValueError("responder_cert must be signed by private_key") + + return basic + + def create_ocsp_response(self, response_status, builder, private_key, + algorithm): + if response_status is ocsp.OCSPResponseStatus.SUCCESSFUL: + basic = self._create_ocsp_basic_response( + builder, private_key, algorithm + ) + else: + basic = self._ffi.NULL + + ocsp_resp = self._lib.OCSP_response_create( + response_status.value, basic + ) + self.openssl_assert(ocsp_resp != self._ffi.NULL) + ocsp_resp = self._ffi.gc(ocsp_resp, self._lib.OCSP_RESPONSE_free) + return _OCSPResponse(self, ocsp_resp) + + def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve): + return ( + self.elliptic_curve_supported(curve) and + isinstance(algorithm, ec.ECDH) + ) + + def _ec_cdata_to_evp_pkey(self, ec_cdata): + evp_pkey = self._create_evp_pkey_gc() + res = self._lib.EVP_PKEY_set1_EC_KEY(evp_pkey, ec_cdata) + self.openssl_assert(res == 1) + return evp_pkey + + def _elliptic_curve_to_nid(self, curve): + """ + Get the NID for a curve name. + """ + + curve_aliases = { + "secp192r1": "prime192v1", + "secp256r1": "prime256v1" + } + + curve_name = curve_aliases.get(curve.name, curve.name) + + curve_nid = self._lib.OBJ_sn2nid(curve_name.encode()) + if curve_nid == self._lib.NID_undef: + raise UnsupportedAlgorithm( + "{} is not a supported elliptic curve".format(curve.name), + _Reasons.UNSUPPORTED_ELLIPTIC_CURVE + ) + return curve_nid + + @contextmanager + def _tmp_bn_ctx(self): + bn_ctx = self._lib.BN_CTX_new() + self.openssl_assert(bn_ctx != self._ffi.NULL) + bn_ctx = self._ffi.gc(bn_ctx, self._lib.BN_CTX_free) + self._lib.BN_CTX_start(bn_ctx) + try: + yield bn_ctx + finally: + self._lib.BN_CTX_end(bn_ctx) + + def _ec_key_determine_group_get_func(self, ctx): + """ + Given an EC_KEY determine the group and what function is required to + get point coordinates. + """ + self.openssl_assert(ctx != self._ffi.NULL) + + nid_two_field = self._lib.OBJ_sn2nid(b"characteristic-two-field") + self.openssl_assert(nid_two_field != self._lib.NID_undef) + + group = self._lib.EC_KEY_get0_group(ctx) + self.openssl_assert(group != self._ffi.NULL) + + method = self._lib.EC_GROUP_method_of(group) + self.openssl_assert(method != self._ffi.NULL) + + nid = self._lib.EC_METHOD_get_field_type(method) + self.openssl_assert(nid != self._lib.NID_undef) + + if nid == nid_two_field and self._lib.Cryptography_HAS_EC2M: + get_func = self._lib.EC_POINT_get_affine_coordinates_GF2m + else: + get_func = self._lib.EC_POINT_get_affine_coordinates_GFp + + assert get_func + + return get_func, group + + def _ec_key_set_public_key_affine_coordinates(self, ctx, x, y): + """ + Sets the public key point in the EC_KEY context to the affine x and y + values. + """ + + if x < 0 or y < 0: + raise ValueError( + "Invalid EC key. Both x and y must be non-negative." + ) + + x = self._ffi.gc(self._int_to_bn(x), self._lib.BN_free) + y = self._ffi.gc(self._int_to_bn(y), self._lib.BN_free) + res = self._lib.EC_KEY_set_public_key_affine_coordinates(ctx, x, y) + if res != 1: + self._consume_errors() + raise ValueError("Invalid EC key.") + + return ctx + + def _private_key_bytes(self, encoding, format, encryption_algorithm, + evp_pkey, cdata): + if not isinstance(format, serialization.PrivateFormat): + raise TypeError( + "format must be an item from the PrivateFormat enum" + ) + + # X9.62 encoding is only valid for EC public keys + if encoding is serialization.Encoding.X962: + raise ValueError("X9.62 format is only valid for EC public keys") + + # Raw format and encoding are only valid for X25519, Ed25519, X448, and + # Ed448 keys. We capture those cases before this method is called so if + # we see those enum values here it means the caller has passed them to + # a key that doesn't support raw type + if format is serialization.PrivateFormat.Raw: + raise ValueError("raw format is invalid with this key or encoding") + + if encoding is serialization.Encoding.Raw: + raise ValueError("raw encoding is invalid with this key or format") + + if not isinstance(encryption_algorithm, + serialization.KeySerializationEncryption): + raise TypeError( + "Encryption algorithm must be a KeySerializationEncryption " + "instance" + ) + + if isinstance(encryption_algorithm, serialization.NoEncryption): + password = b"" + passlen = 0 + evp_cipher = self._ffi.NULL + elif isinstance(encryption_algorithm, + serialization.BestAvailableEncryption): + # This is a curated value that we will update over time. + evp_cipher = self._lib.EVP_get_cipherbyname( + b"aes-256-cbc" + ) + password = encryption_algorithm.password + passlen = len(password) + if passlen > 1023: + raise ValueError( + "Passwords longer than 1023 bytes are not supported by " + "this backend" + ) + else: + raise ValueError("Unsupported encryption type") + + key_type = self._lib.EVP_PKEY_id(evp_pkey) + if encoding is serialization.Encoding.PEM: + if format is serialization.PrivateFormat.PKCS8: + write_bio = self._lib.PEM_write_bio_PKCS8PrivateKey + key = evp_pkey + else: + assert format is serialization.PrivateFormat.TraditionalOpenSSL + if key_type == self._lib.EVP_PKEY_RSA: + write_bio = self._lib.PEM_write_bio_RSAPrivateKey + elif key_type == self._lib.EVP_PKEY_DSA: + write_bio = self._lib.PEM_write_bio_DSAPrivateKey + else: + assert key_type == self._lib.EVP_PKEY_EC + write_bio = self._lib.PEM_write_bio_ECPrivateKey + + key = cdata + elif encoding is serialization.Encoding.DER: + if format is serialization.PrivateFormat.TraditionalOpenSSL: + if not isinstance( + encryption_algorithm, serialization.NoEncryption + ): + raise ValueError( + "Encryption is not supported for DER encoded " + "traditional OpenSSL keys" + ) + + return self._private_key_bytes_traditional_der(key_type, cdata) + else: + assert format is serialization.PrivateFormat.PKCS8 + write_bio = self._lib.i2d_PKCS8PrivateKey_bio + key = evp_pkey + else: + raise TypeError("encoding must be Encoding.PEM or Encoding.DER") + + bio = self._create_mem_bio_gc() + res = write_bio( + bio, + key, + evp_cipher, + password, + passlen, + self._ffi.NULL, + self._ffi.NULL + ) + self.openssl_assert(res == 1) + return self._read_mem_bio(bio) + + def _private_key_bytes_traditional_der(self, key_type, cdata): + if key_type == self._lib.EVP_PKEY_RSA: + write_bio = self._lib.i2d_RSAPrivateKey_bio + elif key_type == self._lib.EVP_PKEY_EC: + write_bio = self._lib.i2d_ECPrivateKey_bio + else: + self.openssl_assert(key_type == self._lib.EVP_PKEY_DSA) + write_bio = self._lib.i2d_DSAPrivateKey_bio + + bio = self._create_mem_bio_gc() + res = write_bio(bio, cdata) + self.openssl_assert(res == 1) + return self._read_mem_bio(bio) + + def _public_key_bytes(self, encoding, format, key, evp_pkey, cdata): + if not isinstance(encoding, serialization.Encoding): + raise TypeError("encoding must be an item from the Encoding enum") + + # Compressed/UncompressedPoint are only valid for EC keys and those + # cases are handled by the ECPublicKey public_bytes method before this + # method is called + if format in (serialization.PublicFormat.UncompressedPoint, + serialization.PublicFormat.CompressedPoint): + raise ValueError("Point formats are not valid for this key type") + + # Raw format and encoding are only valid for X25519, Ed25519, X448, and + # Ed448 keys. We capture those cases before this method is called so if + # we see those enum values here it means the caller has passed them to + # a key that doesn't support raw type + if format is serialization.PublicFormat.Raw: + raise ValueError("raw format is invalid with this key or encoding") + + if encoding is serialization.Encoding.Raw: + raise ValueError("raw encoding is invalid with this key or format") + + if ( + format is serialization.PublicFormat.OpenSSH or + encoding is serialization.Encoding.OpenSSH + ): + if ( + format is not serialization.PublicFormat.OpenSSH or + encoding is not serialization.Encoding.OpenSSH + ): + raise ValueError( + "OpenSSH format must be used with OpenSSH encoding" + ) + return self._openssh_public_key_bytes(key) + elif format is serialization.PublicFormat.SubjectPublicKeyInfo: + if encoding is serialization.Encoding.PEM: + write_bio = self._lib.PEM_write_bio_PUBKEY + else: + assert encoding is serialization.Encoding.DER + write_bio = self._lib.i2d_PUBKEY_bio + + key = evp_pkey + elif format is serialization.PublicFormat.PKCS1: + # Only RSA is supported here. + assert self._lib.EVP_PKEY_id(evp_pkey) == self._lib.EVP_PKEY_RSA + if encoding is serialization.Encoding.PEM: + write_bio = self._lib.PEM_write_bio_RSAPublicKey + else: + assert encoding is serialization.Encoding.DER + write_bio = self._lib.i2d_RSAPublicKey_bio + + key = cdata + else: + raise TypeError( + "format must be an item from the PublicFormat enum" + ) + + bio = self._create_mem_bio_gc() + res = write_bio(bio, key) + self.openssl_assert(res == 1) + return self._read_mem_bio(bio) + + def _openssh_public_key_bytes(self, key): + if isinstance(key, rsa.RSAPublicKey): + public_numbers = key.public_numbers() + return b"ssh-rsa " + base64.b64encode( + ssh._ssh_write_string(b"ssh-rsa") + + ssh._ssh_write_mpint(public_numbers.e) + + ssh._ssh_write_mpint(public_numbers.n) + ) + elif isinstance(key, dsa.DSAPublicKey): + public_numbers = key.public_numbers() + parameter_numbers = public_numbers.parameter_numbers + return b"ssh-dss " + base64.b64encode( + ssh._ssh_write_string(b"ssh-dss") + + ssh._ssh_write_mpint(parameter_numbers.p) + + ssh._ssh_write_mpint(parameter_numbers.q) + + ssh._ssh_write_mpint(parameter_numbers.g) + + ssh._ssh_write_mpint(public_numbers.y) + ) + elif isinstance(key, ed25519.Ed25519PublicKey): + raw_bytes = key.public_bytes(serialization.Encoding.Raw, + serialization.PublicFormat.Raw) + return b"ssh-ed25519 " + base64.b64encode( + ssh._ssh_write_string(b"ssh-ed25519") + + ssh._ssh_write_string(raw_bytes) + ) + elif isinstance(key, ec.EllipticCurvePublicKey): + public_numbers = key.public_numbers() + try: + curve_name = { + ec.SECP256R1: b"nistp256", + ec.SECP384R1: b"nistp384", + ec.SECP521R1: b"nistp521", + }[type(public_numbers.curve)] + except KeyError: + raise ValueError( + "Only SECP256R1, SECP384R1, and SECP521R1 curves are " + "supported by the SSH public key format" + ) + + point = key.public_bytes( + serialization.Encoding.X962, + serialization.PublicFormat.UncompressedPoint + ) + return b"ecdsa-sha2-" + curve_name + b" " + base64.b64encode( + ssh._ssh_write_string(b"ecdsa-sha2-" + curve_name) + + ssh._ssh_write_string(curve_name) + + ssh._ssh_write_string(point) + ) + else: + raise ValueError( + "OpenSSH encoding is not supported for this key type" + ) + + def _parameter_bytes(self, encoding, format, cdata): + if encoding is serialization.Encoding.OpenSSH: + raise TypeError( + "OpenSSH encoding is not supported" + ) + + # Only DH is supported here currently. + q = self._ffi.new("BIGNUM **") + self._lib.DH_get0_pqg(cdata, + self._ffi.NULL, + q, + self._ffi.NULL) + if encoding is serialization.Encoding.PEM: + if q[0] != self._ffi.NULL: + write_bio = self._lib.PEM_write_bio_DHxparams + else: + write_bio = self._lib.PEM_write_bio_DHparams + elif encoding is serialization.Encoding.DER: + if q[0] != self._ffi.NULL: + write_bio = self._lib.Cryptography_i2d_DHxparams_bio + else: + write_bio = self._lib.i2d_DHparams_bio + else: + raise TypeError("encoding must be an item from the Encoding enum") + + bio = self._create_mem_bio_gc() + res = write_bio(bio, cdata) + self.openssl_assert(res == 1) + return self._read_mem_bio(bio) + + def generate_dh_parameters(self, generator, key_size): + if key_size < 512: + raise ValueError("DH key_size must be at least 512 bits") + + if generator not in (2, 5): + raise ValueError("DH generator must be 2 or 5") + + dh_param_cdata = self._lib.DH_new() + self.openssl_assert(dh_param_cdata != self._ffi.NULL) + dh_param_cdata = self._ffi.gc(dh_param_cdata, self._lib.DH_free) + + res = self._lib.DH_generate_parameters_ex( + dh_param_cdata, + key_size, + generator, + self._ffi.NULL + ) + self.openssl_assert(res == 1) + + return _DHParameters(self, dh_param_cdata) + + def _dh_cdata_to_evp_pkey(self, dh_cdata): + evp_pkey = self._create_evp_pkey_gc() + res = self._lib.EVP_PKEY_set1_DH(evp_pkey, dh_cdata) + self.openssl_assert(res == 1) + return evp_pkey + + def generate_dh_private_key(self, parameters): + dh_key_cdata = _dh_params_dup(parameters._dh_cdata, self) + + res = self._lib.DH_generate_key(dh_key_cdata) + self.openssl_assert(res == 1) + + evp_pkey = self._dh_cdata_to_evp_pkey(dh_key_cdata) + + return _DHPrivateKey(self, dh_key_cdata, evp_pkey) + + def generate_dh_private_key_and_parameters(self, generator, key_size): + return self.generate_dh_private_key( + self.generate_dh_parameters(generator, key_size)) + + def load_dh_private_numbers(self, numbers): + parameter_numbers = numbers.public_numbers.parameter_numbers + + dh_cdata = self._lib.DH_new() + self.openssl_assert(dh_cdata != self._ffi.NULL) + dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free) + + p = self._int_to_bn(parameter_numbers.p) + g = self._int_to_bn(parameter_numbers.g) + + if parameter_numbers.q is not None: + q = self._int_to_bn(parameter_numbers.q) + else: + q = self._ffi.NULL + + pub_key = self._int_to_bn(numbers.public_numbers.y) + priv_key = self._int_to_bn(numbers.x) + + res = self._lib.DH_set0_pqg(dh_cdata, p, q, g) + self.openssl_assert(res == 1) + + res = self._lib.DH_set0_key(dh_cdata, pub_key, priv_key) + self.openssl_assert(res == 1) + + codes = self._ffi.new("int[]", 1) + res = self._lib.Cryptography_DH_check(dh_cdata, codes) + self.openssl_assert(res == 1) + + # DH_check will return DH_NOT_SUITABLE_GENERATOR if p % 24 does not + # equal 11 when the generator is 2 (a quadratic nonresidue). + # We want to ignore that error because p % 24 == 23 is also fine. + # Specifically, g is then a quadratic residue. Within the context of + # Diffie-Hellman this means it can only generate half the possible + # values. That sounds bad, but quadratic nonresidues leak a bit of + # the key to the attacker in exchange for having the full key space + # available. See: https://crypto.stackexchange.com/questions/12961 + if codes[0] != 0 and not ( + parameter_numbers.g == 2 and + codes[0] ^ self._lib.DH_NOT_SUITABLE_GENERATOR == 0 + ): + raise ValueError( + "DH private numbers did not pass safety checks." + ) + + evp_pkey = self._dh_cdata_to_evp_pkey(dh_cdata) + + return _DHPrivateKey(self, dh_cdata, evp_pkey) + + def load_dh_public_numbers(self, numbers): + dh_cdata = self._lib.DH_new() + self.openssl_assert(dh_cdata != self._ffi.NULL) + dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free) + + parameter_numbers = numbers.parameter_numbers + + p = self._int_to_bn(parameter_numbers.p) + g = self._int_to_bn(parameter_numbers.g) + + if parameter_numbers.q is not None: + q = self._int_to_bn(parameter_numbers.q) + else: + q = self._ffi.NULL + + pub_key = self._int_to_bn(numbers.y) + + res = self._lib.DH_set0_pqg(dh_cdata, p, q, g) + self.openssl_assert(res == 1) + + res = self._lib.DH_set0_key(dh_cdata, pub_key, self._ffi.NULL) + self.openssl_assert(res == 1) + + evp_pkey = self._dh_cdata_to_evp_pkey(dh_cdata) + + return _DHPublicKey(self, dh_cdata, evp_pkey) + + def load_dh_parameter_numbers(self, numbers): + dh_cdata = self._lib.DH_new() + self.openssl_assert(dh_cdata != self._ffi.NULL) + dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free) + + p = self._int_to_bn(numbers.p) + g = self._int_to_bn(numbers.g) + + if numbers.q is not None: + q = self._int_to_bn(numbers.q) + else: + q = self._ffi.NULL + + res = self._lib.DH_set0_pqg(dh_cdata, p, q, g) + self.openssl_assert(res == 1) + + return _DHParameters(self, dh_cdata) + + def dh_parameters_supported(self, p, g, q=None): + dh_cdata = self._lib.DH_new() + self.openssl_assert(dh_cdata != self._ffi.NULL) + dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free) + + p = self._int_to_bn(p) + g = self._int_to_bn(g) + + if q is not None: + q = self._int_to_bn(q) + else: + q = self._ffi.NULL + + res = self._lib.DH_set0_pqg(dh_cdata, p, q, g) + self.openssl_assert(res == 1) + + codes = self._ffi.new("int[]", 1) + res = self._lib.Cryptography_DH_check(dh_cdata, codes) + self.openssl_assert(res == 1) + + return codes[0] == 0 + + def dh_x942_serialization_supported(self): + return self._lib.Cryptography_HAS_EVP_PKEY_DHX == 1 + + def x509_name_bytes(self, name): + x509_name = _encode_name_gc(self, name) + pp = self._ffi.new("unsigned char **") + res = self._lib.i2d_X509_NAME(x509_name, pp) + self.openssl_assert(pp[0] != self._ffi.NULL) + pp = self._ffi.gc( + pp, lambda pointer: self._lib.OPENSSL_free(pointer[0]) + ) + self.openssl_assert(res > 0) + return self._ffi.buffer(pp[0], res)[:] + + def x25519_load_public_bytes(self, data): + # When we drop support for CRYPTOGRAPHY_OPENSSL_LESS_THAN_111 we can + # switch this to EVP_PKEY_new_raw_public_key + if len(data) != 32: + raise ValueError("An X25519 public key is 32 bytes long") + + evp_pkey = self._create_evp_pkey_gc() + res = self._lib.EVP_PKEY_set_type(evp_pkey, self._lib.NID_X25519) + backend.openssl_assert(res == 1) + res = self._lib.EVP_PKEY_set1_tls_encodedpoint( + evp_pkey, data, len(data) + ) + backend.openssl_assert(res == 1) + return _X25519PublicKey(self, evp_pkey) + + def x25519_load_private_bytes(self, data): + # When we drop support for CRYPTOGRAPHY_OPENSSL_LESS_THAN_111 we can + # switch this to EVP_PKEY_new_raw_private_key and drop the + # zeroed_bytearray garbage. + # OpenSSL only has facilities for loading PKCS8 formatted private + # keys using the algorithm identifiers specified in + # https://tools.ietf.org/html/draft-ietf-curdle-pkix-09. + # This is the standard PKCS8 prefix for a 32 byte X25519 key. + # The form is: + # 0:d=0 hl=2 l= 46 cons: SEQUENCE + # 2:d=1 hl=2 l= 1 prim: INTEGER :00 + # 5:d=1 hl=2 l= 5 cons: SEQUENCE + # 7:d=2 hl=2 l= 3 prim: OBJECT :1.3.101.110 + # 12:d=1 hl=2 l= 34 prim: OCTET STRING (the key) + # Of course there's a bit more complexity. In reality OCTET STRING + # contains an OCTET STRING of length 32! So the last two bytes here + # are \x04\x20, which is an OCTET STRING of length 32. + if len(data) != 32: + raise ValueError("An X25519 private key is 32 bytes long") + + pkcs8_prefix = b'0.\x02\x01\x000\x05\x06\x03+en\x04"\x04 ' + with self._zeroed_bytearray(48) as ba: + ba[0:16] = pkcs8_prefix + ba[16:] = data + bio = self._bytes_to_bio(ba) + evp_pkey = backend._lib.d2i_PrivateKey_bio(bio.bio, self._ffi.NULL) + + self.openssl_assert(evp_pkey != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + self.openssl_assert( + self._lib.EVP_PKEY_id(evp_pkey) == self._lib.EVP_PKEY_X25519 + ) + return _X25519PrivateKey(self, evp_pkey) + + def _evp_pkey_keygen_gc(self, nid): + evp_pkey_ctx = self._lib.EVP_PKEY_CTX_new_id(nid, self._ffi.NULL) + self.openssl_assert(evp_pkey_ctx != self._ffi.NULL) + evp_pkey_ctx = self._ffi.gc(evp_pkey_ctx, self._lib.EVP_PKEY_CTX_free) + res = self._lib.EVP_PKEY_keygen_init(evp_pkey_ctx) + self.openssl_assert(res == 1) + evp_ppkey = self._ffi.new("EVP_PKEY **") + res = self._lib.EVP_PKEY_keygen(evp_pkey_ctx, evp_ppkey) + self.openssl_assert(res == 1) + self.openssl_assert(evp_ppkey[0] != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_ppkey[0], self._lib.EVP_PKEY_free) + return evp_pkey + + def x25519_generate_key(self): + evp_pkey = self._evp_pkey_keygen_gc(self._lib.NID_X25519) + return _X25519PrivateKey(self, evp_pkey) + + def x25519_supported(self): + return self._lib.CRYPTOGRAPHY_OPENSSL_110_OR_GREATER + + def x448_load_public_bytes(self, data): + if len(data) != 56: + raise ValueError("An X448 public key is 56 bytes long") + + evp_pkey = self._lib.EVP_PKEY_new_raw_public_key( + self._lib.NID_X448, self._ffi.NULL, data, len(data) + ) + self.openssl_assert(evp_pkey != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + return _X448PublicKey(self, evp_pkey) + + def x448_load_private_bytes(self, data): + if len(data) != 56: + raise ValueError("An X448 private key is 56 bytes long") + + data_ptr = self._ffi.from_buffer(data) + evp_pkey = self._lib.EVP_PKEY_new_raw_private_key( + self._lib.NID_X448, self._ffi.NULL, data_ptr, len(data) + ) + self.openssl_assert(evp_pkey != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + return _X448PrivateKey(self, evp_pkey) + + def x448_generate_key(self): + evp_pkey = self._evp_pkey_keygen_gc(self._lib.NID_X448) + return _X448PrivateKey(self, evp_pkey) + + def x448_supported(self): + return not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111 + + def ed25519_supported(self): + return not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111B + + def ed25519_load_public_bytes(self, data): + utils._check_bytes("data", data) + + if len(data) != ed25519._ED25519_KEY_SIZE: + raise ValueError("An Ed25519 public key is 32 bytes long") + + evp_pkey = self._lib.EVP_PKEY_new_raw_public_key( + self._lib.NID_ED25519, self._ffi.NULL, data, len(data) + ) + self.openssl_assert(evp_pkey != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + + return _Ed25519PublicKey(self, evp_pkey) + + def ed25519_load_private_bytes(self, data): + if len(data) != ed25519._ED25519_KEY_SIZE: + raise ValueError("An Ed25519 private key is 32 bytes long") + + utils._check_byteslike("data", data) + data_ptr = self._ffi.from_buffer(data) + evp_pkey = self._lib.EVP_PKEY_new_raw_private_key( + self._lib.NID_ED25519, self._ffi.NULL, data_ptr, len(data) + ) + self.openssl_assert(evp_pkey != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + + return _Ed25519PrivateKey(self, evp_pkey) + + def ed25519_generate_key(self): + evp_pkey = self._evp_pkey_keygen_gc(self._lib.NID_ED25519) + return _Ed25519PrivateKey(self, evp_pkey) + + def ed448_supported(self): + return not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111B + + def ed448_load_public_bytes(self, data): + utils._check_bytes("data", data) + if len(data) != _ED448_KEY_SIZE: + raise ValueError("An Ed448 public key is 57 bytes long") + + evp_pkey = self._lib.EVP_PKEY_new_raw_public_key( + self._lib.NID_ED448, self._ffi.NULL, data, len(data) + ) + self.openssl_assert(evp_pkey != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + + return _Ed448PublicKey(self, evp_pkey) + + def ed448_load_private_bytes(self, data): + utils._check_byteslike("data", data) + if len(data) != _ED448_KEY_SIZE: + raise ValueError("An Ed448 private key is 57 bytes long") + + data_ptr = self._ffi.from_buffer(data) + evp_pkey = self._lib.EVP_PKEY_new_raw_private_key( + self._lib.NID_ED448, self._ffi.NULL, data_ptr, len(data) + ) + self.openssl_assert(evp_pkey != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + + return _Ed448PrivateKey(self, evp_pkey) + + def ed448_generate_key(self): + evp_pkey = self._evp_pkey_keygen_gc(self._lib.NID_ED448) + return _Ed448PrivateKey(self, evp_pkey) + + def derive_scrypt(self, key_material, salt, length, n, r, p): + buf = self._ffi.new("unsigned char[]", length) + key_material_ptr = self._ffi.from_buffer(key_material) + res = self._lib.EVP_PBE_scrypt( + key_material_ptr, len(key_material), salt, len(salt), n, r, p, + scrypt._MEM_LIMIT, buf, length + ) + if res != 1: + errors = self._consume_errors() + if not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111: + # This error is only added to the stack in 1.1.1+ + self.openssl_assert( + errors[0]._lib_reason_match( + self._lib.ERR_LIB_EVP, + self._lib.ERR_R_MALLOC_FAILURE + ) or + errors[0]._lib_reason_match( + self._lib.ERR_LIB_EVP, + self._lib.EVP_R_MEMORY_LIMIT_EXCEEDED + ) + ) + + # memory required formula explained here: + # https://blog.filippo.io/the-scrypt-parameters/ + min_memory = 128 * n * r // (1024**2) + raise MemoryError( + "Not enough memory to derive key. These parameters require" + " {} MB of memory.".format(min_memory) + ) + return self._ffi.buffer(buf)[:] + + def aead_cipher_supported(self, cipher): + cipher_name = aead._aead_cipher_name(cipher) + return ( + self._lib.EVP_get_cipherbyname(cipher_name) != self._ffi.NULL + ) + + @contextlib.contextmanager + def _zeroed_bytearray(self, length): + """ + This method creates a bytearray, which we copy data into (hopefully + also from a mutable buffer that can be dynamically erased!), and then + zero when we're done. + """ + ba = bytearray(length) + try: + yield ba + finally: + self._zero_data(ba, length) + + def _zero_data(self, data, length): + # We clear things this way because at the moment we're not + # sure of a better way that can guarantee it overwrites the + # memory of a bytearray and doesn't just replace the underlying char *. + for i in range(length): + data[i] = 0 + + @contextlib.contextmanager + def _zeroed_null_terminated_buf(self, data): + """ + This method takes bytes, which can be a bytestring or a mutable + buffer like a bytearray, and yields a null-terminated version of that + data. This is required because PKCS12_parse doesn't take a length with + its password char * and ffi.from_buffer doesn't provide null + termination. So, to support zeroing the data via bytearray we + need to build this ridiculous construct that copies the memory, but + zeroes it after use. + """ + if data is None: + yield self._ffi.NULL + else: + data_len = len(data) + buf = self._ffi.new("char[]", data_len + 1) + self._ffi.memmove(buf, data, data_len) + try: + yield buf + finally: + # Cast to a uint8_t * so we can assign by integer + self._zero_data(self._ffi.cast("uint8_t *", buf), data_len) + + def load_key_and_certificates_from_pkcs12(self, data, password): + if password is not None: + utils._check_byteslike("password", password) + + bio = self._bytes_to_bio(data) + p12 = self._lib.d2i_PKCS12_bio(bio.bio, self._ffi.NULL) + if p12 == self._ffi.NULL: + self._consume_errors() + raise ValueError("Could not deserialize PKCS12 data") + + p12 = self._ffi.gc(p12, self._lib.PKCS12_free) + evp_pkey_ptr = self._ffi.new("EVP_PKEY **") + x509_ptr = self._ffi.new("X509 **") + sk_x509_ptr = self._ffi.new("Cryptography_STACK_OF_X509 **") + with self._zeroed_null_terminated_buf(password) as password_buf: + res = self._lib.PKCS12_parse( + p12, password_buf, evp_pkey_ptr, x509_ptr, sk_x509_ptr + ) + + if res == 0: + self._consume_errors() + raise ValueError("Invalid password or PKCS12 data") + + cert = None + key = None + additional_certificates = [] + + if evp_pkey_ptr[0] != self._ffi.NULL: + evp_pkey = self._ffi.gc(evp_pkey_ptr[0], self._lib.EVP_PKEY_free) + key = self._evp_pkey_to_private_key(evp_pkey) + + if x509_ptr[0] != self._ffi.NULL: + x509 = self._ffi.gc(x509_ptr[0], self._lib.X509_free) + cert = _Certificate(self, x509) + + if sk_x509_ptr[0] != self._ffi.NULL: + sk_x509 = self._ffi.gc(sk_x509_ptr[0], self._lib.sk_X509_free) + num = self._lib.sk_X509_num(sk_x509_ptr[0]) + for i in range(num): + x509 = self._lib.sk_X509_value(sk_x509, i) + x509 = self._ffi.gc(x509, self._lib.X509_free) + self.openssl_assert(x509 != self._ffi.NULL) + additional_certificates.append(_Certificate(self, x509)) + + return (key, cert, additional_certificates) + + def poly1305_supported(self): + return self._lib.Cryptography_HAS_POLY1305 == 1 + + def create_poly1305_ctx(self, key): + utils._check_byteslike("key", key) + if len(key) != _POLY1305_KEY_SIZE: + raise ValueError("A poly1305 key is 32 bytes long") + + return _Poly1305Context(self, key) + + +class GetCipherByName(object): + def __init__(self, fmt): + self._fmt = fmt + + def __call__(self, backend, cipher, mode): + cipher_name = self._fmt.format(cipher=cipher, mode=mode).lower() + return backend._lib.EVP_get_cipherbyname(cipher_name.encode("ascii")) + + +def _get_xts_cipher(backend, cipher, mode): + cipher_name = "aes-{}-xts".format(cipher.key_size // 2) + return backend._lib.EVP_get_cipherbyname(cipher_name.encode("ascii")) + + +backend = Backend() diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/ciphers.py b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/ciphers.py new file mode 100644 index 0000000..94b48f5 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/ciphers.py @@ -0,0 +1,229 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import InvalidTag, UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.primitives import ciphers +from cryptography.hazmat.primitives.ciphers import modes + + +@utils.register_interface(ciphers.CipherContext) +@utils.register_interface(ciphers.AEADCipherContext) +@utils.register_interface(ciphers.AEADEncryptionContext) +@utils.register_interface(ciphers.AEADDecryptionContext) +class _CipherContext(object): + _ENCRYPT = 1 + _DECRYPT = 0 + + def __init__(self, backend, cipher, mode, operation): + self._backend = backend + self._cipher = cipher + self._mode = mode + self._operation = operation + self._tag = None + + if isinstance(self._cipher, ciphers.BlockCipherAlgorithm): + self._block_size_bytes = self._cipher.block_size // 8 + else: + self._block_size_bytes = 1 + + ctx = self._backend._lib.EVP_CIPHER_CTX_new() + ctx = self._backend._ffi.gc( + ctx, self._backend._lib.EVP_CIPHER_CTX_free + ) + + registry = self._backend._cipher_registry + try: + adapter = registry[type(cipher), type(mode)] + except KeyError: + raise UnsupportedAlgorithm( + "cipher {} in {} mode is not supported " + "by this backend.".format( + cipher.name, mode.name if mode else mode), + _Reasons.UNSUPPORTED_CIPHER + ) + + evp_cipher = adapter(self._backend, cipher, mode) + if evp_cipher == self._backend._ffi.NULL: + msg = "cipher {0.name} ".format(cipher) + if mode is not None: + msg += "in {0.name} mode ".format(mode) + msg += ( + "is not supported by this backend (Your version of OpenSSL " + "may be too old. Current version: {}.)" + ).format(self._backend.openssl_version_text()) + raise UnsupportedAlgorithm(msg, _Reasons.UNSUPPORTED_CIPHER) + + if isinstance(mode, modes.ModeWithInitializationVector): + iv_nonce = self._backend._ffi.from_buffer( + mode.initialization_vector + ) + elif isinstance(mode, modes.ModeWithTweak): + iv_nonce = self._backend._ffi.from_buffer(mode.tweak) + elif isinstance(mode, modes.ModeWithNonce): + iv_nonce = self._backend._ffi.from_buffer(mode.nonce) + elif isinstance(cipher, modes.ModeWithNonce): + iv_nonce = self._backend._ffi.from_buffer(cipher.nonce) + else: + iv_nonce = self._backend._ffi.NULL + # begin init with cipher and operation type + res = self._backend._lib.EVP_CipherInit_ex(ctx, evp_cipher, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + operation) + self._backend.openssl_assert(res != 0) + # set the key length to handle variable key ciphers + res = self._backend._lib.EVP_CIPHER_CTX_set_key_length( + ctx, len(cipher.key) + ) + self._backend.openssl_assert(res != 0) + if isinstance(mode, modes.GCM): + res = self._backend._lib.EVP_CIPHER_CTX_ctrl( + ctx, self._backend._lib.EVP_CTRL_AEAD_SET_IVLEN, + len(iv_nonce), self._backend._ffi.NULL + ) + self._backend.openssl_assert(res != 0) + if mode.tag is not None: + res = self._backend._lib.EVP_CIPHER_CTX_ctrl( + ctx, self._backend._lib.EVP_CTRL_AEAD_SET_TAG, + len(mode.tag), mode.tag + ) + self._backend.openssl_assert(res != 0) + self._tag = mode.tag + elif ( + self._operation == self._DECRYPT and + self._backend._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_102 and + not self._backend._lib.CRYPTOGRAPHY_IS_LIBRESSL + ): + raise NotImplementedError( + "delayed passing of GCM tag requires OpenSSL >= 1.0.2." + " To use this feature please update OpenSSL" + ) + + # pass key/iv + res = self._backend._lib.EVP_CipherInit_ex( + ctx, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + self._backend._ffi.from_buffer(cipher.key), + iv_nonce, + operation + ) + self._backend.openssl_assert(res != 0) + # We purposely disable padding here as it's handled higher up in the + # API. + self._backend._lib.EVP_CIPHER_CTX_set_padding(ctx, 0) + self._ctx = ctx + + def update(self, data): + buf = bytearray(len(data) + self._block_size_bytes - 1) + n = self.update_into(data, buf) + return bytes(buf[:n]) + + def update_into(self, data, buf): + if len(buf) < (len(data) + self._block_size_bytes - 1): + raise ValueError( + "buffer must be at least {} bytes for this " + "payload".format(len(data) + self._block_size_bytes - 1) + ) + + buf = self._backend._ffi.cast( + "unsigned char *", self._backend._ffi.from_buffer(buf) + ) + outlen = self._backend._ffi.new("int *") + res = self._backend._lib.EVP_CipherUpdate( + self._ctx, buf, outlen, + self._backend._ffi.from_buffer(data), len(data) + ) + self._backend.openssl_assert(res != 0) + return outlen[0] + + def finalize(self): + # OpenSSL 1.0.1 on Ubuntu 12.04 (and possibly other distributions) + # appears to have a bug where you must make at least one call to update + # even if you are only using authenticate_additional_data or the + # GCM tag will be wrong. An (empty) call to update resolves this + # and is harmless for all other versions of OpenSSL. + if isinstance(self._mode, modes.GCM): + self.update(b"") + + if ( + self._operation == self._DECRYPT and + isinstance(self._mode, modes.ModeWithAuthenticationTag) and + self.tag is None + ): + raise ValueError( + "Authentication tag must be provided when decrypting." + ) + + buf = self._backend._ffi.new("unsigned char[]", self._block_size_bytes) + outlen = self._backend._ffi.new("int *") + res = self._backend._lib.EVP_CipherFinal_ex(self._ctx, buf, outlen) + if res == 0: + errors = self._backend._consume_errors() + + if not errors and isinstance(self._mode, modes.GCM): + raise InvalidTag + + self._backend.openssl_assert( + errors[0]._lib_reason_match( + self._backend._lib.ERR_LIB_EVP, + self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH + ) + ) + raise ValueError( + "The length of the provided data is not a multiple of " + "the block length." + ) + + if (isinstance(self._mode, modes.GCM) and + self._operation == self._ENCRYPT): + tag_buf = self._backend._ffi.new( + "unsigned char[]", self._block_size_bytes + ) + res = self._backend._lib.EVP_CIPHER_CTX_ctrl( + self._ctx, self._backend._lib.EVP_CTRL_AEAD_GET_TAG, + self._block_size_bytes, tag_buf + ) + self._backend.openssl_assert(res != 0) + self._tag = self._backend._ffi.buffer(tag_buf)[:] + + res = self._backend._lib.EVP_CIPHER_CTX_cleanup(self._ctx) + self._backend.openssl_assert(res == 1) + return self._backend._ffi.buffer(buf)[:outlen[0]] + + def finalize_with_tag(self, tag): + if ( + self._backend._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_102 and + not self._backend._lib.CRYPTOGRAPHY_IS_LIBRESSL + ): + raise NotImplementedError( + "finalize_with_tag requires OpenSSL >= 1.0.2. To use this " + "method please update OpenSSL" + ) + if len(tag) < self._mode._min_tag_length: + raise ValueError( + "Authentication tag must be {} bytes or longer.".format( + self._mode._min_tag_length) + ) + res = self._backend._lib.EVP_CIPHER_CTX_ctrl( + self._ctx, self._backend._lib.EVP_CTRL_AEAD_SET_TAG, + len(tag), tag + ) + self._backend.openssl_assert(res != 0) + self._tag = tag + return self.finalize() + + def authenticate_additional_data(self, data): + outlen = self._backend._ffi.new("int *") + res = self._backend._lib.EVP_CipherUpdate( + self._ctx, self._backend._ffi.NULL, outlen, + self._backend._ffi.from_buffer(data), len(data) + ) + self._backend.openssl_assert(res != 0) + + tag = utils.read_only_property("_tag") diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/cmac.py b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/cmac.py new file mode 100644 index 0000000..d4d46f5 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/cmac.py @@ -0,0 +1,81 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + + +from cryptography import utils +from cryptography.exceptions import ( + InvalidSignature, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.primitives import constant_time +from cryptography.hazmat.primitives.ciphers.modes import CBC + + +class _CMACContext(object): + def __init__(self, backend, algorithm, ctx=None): + if not backend.cmac_algorithm_supported(algorithm): + raise UnsupportedAlgorithm("This backend does not support CMAC.", + _Reasons.UNSUPPORTED_CIPHER) + + self._backend = backend + self._key = algorithm.key + self._algorithm = algorithm + self._output_length = algorithm.block_size // 8 + + if ctx is None: + registry = self._backend._cipher_registry + adapter = registry[type(algorithm), CBC] + + evp_cipher = adapter(self._backend, algorithm, CBC) + + ctx = self._backend._lib.CMAC_CTX_new() + + self._backend.openssl_assert(ctx != self._backend._ffi.NULL) + ctx = self._backend._ffi.gc(ctx, self._backend._lib.CMAC_CTX_free) + + key_ptr = self._backend._ffi.from_buffer(self._key) + res = self._backend._lib.CMAC_Init( + ctx, key_ptr, len(self._key), + evp_cipher, self._backend._ffi.NULL + ) + self._backend.openssl_assert(res == 1) + + self._ctx = ctx + + algorithm = utils.read_only_property("_algorithm") + + def update(self, data): + res = self._backend._lib.CMAC_Update(self._ctx, data, len(data)) + self._backend.openssl_assert(res == 1) + + def finalize(self): + buf = self._backend._ffi.new("unsigned char[]", self._output_length) + length = self._backend._ffi.new("size_t *", self._output_length) + res = self._backend._lib.CMAC_Final( + self._ctx, buf, length + ) + self._backend.openssl_assert(res == 1) + + self._ctx = None + + return self._backend._ffi.buffer(buf)[:] + + def copy(self): + copied_ctx = self._backend._lib.CMAC_CTX_new() + copied_ctx = self._backend._ffi.gc( + copied_ctx, self._backend._lib.CMAC_CTX_free + ) + res = self._backend._lib.CMAC_CTX_copy( + copied_ctx, self._ctx + ) + self._backend.openssl_assert(res == 1) + return _CMACContext( + self._backend, self._algorithm, ctx=copied_ctx + ) + + def verify(self, signature): + digest = self.finalize() + if not constant_time.bytes_eq(digest, signature): + raise InvalidSignature("Signature did not match digest.") diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/decode_asn1.py b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/decode_asn1.py new file mode 100644 index 0000000..47c6c65 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/decode_asn1.py @@ -0,0 +1,900 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import datetime +import ipaddress + +import six + +from cryptography import x509 +from cryptography.hazmat._der import DERReader, INTEGER, NULL, SEQUENCE +from cryptography.x509.extensions import _TLS_FEATURE_TYPE_TO_ENUM +from cryptography.x509.name import _ASN1_TYPE_TO_ENUM +from cryptography.x509.oid import ( + CRLEntryExtensionOID, CertificatePoliciesOID, ExtensionOID, + OCSPExtensionOID, +) + + +def _obj2txt(backend, obj): + # Set to 80 on the recommendation of + # https://www.openssl.org/docs/crypto/OBJ_nid2ln.html#return_values + # + # But OIDs longer than this occur in real life (e.g. Active + # Directory makes some very long OIDs). So we need to detect + # and properly handle the case where the default buffer is not + # big enough. + # + buf_len = 80 + buf = backend._ffi.new("char[]", buf_len) + + # 'res' is the number of bytes that *would* be written if the + # buffer is large enough. If 'res' > buf_len - 1, we need to + # alloc a big-enough buffer and go again. + res = backend._lib.OBJ_obj2txt(buf, buf_len, obj, 1) + if res > buf_len - 1: # account for terminating null byte + buf_len = res + 1 + buf = backend._ffi.new("char[]", buf_len) + res = backend._lib.OBJ_obj2txt(buf, buf_len, obj, 1) + backend.openssl_assert(res > 0) + return backend._ffi.buffer(buf, res)[:].decode() + + +def _decode_x509_name_entry(backend, x509_name_entry): + obj = backend._lib.X509_NAME_ENTRY_get_object(x509_name_entry) + backend.openssl_assert(obj != backend._ffi.NULL) + data = backend._lib.X509_NAME_ENTRY_get_data(x509_name_entry) + backend.openssl_assert(data != backend._ffi.NULL) + value = _asn1_string_to_utf8(backend, data) + oid = _obj2txt(backend, obj) + type = _ASN1_TYPE_TO_ENUM[data.type] + + return x509.NameAttribute(x509.ObjectIdentifier(oid), value, type) + + +def _decode_x509_name(backend, x509_name): + count = backend._lib.X509_NAME_entry_count(x509_name) + attributes = [] + prev_set_id = -1 + for x in range(count): + entry = backend._lib.X509_NAME_get_entry(x509_name, x) + attribute = _decode_x509_name_entry(backend, entry) + set_id = backend._lib.Cryptography_X509_NAME_ENTRY_set(entry) + if set_id != prev_set_id: + attributes.append(set([attribute])) + else: + # is in the same RDN a previous entry + attributes[-1].add(attribute) + prev_set_id = set_id + + return x509.Name(x509.RelativeDistinguishedName(rdn) for rdn in attributes) + + +def _decode_general_names(backend, gns): + num = backend._lib.sk_GENERAL_NAME_num(gns) + names = [] + for i in range(num): + gn = backend._lib.sk_GENERAL_NAME_value(gns, i) + backend.openssl_assert(gn != backend._ffi.NULL) + names.append(_decode_general_name(backend, gn)) + + return names + + +def _decode_general_name(backend, gn): + if gn.type == backend._lib.GEN_DNS: + # Convert to bytes and then decode to utf8. We don't use + # asn1_string_to_utf8 here because it doesn't properly convert + # utf8 from ia5strings. + data = _asn1_string_to_bytes(backend, gn.d.dNSName).decode("utf8") + # We don't use the constructor for DNSName so we can bypass validation + # This allows us to create DNSName objects that have unicode chars + # when a certificate (against the RFC) contains them. + return x509.DNSName._init_without_validation(data) + elif gn.type == backend._lib.GEN_URI: + # Convert to bytes and then decode to utf8. We don't use + # asn1_string_to_utf8 here because it doesn't properly convert + # utf8 from ia5strings. + data = _asn1_string_to_bytes( + backend, gn.d.uniformResourceIdentifier + ).decode("utf8") + # We don't use the constructor for URI so we can bypass validation + # This allows us to create URI objects that have unicode chars + # when a certificate (against the RFC) contains them. + return x509.UniformResourceIdentifier._init_without_validation(data) + elif gn.type == backend._lib.GEN_RID: + oid = _obj2txt(backend, gn.d.registeredID) + return x509.RegisteredID(x509.ObjectIdentifier(oid)) + elif gn.type == backend._lib.GEN_IPADD: + data = _asn1_string_to_bytes(backend, gn.d.iPAddress) + data_len = len(data) + if data_len == 8 or data_len == 32: + # This is an IPv4 or IPv6 Network and not a single IP. This + # type of data appears in Name Constraints. Unfortunately, + # ipaddress doesn't support packed bytes + netmask. Additionally, + # IPv6Network can only handle CIDR rather than the full 16 byte + # netmask. To handle this we convert the netmask to integer, then + # find the first 0 bit, which will be the prefix. If another 1 + # bit is present after that the netmask is invalid. + base = ipaddress.ip_address(data[:data_len // 2]) + netmask = ipaddress.ip_address(data[data_len // 2:]) + bits = bin(int(netmask))[2:] + prefix = bits.find('0') + # If no 0 bits are found it is a /32 or /128 + if prefix == -1: + prefix = len(bits) + + if "1" in bits[prefix:]: + raise ValueError("Invalid netmask") + + ip = ipaddress.ip_network(base.exploded + u"/{}".format(prefix)) + else: + ip = ipaddress.ip_address(data) + + return x509.IPAddress(ip) + elif gn.type == backend._lib.GEN_DIRNAME: + return x509.DirectoryName( + _decode_x509_name(backend, gn.d.directoryName) + ) + elif gn.type == backend._lib.GEN_EMAIL: + # Convert to bytes and then decode to utf8. We don't use + # asn1_string_to_utf8 here because it doesn't properly convert + # utf8 from ia5strings. + data = _asn1_string_to_bytes(backend, gn.d.rfc822Name).decode("utf8") + # We don't use the constructor for RFC822Name so we can bypass + # validation. This allows us to create RFC822Name objects that have + # unicode chars when a certificate (against the RFC) contains them. + return x509.RFC822Name._init_without_validation(data) + elif gn.type == backend._lib.GEN_OTHERNAME: + type_id = _obj2txt(backend, gn.d.otherName.type_id) + value = _asn1_to_der(backend, gn.d.otherName.value) + return x509.OtherName(x509.ObjectIdentifier(type_id), value) + else: + # x400Address or ediPartyName + raise x509.UnsupportedGeneralNameType( + "{} is not a supported type".format( + x509._GENERAL_NAMES.get(gn.type, gn.type) + ), + gn.type + ) + + +def _decode_ocsp_no_check(backend, ext): + return x509.OCSPNoCheck() + + +def _decode_crl_number(backend, ext): + asn1_int = backend._ffi.cast("ASN1_INTEGER *", ext) + asn1_int = backend._ffi.gc(asn1_int, backend._lib.ASN1_INTEGER_free) + return x509.CRLNumber(_asn1_integer_to_int(backend, asn1_int)) + + +def _decode_delta_crl_indicator(backend, ext): + asn1_int = backend._ffi.cast("ASN1_INTEGER *", ext) + asn1_int = backend._ffi.gc(asn1_int, backend._lib.ASN1_INTEGER_free) + return x509.DeltaCRLIndicator(_asn1_integer_to_int(backend, asn1_int)) + + +class _X509ExtensionParser(object): + def __init__(self, ext_count, get_ext, handlers): + self.ext_count = ext_count + self.get_ext = get_ext + self.handlers = handlers + + def parse(self, backend, x509_obj): + extensions = [] + seen_oids = set() + for i in range(self.ext_count(backend, x509_obj)): + ext = self.get_ext(backend, x509_obj, i) + backend.openssl_assert(ext != backend._ffi.NULL) + crit = backend._lib.X509_EXTENSION_get_critical(ext) + critical = crit == 1 + oid = x509.ObjectIdentifier( + _obj2txt(backend, backend._lib.X509_EXTENSION_get_object(ext)) + ) + if oid in seen_oids: + raise x509.DuplicateExtension( + "Duplicate {} extension found".format(oid), oid + ) + + # These OIDs are only supported in OpenSSL 1.1.0+ but we want + # to support them in all versions of OpenSSL so we decode them + # ourselves. + if oid == ExtensionOID.TLS_FEATURE: + # The extension contents are a SEQUENCE OF INTEGERs. + data = backend._lib.X509_EXTENSION_get_data(ext) + data_bytes = _asn1_string_to_bytes(backend, data) + features = DERReader(data_bytes).read_single_element(SEQUENCE) + parsed = [] + while not features.is_empty(): + parsed.append(features.read_element(INTEGER).as_integer()) + # Map the features to their enum value. + value = x509.TLSFeature( + [_TLS_FEATURE_TYPE_TO_ENUM[x] for x in parsed] + ) + extensions.append(x509.Extension(oid, critical, value)) + seen_oids.add(oid) + continue + elif oid == ExtensionOID.PRECERT_POISON: + data = backend._lib.X509_EXTENSION_get_data(ext) + # The contents of the extension must be an ASN.1 NULL. + reader = DERReader(_asn1_string_to_bytes(backend, data)) + reader.read_single_element(NULL).check_empty() + extensions.append(x509.Extension( + oid, critical, x509.PrecertPoison() + )) + seen_oids.add(oid) + continue + + try: + handler = self.handlers[oid] + except KeyError: + # Dump the DER payload into an UnrecognizedExtension object + data = backend._lib.X509_EXTENSION_get_data(ext) + backend.openssl_assert(data != backend._ffi.NULL) + der = backend._ffi.buffer(data.data, data.length)[:] + unrecognized = x509.UnrecognizedExtension(oid, der) + extensions.append( + x509.Extension(oid, critical, unrecognized) + ) + else: + ext_data = backend._lib.X509V3_EXT_d2i(ext) + if ext_data == backend._ffi.NULL: + backend._consume_errors() + raise ValueError( + "The {} extension is invalid and can't be " + "parsed".format(oid) + ) + + value = handler(backend, ext_data) + extensions.append(x509.Extension(oid, critical, value)) + + seen_oids.add(oid) + + return x509.Extensions(extensions) + + +def _decode_certificate_policies(backend, cp): + cp = backend._ffi.cast("Cryptography_STACK_OF_POLICYINFO *", cp) + cp = backend._ffi.gc(cp, backend._lib.CERTIFICATEPOLICIES_free) + + num = backend._lib.sk_POLICYINFO_num(cp) + certificate_policies = [] + for i in range(num): + qualifiers = None + pi = backend._lib.sk_POLICYINFO_value(cp, i) + oid = x509.ObjectIdentifier(_obj2txt(backend, pi.policyid)) + if pi.qualifiers != backend._ffi.NULL: + qnum = backend._lib.sk_POLICYQUALINFO_num(pi.qualifiers) + qualifiers = [] + for j in range(qnum): + pqi = backend._lib.sk_POLICYQUALINFO_value( + pi.qualifiers, j + ) + pqualid = x509.ObjectIdentifier( + _obj2txt(backend, pqi.pqualid) + ) + if pqualid == CertificatePoliciesOID.CPS_QUALIFIER: + cpsuri = backend._ffi.buffer( + pqi.d.cpsuri.data, pqi.d.cpsuri.length + )[:].decode('ascii') + qualifiers.append(cpsuri) + else: + assert pqualid == CertificatePoliciesOID.CPS_USER_NOTICE + user_notice = _decode_user_notice( + backend, pqi.d.usernotice + ) + qualifiers.append(user_notice) + + certificate_policies.append( + x509.PolicyInformation(oid, qualifiers) + ) + + return x509.CertificatePolicies(certificate_policies) + + +def _decode_user_notice(backend, un): + explicit_text = None + notice_reference = None + + if un.exptext != backend._ffi.NULL: + explicit_text = _asn1_string_to_utf8(backend, un.exptext) + + if un.noticeref != backend._ffi.NULL: + organization = _asn1_string_to_utf8( + backend, un.noticeref.organization + ) + + num = backend._lib.sk_ASN1_INTEGER_num( + un.noticeref.noticenos + ) + notice_numbers = [] + for i in range(num): + asn1_int = backend._lib.sk_ASN1_INTEGER_value( + un.noticeref.noticenos, i + ) + notice_num = _asn1_integer_to_int(backend, asn1_int) + notice_numbers.append(notice_num) + + notice_reference = x509.NoticeReference( + organization, notice_numbers + ) + + return x509.UserNotice(notice_reference, explicit_text) + + +def _decode_basic_constraints(backend, bc_st): + basic_constraints = backend._ffi.cast("BASIC_CONSTRAINTS *", bc_st) + basic_constraints = backend._ffi.gc( + basic_constraints, backend._lib.BASIC_CONSTRAINTS_free + ) + # The byte representation of an ASN.1 boolean true is \xff. OpenSSL + # chooses to just map this to its ordinal value, so true is 255 and + # false is 0. + ca = basic_constraints.ca == 255 + path_length = _asn1_integer_to_int_or_none( + backend, basic_constraints.pathlen + ) + + return x509.BasicConstraints(ca, path_length) + + +def _decode_subject_key_identifier(backend, asn1_string): + asn1_string = backend._ffi.cast("ASN1_OCTET_STRING *", asn1_string) + asn1_string = backend._ffi.gc( + asn1_string, backend._lib.ASN1_OCTET_STRING_free + ) + return x509.SubjectKeyIdentifier( + backend._ffi.buffer(asn1_string.data, asn1_string.length)[:] + ) + + +def _decode_authority_key_identifier(backend, akid): + akid = backend._ffi.cast("AUTHORITY_KEYID *", akid) + akid = backend._ffi.gc(akid, backend._lib.AUTHORITY_KEYID_free) + key_identifier = None + authority_cert_issuer = None + + if akid.keyid != backend._ffi.NULL: + key_identifier = backend._ffi.buffer( + akid.keyid.data, akid.keyid.length + )[:] + + if akid.issuer != backend._ffi.NULL: + authority_cert_issuer = _decode_general_names( + backend, akid.issuer + ) + + authority_cert_serial_number = _asn1_integer_to_int_or_none( + backend, akid.serial + ) + + return x509.AuthorityKeyIdentifier( + key_identifier, authority_cert_issuer, authority_cert_serial_number + ) + + +def _decode_authority_information_access(backend, aia): + aia = backend._ffi.cast("Cryptography_STACK_OF_ACCESS_DESCRIPTION *", aia) + aia = backend._ffi.gc( + aia, + lambda x: backend._lib.sk_ACCESS_DESCRIPTION_pop_free( + x, backend._ffi.addressof( + backend._lib._original_lib, "ACCESS_DESCRIPTION_free" + ) + ) + ) + num = backend._lib.sk_ACCESS_DESCRIPTION_num(aia) + access_descriptions = [] + for i in range(num): + ad = backend._lib.sk_ACCESS_DESCRIPTION_value(aia, i) + backend.openssl_assert(ad.method != backend._ffi.NULL) + oid = x509.ObjectIdentifier(_obj2txt(backend, ad.method)) + backend.openssl_assert(ad.location != backend._ffi.NULL) + gn = _decode_general_name(backend, ad.location) + access_descriptions.append(x509.AccessDescription(oid, gn)) + + return x509.AuthorityInformationAccess(access_descriptions) + + +def _decode_key_usage(backend, bit_string): + bit_string = backend._ffi.cast("ASN1_BIT_STRING *", bit_string) + bit_string = backend._ffi.gc(bit_string, backend._lib.ASN1_BIT_STRING_free) + get_bit = backend._lib.ASN1_BIT_STRING_get_bit + digital_signature = get_bit(bit_string, 0) == 1 + content_commitment = get_bit(bit_string, 1) == 1 + key_encipherment = get_bit(bit_string, 2) == 1 + data_encipherment = get_bit(bit_string, 3) == 1 + key_agreement = get_bit(bit_string, 4) == 1 + key_cert_sign = get_bit(bit_string, 5) == 1 + crl_sign = get_bit(bit_string, 6) == 1 + encipher_only = get_bit(bit_string, 7) == 1 + decipher_only = get_bit(bit_string, 8) == 1 + return x509.KeyUsage( + digital_signature, + content_commitment, + key_encipherment, + data_encipherment, + key_agreement, + key_cert_sign, + crl_sign, + encipher_only, + decipher_only + ) + + +def _decode_general_names_extension(backend, gns): + gns = backend._ffi.cast("GENERAL_NAMES *", gns) + gns = backend._ffi.gc(gns, backend._lib.GENERAL_NAMES_free) + general_names = _decode_general_names(backend, gns) + return general_names + + +def _decode_subject_alt_name(backend, ext): + return x509.SubjectAlternativeName( + _decode_general_names_extension(backend, ext) + ) + + +def _decode_issuer_alt_name(backend, ext): + return x509.IssuerAlternativeName( + _decode_general_names_extension(backend, ext) + ) + + +def _decode_name_constraints(backend, nc): + nc = backend._ffi.cast("NAME_CONSTRAINTS *", nc) + nc = backend._ffi.gc(nc, backend._lib.NAME_CONSTRAINTS_free) + permitted = _decode_general_subtrees(backend, nc.permittedSubtrees) + excluded = _decode_general_subtrees(backend, nc.excludedSubtrees) + return x509.NameConstraints( + permitted_subtrees=permitted, excluded_subtrees=excluded + ) + + +def _decode_general_subtrees(backend, stack_subtrees): + if stack_subtrees == backend._ffi.NULL: + return None + + num = backend._lib.sk_GENERAL_SUBTREE_num(stack_subtrees) + subtrees = [] + + for i in range(num): + obj = backend._lib.sk_GENERAL_SUBTREE_value(stack_subtrees, i) + backend.openssl_assert(obj != backend._ffi.NULL) + name = _decode_general_name(backend, obj.base) + subtrees.append(name) + + return subtrees + + +def _decode_issuing_dist_point(backend, idp): + idp = backend._ffi.cast("ISSUING_DIST_POINT *", idp) + idp = backend._ffi.gc(idp, backend._lib.ISSUING_DIST_POINT_free) + if idp.distpoint != backend._ffi.NULL: + full_name, relative_name = _decode_distpoint(backend, idp.distpoint) + else: + full_name = None + relative_name = None + + only_user = idp.onlyuser == 255 + only_ca = idp.onlyCA == 255 + indirect_crl = idp.indirectCRL == 255 + only_attr = idp.onlyattr == 255 + if idp.onlysomereasons != backend._ffi.NULL: + only_some_reasons = _decode_reasons(backend, idp.onlysomereasons) + else: + only_some_reasons = None + + return x509.IssuingDistributionPoint( + full_name, relative_name, only_user, only_ca, only_some_reasons, + indirect_crl, only_attr + ) + + +def _decode_policy_constraints(backend, pc): + pc = backend._ffi.cast("POLICY_CONSTRAINTS *", pc) + pc = backend._ffi.gc(pc, backend._lib.POLICY_CONSTRAINTS_free) + + require_explicit_policy = _asn1_integer_to_int_or_none( + backend, pc.requireExplicitPolicy + ) + inhibit_policy_mapping = _asn1_integer_to_int_or_none( + backend, pc.inhibitPolicyMapping + ) + + return x509.PolicyConstraints( + require_explicit_policy, inhibit_policy_mapping + ) + + +def _decode_extended_key_usage(backend, sk): + sk = backend._ffi.cast("Cryptography_STACK_OF_ASN1_OBJECT *", sk) + sk = backend._ffi.gc(sk, backend._lib.sk_ASN1_OBJECT_free) + num = backend._lib.sk_ASN1_OBJECT_num(sk) + ekus = [] + + for i in range(num): + obj = backend._lib.sk_ASN1_OBJECT_value(sk, i) + backend.openssl_assert(obj != backend._ffi.NULL) + oid = x509.ObjectIdentifier(_obj2txt(backend, obj)) + ekus.append(oid) + + return x509.ExtendedKeyUsage(ekus) + + +_DISTPOINT_TYPE_FULLNAME = 0 +_DISTPOINT_TYPE_RELATIVENAME = 1 + + +def _decode_dist_points(backend, cdps): + cdps = backend._ffi.cast("Cryptography_STACK_OF_DIST_POINT *", cdps) + cdps = backend._ffi.gc(cdps, backend._lib.CRL_DIST_POINTS_free) + + num = backend._lib.sk_DIST_POINT_num(cdps) + dist_points = [] + for i in range(num): + full_name = None + relative_name = None + crl_issuer = None + reasons = None + cdp = backend._lib.sk_DIST_POINT_value(cdps, i) + if cdp.reasons != backend._ffi.NULL: + reasons = _decode_reasons(backend, cdp.reasons) + + if cdp.CRLissuer != backend._ffi.NULL: + crl_issuer = _decode_general_names(backend, cdp.CRLissuer) + + # Certificates may have a crl_issuer/reasons and no distribution + # point so make sure it's not null. + if cdp.distpoint != backend._ffi.NULL: + full_name, relative_name = _decode_distpoint( + backend, cdp.distpoint + ) + + dist_points.append( + x509.DistributionPoint( + full_name, relative_name, reasons, crl_issuer + ) + ) + + return dist_points + + +# ReasonFlags ::= BIT STRING { +# unused (0), +# keyCompromise (1), +# cACompromise (2), +# affiliationChanged (3), +# superseded (4), +# cessationOfOperation (5), +# certificateHold (6), +# privilegeWithdrawn (7), +# aACompromise (8) } +_REASON_BIT_MAPPING = { + 1: x509.ReasonFlags.key_compromise, + 2: x509.ReasonFlags.ca_compromise, + 3: x509.ReasonFlags.affiliation_changed, + 4: x509.ReasonFlags.superseded, + 5: x509.ReasonFlags.cessation_of_operation, + 6: x509.ReasonFlags.certificate_hold, + 7: x509.ReasonFlags.privilege_withdrawn, + 8: x509.ReasonFlags.aa_compromise, +} + + +def _decode_reasons(backend, reasons): + # We will check each bit from RFC 5280 + enum_reasons = [] + for bit_position, reason in six.iteritems(_REASON_BIT_MAPPING): + if backend._lib.ASN1_BIT_STRING_get_bit(reasons, bit_position): + enum_reasons.append(reason) + + return frozenset(enum_reasons) + + +def _decode_distpoint(backend, distpoint): + if distpoint.type == _DISTPOINT_TYPE_FULLNAME: + full_name = _decode_general_names(backend, distpoint.name.fullname) + return full_name, None + + # OpenSSL code doesn't test for a specific type for + # relativename, everything that isn't fullname is considered + # relativename. Per RFC 5280: + # + # DistributionPointName ::= CHOICE { + # fullName [0] GeneralNames, + # nameRelativeToCRLIssuer [1] RelativeDistinguishedName } + rns = distpoint.name.relativename + rnum = backend._lib.sk_X509_NAME_ENTRY_num(rns) + attributes = set() + for i in range(rnum): + rn = backend._lib.sk_X509_NAME_ENTRY_value( + rns, i + ) + backend.openssl_assert(rn != backend._ffi.NULL) + attributes.add( + _decode_x509_name_entry(backend, rn) + ) + + relative_name = x509.RelativeDistinguishedName(attributes) + + return None, relative_name + + +def _decode_crl_distribution_points(backend, cdps): + dist_points = _decode_dist_points(backend, cdps) + return x509.CRLDistributionPoints(dist_points) + + +def _decode_freshest_crl(backend, cdps): + dist_points = _decode_dist_points(backend, cdps) + return x509.FreshestCRL(dist_points) + + +def _decode_inhibit_any_policy(backend, asn1_int): + asn1_int = backend._ffi.cast("ASN1_INTEGER *", asn1_int) + asn1_int = backend._ffi.gc(asn1_int, backend._lib.ASN1_INTEGER_free) + skip_certs = _asn1_integer_to_int(backend, asn1_int) + return x509.InhibitAnyPolicy(skip_certs) + + +def _decode_precert_signed_certificate_timestamps(backend, asn1_scts): + from cryptography.hazmat.backends.openssl.x509 import ( + _SignedCertificateTimestamp + ) + asn1_scts = backend._ffi.cast("Cryptography_STACK_OF_SCT *", asn1_scts) + asn1_scts = backend._ffi.gc(asn1_scts, backend._lib.SCT_LIST_free) + + scts = [] + for i in range(backend._lib.sk_SCT_num(asn1_scts)): + sct = backend._lib.sk_SCT_value(asn1_scts, i) + + scts.append(_SignedCertificateTimestamp(backend, asn1_scts, sct)) + return x509.PrecertificateSignedCertificateTimestamps(scts) + + +# CRLReason ::= ENUMERATED { +# unspecified (0), +# keyCompromise (1), +# cACompromise (2), +# affiliationChanged (3), +# superseded (4), +# cessationOfOperation (5), +# certificateHold (6), +# -- value 7 is not used +# removeFromCRL (8), +# privilegeWithdrawn (9), +# aACompromise (10) } +_CRL_ENTRY_REASON_CODE_TO_ENUM = { + 0: x509.ReasonFlags.unspecified, + 1: x509.ReasonFlags.key_compromise, + 2: x509.ReasonFlags.ca_compromise, + 3: x509.ReasonFlags.affiliation_changed, + 4: x509.ReasonFlags.superseded, + 5: x509.ReasonFlags.cessation_of_operation, + 6: x509.ReasonFlags.certificate_hold, + 8: x509.ReasonFlags.remove_from_crl, + 9: x509.ReasonFlags.privilege_withdrawn, + 10: x509.ReasonFlags.aa_compromise, +} + + +_CRL_ENTRY_REASON_ENUM_TO_CODE = { + x509.ReasonFlags.unspecified: 0, + x509.ReasonFlags.key_compromise: 1, + x509.ReasonFlags.ca_compromise: 2, + x509.ReasonFlags.affiliation_changed: 3, + x509.ReasonFlags.superseded: 4, + x509.ReasonFlags.cessation_of_operation: 5, + x509.ReasonFlags.certificate_hold: 6, + x509.ReasonFlags.remove_from_crl: 8, + x509.ReasonFlags.privilege_withdrawn: 9, + x509.ReasonFlags.aa_compromise: 10 +} + + +def _decode_crl_reason(backend, enum): + enum = backend._ffi.cast("ASN1_ENUMERATED *", enum) + enum = backend._ffi.gc(enum, backend._lib.ASN1_ENUMERATED_free) + code = backend._lib.ASN1_ENUMERATED_get(enum) + + try: + return x509.CRLReason(_CRL_ENTRY_REASON_CODE_TO_ENUM[code]) + except KeyError: + raise ValueError("Unsupported reason code: {}".format(code)) + + +def _decode_invalidity_date(backend, inv_date): + generalized_time = backend._ffi.cast( + "ASN1_GENERALIZEDTIME *", inv_date + ) + generalized_time = backend._ffi.gc( + generalized_time, backend._lib.ASN1_GENERALIZEDTIME_free + ) + return x509.InvalidityDate( + _parse_asn1_generalized_time(backend, generalized_time) + ) + + +def _decode_cert_issuer(backend, gns): + gns = backend._ffi.cast("GENERAL_NAMES *", gns) + gns = backend._ffi.gc(gns, backend._lib.GENERAL_NAMES_free) + general_names = _decode_general_names(backend, gns) + return x509.CertificateIssuer(general_names) + + +def _asn1_to_der(backend, asn1_type): + buf = backend._ffi.new("unsigned char **") + res = backend._lib.i2d_ASN1_TYPE(asn1_type, buf) + backend.openssl_assert(res >= 0) + backend.openssl_assert(buf[0] != backend._ffi.NULL) + buf = backend._ffi.gc( + buf, lambda buffer: backend._lib.OPENSSL_free(buffer[0]) + ) + return backend._ffi.buffer(buf[0], res)[:] + + +def _asn1_integer_to_int(backend, asn1_int): + bn = backend._lib.ASN1_INTEGER_to_BN(asn1_int, backend._ffi.NULL) + backend.openssl_assert(bn != backend._ffi.NULL) + bn = backend._ffi.gc(bn, backend._lib.BN_free) + return backend._bn_to_int(bn) + + +def _asn1_integer_to_int_or_none(backend, asn1_int): + if asn1_int == backend._ffi.NULL: + return None + else: + return _asn1_integer_to_int(backend, asn1_int) + + +def _asn1_string_to_bytes(backend, asn1_string): + return backend._ffi.buffer(asn1_string.data, asn1_string.length)[:] + + +def _asn1_string_to_ascii(backend, asn1_string): + return _asn1_string_to_bytes(backend, asn1_string).decode("ascii") + + +def _asn1_string_to_utf8(backend, asn1_string): + buf = backend._ffi.new("unsigned char **") + res = backend._lib.ASN1_STRING_to_UTF8(buf, asn1_string) + if res == -1: + raise ValueError( + "Unsupported ASN1 string type. Type: {}".format(asn1_string.type) + ) + + backend.openssl_assert(buf[0] != backend._ffi.NULL) + buf = backend._ffi.gc( + buf, lambda buffer: backend._lib.OPENSSL_free(buffer[0]) + ) + return backend._ffi.buffer(buf[0], res)[:].decode('utf8') + + +def _parse_asn1_time(backend, asn1_time): + backend.openssl_assert(asn1_time != backend._ffi.NULL) + generalized_time = backend._lib.ASN1_TIME_to_generalizedtime( + asn1_time, backend._ffi.NULL + ) + if generalized_time == backend._ffi.NULL: + raise ValueError( + "Couldn't parse ASN.1 time as generalizedtime {!r}".format( + _asn1_string_to_bytes(backend, asn1_time) + ) + ) + + generalized_time = backend._ffi.gc( + generalized_time, backend._lib.ASN1_GENERALIZEDTIME_free + ) + return _parse_asn1_generalized_time(backend, generalized_time) + + +def _parse_asn1_generalized_time(backend, generalized_time): + time = _asn1_string_to_ascii( + backend, backend._ffi.cast("ASN1_STRING *", generalized_time) + ) + return datetime.datetime.strptime(time, "%Y%m%d%H%M%SZ") + + +def _decode_nonce(backend, nonce): + nonce = backend._ffi.cast("ASN1_OCTET_STRING *", nonce) + nonce = backend._ffi.gc(nonce, backend._lib.ASN1_OCTET_STRING_free) + return x509.OCSPNonce(_asn1_string_to_bytes(backend, nonce)) + + +_EXTENSION_HANDLERS_NO_SCT = { + ExtensionOID.BASIC_CONSTRAINTS: _decode_basic_constraints, + ExtensionOID.SUBJECT_KEY_IDENTIFIER: _decode_subject_key_identifier, + ExtensionOID.KEY_USAGE: _decode_key_usage, + ExtensionOID.SUBJECT_ALTERNATIVE_NAME: _decode_subject_alt_name, + ExtensionOID.EXTENDED_KEY_USAGE: _decode_extended_key_usage, + ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _decode_authority_key_identifier, + ExtensionOID.AUTHORITY_INFORMATION_ACCESS: ( + _decode_authority_information_access + ), + ExtensionOID.CERTIFICATE_POLICIES: _decode_certificate_policies, + ExtensionOID.CRL_DISTRIBUTION_POINTS: _decode_crl_distribution_points, + ExtensionOID.FRESHEST_CRL: _decode_freshest_crl, + ExtensionOID.OCSP_NO_CHECK: _decode_ocsp_no_check, + ExtensionOID.INHIBIT_ANY_POLICY: _decode_inhibit_any_policy, + ExtensionOID.ISSUER_ALTERNATIVE_NAME: _decode_issuer_alt_name, + ExtensionOID.NAME_CONSTRAINTS: _decode_name_constraints, + ExtensionOID.POLICY_CONSTRAINTS: _decode_policy_constraints, +} +_EXTENSION_HANDLERS = _EXTENSION_HANDLERS_NO_SCT.copy() +_EXTENSION_HANDLERS[ + ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS +] = _decode_precert_signed_certificate_timestamps + + +_REVOKED_EXTENSION_HANDLERS = { + CRLEntryExtensionOID.CRL_REASON: _decode_crl_reason, + CRLEntryExtensionOID.INVALIDITY_DATE: _decode_invalidity_date, + CRLEntryExtensionOID.CERTIFICATE_ISSUER: _decode_cert_issuer, +} + +_CRL_EXTENSION_HANDLERS = { + ExtensionOID.CRL_NUMBER: _decode_crl_number, + ExtensionOID.DELTA_CRL_INDICATOR: _decode_delta_crl_indicator, + ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _decode_authority_key_identifier, + ExtensionOID.ISSUER_ALTERNATIVE_NAME: _decode_issuer_alt_name, + ExtensionOID.AUTHORITY_INFORMATION_ACCESS: ( + _decode_authority_information_access + ), + ExtensionOID.ISSUING_DISTRIBUTION_POINT: _decode_issuing_dist_point, + ExtensionOID.FRESHEST_CRL: _decode_freshest_crl, +} + +_OCSP_REQ_EXTENSION_HANDLERS = { + OCSPExtensionOID.NONCE: _decode_nonce, +} + +_OCSP_BASICRESP_EXTENSION_HANDLERS = { + OCSPExtensionOID.NONCE: _decode_nonce, +} + +_CERTIFICATE_EXTENSION_PARSER_NO_SCT = _X509ExtensionParser( + ext_count=lambda backend, x: backend._lib.X509_get_ext_count(x), + get_ext=lambda backend, x, i: backend._lib.X509_get_ext(x, i), + handlers=_EXTENSION_HANDLERS_NO_SCT +) + +_CERTIFICATE_EXTENSION_PARSER = _X509ExtensionParser( + ext_count=lambda backend, x: backend._lib.X509_get_ext_count(x), + get_ext=lambda backend, x, i: backend._lib.X509_get_ext(x, i), + handlers=_EXTENSION_HANDLERS +) + +_CSR_EXTENSION_PARSER = _X509ExtensionParser( + ext_count=lambda backend, x: backend._lib.sk_X509_EXTENSION_num(x), + get_ext=lambda backend, x, i: backend._lib.sk_X509_EXTENSION_value(x, i), + handlers=_EXTENSION_HANDLERS +) + +_REVOKED_CERTIFICATE_EXTENSION_PARSER = _X509ExtensionParser( + ext_count=lambda backend, x: backend._lib.X509_REVOKED_get_ext_count(x), + get_ext=lambda backend, x, i: backend._lib.X509_REVOKED_get_ext(x, i), + handlers=_REVOKED_EXTENSION_HANDLERS, +) + +_CRL_EXTENSION_PARSER = _X509ExtensionParser( + ext_count=lambda backend, x: backend._lib.X509_CRL_get_ext_count(x), + get_ext=lambda backend, x, i: backend._lib.X509_CRL_get_ext(x, i), + handlers=_CRL_EXTENSION_HANDLERS, +) + +_OCSP_REQ_EXT_PARSER = _X509ExtensionParser( + ext_count=lambda backend, x: backend._lib.OCSP_REQUEST_get_ext_count(x), + get_ext=lambda backend, x, i: backend._lib.OCSP_REQUEST_get_ext(x, i), + handlers=_OCSP_REQ_EXTENSION_HANDLERS, +) + +_OCSP_BASICRESP_EXT_PARSER = _X509ExtensionParser( + ext_count=lambda backend, x: backend._lib.OCSP_BASICRESP_get_ext_count(x), + get_ext=lambda backend, x, i: backend._lib.OCSP_BASICRESP_get_ext(x, i), + handlers=_OCSP_BASICRESP_EXTENSION_HANDLERS, +) diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/dh.py b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/dh.py new file mode 100644 index 0000000..095f062 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/dh.py @@ -0,0 +1,280 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import dh + + +def _dh_params_dup(dh_cdata, backend): + lib = backend._lib + ffi = backend._ffi + + param_cdata = lib.DHparams_dup(dh_cdata) + backend.openssl_assert(param_cdata != ffi.NULL) + param_cdata = ffi.gc(param_cdata, lib.DH_free) + if lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_102: + # In OpenSSL versions < 1.0.2 or libressl DHparams_dup don't copy q + q = ffi.new("BIGNUM **") + lib.DH_get0_pqg(dh_cdata, ffi.NULL, q, ffi.NULL) + q_dup = lib.BN_dup(q[0]) + res = lib.DH_set0_pqg(param_cdata, ffi.NULL, q_dup, ffi.NULL) + backend.openssl_assert(res == 1) + + return param_cdata + + +def _dh_cdata_to_parameters(dh_cdata, backend): + param_cdata = _dh_params_dup(dh_cdata, backend) + return _DHParameters(backend, param_cdata) + + +@utils.register_interface(dh.DHParametersWithSerialization) +class _DHParameters(object): + def __init__(self, backend, dh_cdata): + self._backend = backend + self._dh_cdata = dh_cdata + + def parameter_numbers(self): + p = self._backend._ffi.new("BIGNUM **") + g = self._backend._ffi.new("BIGNUM **") + q = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DH_get0_pqg(self._dh_cdata, p, q, g) + self._backend.openssl_assert(p[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(g[0] != self._backend._ffi.NULL) + if q[0] == self._backend._ffi.NULL: + q_val = None + else: + q_val = self._backend._bn_to_int(q[0]) + return dh.DHParameterNumbers( + p=self._backend._bn_to_int(p[0]), + g=self._backend._bn_to_int(g[0]), + q=q_val + ) + + def generate_private_key(self): + return self._backend.generate_dh_private_key(self) + + def parameter_bytes(self, encoding, format): + if format is not serialization.ParameterFormat.PKCS3: + raise ValueError( + "Only PKCS3 serialization is supported" + ) + if not self._backend._lib.Cryptography_HAS_EVP_PKEY_DHX: + q = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DH_get0_pqg(self._dh_cdata, + self._backend._ffi.NULL, + q, + self._backend._ffi.NULL) + if q[0] != self._backend._ffi.NULL: + raise UnsupportedAlgorithm( + "DH X9.42 serialization is not supported", + _Reasons.UNSUPPORTED_SERIALIZATION) + + return self._backend._parameter_bytes( + encoding, + format, + self._dh_cdata + ) + + +def _handle_dh_compute_key_error(errors, backend): + lib = backend._lib + + backend.openssl_assert( + errors[0]._lib_reason_match( + lib.ERR_LIB_DH, lib.DH_R_INVALID_PUBKEY + ) + ) + + raise ValueError("Public key value is invalid for this exchange.") + + +def _get_dh_num_bits(backend, dh_cdata): + p = backend._ffi.new("BIGNUM **") + backend._lib.DH_get0_pqg(dh_cdata, p, + backend._ffi.NULL, + backend._ffi.NULL) + backend.openssl_assert(p[0] != backend._ffi.NULL) + return backend._lib.BN_num_bits(p[0]) + + +@utils.register_interface(dh.DHPrivateKeyWithSerialization) +class _DHPrivateKey(object): + def __init__(self, backend, dh_cdata, evp_pkey): + self._backend = backend + self._dh_cdata = dh_cdata + self._evp_pkey = evp_pkey + self._key_size_bytes = self._backend._lib.DH_size(dh_cdata) + + @property + def key_size(self): + return _get_dh_num_bits(self._backend, self._dh_cdata) + + def private_numbers(self): + p = self._backend._ffi.new("BIGNUM **") + g = self._backend._ffi.new("BIGNUM **") + q = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DH_get0_pqg(self._dh_cdata, p, q, g) + self._backend.openssl_assert(p[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(g[0] != self._backend._ffi.NULL) + if q[0] == self._backend._ffi.NULL: + q_val = None + else: + q_val = self._backend._bn_to_int(q[0]) + pub_key = self._backend._ffi.new("BIGNUM **") + priv_key = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DH_get0_key(self._dh_cdata, pub_key, priv_key) + self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(priv_key[0] != self._backend._ffi.NULL) + return dh.DHPrivateNumbers( + public_numbers=dh.DHPublicNumbers( + parameter_numbers=dh.DHParameterNumbers( + p=self._backend._bn_to_int(p[0]), + g=self._backend._bn_to_int(g[0]), + q=q_val + ), + y=self._backend._bn_to_int(pub_key[0]) + ), + x=self._backend._bn_to_int(priv_key[0]) + ) + + def exchange(self, peer_public_key): + + buf = self._backend._ffi.new("unsigned char[]", self._key_size_bytes) + pub_key = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DH_get0_key(peer_public_key._dh_cdata, pub_key, + self._backend._ffi.NULL) + self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL) + res = self._backend._lib.DH_compute_key( + buf, + pub_key[0], + self._dh_cdata + ) + + if res == -1: + errors = self._backend._consume_errors() + return _handle_dh_compute_key_error(errors, self._backend) + else: + self._backend.openssl_assert(res >= 1) + + key = self._backend._ffi.buffer(buf)[:res] + pad = self._key_size_bytes - len(key) + + if pad > 0: + key = (b"\x00" * pad) + key + + return key + + def public_key(self): + dh_cdata = _dh_params_dup(self._dh_cdata, self._backend) + pub_key = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DH_get0_key(self._dh_cdata, + pub_key, self._backend._ffi.NULL) + self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL) + pub_key_dup = self._backend._lib.BN_dup(pub_key[0]) + self._backend.openssl_assert(pub_key_dup != self._backend._ffi.NULL) + + res = self._backend._lib.DH_set0_key(dh_cdata, + pub_key_dup, + self._backend._ffi.NULL) + self._backend.openssl_assert(res == 1) + evp_pkey = self._backend._dh_cdata_to_evp_pkey(dh_cdata) + return _DHPublicKey(self._backend, dh_cdata, evp_pkey) + + def parameters(self): + return _dh_cdata_to_parameters(self._dh_cdata, self._backend) + + def private_bytes(self, encoding, format, encryption_algorithm): + if format is not serialization.PrivateFormat.PKCS8: + raise ValueError( + "DH private keys support only PKCS8 serialization" + ) + if not self._backend._lib.Cryptography_HAS_EVP_PKEY_DHX: + q = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DH_get0_pqg(self._dh_cdata, + self._backend._ffi.NULL, + q, + self._backend._ffi.NULL) + if q[0] != self._backend._ffi.NULL: + raise UnsupportedAlgorithm( + "DH X9.42 serialization is not supported", + _Reasons.UNSUPPORTED_SERIALIZATION) + + return self._backend._private_key_bytes( + encoding, + format, + encryption_algorithm, + self._evp_pkey, + self._dh_cdata + ) + + +@utils.register_interface(dh.DHPublicKeyWithSerialization) +class _DHPublicKey(object): + def __init__(self, backend, dh_cdata, evp_pkey): + self._backend = backend + self._dh_cdata = dh_cdata + self._evp_pkey = evp_pkey + self._key_size_bits = _get_dh_num_bits(self._backend, self._dh_cdata) + + @property + def key_size(self): + return self._key_size_bits + + def public_numbers(self): + p = self._backend._ffi.new("BIGNUM **") + g = self._backend._ffi.new("BIGNUM **") + q = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DH_get0_pqg(self._dh_cdata, p, q, g) + self._backend.openssl_assert(p[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(g[0] != self._backend._ffi.NULL) + if q[0] == self._backend._ffi.NULL: + q_val = None + else: + q_val = self._backend._bn_to_int(q[0]) + pub_key = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DH_get0_key(self._dh_cdata, + pub_key, self._backend._ffi.NULL) + self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL) + return dh.DHPublicNumbers( + parameter_numbers=dh.DHParameterNumbers( + p=self._backend._bn_to_int(p[0]), + g=self._backend._bn_to_int(g[0]), + q=q_val + ), + y=self._backend._bn_to_int(pub_key[0]) + ) + + def parameters(self): + return _dh_cdata_to_parameters(self._dh_cdata, self._backend) + + def public_bytes(self, encoding, format): + if format is not serialization.PublicFormat.SubjectPublicKeyInfo: + raise ValueError( + "DH public keys support only " + "SubjectPublicKeyInfo serialization" + ) + + if not self._backend._lib.Cryptography_HAS_EVP_PKEY_DHX: + q = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DH_get0_pqg(self._dh_cdata, + self._backend._ffi.NULL, + q, + self._backend._ffi.NULL) + if q[0] != self._backend._ffi.NULL: + raise UnsupportedAlgorithm( + "DH X9.42 serialization is not supported", + _Reasons.UNSUPPORTED_SERIALIZATION) + + return self._backend._public_key_bytes( + encoding, + format, + self, + self._evp_pkey, + None + ) diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/dsa.py b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/dsa.py new file mode 100644 index 0000000..de61f08 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/dsa.py @@ -0,0 +1,268 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.backends.openssl.utils import ( + _calculate_digest_and_algorithm, _check_not_prehashed, + _warn_sign_verify_deprecated +) +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import ( + AsymmetricSignatureContext, AsymmetricVerificationContext, dsa +) + + +def _dsa_sig_sign(backend, private_key, data): + sig_buf_len = backend._lib.DSA_size(private_key._dsa_cdata) + sig_buf = backend._ffi.new("unsigned char[]", sig_buf_len) + buflen = backend._ffi.new("unsigned int *") + + # The first parameter passed to DSA_sign is unused by OpenSSL but + # must be an integer. + res = backend._lib.DSA_sign( + 0, data, len(data), sig_buf, buflen, private_key._dsa_cdata + ) + backend.openssl_assert(res == 1) + backend.openssl_assert(buflen[0]) + + return backend._ffi.buffer(sig_buf)[:buflen[0]] + + +def _dsa_sig_verify(backend, public_key, signature, data): + # The first parameter passed to DSA_verify is unused by OpenSSL but + # must be an integer. + res = backend._lib.DSA_verify( + 0, data, len(data), signature, len(signature), public_key._dsa_cdata + ) + + if res != 1: + backend._consume_errors() + raise InvalidSignature + + +@utils.register_interface(AsymmetricVerificationContext) +class _DSAVerificationContext(object): + def __init__(self, backend, public_key, signature, algorithm): + self._backend = backend + self._public_key = public_key + self._signature = signature + self._algorithm = algorithm + + self._hash_ctx = hashes.Hash(self._algorithm, self._backend) + + def update(self, data): + self._hash_ctx.update(data) + + def verify(self): + data_to_verify = self._hash_ctx.finalize() + + _dsa_sig_verify( + self._backend, self._public_key, self._signature, data_to_verify + ) + + +@utils.register_interface(AsymmetricSignatureContext) +class _DSASignatureContext(object): + def __init__(self, backend, private_key, algorithm): + self._backend = backend + self._private_key = private_key + self._algorithm = algorithm + self._hash_ctx = hashes.Hash(self._algorithm, self._backend) + + def update(self, data): + self._hash_ctx.update(data) + + def finalize(self): + data_to_sign = self._hash_ctx.finalize() + return _dsa_sig_sign(self._backend, self._private_key, data_to_sign) + + +@utils.register_interface(dsa.DSAParametersWithNumbers) +class _DSAParameters(object): + def __init__(self, backend, dsa_cdata): + self._backend = backend + self._dsa_cdata = dsa_cdata + + def parameter_numbers(self): + p = self._backend._ffi.new("BIGNUM **") + q = self._backend._ffi.new("BIGNUM **") + g = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g) + self._backend.openssl_assert(p[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(q[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(g[0] != self._backend._ffi.NULL) + return dsa.DSAParameterNumbers( + p=self._backend._bn_to_int(p[0]), + q=self._backend._bn_to_int(q[0]), + g=self._backend._bn_to_int(g[0]) + ) + + def generate_private_key(self): + return self._backend.generate_dsa_private_key(self) + + +@utils.register_interface(dsa.DSAPrivateKeyWithSerialization) +class _DSAPrivateKey(object): + def __init__(self, backend, dsa_cdata, evp_pkey): + self._backend = backend + self._dsa_cdata = dsa_cdata + self._evp_pkey = evp_pkey + + p = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DSA_get0_pqg( + dsa_cdata, p, self._backend._ffi.NULL, self._backend._ffi.NULL + ) + self._backend.openssl_assert(p[0] != backend._ffi.NULL) + self._key_size = self._backend._lib.BN_num_bits(p[0]) + + key_size = utils.read_only_property("_key_size") + + def signer(self, signature_algorithm): + _warn_sign_verify_deprecated() + _check_not_prehashed(signature_algorithm) + return _DSASignatureContext(self._backend, self, signature_algorithm) + + def private_numbers(self): + p = self._backend._ffi.new("BIGNUM **") + q = self._backend._ffi.new("BIGNUM **") + g = self._backend._ffi.new("BIGNUM **") + pub_key = self._backend._ffi.new("BIGNUM **") + priv_key = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g) + self._backend.openssl_assert(p[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(q[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(g[0] != self._backend._ffi.NULL) + self._backend._lib.DSA_get0_key(self._dsa_cdata, pub_key, priv_key) + self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(priv_key[0] != self._backend._ffi.NULL) + return dsa.DSAPrivateNumbers( + public_numbers=dsa.DSAPublicNumbers( + parameter_numbers=dsa.DSAParameterNumbers( + p=self._backend._bn_to_int(p[0]), + q=self._backend._bn_to_int(q[0]), + g=self._backend._bn_to_int(g[0]) + ), + y=self._backend._bn_to_int(pub_key[0]) + ), + x=self._backend._bn_to_int(priv_key[0]) + ) + + def public_key(self): + dsa_cdata = self._backend._lib.DSAparams_dup(self._dsa_cdata) + self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL) + dsa_cdata = self._backend._ffi.gc( + dsa_cdata, self._backend._lib.DSA_free + ) + pub_key = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DSA_get0_key( + self._dsa_cdata, pub_key, self._backend._ffi.NULL + ) + self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL) + pub_key_dup = self._backend._lib.BN_dup(pub_key[0]) + res = self._backend._lib.DSA_set0_key( + dsa_cdata, pub_key_dup, self._backend._ffi.NULL + ) + self._backend.openssl_assert(res == 1) + evp_pkey = self._backend._dsa_cdata_to_evp_pkey(dsa_cdata) + return _DSAPublicKey(self._backend, dsa_cdata, evp_pkey) + + def parameters(self): + dsa_cdata = self._backend._lib.DSAparams_dup(self._dsa_cdata) + self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL) + dsa_cdata = self._backend._ffi.gc( + dsa_cdata, self._backend._lib.DSA_free + ) + return _DSAParameters(self._backend, dsa_cdata) + + def private_bytes(self, encoding, format, encryption_algorithm): + return self._backend._private_key_bytes( + encoding, + format, + encryption_algorithm, + self._evp_pkey, + self._dsa_cdata + ) + + def sign(self, data, algorithm): + data, algorithm = _calculate_digest_and_algorithm( + self._backend, data, algorithm + ) + return _dsa_sig_sign(self._backend, self, data) + + +@utils.register_interface(dsa.DSAPublicKeyWithSerialization) +class _DSAPublicKey(object): + def __init__(self, backend, dsa_cdata, evp_pkey): + self._backend = backend + self._dsa_cdata = dsa_cdata + self._evp_pkey = evp_pkey + p = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DSA_get0_pqg( + dsa_cdata, p, self._backend._ffi.NULL, self._backend._ffi.NULL + ) + self._backend.openssl_assert(p[0] != backend._ffi.NULL) + self._key_size = self._backend._lib.BN_num_bits(p[0]) + + key_size = utils.read_only_property("_key_size") + + def verifier(self, signature, signature_algorithm): + _warn_sign_verify_deprecated() + utils._check_bytes("signature", signature) + + _check_not_prehashed(signature_algorithm) + return _DSAVerificationContext( + self._backend, self, signature, signature_algorithm + ) + + def public_numbers(self): + p = self._backend._ffi.new("BIGNUM **") + q = self._backend._ffi.new("BIGNUM **") + g = self._backend._ffi.new("BIGNUM **") + pub_key = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g) + self._backend.openssl_assert(p[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(q[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(g[0] != self._backend._ffi.NULL) + self._backend._lib.DSA_get0_key( + self._dsa_cdata, pub_key, self._backend._ffi.NULL + ) + self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL) + return dsa.DSAPublicNumbers( + parameter_numbers=dsa.DSAParameterNumbers( + p=self._backend._bn_to_int(p[0]), + q=self._backend._bn_to_int(q[0]), + g=self._backend._bn_to_int(g[0]) + ), + y=self._backend._bn_to_int(pub_key[0]) + ) + + def parameters(self): + dsa_cdata = self._backend._lib.DSAparams_dup(self._dsa_cdata) + dsa_cdata = self._backend._ffi.gc( + dsa_cdata, self._backend._lib.DSA_free + ) + return _DSAParameters(self._backend, dsa_cdata) + + def public_bytes(self, encoding, format): + if format is serialization.PublicFormat.PKCS1: + raise ValueError( + "DSA public keys do not support PKCS1 serialization" + ) + + return self._backend._public_key_bytes( + encoding, + format, + self, + self._evp_pkey, + None + ) + + def verify(self, signature, data, algorithm): + data, algorithm = _calculate_digest_and_algorithm( + self._backend, data, algorithm + ) + return _dsa_sig_verify(self._backend, self, signature, data) diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/ec.py b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/ec.py new file mode 100644 index 0000000..3d8681b --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/ec.py @@ -0,0 +1,352 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import ( + InvalidSignature, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.openssl.utils import ( + _calculate_digest_and_algorithm, _check_not_prehashed, + _warn_sign_verify_deprecated +) +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import ( + AsymmetricSignatureContext, AsymmetricVerificationContext, ec +) + + +def _check_signature_algorithm(signature_algorithm): + if not isinstance(signature_algorithm, ec.ECDSA): + raise UnsupportedAlgorithm( + "Unsupported elliptic curve signature algorithm.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + +def _ec_key_curve_sn(backend, ec_key): + group = backend._lib.EC_KEY_get0_group(ec_key) + backend.openssl_assert(group != backend._ffi.NULL) + + nid = backend._lib.EC_GROUP_get_curve_name(group) + # The following check is to find EC keys with unnamed curves and raise + # an error for now. + if nid == backend._lib.NID_undef: + raise NotImplementedError( + "ECDSA keys with unnamed curves are unsupported " + "at this time" + ) + + # This is like the above check, but it also catches the case where you + # explicitly encoded a curve with the same parameters as a named curve. + # Don't do that. + if ( + backend._lib.CRYPTOGRAPHY_OPENSSL_110_OR_GREATER and + backend._lib.EC_GROUP_get_asn1_flag(group) == 0 + ): + raise NotImplementedError( + "ECDSA keys with unnamed curves are unsupported " + "at this time" + ) + + curve_name = backend._lib.OBJ_nid2sn(nid) + backend.openssl_assert(curve_name != backend._ffi.NULL) + + sn = backend._ffi.string(curve_name).decode('ascii') + return sn + + +def _mark_asn1_named_ec_curve(backend, ec_cdata): + """ + Set the named curve flag on the EC_KEY. This causes OpenSSL to + serialize EC keys along with their curve OID which makes + deserialization easier. + """ + + backend._lib.EC_KEY_set_asn1_flag( + ec_cdata, backend._lib.OPENSSL_EC_NAMED_CURVE + ) + + +def _sn_to_elliptic_curve(backend, sn): + try: + return ec._CURVE_TYPES[sn]() + except KeyError: + raise UnsupportedAlgorithm( + "{} is not a supported elliptic curve".format(sn), + _Reasons.UNSUPPORTED_ELLIPTIC_CURVE + ) + + +def _ecdsa_sig_sign(backend, private_key, data): + max_size = backend._lib.ECDSA_size(private_key._ec_key) + backend.openssl_assert(max_size > 0) + + sigbuf = backend._ffi.new("unsigned char[]", max_size) + siglen_ptr = backend._ffi.new("unsigned int[]", 1) + res = backend._lib.ECDSA_sign( + 0, data, len(data), sigbuf, siglen_ptr, private_key._ec_key + ) + backend.openssl_assert(res == 1) + return backend._ffi.buffer(sigbuf)[:siglen_ptr[0]] + + +def _ecdsa_sig_verify(backend, public_key, signature, data): + res = backend._lib.ECDSA_verify( + 0, data, len(data), signature, len(signature), public_key._ec_key + ) + if res != 1: + backend._consume_errors() + raise InvalidSignature + + +@utils.register_interface(AsymmetricSignatureContext) +class _ECDSASignatureContext(object): + def __init__(self, backend, private_key, algorithm): + self._backend = backend + self._private_key = private_key + self._digest = hashes.Hash(algorithm, backend) + + def update(self, data): + self._digest.update(data) + + def finalize(self): + digest = self._digest.finalize() + + return _ecdsa_sig_sign(self._backend, self._private_key, digest) + + +@utils.register_interface(AsymmetricVerificationContext) +class _ECDSAVerificationContext(object): + def __init__(self, backend, public_key, signature, algorithm): + self._backend = backend + self._public_key = public_key + self._signature = signature + self._digest = hashes.Hash(algorithm, backend) + + def update(self, data): + self._digest.update(data) + + def verify(self): + digest = self._digest.finalize() + _ecdsa_sig_verify( + self._backend, self._public_key, self._signature, digest + ) + + +@utils.register_interface(ec.EllipticCurvePrivateKeyWithSerialization) +class _EllipticCurvePrivateKey(object): + def __init__(self, backend, ec_key_cdata, evp_pkey): + self._backend = backend + self._ec_key = ec_key_cdata + self._evp_pkey = evp_pkey + + sn = _ec_key_curve_sn(backend, ec_key_cdata) + self._curve = _sn_to_elliptic_curve(backend, sn) + _mark_asn1_named_ec_curve(backend, ec_key_cdata) + + curve = utils.read_only_property("_curve") + + @property + def key_size(self): + return self.curve.key_size + + def signer(self, signature_algorithm): + _warn_sign_verify_deprecated() + _check_signature_algorithm(signature_algorithm) + _check_not_prehashed(signature_algorithm.algorithm) + return _ECDSASignatureContext( + self._backend, self, signature_algorithm.algorithm + ) + + def exchange(self, algorithm, peer_public_key): + if not ( + self._backend.elliptic_curve_exchange_algorithm_supported( + algorithm, self.curve + ) + ): + raise UnsupportedAlgorithm( + "This backend does not support the ECDH algorithm.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM + ) + + if peer_public_key.curve.name != self.curve.name: + raise ValueError( + "peer_public_key and self are not on the same curve" + ) + + group = self._backend._lib.EC_KEY_get0_group(self._ec_key) + z_len = (self._backend._lib.EC_GROUP_get_degree(group) + 7) // 8 + self._backend.openssl_assert(z_len > 0) + z_buf = self._backend._ffi.new("uint8_t[]", z_len) + peer_key = self._backend._lib.EC_KEY_get0_public_key( + peer_public_key._ec_key + ) + + r = self._backend._lib.ECDH_compute_key( + z_buf, z_len, peer_key, self._ec_key, self._backend._ffi.NULL + ) + self._backend.openssl_assert(r > 0) + return self._backend._ffi.buffer(z_buf)[:z_len] + + def public_key(self): + group = self._backend._lib.EC_KEY_get0_group(self._ec_key) + self._backend.openssl_assert(group != self._backend._ffi.NULL) + + curve_nid = self._backend._lib.EC_GROUP_get_curve_name(group) + + public_ec_key = self._backend._lib.EC_KEY_new_by_curve_name(curve_nid) + self._backend.openssl_assert(public_ec_key != self._backend._ffi.NULL) + public_ec_key = self._backend._ffi.gc( + public_ec_key, self._backend._lib.EC_KEY_free + ) + + point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key) + self._backend.openssl_assert(point != self._backend._ffi.NULL) + + res = self._backend._lib.EC_KEY_set_public_key(public_ec_key, point) + self._backend.openssl_assert(res == 1) + + evp_pkey = self._backend._ec_cdata_to_evp_pkey(public_ec_key) + + return _EllipticCurvePublicKey(self._backend, public_ec_key, evp_pkey) + + def private_numbers(self): + bn = self._backend._lib.EC_KEY_get0_private_key(self._ec_key) + private_value = self._backend._bn_to_int(bn) + return ec.EllipticCurvePrivateNumbers( + private_value=private_value, + public_numbers=self.public_key().public_numbers() + ) + + def private_bytes(self, encoding, format, encryption_algorithm): + return self._backend._private_key_bytes( + encoding, + format, + encryption_algorithm, + self._evp_pkey, + self._ec_key + ) + + def sign(self, data, signature_algorithm): + _check_signature_algorithm(signature_algorithm) + data, algorithm = _calculate_digest_and_algorithm( + self._backend, data, signature_algorithm._algorithm + ) + return _ecdsa_sig_sign(self._backend, self, data) + + +@utils.register_interface(ec.EllipticCurvePublicKeyWithSerialization) +class _EllipticCurvePublicKey(object): + def __init__(self, backend, ec_key_cdata, evp_pkey): + self._backend = backend + self._ec_key = ec_key_cdata + self._evp_pkey = evp_pkey + + sn = _ec_key_curve_sn(backend, ec_key_cdata) + self._curve = _sn_to_elliptic_curve(backend, sn) + _mark_asn1_named_ec_curve(backend, ec_key_cdata) + + curve = utils.read_only_property("_curve") + + @property + def key_size(self): + return self.curve.key_size + + def verifier(self, signature, signature_algorithm): + _warn_sign_verify_deprecated() + utils._check_bytes("signature", signature) + + _check_signature_algorithm(signature_algorithm) + _check_not_prehashed(signature_algorithm.algorithm) + return _ECDSAVerificationContext( + self._backend, self, signature, signature_algorithm.algorithm + ) + + def public_numbers(self): + get_func, group = ( + self._backend._ec_key_determine_group_get_func(self._ec_key) + ) + point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key) + self._backend.openssl_assert(point != self._backend._ffi.NULL) + + with self._backend._tmp_bn_ctx() as bn_ctx: + bn_x = self._backend._lib.BN_CTX_get(bn_ctx) + bn_y = self._backend._lib.BN_CTX_get(bn_ctx) + + res = get_func(group, point, bn_x, bn_y, bn_ctx) + self._backend.openssl_assert(res == 1) + + x = self._backend._bn_to_int(bn_x) + y = self._backend._bn_to_int(bn_y) + + return ec.EllipticCurvePublicNumbers( + x=x, + y=y, + curve=self._curve + ) + + def _encode_point(self, format): + if format is serialization.PublicFormat.CompressedPoint: + conversion = self._backend._lib.POINT_CONVERSION_COMPRESSED + else: + assert format is serialization.PublicFormat.UncompressedPoint + conversion = self._backend._lib.POINT_CONVERSION_UNCOMPRESSED + + group = self._backend._lib.EC_KEY_get0_group(self._ec_key) + self._backend.openssl_assert(group != self._backend._ffi.NULL) + point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key) + self._backend.openssl_assert(point != self._backend._ffi.NULL) + with self._backend._tmp_bn_ctx() as bn_ctx: + buflen = self._backend._lib.EC_POINT_point2oct( + group, point, conversion, self._backend._ffi.NULL, 0, bn_ctx + ) + self._backend.openssl_assert(buflen > 0) + buf = self._backend._ffi.new("char[]", buflen) + res = self._backend._lib.EC_POINT_point2oct( + group, point, conversion, buf, buflen, bn_ctx + ) + self._backend.openssl_assert(buflen == res) + + return self._backend._ffi.buffer(buf)[:] + + def public_bytes(self, encoding, format): + if format is serialization.PublicFormat.PKCS1: + raise ValueError( + "EC public keys do not support PKCS1 serialization" + ) + + if ( + encoding is serialization.Encoding.X962 or + format is serialization.PublicFormat.CompressedPoint or + format is serialization.PublicFormat.UncompressedPoint + ): + if ( + encoding is not serialization.Encoding.X962 or + format not in ( + serialization.PublicFormat.CompressedPoint, + serialization.PublicFormat.UncompressedPoint + ) + ): + raise ValueError( + "X962 encoding must be used with CompressedPoint or " + "UncompressedPoint format" + ) + + return self._encode_point(format) + else: + return self._backend._public_key_bytes( + encoding, + format, + self, + self._evp_pkey, + None + ) + + def verify(self, signature, data, signature_algorithm): + _check_signature_algorithm(signature_algorithm) + data, algorithm = _calculate_digest_and_algorithm( + self._backend, data, signature_algorithm._algorithm + ) + _ecdsa_sig_verify(self._backend, self, signature, data) diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/ed25519.py b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/ed25519.py new file mode 100644 index 0000000..f38f11d --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/ed25519.py @@ -0,0 +1,151 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import exceptions, utils +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric.ed25519 import ( + Ed25519PrivateKey, Ed25519PublicKey, _ED25519_KEY_SIZE, _ED25519_SIG_SIZE +) + + +@utils.register_interface(Ed25519PublicKey) +class _Ed25519PublicKey(object): + def __init__(self, backend, evp_pkey): + self._backend = backend + self._evp_pkey = evp_pkey + + def public_bytes(self, encoding, format): + if ( + encoding is serialization.Encoding.Raw or + format is serialization.PublicFormat.Raw + ): + if ( + encoding is not serialization.Encoding.Raw or + format is not serialization.PublicFormat.Raw + ): + raise ValueError( + "When using Raw both encoding and format must be Raw" + ) + + return self._raw_public_bytes() + + if ( + encoding in serialization._PEM_DER and + format is not serialization.PublicFormat.SubjectPublicKeyInfo + ): + raise ValueError( + "format must be SubjectPublicKeyInfo when encoding is PEM or " + "DER" + ) + + return self._backend._public_key_bytes( + encoding, format, self, self._evp_pkey, None + ) + + def _raw_public_bytes(self): + buf = self._backend._ffi.new("unsigned char []", _ED25519_KEY_SIZE) + buflen = self._backend._ffi.new("size_t *", _ED25519_KEY_SIZE) + res = self._backend._lib.EVP_PKEY_get_raw_public_key( + self._evp_pkey, buf, buflen + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _ED25519_KEY_SIZE) + return self._backend._ffi.buffer(buf, _ED25519_KEY_SIZE)[:] + + def verify(self, signature, data): + evp_md_ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new() + self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL) + evp_md_ctx = self._backend._ffi.gc( + evp_md_ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free + ) + res = self._backend._lib.EVP_DigestVerifyInit( + evp_md_ctx, self._backend._ffi.NULL, self._backend._ffi.NULL, + self._backend._ffi.NULL, self._evp_pkey + ) + self._backend.openssl_assert(res == 1) + res = self._backend._lib.EVP_DigestVerify( + evp_md_ctx, signature, len(signature), data, len(data) + ) + if res != 1: + self._backend._consume_errors() + raise exceptions.InvalidSignature + + +@utils.register_interface(Ed25519PrivateKey) +class _Ed25519PrivateKey(object): + def __init__(self, backend, evp_pkey): + self._backend = backend + self._evp_pkey = evp_pkey + + def public_key(self): + buf = self._backend._ffi.new("unsigned char []", _ED25519_KEY_SIZE) + buflen = self._backend._ffi.new("size_t *", _ED25519_KEY_SIZE) + res = self._backend._lib.EVP_PKEY_get_raw_public_key( + self._evp_pkey, buf, buflen + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _ED25519_KEY_SIZE) + public_bytes = self._backend._ffi.buffer(buf)[:] + return self._backend.ed25519_load_public_bytes(public_bytes) + + def sign(self, data): + evp_md_ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new() + self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL) + evp_md_ctx = self._backend._ffi.gc( + evp_md_ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free + ) + res = self._backend._lib.EVP_DigestSignInit( + evp_md_ctx, self._backend._ffi.NULL, self._backend._ffi.NULL, + self._backend._ffi.NULL, self._evp_pkey + ) + self._backend.openssl_assert(res == 1) + buf = self._backend._ffi.new("unsigned char[]", _ED25519_SIG_SIZE) + buflen = self._backend._ffi.new("size_t *", len(buf)) + res = self._backend._lib.EVP_DigestSign( + evp_md_ctx, buf, buflen, data, len(data) + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _ED25519_SIG_SIZE) + return self._backend._ffi.buffer(buf, buflen[0])[:] + + def private_bytes(self, encoding, format, encryption_algorithm): + if ( + encoding is serialization.Encoding.Raw or + format is serialization.PublicFormat.Raw + ): + if ( + format is not serialization.PrivateFormat.Raw or + encoding is not serialization.Encoding.Raw or not + isinstance(encryption_algorithm, serialization.NoEncryption) + ): + raise ValueError( + "When using Raw both encoding and format must be Raw " + "and encryption_algorithm must be NoEncryption()" + ) + + return self._raw_private_bytes() + + if ( + encoding in serialization._PEM_DER and + format is not serialization.PrivateFormat.PKCS8 + ): + raise ValueError( + "format must be PKCS8 when encoding is PEM or DER" + ) + + return self._backend._private_key_bytes( + encoding, format, encryption_algorithm, self._evp_pkey, None + ) + + def _raw_private_bytes(self): + buf = self._backend._ffi.new("unsigned char []", _ED25519_KEY_SIZE) + buflen = self._backend._ffi.new("size_t *", _ED25519_KEY_SIZE) + res = self._backend._lib.EVP_PKEY_get_raw_private_key( + self._evp_pkey, buf, buflen + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _ED25519_KEY_SIZE) + return self._backend._ffi.buffer(buf, _ED25519_KEY_SIZE)[:] diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/ed448.py b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/ed448.py new file mode 100644 index 0000000..f541f05 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/ed448.py @@ -0,0 +1,154 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import exceptions, utils +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric.ed448 import ( + Ed448PrivateKey, Ed448PublicKey +) + +_ED448_KEY_SIZE = 57 +_ED448_SIG_SIZE = 114 + + +@utils.register_interface(Ed448PublicKey) +class _Ed448PublicKey(object): + def __init__(self, backend, evp_pkey): + self._backend = backend + self._evp_pkey = evp_pkey + + def public_bytes(self, encoding, format): + if ( + encoding is serialization.Encoding.Raw or + format is serialization.PublicFormat.Raw + ): + if ( + encoding is not serialization.Encoding.Raw or + format is not serialization.PublicFormat.Raw + ): + raise ValueError( + "When using Raw both encoding and format must be Raw" + ) + + return self._raw_public_bytes() + + if ( + encoding in serialization._PEM_DER and + format is not serialization.PublicFormat.SubjectPublicKeyInfo + ): + raise ValueError( + "format must be SubjectPublicKeyInfo when encoding is PEM or " + "DER" + ) + + return self._backend._public_key_bytes( + encoding, format, self, self._evp_pkey, None + ) + + def _raw_public_bytes(self): + buf = self._backend._ffi.new("unsigned char []", _ED448_KEY_SIZE) + buflen = self._backend._ffi.new("size_t *", _ED448_KEY_SIZE) + res = self._backend._lib.EVP_PKEY_get_raw_public_key( + self._evp_pkey, buf, buflen + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _ED448_KEY_SIZE) + return self._backend._ffi.buffer(buf, _ED448_KEY_SIZE)[:] + + def verify(self, signature, data): + evp_md_ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new() + self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL) + evp_md_ctx = self._backend._ffi.gc( + evp_md_ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free + ) + res = self._backend._lib.EVP_DigestVerifyInit( + evp_md_ctx, self._backend._ffi.NULL, self._backend._ffi.NULL, + self._backend._ffi.NULL, self._evp_pkey + ) + self._backend.openssl_assert(res == 1) + res = self._backend._lib.EVP_DigestVerify( + evp_md_ctx, signature, len(signature), data, len(data) + ) + if res != 1: + self._backend._consume_errors() + raise exceptions.InvalidSignature + + +@utils.register_interface(Ed448PrivateKey) +class _Ed448PrivateKey(object): + def __init__(self, backend, evp_pkey): + self._backend = backend + self._evp_pkey = evp_pkey + + def public_key(self): + buf = self._backend._ffi.new("unsigned char []", _ED448_KEY_SIZE) + buflen = self._backend._ffi.new("size_t *", _ED448_KEY_SIZE) + res = self._backend._lib.EVP_PKEY_get_raw_public_key( + self._evp_pkey, buf, buflen + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _ED448_KEY_SIZE) + public_bytes = self._backend._ffi.buffer(buf)[:] + return self._backend.ed448_load_public_bytes(public_bytes) + + def sign(self, data): + evp_md_ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new() + self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL) + evp_md_ctx = self._backend._ffi.gc( + evp_md_ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free + ) + res = self._backend._lib.EVP_DigestSignInit( + evp_md_ctx, self._backend._ffi.NULL, self._backend._ffi.NULL, + self._backend._ffi.NULL, self._evp_pkey + ) + self._backend.openssl_assert(res == 1) + buf = self._backend._ffi.new("unsigned char[]", _ED448_SIG_SIZE) + buflen = self._backend._ffi.new("size_t *", len(buf)) + res = self._backend._lib.EVP_DigestSign( + evp_md_ctx, buf, buflen, data, len(data) + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _ED448_SIG_SIZE) + return self._backend._ffi.buffer(buf, buflen[0])[:] + + def private_bytes(self, encoding, format, encryption_algorithm): + if ( + encoding is serialization.Encoding.Raw or + format is serialization.PublicFormat.Raw + ): + if ( + format is not serialization.PrivateFormat.Raw or + encoding is not serialization.Encoding.Raw or not + isinstance(encryption_algorithm, serialization.NoEncryption) + ): + raise ValueError( + "When using Raw both encoding and format must be Raw " + "and encryption_algorithm must be NoEncryption()" + ) + + return self._raw_private_bytes() + + if ( + encoding in serialization._PEM_DER and + format is not serialization.PrivateFormat.PKCS8 + ): + raise ValueError( + "format must be PKCS8 when encoding is PEM or DER" + ) + + return self._backend._private_key_bytes( + encoding, format, encryption_algorithm, self._evp_pkey, None + ) + + def _raw_private_bytes(self): + buf = self._backend._ffi.new("unsigned char []", _ED448_KEY_SIZE) + buflen = self._backend._ffi.new("size_t *", _ED448_KEY_SIZE) + res = self._backend._lib.EVP_PKEY_get_raw_private_key( + self._evp_pkey, buf, buflen + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _ED448_KEY_SIZE) + return self._backend._ffi.buffer(buf, _ED448_KEY_SIZE)[:] diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/encode_asn1.py b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/encode_asn1.py new file mode 100644 index 0000000..ca35f0e --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/encode_asn1.py @@ -0,0 +1,660 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import calendar +import ipaddress + +import six + +from cryptography import utils, x509 +from cryptography.hazmat.backends.openssl.decode_asn1 import ( + _CRL_ENTRY_REASON_ENUM_TO_CODE, _DISTPOINT_TYPE_FULLNAME, + _DISTPOINT_TYPE_RELATIVENAME +) +from cryptography.x509.name import _ASN1Type +from cryptography.x509.oid import ( + CRLEntryExtensionOID, ExtensionOID, OCSPExtensionOID, +) + + +def _encode_asn1_int(backend, x): + """ + Converts a python integer to an ASN1_INTEGER. The returned ASN1_INTEGER + will not be garbage collected (to support adding them to structs that take + ownership of the object). Be sure to register it for GC if it will be + discarded after use. + + """ + # Convert Python integer to OpenSSL "bignum" in case value exceeds + # machine's native integer limits (note: `int_to_bn` doesn't automatically + # GC). + i = backend._int_to_bn(x) + i = backend._ffi.gc(i, backend._lib.BN_free) + + # Wrap in an ASN.1 integer. Don't GC -- as documented. + i = backend._lib.BN_to_ASN1_INTEGER(i, backend._ffi.NULL) + backend.openssl_assert(i != backend._ffi.NULL) + return i + + +def _encode_asn1_int_gc(backend, x): + i = _encode_asn1_int(backend, x) + i = backend._ffi.gc(i, backend._lib.ASN1_INTEGER_free) + return i + + +def _encode_asn1_str(backend, data): + """ + Create an ASN1_OCTET_STRING from a Python byte string. + """ + s = backend._lib.ASN1_OCTET_STRING_new() + res = backend._lib.ASN1_OCTET_STRING_set(s, data, len(data)) + backend.openssl_assert(res == 1) + return s + + +def _encode_asn1_utf8_str(backend, string): + """ + Create an ASN1_UTF8STRING from a Python unicode string. + This object will be an ASN1_STRING with UTF8 type in OpenSSL and + can be decoded with ASN1_STRING_to_UTF8. + """ + s = backend._lib.ASN1_UTF8STRING_new() + res = backend._lib.ASN1_STRING_set( + s, string.encode("utf8"), len(string.encode("utf8")) + ) + backend.openssl_assert(res == 1) + return s + + +def _encode_asn1_str_gc(backend, data): + s = _encode_asn1_str(backend, data) + s = backend._ffi.gc(s, backend._lib.ASN1_OCTET_STRING_free) + return s + + +def _encode_inhibit_any_policy(backend, inhibit_any_policy): + return _encode_asn1_int_gc(backend, inhibit_any_policy.skip_certs) + + +def _encode_name(backend, name): + """ + The X509_NAME created will not be gc'd. Use _encode_name_gc if needed. + """ + subject = backend._lib.X509_NAME_new() + for rdn in name.rdns: + set_flag = 0 # indicate whether to add to last RDN or create new RDN + for attribute in rdn: + name_entry = _encode_name_entry(backend, attribute) + # X509_NAME_add_entry dups the object so we need to gc this copy + name_entry = backend._ffi.gc( + name_entry, backend._lib.X509_NAME_ENTRY_free + ) + res = backend._lib.X509_NAME_add_entry( + subject, name_entry, -1, set_flag) + backend.openssl_assert(res == 1) + set_flag = -1 + return subject + + +def _encode_name_gc(backend, attributes): + subject = _encode_name(backend, attributes) + subject = backend._ffi.gc(subject, backend._lib.X509_NAME_free) + return subject + + +def _encode_sk_name_entry(backend, attributes): + """ + The sk_X509_NAME_ENTRY created will not be gc'd. + """ + stack = backend._lib.sk_X509_NAME_ENTRY_new_null() + for attribute in attributes: + name_entry = _encode_name_entry(backend, attribute) + res = backend._lib.sk_X509_NAME_ENTRY_push(stack, name_entry) + backend.openssl_assert(res >= 1) + return stack + + +def _encode_name_entry(backend, attribute): + if attribute._type is _ASN1Type.BMPString: + value = attribute.value.encode('utf_16_be') + elif attribute._type is _ASN1Type.UniversalString: + value = attribute.value.encode('utf_32_be') + else: + value = attribute.value.encode('utf8') + + obj = _txt2obj_gc(backend, attribute.oid.dotted_string) + + name_entry = backend._lib.X509_NAME_ENTRY_create_by_OBJ( + backend._ffi.NULL, obj, attribute._type.value, value, len(value) + ) + return name_entry + + +def _encode_crl_number_delta_crl_indicator(backend, ext): + return _encode_asn1_int_gc(backend, ext.crl_number) + + +def _encode_issuing_dist_point(backend, ext): + idp = backend._lib.ISSUING_DIST_POINT_new() + backend.openssl_assert(idp != backend._ffi.NULL) + idp = backend._ffi.gc(idp, backend._lib.ISSUING_DIST_POINT_free) + idp.onlyuser = 255 if ext.only_contains_user_certs else 0 + idp.onlyCA = 255 if ext.only_contains_ca_certs else 0 + idp.indirectCRL = 255 if ext.indirect_crl else 0 + idp.onlyattr = 255 if ext.only_contains_attribute_certs else 0 + if ext.only_some_reasons: + idp.onlysomereasons = _encode_reasonflags( + backend, ext.only_some_reasons + ) + + if ext.full_name: + idp.distpoint = _encode_full_name(backend, ext.full_name) + + if ext.relative_name: + idp.distpoint = _encode_relative_name(backend, ext.relative_name) + + return idp + + +def _encode_crl_reason(backend, crl_reason): + asn1enum = backend._lib.ASN1_ENUMERATED_new() + backend.openssl_assert(asn1enum != backend._ffi.NULL) + asn1enum = backend._ffi.gc(asn1enum, backend._lib.ASN1_ENUMERATED_free) + res = backend._lib.ASN1_ENUMERATED_set( + asn1enum, _CRL_ENTRY_REASON_ENUM_TO_CODE[crl_reason.reason] + ) + backend.openssl_assert(res == 1) + + return asn1enum + + +def _encode_invalidity_date(backend, invalidity_date): + time = backend._lib.ASN1_GENERALIZEDTIME_set( + backend._ffi.NULL, calendar.timegm( + invalidity_date.invalidity_date.timetuple() + ) + ) + backend.openssl_assert(time != backend._ffi.NULL) + time = backend._ffi.gc(time, backend._lib.ASN1_GENERALIZEDTIME_free) + + return time + + +def _encode_certificate_policies(backend, certificate_policies): + cp = backend._lib.sk_POLICYINFO_new_null() + backend.openssl_assert(cp != backend._ffi.NULL) + cp = backend._ffi.gc(cp, backend._lib.sk_POLICYINFO_free) + for policy_info in certificate_policies: + pi = backend._lib.POLICYINFO_new() + backend.openssl_assert(pi != backend._ffi.NULL) + res = backend._lib.sk_POLICYINFO_push(cp, pi) + backend.openssl_assert(res >= 1) + oid = _txt2obj(backend, policy_info.policy_identifier.dotted_string) + pi.policyid = oid + if policy_info.policy_qualifiers: + pqis = backend._lib.sk_POLICYQUALINFO_new_null() + backend.openssl_assert(pqis != backend._ffi.NULL) + for qualifier in policy_info.policy_qualifiers: + pqi = backend._lib.POLICYQUALINFO_new() + backend.openssl_assert(pqi != backend._ffi.NULL) + res = backend._lib.sk_POLICYQUALINFO_push(pqis, pqi) + backend.openssl_assert(res >= 1) + if isinstance(qualifier, six.text_type): + pqi.pqualid = _txt2obj( + backend, x509.OID_CPS_QUALIFIER.dotted_string + ) + pqi.d.cpsuri = _encode_asn1_str( + backend, + qualifier.encode("ascii"), + ) + else: + assert isinstance(qualifier, x509.UserNotice) + pqi.pqualid = _txt2obj( + backend, x509.OID_CPS_USER_NOTICE.dotted_string + ) + un = backend._lib.USERNOTICE_new() + backend.openssl_assert(un != backend._ffi.NULL) + pqi.d.usernotice = un + if qualifier.explicit_text: + un.exptext = _encode_asn1_utf8_str( + backend, qualifier.explicit_text + ) + + un.noticeref = _encode_notice_reference( + backend, qualifier.notice_reference + ) + + pi.qualifiers = pqis + + return cp + + +def _encode_notice_reference(backend, notice): + if notice is None: + return backend._ffi.NULL + else: + nr = backend._lib.NOTICEREF_new() + backend.openssl_assert(nr != backend._ffi.NULL) + # organization is a required field + nr.organization = _encode_asn1_utf8_str(backend, notice.organization) + + notice_stack = backend._lib.sk_ASN1_INTEGER_new_null() + nr.noticenos = notice_stack + for number in notice.notice_numbers: + num = _encode_asn1_int(backend, number) + res = backend._lib.sk_ASN1_INTEGER_push(notice_stack, num) + backend.openssl_assert(res >= 1) + + return nr + + +def _txt2obj(backend, name): + """ + Converts a Python string with an ASN.1 object ID in dotted form to a + ASN1_OBJECT. + """ + name = name.encode('ascii') + obj = backend._lib.OBJ_txt2obj(name, 1) + backend.openssl_assert(obj != backend._ffi.NULL) + return obj + + +def _txt2obj_gc(backend, name): + obj = _txt2obj(backend, name) + obj = backend._ffi.gc(obj, backend._lib.ASN1_OBJECT_free) + return obj + + +def _encode_ocsp_nocheck(backend, ext): + # Doesn't need to be GC'd + return backend._lib.ASN1_NULL_new() + + +def _encode_key_usage(backend, key_usage): + set_bit = backend._lib.ASN1_BIT_STRING_set_bit + ku = backend._lib.ASN1_BIT_STRING_new() + ku = backend._ffi.gc(ku, backend._lib.ASN1_BIT_STRING_free) + res = set_bit(ku, 0, key_usage.digital_signature) + backend.openssl_assert(res == 1) + res = set_bit(ku, 1, key_usage.content_commitment) + backend.openssl_assert(res == 1) + res = set_bit(ku, 2, key_usage.key_encipherment) + backend.openssl_assert(res == 1) + res = set_bit(ku, 3, key_usage.data_encipherment) + backend.openssl_assert(res == 1) + res = set_bit(ku, 4, key_usage.key_agreement) + backend.openssl_assert(res == 1) + res = set_bit(ku, 5, key_usage.key_cert_sign) + backend.openssl_assert(res == 1) + res = set_bit(ku, 6, key_usage.crl_sign) + backend.openssl_assert(res == 1) + if key_usage.key_agreement: + res = set_bit(ku, 7, key_usage.encipher_only) + backend.openssl_assert(res == 1) + res = set_bit(ku, 8, key_usage.decipher_only) + backend.openssl_assert(res == 1) + else: + res = set_bit(ku, 7, 0) + backend.openssl_assert(res == 1) + res = set_bit(ku, 8, 0) + backend.openssl_assert(res == 1) + + return ku + + +def _encode_authority_key_identifier(backend, authority_keyid): + akid = backend._lib.AUTHORITY_KEYID_new() + backend.openssl_assert(akid != backend._ffi.NULL) + akid = backend._ffi.gc(akid, backend._lib.AUTHORITY_KEYID_free) + if authority_keyid.key_identifier is not None: + akid.keyid = _encode_asn1_str( + backend, + authority_keyid.key_identifier, + ) + + if authority_keyid.authority_cert_issuer is not None: + akid.issuer = _encode_general_names( + backend, authority_keyid.authority_cert_issuer + ) + + if authority_keyid.authority_cert_serial_number is not None: + akid.serial = _encode_asn1_int( + backend, authority_keyid.authority_cert_serial_number + ) + + return akid + + +def _encode_basic_constraints(backend, basic_constraints): + constraints = backend._lib.BASIC_CONSTRAINTS_new() + constraints = backend._ffi.gc( + constraints, backend._lib.BASIC_CONSTRAINTS_free + ) + constraints.ca = 255 if basic_constraints.ca else 0 + if basic_constraints.ca and basic_constraints.path_length is not None: + constraints.pathlen = _encode_asn1_int( + backend, basic_constraints.path_length + ) + + return constraints + + +def _encode_authority_information_access(backend, authority_info_access): + aia = backend._lib.sk_ACCESS_DESCRIPTION_new_null() + backend.openssl_assert(aia != backend._ffi.NULL) + aia = backend._ffi.gc( + aia, + lambda x: backend._lib.sk_ACCESS_DESCRIPTION_pop_free( + x, backend._ffi.addressof( + backend._lib._original_lib, "ACCESS_DESCRIPTION_free" + ) + ) + ) + for access_description in authority_info_access: + ad = backend._lib.ACCESS_DESCRIPTION_new() + method = _txt2obj( + backend, access_description.access_method.dotted_string + ) + _encode_general_name_preallocated( + backend, access_description.access_location, ad.location + ) + ad.method = method + res = backend._lib.sk_ACCESS_DESCRIPTION_push(aia, ad) + backend.openssl_assert(res >= 1) + + return aia + + +def _encode_general_names(backend, names): + general_names = backend._lib.GENERAL_NAMES_new() + backend.openssl_assert(general_names != backend._ffi.NULL) + for name in names: + gn = _encode_general_name(backend, name) + res = backend._lib.sk_GENERAL_NAME_push(general_names, gn) + backend.openssl_assert(res != 0) + + return general_names + + +def _encode_alt_name(backend, san): + general_names = _encode_general_names(backend, san) + general_names = backend._ffi.gc( + general_names, backend._lib.GENERAL_NAMES_free + ) + return general_names + + +def _encode_subject_key_identifier(backend, ski): + return _encode_asn1_str_gc(backend, ski.digest) + + +def _encode_general_name(backend, name): + gn = backend._lib.GENERAL_NAME_new() + _encode_general_name_preallocated(backend, name, gn) + return gn + + +def _encode_general_name_preallocated(backend, name, gn): + if isinstance(name, x509.DNSName): + backend.openssl_assert(gn != backend._ffi.NULL) + gn.type = backend._lib.GEN_DNS + + ia5 = backend._lib.ASN1_IA5STRING_new() + backend.openssl_assert(ia5 != backend._ffi.NULL) + # ia5strings are supposed to be ITU T.50 but to allow round-tripping + # of broken certs that encode utf8 we'll encode utf8 here too. + value = name.value.encode("utf8") + + res = backend._lib.ASN1_STRING_set(ia5, value, len(value)) + backend.openssl_assert(res == 1) + gn.d.dNSName = ia5 + elif isinstance(name, x509.RegisteredID): + backend.openssl_assert(gn != backend._ffi.NULL) + gn.type = backend._lib.GEN_RID + obj = backend._lib.OBJ_txt2obj( + name.value.dotted_string.encode('ascii'), 1 + ) + backend.openssl_assert(obj != backend._ffi.NULL) + gn.d.registeredID = obj + elif isinstance(name, x509.DirectoryName): + backend.openssl_assert(gn != backend._ffi.NULL) + dir_name = _encode_name(backend, name.value) + gn.type = backend._lib.GEN_DIRNAME + gn.d.directoryName = dir_name + elif isinstance(name, x509.IPAddress): + backend.openssl_assert(gn != backend._ffi.NULL) + if isinstance(name.value, ipaddress.IPv4Network): + packed = ( + name.value.network_address.packed + + utils.int_to_bytes(((1 << 32) - name.value.num_addresses), 4) + ) + elif isinstance(name.value, ipaddress.IPv6Network): + packed = ( + name.value.network_address.packed + + utils.int_to_bytes((1 << 128) - name.value.num_addresses, 16) + ) + else: + packed = name.value.packed + ipaddr = _encode_asn1_str(backend, packed) + gn.type = backend._lib.GEN_IPADD + gn.d.iPAddress = ipaddr + elif isinstance(name, x509.OtherName): + backend.openssl_assert(gn != backend._ffi.NULL) + other_name = backend._lib.OTHERNAME_new() + backend.openssl_assert(other_name != backend._ffi.NULL) + + type_id = backend._lib.OBJ_txt2obj( + name.type_id.dotted_string.encode('ascii'), 1 + ) + backend.openssl_assert(type_id != backend._ffi.NULL) + data = backend._ffi.new("unsigned char[]", name.value) + data_ptr_ptr = backend._ffi.new("unsigned char **") + data_ptr_ptr[0] = data + value = backend._lib.d2i_ASN1_TYPE( + backend._ffi.NULL, data_ptr_ptr, len(name.value) + ) + if value == backend._ffi.NULL: + backend._consume_errors() + raise ValueError("Invalid ASN.1 data") + other_name.type_id = type_id + other_name.value = value + gn.type = backend._lib.GEN_OTHERNAME + gn.d.otherName = other_name + elif isinstance(name, x509.RFC822Name): + backend.openssl_assert(gn != backend._ffi.NULL) + # ia5strings are supposed to be ITU T.50 but to allow round-tripping + # of broken certs that encode utf8 we'll encode utf8 here too. + data = name.value.encode("utf8") + asn1_str = _encode_asn1_str(backend, data) + gn.type = backend._lib.GEN_EMAIL + gn.d.rfc822Name = asn1_str + elif isinstance(name, x509.UniformResourceIdentifier): + backend.openssl_assert(gn != backend._ffi.NULL) + # ia5strings are supposed to be ITU T.50 but to allow round-tripping + # of broken certs that encode utf8 we'll encode utf8 here too. + data = name.value.encode("utf8") + asn1_str = _encode_asn1_str(backend, data) + gn.type = backend._lib.GEN_URI + gn.d.uniformResourceIdentifier = asn1_str + else: + raise ValueError( + "{} is an unknown GeneralName type".format(name) + ) + + +def _encode_extended_key_usage(backend, extended_key_usage): + eku = backend._lib.sk_ASN1_OBJECT_new_null() + eku = backend._ffi.gc(eku, backend._lib.sk_ASN1_OBJECT_free) + for oid in extended_key_usage: + obj = _txt2obj(backend, oid.dotted_string) + res = backend._lib.sk_ASN1_OBJECT_push(eku, obj) + backend.openssl_assert(res >= 1) + + return eku + + +_CRLREASONFLAGS = { + x509.ReasonFlags.key_compromise: 1, + x509.ReasonFlags.ca_compromise: 2, + x509.ReasonFlags.affiliation_changed: 3, + x509.ReasonFlags.superseded: 4, + x509.ReasonFlags.cessation_of_operation: 5, + x509.ReasonFlags.certificate_hold: 6, + x509.ReasonFlags.privilege_withdrawn: 7, + x509.ReasonFlags.aa_compromise: 8, +} + + +def _encode_reasonflags(backend, reasons): + bitmask = backend._lib.ASN1_BIT_STRING_new() + backend.openssl_assert(bitmask != backend._ffi.NULL) + for reason in reasons: + res = backend._lib.ASN1_BIT_STRING_set_bit( + bitmask, _CRLREASONFLAGS[reason], 1 + ) + backend.openssl_assert(res == 1) + + return bitmask + + +def _encode_full_name(backend, full_name): + dpn = backend._lib.DIST_POINT_NAME_new() + backend.openssl_assert(dpn != backend._ffi.NULL) + dpn.type = _DISTPOINT_TYPE_FULLNAME + dpn.name.fullname = _encode_general_names(backend, full_name) + return dpn + + +def _encode_relative_name(backend, relative_name): + dpn = backend._lib.DIST_POINT_NAME_new() + backend.openssl_assert(dpn != backend._ffi.NULL) + dpn.type = _DISTPOINT_TYPE_RELATIVENAME + dpn.name.relativename = _encode_sk_name_entry(backend, relative_name) + return dpn + + +def _encode_cdps_freshest_crl(backend, cdps): + cdp = backend._lib.sk_DIST_POINT_new_null() + cdp = backend._ffi.gc(cdp, backend._lib.sk_DIST_POINT_free) + for point in cdps: + dp = backend._lib.DIST_POINT_new() + backend.openssl_assert(dp != backend._ffi.NULL) + + if point.reasons: + dp.reasons = _encode_reasonflags(backend, point.reasons) + + if point.full_name: + dp.distpoint = _encode_full_name(backend, point.full_name) + + if point.relative_name: + dp.distpoint = _encode_relative_name(backend, point.relative_name) + + if point.crl_issuer: + dp.CRLissuer = _encode_general_names(backend, point.crl_issuer) + + res = backend._lib.sk_DIST_POINT_push(cdp, dp) + backend.openssl_assert(res >= 1) + + return cdp + + +def _encode_name_constraints(backend, name_constraints): + nc = backend._lib.NAME_CONSTRAINTS_new() + backend.openssl_assert(nc != backend._ffi.NULL) + nc = backend._ffi.gc(nc, backend._lib.NAME_CONSTRAINTS_free) + permitted = _encode_general_subtree( + backend, name_constraints.permitted_subtrees + ) + nc.permittedSubtrees = permitted + excluded = _encode_general_subtree( + backend, name_constraints.excluded_subtrees + ) + nc.excludedSubtrees = excluded + + return nc + + +def _encode_policy_constraints(backend, policy_constraints): + pc = backend._lib.POLICY_CONSTRAINTS_new() + backend.openssl_assert(pc != backend._ffi.NULL) + pc = backend._ffi.gc(pc, backend._lib.POLICY_CONSTRAINTS_free) + if policy_constraints.require_explicit_policy is not None: + pc.requireExplicitPolicy = _encode_asn1_int( + backend, policy_constraints.require_explicit_policy + ) + + if policy_constraints.inhibit_policy_mapping is not None: + pc.inhibitPolicyMapping = _encode_asn1_int( + backend, policy_constraints.inhibit_policy_mapping + ) + + return pc + + +def _encode_general_subtree(backend, subtrees): + if subtrees is None: + return backend._ffi.NULL + else: + general_subtrees = backend._lib.sk_GENERAL_SUBTREE_new_null() + for name in subtrees: + gs = backend._lib.GENERAL_SUBTREE_new() + gs.base = _encode_general_name(backend, name) + res = backend._lib.sk_GENERAL_SUBTREE_push(general_subtrees, gs) + assert res >= 1 + + return general_subtrees + + +def _encode_nonce(backend, nonce): + return _encode_asn1_str_gc(backend, nonce.nonce) + + +_EXTENSION_ENCODE_HANDLERS = { + ExtensionOID.BASIC_CONSTRAINTS: _encode_basic_constraints, + ExtensionOID.SUBJECT_KEY_IDENTIFIER: _encode_subject_key_identifier, + ExtensionOID.KEY_USAGE: _encode_key_usage, + ExtensionOID.SUBJECT_ALTERNATIVE_NAME: _encode_alt_name, + ExtensionOID.ISSUER_ALTERNATIVE_NAME: _encode_alt_name, + ExtensionOID.EXTENDED_KEY_USAGE: _encode_extended_key_usage, + ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _encode_authority_key_identifier, + ExtensionOID.CERTIFICATE_POLICIES: _encode_certificate_policies, + ExtensionOID.AUTHORITY_INFORMATION_ACCESS: ( + _encode_authority_information_access + ), + ExtensionOID.CRL_DISTRIBUTION_POINTS: _encode_cdps_freshest_crl, + ExtensionOID.FRESHEST_CRL: _encode_cdps_freshest_crl, + ExtensionOID.INHIBIT_ANY_POLICY: _encode_inhibit_any_policy, + ExtensionOID.OCSP_NO_CHECK: _encode_ocsp_nocheck, + ExtensionOID.NAME_CONSTRAINTS: _encode_name_constraints, + ExtensionOID.POLICY_CONSTRAINTS: _encode_policy_constraints, +} + +_CRL_EXTENSION_ENCODE_HANDLERS = { + ExtensionOID.ISSUER_ALTERNATIVE_NAME: _encode_alt_name, + ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _encode_authority_key_identifier, + ExtensionOID.AUTHORITY_INFORMATION_ACCESS: ( + _encode_authority_information_access + ), + ExtensionOID.CRL_NUMBER: _encode_crl_number_delta_crl_indicator, + ExtensionOID.DELTA_CRL_INDICATOR: _encode_crl_number_delta_crl_indicator, + ExtensionOID.ISSUING_DISTRIBUTION_POINT: _encode_issuing_dist_point, + ExtensionOID.FRESHEST_CRL: _encode_cdps_freshest_crl, +} + +_CRL_ENTRY_EXTENSION_ENCODE_HANDLERS = { + CRLEntryExtensionOID.CERTIFICATE_ISSUER: _encode_alt_name, + CRLEntryExtensionOID.CRL_REASON: _encode_crl_reason, + CRLEntryExtensionOID.INVALIDITY_DATE: _encode_invalidity_date, +} + +_OCSP_REQUEST_EXTENSION_ENCODE_HANDLERS = { + OCSPExtensionOID.NONCE: _encode_nonce, +} + +_OCSP_BASICRESP_EXTENSION_ENCODE_HANDLERS = { + OCSPExtensionOID.NONCE: _encode_nonce, +} diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/hashes.py b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/hashes.py new file mode 100644 index 0000000..7f9d840 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/hashes.py @@ -0,0 +1,78 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.primitives import hashes + + +@utils.register_interface(hashes.HashContext) +class _HashContext(object): + def __init__(self, backend, algorithm, ctx=None): + self._algorithm = algorithm + + self._backend = backend + + if ctx is None: + ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new() + ctx = self._backend._ffi.gc( + ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free + ) + evp_md = self._backend._evp_md_from_algorithm(algorithm) + if evp_md == self._backend._ffi.NULL: + raise UnsupportedAlgorithm( + "{} is not a supported hash on this backend.".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + res = self._backend._lib.EVP_DigestInit_ex(ctx, evp_md, + self._backend._ffi.NULL) + self._backend.openssl_assert(res != 0) + + self._ctx = ctx + + algorithm = utils.read_only_property("_algorithm") + + def copy(self): + copied_ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new() + copied_ctx = self._backend._ffi.gc( + copied_ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free + ) + res = self._backend._lib.EVP_MD_CTX_copy_ex(copied_ctx, self._ctx) + self._backend.openssl_assert(res != 0) + return _HashContext(self._backend, self.algorithm, ctx=copied_ctx) + + def update(self, data): + data_ptr = self._backend._ffi.from_buffer(data) + res = self._backend._lib.EVP_DigestUpdate( + self._ctx, data_ptr, len(data) + ) + self._backend.openssl_assert(res != 0) + + def finalize(self): + if isinstance(self.algorithm, hashes.ExtendableOutputFunction): + # extendable output functions use a different finalize + return self._finalize_xof() + else: + buf = self._backend._ffi.new("unsigned char[]", + self._backend._lib.EVP_MAX_MD_SIZE) + outlen = self._backend._ffi.new("unsigned int *") + res = self._backend._lib.EVP_DigestFinal_ex(self._ctx, buf, outlen) + self._backend.openssl_assert(res != 0) + self._backend.openssl_assert( + outlen[0] == self.algorithm.digest_size + ) + return self._backend._ffi.buffer(buf)[:outlen[0]] + + def _finalize_xof(self): + buf = self._backend._ffi.new("unsigned char[]", + self.algorithm.digest_size) + res = self._backend._lib.EVP_DigestFinalXOF( + self._ctx, buf, self.algorithm.digest_size + ) + self._backend.openssl_assert(res != 0) + return self._backend._ffi.buffer(buf)[:self.algorithm.digest_size] diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/hmac.py b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/hmac.py new file mode 100644 index 0000000..2e09cbc --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/hmac.py @@ -0,0 +1,74 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + + +from cryptography import utils +from cryptography.exceptions import ( + InvalidSignature, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.primitives import constant_time, hashes + + +@utils.register_interface(hashes.HashContext) +class _HMACContext(object): + def __init__(self, backend, key, algorithm, ctx=None): + self._algorithm = algorithm + self._backend = backend + + if ctx is None: + ctx = self._backend._lib.Cryptography_HMAC_CTX_new() + self._backend.openssl_assert(ctx != self._backend._ffi.NULL) + ctx = self._backend._ffi.gc( + ctx, self._backend._lib.Cryptography_HMAC_CTX_free + ) + evp_md = self._backend._evp_md_from_algorithm(algorithm) + if evp_md == self._backend._ffi.NULL: + raise UnsupportedAlgorithm( + "{} is not a supported hash on this backend".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + key_ptr = self._backend._ffi.from_buffer(key) + res = self._backend._lib.HMAC_Init_ex( + ctx, key_ptr, len(key), evp_md, self._backend._ffi.NULL + ) + self._backend.openssl_assert(res != 0) + + self._ctx = ctx + self._key = key + + algorithm = utils.read_only_property("_algorithm") + + def copy(self): + copied_ctx = self._backend._lib.Cryptography_HMAC_CTX_new() + self._backend.openssl_assert(copied_ctx != self._backend._ffi.NULL) + copied_ctx = self._backend._ffi.gc( + copied_ctx, self._backend._lib.Cryptography_HMAC_CTX_free + ) + res = self._backend._lib.HMAC_CTX_copy(copied_ctx, self._ctx) + self._backend.openssl_assert(res != 0) + return _HMACContext( + self._backend, self._key, self.algorithm, ctx=copied_ctx + ) + + def update(self, data): + data_ptr = self._backend._ffi.from_buffer(data) + res = self._backend._lib.HMAC_Update(self._ctx, data_ptr, len(data)) + self._backend.openssl_assert(res != 0) + + def finalize(self): + buf = self._backend._ffi.new("unsigned char[]", + self._backend._lib.EVP_MAX_MD_SIZE) + outlen = self._backend._ffi.new("unsigned int *") + res = self._backend._lib.HMAC_Final(self._ctx, buf, outlen) + self._backend.openssl_assert(res != 0) + self._backend.openssl_assert(outlen[0] == self.algorithm.digest_size) + return self._backend._ffi.buffer(buf)[:outlen[0]] + + def verify(self, signature): + digest = self.finalize() + if not constant_time.bytes_eq(digest, signature): + raise InvalidSignature("Signature did not match digest.") diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/ocsp.py b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/ocsp.py new file mode 100644 index 0000000..7420f65 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/ocsp.py @@ -0,0 +1,381 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import functools + +from cryptography import utils, x509 +from cryptography.exceptions import UnsupportedAlgorithm +from cryptography.hazmat.backends.openssl.decode_asn1 import ( + _CRL_ENTRY_REASON_CODE_TO_ENUM, _OCSP_BASICRESP_EXT_PARSER, + _OCSP_REQ_EXT_PARSER, _asn1_integer_to_int, + _asn1_string_to_bytes, _decode_x509_name, _obj2txt, + _parse_asn1_generalized_time, +) +from cryptography.hazmat.backends.openssl.x509 import _Certificate +from cryptography.hazmat.primitives import serialization +from cryptography.x509.ocsp import ( + OCSPCertStatus, OCSPRequest, OCSPResponse, OCSPResponseStatus, + _CERT_STATUS_TO_ENUM, _OIDS_TO_HASH, _RESPONSE_STATUS_TO_ENUM, +) + + +def _requires_successful_response(func): + @functools.wraps(func) + def wrapper(self, *args): + if self.response_status != OCSPResponseStatus.SUCCESSFUL: + raise ValueError( + "OCSP response status is not successful so the property " + "has no value" + ) + else: + return func(self, *args) + + return wrapper + + +def _issuer_key_hash(backend, cert_id): + key_hash = backend._ffi.new("ASN1_OCTET_STRING **") + res = backend._lib.OCSP_id_get0_info( + backend._ffi.NULL, backend._ffi.NULL, + key_hash, backend._ffi.NULL, cert_id + ) + backend.openssl_assert(res == 1) + backend.openssl_assert(key_hash[0] != backend._ffi.NULL) + return _asn1_string_to_bytes(backend, key_hash[0]) + + +def _issuer_name_hash(backend, cert_id): + name_hash = backend._ffi.new("ASN1_OCTET_STRING **") + res = backend._lib.OCSP_id_get0_info( + name_hash, backend._ffi.NULL, + backend._ffi.NULL, backend._ffi.NULL, cert_id + ) + backend.openssl_assert(res == 1) + backend.openssl_assert(name_hash[0] != backend._ffi.NULL) + return _asn1_string_to_bytes(backend, name_hash[0]) + + +def _serial_number(backend, cert_id): + num = backend._ffi.new("ASN1_INTEGER **") + res = backend._lib.OCSP_id_get0_info( + backend._ffi.NULL, backend._ffi.NULL, + backend._ffi.NULL, num, cert_id + ) + backend.openssl_assert(res == 1) + backend.openssl_assert(num[0] != backend._ffi.NULL) + return _asn1_integer_to_int(backend, num[0]) + + +def _hash_algorithm(backend, cert_id): + asn1obj = backend._ffi.new("ASN1_OBJECT **") + res = backend._lib.OCSP_id_get0_info( + backend._ffi.NULL, asn1obj, + backend._ffi.NULL, backend._ffi.NULL, cert_id + ) + backend.openssl_assert(res == 1) + backend.openssl_assert(asn1obj[0] != backend._ffi.NULL) + oid = _obj2txt(backend, asn1obj[0]) + try: + return _OIDS_TO_HASH[oid] + except KeyError: + raise UnsupportedAlgorithm( + "Signature algorithm OID: {} not recognized".format(oid) + ) + + +@utils.register_interface(OCSPResponse) +class _OCSPResponse(object): + def __init__(self, backend, ocsp_response): + self._backend = backend + self._ocsp_response = ocsp_response + status = self._backend._lib.OCSP_response_status(self._ocsp_response) + self._backend.openssl_assert(status in _RESPONSE_STATUS_TO_ENUM) + self._status = _RESPONSE_STATUS_TO_ENUM[status] + if self._status is OCSPResponseStatus.SUCCESSFUL: + basic = self._backend._lib.OCSP_response_get1_basic( + self._ocsp_response + ) + self._backend.openssl_assert(basic != self._backend._ffi.NULL) + self._basic = self._backend._ffi.gc( + basic, self._backend._lib.OCSP_BASICRESP_free + ) + self._backend.openssl_assert( + self._backend._lib.OCSP_resp_count(self._basic) == 1 + ) + self._single = self._backend._lib.OCSP_resp_get0(self._basic, 0) + self._backend.openssl_assert( + self._single != self._backend._ffi.NULL + ) + self._cert_id = self._backend._lib.OCSP_SINGLERESP_get0_id( + self._single + ) + self._backend.openssl_assert( + self._cert_id != self._backend._ffi.NULL + ) + + response_status = utils.read_only_property("_status") + + @property + @_requires_successful_response + def signature_algorithm_oid(self): + alg = self._backend._lib.OCSP_resp_get0_tbs_sigalg(self._basic) + self._backend.openssl_assert(alg != self._backend._ffi.NULL) + oid = _obj2txt(self._backend, alg.algorithm) + return x509.ObjectIdentifier(oid) + + @property + @_requires_successful_response + def signature_hash_algorithm(self): + oid = self.signature_algorithm_oid + try: + return x509._SIG_OIDS_TO_HASH[oid] + except KeyError: + raise UnsupportedAlgorithm( + "Signature algorithm OID:{} not recognized".format(oid) + ) + + @property + @_requires_successful_response + def signature(self): + sig = self._backend._lib.OCSP_resp_get0_signature(self._basic) + self._backend.openssl_assert(sig != self._backend._ffi.NULL) + return _asn1_string_to_bytes(self._backend, sig) + + @property + @_requires_successful_response + def tbs_response_bytes(self): + respdata = self._backend._lib.OCSP_resp_get0_respdata(self._basic) + self._backend.openssl_assert(respdata != self._backend._ffi.NULL) + pp = self._backend._ffi.new("unsigned char **") + res = self._backend._lib.i2d_OCSP_RESPDATA(respdata, pp) + self._backend.openssl_assert(pp[0] != self._backend._ffi.NULL) + pp = self._backend._ffi.gc( + pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0]) + ) + self._backend.openssl_assert(res > 0) + return self._backend._ffi.buffer(pp[0], res)[:] + + @property + @_requires_successful_response + def certificates(self): + sk_x509 = self._backend._lib.OCSP_resp_get0_certs(self._basic) + num = self._backend._lib.sk_X509_num(sk_x509) + certs = [] + for i in range(num): + x509 = self._backend._lib.sk_X509_value(sk_x509, i) + self._backend.openssl_assert(x509 != self._backend._ffi.NULL) + cert = _Certificate(self._backend, x509) + # We need to keep the OCSP response that the certificate came from + # alive until the Certificate object itself goes out of scope, so + # we give it a private reference. + cert._ocsp_resp = self + certs.append(cert) + + return certs + + @property + @_requires_successful_response + def responder_key_hash(self): + _, asn1_string = self._responder_key_name() + if asn1_string == self._backend._ffi.NULL: + return None + else: + return _asn1_string_to_bytes(self._backend, asn1_string) + + @property + @_requires_successful_response + def responder_name(self): + x509_name, _ = self._responder_key_name() + if x509_name == self._backend._ffi.NULL: + return None + else: + return _decode_x509_name(self._backend, x509_name) + + def _responder_key_name(self): + asn1_string = self._backend._ffi.new("ASN1_OCTET_STRING **") + x509_name = self._backend._ffi.new("X509_NAME **") + res = self._backend._lib.OCSP_resp_get0_id( + self._basic, asn1_string, x509_name + ) + self._backend.openssl_assert(res == 1) + return x509_name[0], asn1_string[0] + + @property + @_requires_successful_response + def produced_at(self): + produced_at = self._backend._lib.OCSP_resp_get0_produced_at( + self._basic + ) + return _parse_asn1_generalized_time(self._backend, produced_at) + + @property + @_requires_successful_response + def certificate_status(self): + status = self._backend._lib.OCSP_single_get0_status( + self._single, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + ) + self._backend.openssl_assert(status in _CERT_STATUS_TO_ENUM) + return _CERT_STATUS_TO_ENUM[status] + + @property + @_requires_successful_response + def revocation_time(self): + if self.certificate_status is not OCSPCertStatus.REVOKED: + return None + + asn1_time = self._backend._ffi.new("ASN1_GENERALIZEDTIME **") + self._backend._lib.OCSP_single_get0_status( + self._single, + self._backend._ffi.NULL, + asn1_time, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + ) + self._backend.openssl_assert(asn1_time[0] != self._backend._ffi.NULL) + return _parse_asn1_generalized_time(self._backend, asn1_time[0]) + + @property + @_requires_successful_response + def revocation_reason(self): + if self.certificate_status is not OCSPCertStatus.REVOKED: + return None + + reason_ptr = self._backend._ffi.new("int *") + self._backend._lib.OCSP_single_get0_status( + self._single, + reason_ptr, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + ) + # If no reason is encoded OpenSSL returns -1 + if reason_ptr[0] == -1: + return None + else: + self._backend.openssl_assert( + reason_ptr[0] in _CRL_ENTRY_REASON_CODE_TO_ENUM + ) + return _CRL_ENTRY_REASON_CODE_TO_ENUM[reason_ptr[0]] + + @property + @_requires_successful_response + def this_update(self): + asn1_time = self._backend._ffi.new("ASN1_GENERALIZEDTIME **") + self._backend._lib.OCSP_single_get0_status( + self._single, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + asn1_time, + self._backend._ffi.NULL, + ) + self._backend.openssl_assert(asn1_time[0] != self._backend._ffi.NULL) + return _parse_asn1_generalized_time(self._backend, asn1_time[0]) + + @property + @_requires_successful_response + def next_update(self): + asn1_time = self._backend._ffi.new("ASN1_GENERALIZEDTIME **") + self._backend._lib.OCSP_single_get0_status( + self._single, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + asn1_time, + ) + if asn1_time[0] != self._backend._ffi.NULL: + return _parse_asn1_generalized_time(self._backend, asn1_time[0]) + else: + return None + + @property + @_requires_successful_response + def issuer_key_hash(self): + return _issuer_key_hash(self._backend, self._cert_id) + + @property + @_requires_successful_response + def issuer_name_hash(self): + return _issuer_name_hash(self._backend, self._cert_id) + + @property + @_requires_successful_response + def hash_algorithm(self): + return _hash_algorithm(self._backend, self._cert_id) + + @property + @_requires_successful_response + def serial_number(self): + return _serial_number(self._backend, self._cert_id) + + @utils.cached_property + @_requires_successful_response + def extensions(self): + return _OCSP_BASICRESP_EXT_PARSER.parse(self._backend, self._basic) + + def public_bytes(self, encoding): + if encoding is not serialization.Encoding.DER: + raise ValueError( + "The only allowed encoding value is Encoding.DER" + ) + + bio = self._backend._create_mem_bio_gc() + res = self._backend._lib.i2d_OCSP_RESPONSE_bio( + bio, self._ocsp_response + ) + self._backend.openssl_assert(res > 0) + return self._backend._read_mem_bio(bio) + + +@utils.register_interface(OCSPRequest) +class _OCSPRequest(object): + def __init__(self, backend, ocsp_request): + if backend._lib.OCSP_request_onereq_count(ocsp_request) > 1: + raise NotImplementedError( + 'OCSP request contains more than one request' + ) + self._backend = backend + self._ocsp_request = ocsp_request + self._request = self._backend._lib.OCSP_request_onereq_get0( + self._ocsp_request, 0 + ) + self._backend.openssl_assert(self._request != self._backend._ffi.NULL) + self._cert_id = self._backend._lib.OCSP_onereq_get0_id(self._request) + self._backend.openssl_assert(self._cert_id != self._backend._ffi.NULL) + + @property + def issuer_key_hash(self): + return _issuer_key_hash(self._backend, self._cert_id) + + @property + def issuer_name_hash(self): + return _issuer_name_hash(self._backend, self._cert_id) + + @property + def serial_number(self): + return _serial_number(self._backend, self._cert_id) + + @property + def hash_algorithm(self): + return _hash_algorithm(self._backend, self._cert_id) + + @utils.cached_property + def extensions(self): + return _OCSP_REQ_EXT_PARSER.parse(self._backend, self._ocsp_request) + + def public_bytes(self, encoding): + if encoding is not serialization.Encoding.DER: + raise ValueError( + "The only allowed encoding value is Encoding.DER" + ) + + bio = self._backend._create_mem_bio_gc() + res = self._backend._lib.i2d_OCSP_REQUEST_bio(bio, self._ocsp_request) + self._backend.openssl_assert(res > 0) + return self._backend._read_mem_bio(bio) diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/poly1305.py b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/poly1305.py new file mode 100644 index 0000000..25448dd --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/poly1305.py @@ -0,0 +1,60 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + + +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.primitives import constant_time + + +_POLY1305_TAG_SIZE = 16 +_POLY1305_KEY_SIZE = 32 + + +class _Poly1305Context(object): + def __init__(self, backend, key): + self._backend = backend + + key_ptr = self._backend._ffi.from_buffer(key) + # This function copies the key into OpenSSL-owned memory so we don't + # need to retain it ourselves + evp_pkey = self._backend._lib.EVP_PKEY_new_raw_private_key( + self._backend._lib.NID_poly1305, + self._backend._ffi.NULL, key_ptr, len(key) + ) + self._backend.openssl_assert(evp_pkey != self._backend._ffi.NULL) + self._evp_pkey = self._backend._ffi.gc( + evp_pkey, self._backend._lib.EVP_PKEY_free + ) + ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new() + self._backend.openssl_assert(ctx != self._backend._ffi.NULL) + self._ctx = self._backend._ffi.gc( + ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free + ) + res = self._backend._lib.EVP_DigestSignInit( + self._ctx, self._backend._ffi.NULL, self._backend._ffi.NULL, + self._backend._ffi.NULL, self._evp_pkey + ) + self._backend.openssl_assert(res == 1) + + def update(self, data): + data_ptr = self._backend._ffi.from_buffer(data) + res = self._backend._lib.EVP_DigestSignUpdate( + self._ctx, data_ptr, len(data) + ) + self._backend.openssl_assert(res != 0) + + def finalize(self): + buf = self._backend._ffi.new("unsigned char[]", _POLY1305_TAG_SIZE) + outlen = self._backend._ffi.new("size_t *") + res = self._backend._lib.EVP_DigestSignFinal(self._ctx, buf, outlen) + self._backend.openssl_assert(res != 0) + self._backend.openssl_assert(outlen[0] == _POLY1305_TAG_SIZE) + return self._backend._ffi.buffer(buf)[:outlen[0]] + + def verify(self, tag): + mac = self.finalize() + if not constant_time.bytes_eq(mac, tag): + raise InvalidSignature("Value did not match computed tag.") diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/rsa.py b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/rsa.py new file mode 100644 index 0000000..3e4c2fd --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/rsa.py @@ -0,0 +1,478 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import math + +from cryptography import utils +from cryptography.exceptions import ( + InvalidSignature, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.openssl.utils import ( + _calculate_digest_and_algorithm, _check_not_prehashed, + _warn_sign_verify_deprecated +) +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import ( + AsymmetricSignatureContext, AsymmetricVerificationContext, rsa +) +from cryptography.hazmat.primitives.asymmetric.padding import ( + AsymmetricPadding, MGF1, OAEP, PKCS1v15, PSS, calculate_max_pss_salt_length +) +from cryptography.hazmat.primitives.asymmetric.rsa import ( + RSAPrivateKeyWithSerialization, RSAPublicKeyWithSerialization +) + + +def _get_rsa_pss_salt_length(pss, key, hash_algorithm): + salt = pss._salt_length + + if salt is MGF1.MAX_LENGTH or salt is PSS.MAX_LENGTH: + return calculate_max_pss_salt_length(key, hash_algorithm) + else: + return salt + + +def _enc_dec_rsa(backend, key, data, padding): + if not isinstance(padding, AsymmetricPadding): + raise TypeError("Padding must be an instance of AsymmetricPadding.") + + if isinstance(padding, PKCS1v15): + padding_enum = backend._lib.RSA_PKCS1_PADDING + elif isinstance(padding, OAEP): + padding_enum = backend._lib.RSA_PKCS1_OAEP_PADDING + + if not isinstance(padding._mgf, MGF1): + raise UnsupportedAlgorithm( + "Only MGF1 is supported by this backend.", + _Reasons.UNSUPPORTED_MGF + ) + + if not backend.rsa_padding_supported(padding): + raise UnsupportedAlgorithm( + "This combination of padding and hash algorithm is not " + "supported by this backend.", + _Reasons.UNSUPPORTED_PADDING + ) + + else: + raise UnsupportedAlgorithm( + "{} is not supported by this backend.".format( + padding.name + ), + _Reasons.UNSUPPORTED_PADDING + ) + + return _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum, padding) + + +def _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum, padding): + if isinstance(key, _RSAPublicKey): + init = backend._lib.EVP_PKEY_encrypt_init + crypt = backend._lib.EVP_PKEY_encrypt + else: + init = backend._lib.EVP_PKEY_decrypt_init + crypt = backend._lib.EVP_PKEY_decrypt + + pkey_ctx = backend._lib.EVP_PKEY_CTX_new( + key._evp_pkey, backend._ffi.NULL + ) + backend.openssl_assert(pkey_ctx != backend._ffi.NULL) + pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free) + res = init(pkey_ctx) + backend.openssl_assert(res == 1) + res = backend._lib.EVP_PKEY_CTX_set_rsa_padding( + pkey_ctx, padding_enum) + backend.openssl_assert(res > 0) + buf_size = backend._lib.EVP_PKEY_size(key._evp_pkey) + backend.openssl_assert(buf_size > 0) + if ( + isinstance(padding, OAEP) and + backend._lib.Cryptography_HAS_RSA_OAEP_MD + ): + mgf1_md = backend._evp_md_non_null_from_algorithm( + padding._mgf._algorithm) + res = backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, mgf1_md) + backend.openssl_assert(res > 0) + oaep_md = backend._evp_md_non_null_from_algorithm(padding._algorithm) + res = backend._lib.EVP_PKEY_CTX_set_rsa_oaep_md(pkey_ctx, oaep_md) + backend.openssl_assert(res > 0) + + if ( + isinstance(padding, OAEP) and + padding._label is not None and + len(padding._label) > 0 + ): + # set0_rsa_oaep_label takes ownership of the char * so we need to + # copy it into some new memory + labelptr = backend._lib.OPENSSL_malloc(len(padding._label)) + backend.openssl_assert(labelptr != backend._ffi.NULL) + backend._ffi.memmove(labelptr, padding._label, len(padding._label)) + res = backend._lib.EVP_PKEY_CTX_set0_rsa_oaep_label( + pkey_ctx, labelptr, len(padding._label) + ) + backend.openssl_assert(res == 1) + + outlen = backend._ffi.new("size_t *", buf_size) + buf = backend._ffi.new("unsigned char[]", buf_size) + res = crypt(pkey_ctx, buf, outlen, data, len(data)) + if res <= 0: + _handle_rsa_enc_dec_error(backend, key) + + return backend._ffi.buffer(buf)[:outlen[0]] + + +def _handle_rsa_enc_dec_error(backend, key): + errors = backend._consume_errors() + backend.openssl_assert(errors) + backend.openssl_assert(errors[0].lib == backend._lib.ERR_LIB_RSA) + if isinstance(key, _RSAPublicKey): + backend.openssl_assert( + errors[0].reason == backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE + ) + raise ValueError( + "Data too long for key size. Encrypt less data or use a " + "larger key size." + ) + else: + decoding_errors = [ + backend._lib.RSA_R_BLOCK_TYPE_IS_NOT_01, + backend._lib.RSA_R_BLOCK_TYPE_IS_NOT_02, + backend._lib.RSA_R_OAEP_DECODING_ERROR, + # Though this error looks similar to the + # RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE, this occurs on decrypts, + # rather than on encrypts + backend._lib.RSA_R_DATA_TOO_LARGE_FOR_MODULUS, + ] + if backend._lib.Cryptography_HAS_RSA_R_PKCS_DECODING_ERROR: + decoding_errors.append(backend._lib.RSA_R_PKCS_DECODING_ERROR) + + backend.openssl_assert(errors[0].reason in decoding_errors) + raise ValueError("Decryption failed.") + + +def _rsa_sig_determine_padding(backend, key, padding, algorithm): + if not isinstance(padding, AsymmetricPadding): + raise TypeError("Expected provider of AsymmetricPadding.") + + pkey_size = backend._lib.EVP_PKEY_size(key._evp_pkey) + backend.openssl_assert(pkey_size > 0) + + if isinstance(padding, PKCS1v15): + padding_enum = backend._lib.RSA_PKCS1_PADDING + elif isinstance(padding, PSS): + if not isinstance(padding._mgf, MGF1): + raise UnsupportedAlgorithm( + "Only MGF1 is supported by this backend.", + _Reasons.UNSUPPORTED_MGF + ) + + # Size of key in bytes - 2 is the maximum + # PSS signature length (salt length is checked later) + if pkey_size - algorithm.digest_size - 2 < 0: + raise ValueError("Digest too large for key size. Use a larger " + "key or different digest.") + + padding_enum = backend._lib.RSA_PKCS1_PSS_PADDING + else: + raise UnsupportedAlgorithm( + "{} is not supported by this backend.".format(padding.name), + _Reasons.UNSUPPORTED_PADDING + ) + + return padding_enum + + +def _rsa_sig_setup(backend, padding, algorithm, key, data, init_func): + padding_enum = _rsa_sig_determine_padding(backend, key, padding, algorithm) + evp_md = backend._evp_md_non_null_from_algorithm(algorithm) + pkey_ctx = backend._lib.EVP_PKEY_CTX_new(key._evp_pkey, backend._ffi.NULL) + backend.openssl_assert(pkey_ctx != backend._ffi.NULL) + pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free) + res = init_func(pkey_ctx) + backend.openssl_assert(res == 1) + res = backend._lib.EVP_PKEY_CTX_set_signature_md(pkey_ctx, evp_md) + if res == 0: + backend._consume_errors() + raise UnsupportedAlgorithm( + "{} is not supported by this backend for RSA signing.".format( + algorithm.name + ), + _Reasons.UNSUPPORTED_HASH + ) + res = backend._lib.EVP_PKEY_CTX_set_rsa_padding(pkey_ctx, padding_enum) + backend.openssl_assert(res > 0) + if isinstance(padding, PSS): + res = backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen( + pkey_ctx, _get_rsa_pss_salt_length(padding, key, algorithm) + ) + backend.openssl_assert(res > 0) + + mgf1_md = backend._evp_md_non_null_from_algorithm( + padding._mgf._algorithm) + res = backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, mgf1_md) + backend.openssl_assert(res > 0) + + return pkey_ctx + + +def _rsa_sig_sign(backend, padding, algorithm, private_key, data): + pkey_ctx = _rsa_sig_setup( + backend, padding, algorithm, private_key, data, + backend._lib.EVP_PKEY_sign_init + ) + buflen = backend._ffi.new("size_t *") + res = backend._lib.EVP_PKEY_sign( + pkey_ctx, + backend._ffi.NULL, + buflen, + data, + len(data) + ) + backend.openssl_assert(res == 1) + buf = backend._ffi.new("unsigned char[]", buflen[0]) + res = backend._lib.EVP_PKEY_sign( + pkey_ctx, buf, buflen, data, len(data)) + if res != 1: + errors = backend._consume_errors() + backend.openssl_assert(errors[0].lib == backend._lib.ERR_LIB_RSA) + if ( + errors[0].reason == + backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE + ): + reason = ("Salt length too long for key size. Try using " + "MAX_LENGTH instead.") + else: + backend.openssl_assert( + errors[0].reason == + backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY + ) + reason = "Digest too large for key size. Use a larger key." + raise ValueError(reason) + + return backend._ffi.buffer(buf)[:] + + +def _rsa_sig_verify(backend, padding, algorithm, public_key, signature, data): + pkey_ctx = _rsa_sig_setup( + backend, padding, algorithm, public_key, data, + backend._lib.EVP_PKEY_verify_init + ) + res = backend._lib.EVP_PKEY_verify( + pkey_ctx, signature, len(signature), data, len(data) + ) + # The previous call can return negative numbers in the event of an + # error. This is not a signature failure but we need to fail if it + # occurs. + backend.openssl_assert(res >= 0) + if res == 0: + backend._consume_errors() + raise InvalidSignature + + +@utils.register_interface(AsymmetricSignatureContext) +class _RSASignatureContext(object): + def __init__(self, backend, private_key, padding, algorithm): + self._backend = backend + self._private_key = private_key + + # We now call _rsa_sig_determine_padding in _rsa_sig_setup. However + # we need to make a pointless call to it here so we maintain the + # API of erroring on init with this context if the values are invalid. + _rsa_sig_determine_padding(backend, private_key, padding, algorithm) + self._padding = padding + self._algorithm = algorithm + self._hash_ctx = hashes.Hash(self._algorithm, self._backend) + + def update(self, data): + self._hash_ctx.update(data) + + def finalize(self): + return _rsa_sig_sign( + self._backend, + self._padding, + self._algorithm, + self._private_key, + self._hash_ctx.finalize() + ) + + +@utils.register_interface(AsymmetricVerificationContext) +class _RSAVerificationContext(object): + def __init__(self, backend, public_key, signature, padding, algorithm): + self._backend = backend + self._public_key = public_key + self._signature = signature + self._padding = padding + # We now call _rsa_sig_determine_padding in _rsa_sig_setup. However + # we need to make a pointless call to it here so we maintain the + # API of erroring on init with this context if the values are invalid. + _rsa_sig_determine_padding(backend, public_key, padding, algorithm) + + padding = padding + self._algorithm = algorithm + self._hash_ctx = hashes.Hash(self._algorithm, self._backend) + + def update(self, data): + self._hash_ctx.update(data) + + def verify(self): + return _rsa_sig_verify( + self._backend, + self._padding, + self._algorithm, + self._public_key, + self._signature, + self._hash_ctx.finalize() + ) + + +@utils.register_interface(RSAPrivateKeyWithSerialization) +class _RSAPrivateKey(object): + def __init__(self, backend, rsa_cdata, evp_pkey): + self._backend = backend + self._rsa_cdata = rsa_cdata + self._evp_pkey = evp_pkey + + n = self._backend._ffi.new("BIGNUM **") + self._backend._lib.RSA_get0_key( + self._rsa_cdata, n, self._backend._ffi.NULL, + self._backend._ffi.NULL + ) + self._backend.openssl_assert(n[0] != self._backend._ffi.NULL) + self._key_size = self._backend._lib.BN_num_bits(n[0]) + + key_size = utils.read_only_property("_key_size") + + def signer(self, padding, algorithm): + _warn_sign_verify_deprecated() + _check_not_prehashed(algorithm) + return _RSASignatureContext(self._backend, self, padding, algorithm) + + def decrypt(self, ciphertext, padding): + key_size_bytes = int(math.ceil(self.key_size / 8.0)) + if key_size_bytes != len(ciphertext): + raise ValueError("Ciphertext length must be equal to key size.") + + return _enc_dec_rsa(self._backend, self, ciphertext, padding) + + def public_key(self): + ctx = self._backend._lib.RSAPublicKey_dup(self._rsa_cdata) + self._backend.openssl_assert(ctx != self._backend._ffi.NULL) + ctx = self._backend._ffi.gc(ctx, self._backend._lib.RSA_free) + res = self._backend._lib.RSA_blinding_on(ctx, self._backend._ffi.NULL) + self._backend.openssl_assert(res == 1) + evp_pkey = self._backend._rsa_cdata_to_evp_pkey(ctx) + return _RSAPublicKey(self._backend, ctx, evp_pkey) + + def private_numbers(self): + n = self._backend._ffi.new("BIGNUM **") + e = self._backend._ffi.new("BIGNUM **") + d = self._backend._ffi.new("BIGNUM **") + p = self._backend._ffi.new("BIGNUM **") + q = self._backend._ffi.new("BIGNUM **") + dmp1 = self._backend._ffi.new("BIGNUM **") + dmq1 = self._backend._ffi.new("BIGNUM **") + iqmp = self._backend._ffi.new("BIGNUM **") + self._backend._lib.RSA_get0_key(self._rsa_cdata, n, e, d) + self._backend.openssl_assert(n[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(e[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(d[0] != self._backend._ffi.NULL) + self._backend._lib.RSA_get0_factors(self._rsa_cdata, p, q) + self._backend.openssl_assert(p[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(q[0] != self._backend._ffi.NULL) + self._backend._lib.RSA_get0_crt_params( + self._rsa_cdata, dmp1, dmq1, iqmp + ) + self._backend.openssl_assert(dmp1[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(dmq1[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(iqmp[0] != self._backend._ffi.NULL) + return rsa.RSAPrivateNumbers( + p=self._backend._bn_to_int(p[0]), + q=self._backend._bn_to_int(q[0]), + d=self._backend._bn_to_int(d[0]), + dmp1=self._backend._bn_to_int(dmp1[0]), + dmq1=self._backend._bn_to_int(dmq1[0]), + iqmp=self._backend._bn_to_int(iqmp[0]), + public_numbers=rsa.RSAPublicNumbers( + e=self._backend._bn_to_int(e[0]), + n=self._backend._bn_to_int(n[0]), + ) + ) + + def private_bytes(self, encoding, format, encryption_algorithm): + return self._backend._private_key_bytes( + encoding, + format, + encryption_algorithm, + self._evp_pkey, + self._rsa_cdata + ) + + def sign(self, data, padding, algorithm): + data, algorithm = _calculate_digest_and_algorithm( + self._backend, data, algorithm + ) + return _rsa_sig_sign(self._backend, padding, algorithm, self, data) + + +@utils.register_interface(RSAPublicKeyWithSerialization) +class _RSAPublicKey(object): + def __init__(self, backend, rsa_cdata, evp_pkey): + self._backend = backend + self._rsa_cdata = rsa_cdata + self._evp_pkey = evp_pkey + + n = self._backend._ffi.new("BIGNUM **") + self._backend._lib.RSA_get0_key( + self._rsa_cdata, n, self._backend._ffi.NULL, + self._backend._ffi.NULL + ) + self._backend.openssl_assert(n[0] != self._backend._ffi.NULL) + self._key_size = self._backend._lib.BN_num_bits(n[0]) + + key_size = utils.read_only_property("_key_size") + + def verifier(self, signature, padding, algorithm): + _warn_sign_verify_deprecated() + utils._check_bytes("signature", signature) + + _check_not_prehashed(algorithm) + return _RSAVerificationContext( + self._backend, self, signature, padding, algorithm + ) + + def encrypt(self, plaintext, padding): + return _enc_dec_rsa(self._backend, self, plaintext, padding) + + def public_numbers(self): + n = self._backend._ffi.new("BIGNUM **") + e = self._backend._ffi.new("BIGNUM **") + self._backend._lib.RSA_get0_key( + self._rsa_cdata, n, e, self._backend._ffi.NULL + ) + self._backend.openssl_assert(n[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(e[0] != self._backend._ffi.NULL) + return rsa.RSAPublicNumbers( + e=self._backend._bn_to_int(e[0]), + n=self._backend._bn_to_int(n[0]), + ) + + def public_bytes(self, encoding, format): + return self._backend._public_key_bytes( + encoding, + format, + self, + self._evp_pkey, + self._rsa_cdata + ) + + def verify(self, signature, data, padding, algorithm): + data, algorithm = _calculate_digest_and_algorithm( + self._backend, data, algorithm + ) + return _rsa_sig_verify( + self._backend, padding, algorithm, self, signature, data + ) diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/utils.py b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/utils.py new file mode 100644 index 0000000..ee472c0 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/utils.py @@ -0,0 +1,69 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import warnings + +from cryptography import utils +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric.utils import Prehashed + + +def _evp_pkey_derive(backend, evp_pkey, peer_public_key): + ctx = backend._lib.EVP_PKEY_CTX_new(evp_pkey, backend._ffi.NULL) + backend.openssl_assert(ctx != backend._ffi.NULL) + ctx = backend._ffi.gc(ctx, backend._lib.EVP_PKEY_CTX_free) + res = backend._lib.EVP_PKEY_derive_init(ctx) + backend.openssl_assert(res == 1) + res = backend._lib.EVP_PKEY_derive_set_peer( + ctx, peer_public_key._evp_pkey + ) + backend.openssl_assert(res == 1) + keylen = backend._ffi.new("size_t *") + res = backend._lib.EVP_PKEY_derive(ctx, backend._ffi.NULL, keylen) + backend.openssl_assert(res == 1) + backend.openssl_assert(keylen[0] > 0) + buf = backend._ffi.new("unsigned char[]", keylen[0]) + res = backend._lib.EVP_PKEY_derive(ctx, buf, keylen) + if res != 1: + raise ValueError( + "Null shared key derived from public/private pair." + ) + + return backend._ffi.buffer(buf, keylen[0])[:] + + +def _calculate_digest_and_algorithm(backend, data, algorithm): + if not isinstance(algorithm, Prehashed): + hash_ctx = hashes.Hash(algorithm, backend) + hash_ctx.update(data) + data = hash_ctx.finalize() + else: + algorithm = algorithm._algorithm + + if len(data) != algorithm.digest_size: + raise ValueError( + "The provided data must be the same length as the hash " + "algorithm's digest size." + ) + + return (data, algorithm) + + +def _check_not_prehashed(signature_algorithm): + if isinstance(signature_algorithm, Prehashed): + raise TypeError( + "Prehashed is only supported in the sign and verify methods. " + "It cannot be used with signer or verifier." + ) + + +def _warn_sign_verify_deprecated(): + warnings.warn( + "signer and verifier have been deprecated. Please use sign " + "and verify instead.", + utils.PersistentlyDeprecated2017, + stacklevel=3 + ) diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/x25519.py b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/x25519.py new file mode 100644 index 0000000..9aab25b --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/x25519.py @@ -0,0 +1,149 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import warnings + +from cryptography import utils +from cryptography.hazmat.backends.openssl.utils import _evp_pkey_derive +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric.x25519 import ( + X25519PrivateKey, X25519PublicKey +) + + +_X25519_KEY_SIZE = 32 + + +@utils.register_interface(X25519PublicKey) +class _X25519PublicKey(object): + def __init__(self, backend, evp_pkey): + self._backend = backend + self._evp_pkey = evp_pkey + + def public_bytes(self, encoding=None, format=None): + if encoding is None or format is None: + if encoding is not None or format is not None: + raise ValueError("Both encoding and format are required") + else: + warnings.warn( + "public_bytes now requires encoding and format arguments. " + "Support for calling without arguments will be removed in " + "cryptography 2.7", + utils.DeprecatedIn25, + ) + encoding = serialization.Encoding.Raw + format = serialization.PublicFormat.Raw + if ( + encoding is serialization.Encoding.Raw or + format is serialization.PublicFormat.Raw + ): + if ( + encoding is not serialization.Encoding.Raw or + format is not serialization.PublicFormat.Raw + ): + raise ValueError( + "When using Raw both encoding and format must be Raw" + ) + + return self._raw_public_bytes() + + if ( + encoding in serialization._PEM_DER and + format is not serialization.PublicFormat.SubjectPublicKeyInfo + ): + raise ValueError( + "format must be SubjectPublicKeyInfo when encoding is PEM or " + "DER" + ) + + return self._backend._public_key_bytes( + encoding, format, self, self._evp_pkey, None + ) + + def _raw_public_bytes(self): + ucharpp = self._backend._ffi.new("unsigned char **") + res = self._backend._lib.EVP_PKEY_get1_tls_encodedpoint( + self._evp_pkey, ucharpp + ) + self._backend.openssl_assert(res == 32) + self._backend.openssl_assert(ucharpp[0] != self._backend._ffi.NULL) + data = self._backend._ffi.gc( + ucharpp[0], self._backend._lib.OPENSSL_free + ) + return self._backend._ffi.buffer(data, res)[:] + + +@utils.register_interface(X25519PrivateKey) +class _X25519PrivateKey(object): + def __init__(self, backend, evp_pkey): + self._backend = backend + self._evp_pkey = evp_pkey + + def public_key(self): + bio = self._backend._create_mem_bio_gc() + res = self._backend._lib.i2d_PUBKEY_bio(bio, self._evp_pkey) + self._backend.openssl_assert(res == 1) + evp_pkey = self._backend._lib.d2i_PUBKEY_bio( + bio, self._backend._ffi.NULL + ) + self._backend.openssl_assert(evp_pkey != self._backend._ffi.NULL) + evp_pkey = self._backend._ffi.gc( + evp_pkey, self._backend._lib.EVP_PKEY_free + ) + return _X25519PublicKey(self._backend, evp_pkey) + + def exchange(self, peer_public_key): + if not isinstance(peer_public_key, X25519PublicKey): + raise TypeError("peer_public_key must be X25519PublicKey.") + + return _evp_pkey_derive( + self._backend, self._evp_pkey, peer_public_key + ) + + def private_bytes(self, encoding, format, encryption_algorithm): + if ( + encoding is serialization.Encoding.Raw or + format is serialization.PublicFormat.Raw + ): + if ( + format is not serialization.PrivateFormat.Raw or + encoding is not serialization.Encoding.Raw or not + isinstance(encryption_algorithm, serialization.NoEncryption) + ): + raise ValueError( + "When using Raw both encoding and format must be Raw " + "and encryption_algorithm must be NoEncryption()" + ) + + return self._raw_private_bytes() + + if ( + encoding in serialization._PEM_DER and + format is not serialization.PrivateFormat.PKCS8 + ): + raise ValueError( + "format must be PKCS8 when encoding is PEM or DER" + ) + + return self._backend._private_key_bytes( + encoding, format, encryption_algorithm, self._evp_pkey, None + ) + + def _raw_private_bytes(self): + # When we drop support for CRYPTOGRAPHY_OPENSSL_LESS_THAN_111 we can + # switch this to EVP_PKEY_new_raw_private_key + # The trick we use here is serializing to a PKCS8 key and just + # using the last 32 bytes, which is the key itself. + bio = self._backend._create_mem_bio_gc() + res = self._backend._lib.i2d_PKCS8PrivateKey_bio( + bio, self._evp_pkey, + self._backend._ffi.NULL, self._backend._ffi.NULL, + 0, self._backend._ffi.NULL, self._backend._ffi.NULL + ) + self._backend.openssl_assert(res == 1) + pkcs8 = self._backend._read_mem_bio(bio) + self._backend.openssl_assert(len(pkcs8) == 48) + return pkcs8[-_X25519_KEY_SIZE:] diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/x448.py b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/x448.py new file mode 100644 index 0000000..fe0dcd9 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/x448.py @@ -0,0 +1,123 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.hazmat.backends.openssl.utils import _evp_pkey_derive +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric.x448 import ( + X448PrivateKey, X448PublicKey +) + +_X448_KEY_SIZE = 56 + + +@utils.register_interface(X448PublicKey) +class _X448PublicKey(object): + def __init__(self, backend, evp_pkey): + self._backend = backend + self._evp_pkey = evp_pkey + + def public_bytes(self, encoding, format): + if ( + encoding is serialization.Encoding.Raw or + format is serialization.PublicFormat.Raw + ): + if ( + encoding is not serialization.Encoding.Raw or + format is not serialization.PublicFormat.Raw + ): + raise ValueError( + "When using Raw both encoding and format must be Raw" + ) + + return self._raw_public_bytes() + + if ( + encoding in serialization._PEM_DER and + format is not serialization.PublicFormat.SubjectPublicKeyInfo + ): + raise ValueError( + "format must be SubjectPublicKeyInfo when encoding is PEM or " + "DER" + ) + + return self._backend._public_key_bytes( + encoding, format, self, self._evp_pkey, None + ) + + def _raw_public_bytes(self): + buf = self._backend._ffi.new("unsigned char []", _X448_KEY_SIZE) + buflen = self._backend._ffi.new("size_t *", _X448_KEY_SIZE) + res = self._backend._lib.EVP_PKEY_get_raw_public_key( + self._evp_pkey, buf, buflen + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _X448_KEY_SIZE) + return self._backend._ffi.buffer(buf, _X448_KEY_SIZE)[:] + + +@utils.register_interface(X448PrivateKey) +class _X448PrivateKey(object): + def __init__(self, backend, evp_pkey): + self._backend = backend + self._evp_pkey = evp_pkey + + def public_key(self): + buf = self._backend._ffi.new("unsigned char []", _X448_KEY_SIZE) + buflen = self._backend._ffi.new("size_t *", _X448_KEY_SIZE) + res = self._backend._lib.EVP_PKEY_get_raw_public_key( + self._evp_pkey, buf, buflen + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _X448_KEY_SIZE) + return self._backend.x448_load_public_bytes(buf) + + def exchange(self, peer_public_key): + if not isinstance(peer_public_key, X448PublicKey): + raise TypeError("peer_public_key must be X448PublicKey.") + + return _evp_pkey_derive( + self._backend, self._evp_pkey, peer_public_key + ) + + def private_bytes(self, encoding, format, encryption_algorithm): + if ( + encoding is serialization.Encoding.Raw or + format is serialization.PublicFormat.Raw + ): + if ( + format is not serialization.PrivateFormat.Raw or + encoding is not serialization.Encoding.Raw or not + isinstance(encryption_algorithm, serialization.NoEncryption) + ): + raise ValueError( + "When using Raw both encoding and format must be Raw " + "and encryption_algorithm must be NoEncryption()" + ) + + return self._raw_private_bytes() + + if ( + encoding in serialization._PEM_DER and + format is not serialization.PrivateFormat.PKCS8 + ): + raise ValueError( + "format must be PKCS8 when encoding is PEM or DER" + ) + + return self._backend._private_key_bytes( + encoding, format, encryption_algorithm, self._evp_pkey, None + ) + + def _raw_private_bytes(self): + buf = self._backend._ffi.new("unsigned char []", _X448_KEY_SIZE) + buflen = self._backend._ffi.new("size_t *", _X448_KEY_SIZE) + res = self._backend._lib.EVP_PKEY_get_raw_private_key( + self._evp_pkey, buf, buflen + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _X448_KEY_SIZE) + return self._backend._ffi.buffer(buf, _X448_KEY_SIZE)[:] diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/x509.py b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/x509.py new file mode 100644 index 0000000..efbb179 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/backends/openssl/x509.py @@ -0,0 +1,546 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import datetime +import operator + +from cryptography import utils, x509 +from cryptography.exceptions import UnsupportedAlgorithm +from cryptography.hazmat.backends.openssl.decode_asn1 import ( + _CERTIFICATE_EXTENSION_PARSER, _CERTIFICATE_EXTENSION_PARSER_NO_SCT, + _CRL_EXTENSION_PARSER, _CSR_EXTENSION_PARSER, + _REVOKED_CERTIFICATE_EXTENSION_PARSER, _asn1_integer_to_int, + _asn1_string_to_bytes, _decode_x509_name, _obj2txt, _parse_asn1_time +) +from cryptography.hazmat.backends.openssl.encode_asn1 import ( + _encode_asn1_int_gc +) +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa + + +@utils.register_interface(x509.Certificate) +class _Certificate(object): + def __init__(self, backend, x509): + self._backend = backend + self._x509 = x509 + + def __repr__(self): + return "".format(self.subject) + + def __eq__(self, other): + if not isinstance(other, x509.Certificate): + return NotImplemented + + res = self._backend._lib.X509_cmp(self._x509, other._x509) + return res == 0 + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.public_bytes(serialization.Encoding.DER)) + + def fingerprint(self, algorithm): + h = hashes.Hash(algorithm, self._backend) + h.update(self.public_bytes(serialization.Encoding.DER)) + return h.finalize() + + @property + def version(self): + version = self._backend._lib.X509_get_version(self._x509) + if version == 0: + return x509.Version.v1 + elif version == 2: + return x509.Version.v3 + else: + raise x509.InvalidVersion( + "{} is not a valid X509 version".format(version), version + ) + + @property + def serial_number(self): + asn1_int = self._backend._lib.X509_get_serialNumber(self._x509) + self._backend.openssl_assert(asn1_int != self._backend._ffi.NULL) + return _asn1_integer_to_int(self._backend, asn1_int) + + def public_key(self): + pkey = self._backend._lib.X509_get_pubkey(self._x509) + if pkey == self._backend._ffi.NULL: + # Remove errors from the stack. + self._backend._consume_errors() + raise ValueError("Certificate public key is of an unknown type") + + pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free) + + return self._backend._evp_pkey_to_public_key(pkey) + + @property + def not_valid_before(self): + asn1_time = self._backend._lib.X509_getm_notBefore(self._x509) + return _parse_asn1_time(self._backend, asn1_time) + + @property + def not_valid_after(self): + asn1_time = self._backend._lib.X509_getm_notAfter(self._x509) + return _parse_asn1_time(self._backend, asn1_time) + + @property + def issuer(self): + issuer = self._backend._lib.X509_get_issuer_name(self._x509) + self._backend.openssl_assert(issuer != self._backend._ffi.NULL) + return _decode_x509_name(self._backend, issuer) + + @property + def subject(self): + subject = self._backend._lib.X509_get_subject_name(self._x509) + self._backend.openssl_assert(subject != self._backend._ffi.NULL) + return _decode_x509_name(self._backend, subject) + + @property + def signature_hash_algorithm(self): + oid = self.signature_algorithm_oid + try: + return x509._SIG_OIDS_TO_HASH[oid] + except KeyError: + raise UnsupportedAlgorithm( + "Signature algorithm OID:{} not recognized".format(oid) + ) + + @property + def signature_algorithm_oid(self): + alg = self._backend._ffi.new("X509_ALGOR **") + self._backend._lib.X509_get0_signature( + self._backend._ffi.NULL, alg, self._x509 + ) + self._backend.openssl_assert(alg[0] != self._backend._ffi.NULL) + oid = _obj2txt(self._backend, alg[0].algorithm) + return x509.ObjectIdentifier(oid) + + @utils.cached_property + def extensions(self): + if self._backend._lib.CRYPTOGRAPHY_OPENSSL_110_OR_GREATER: + return _CERTIFICATE_EXTENSION_PARSER.parse( + self._backend, self._x509 + ) + else: + return _CERTIFICATE_EXTENSION_PARSER_NO_SCT.parse( + self._backend, self._x509 + ) + + @property + def signature(self): + sig = self._backend._ffi.new("ASN1_BIT_STRING **") + self._backend._lib.X509_get0_signature( + sig, self._backend._ffi.NULL, self._x509 + ) + self._backend.openssl_assert(sig[0] != self._backend._ffi.NULL) + return _asn1_string_to_bytes(self._backend, sig[0]) + + @property + def tbs_certificate_bytes(self): + pp = self._backend._ffi.new("unsigned char **") + res = self._backend._lib.i2d_re_X509_tbs(self._x509, pp) + self._backend.openssl_assert(res > 0) + pp = self._backend._ffi.gc( + pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0]) + ) + return self._backend._ffi.buffer(pp[0], res)[:] + + def public_bytes(self, encoding): + bio = self._backend._create_mem_bio_gc() + if encoding is serialization.Encoding.PEM: + res = self._backend._lib.PEM_write_bio_X509(bio, self._x509) + elif encoding is serialization.Encoding.DER: + res = self._backend._lib.i2d_X509_bio(bio, self._x509) + else: + raise TypeError("encoding must be an item from the Encoding enum") + + self._backend.openssl_assert(res == 1) + return self._backend._read_mem_bio(bio) + + +@utils.register_interface(x509.RevokedCertificate) +class _RevokedCertificate(object): + def __init__(self, backend, crl, x509_revoked): + self._backend = backend + # The X509_REVOKED_value is a X509_REVOKED * that has + # no reference counting. This means when X509_CRL_free is + # called then the CRL and all X509_REVOKED * are freed. Since + # you can retain a reference to a single revoked certificate + # and let the CRL fall out of scope we need to retain a + # private reference to the CRL inside the RevokedCertificate + # object to prevent the gc from being called inappropriately. + self._crl = crl + self._x509_revoked = x509_revoked + + @property + def serial_number(self): + asn1_int = self._backend._lib.X509_REVOKED_get0_serialNumber( + self._x509_revoked + ) + self._backend.openssl_assert(asn1_int != self._backend._ffi.NULL) + return _asn1_integer_to_int(self._backend, asn1_int) + + @property + def revocation_date(self): + return _parse_asn1_time( + self._backend, + self._backend._lib.X509_REVOKED_get0_revocationDate( + self._x509_revoked + ) + ) + + @utils.cached_property + def extensions(self): + return _REVOKED_CERTIFICATE_EXTENSION_PARSER.parse( + self._backend, self._x509_revoked + ) + + +@utils.register_interface(x509.CertificateRevocationList) +class _CertificateRevocationList(object): + def __init__(self, backend, x509_crl): + self._backend = backend + self._x509_crl = x509_crl + + def __eq__(self, other): + if not isinstance(other, x509.CertificateRevocationList): + return NotImplemented + + res = self._backend._lib.X509_CRL_cmp(self._x509_crl, other._x509_crl) + return res == 0 + + def __ne__(self, other): + return not self == other + + def fingerprint(self, algorithm): + h = hashes.Hash(algorithm, self._backend) + bio = self._backend._create_mem_bio_gc() + res = self._backend._lib.i2d_X509_CRL_bio( + bio, self._x509_crl + ) + self._backend.openssl_assert(res == 1) + der = self._backend._read_mem_bio(bio) + h.update(der) + return h.finalize() + + @utils.cached_property + def _sorted_crl(self): + # X509_CRL_get0_by_serial sorts in place, which breaks a variety of + # things we don't want to break (like iteration and the signature). + # Let's dupe it and sort that instead. + dup = self._backend._lib.X509_CRL_dup(self._x509_crl) + self._backend.openssl_assert(dup != self._backend._ffi.NULL) + dup = self._backend._ffi.gc(dup, self._backend._lib.X509_CRL_free) + return dup + + def get_revoked_certificate_by_serial_number(self, serial_number): + revoked = self._backend._ffi.new("X509_REVOKED **") + asn1_int = _encode_asn1_int_gc(self._backend, serial_number) + res = self._backend._lib.X509_CRL_get0_by_serial( + self._sorted_crl, revoked, asn1_int + ) + if res == 0: + return None + else: + self._backend.openssl_assert( + revoked[0] != self._backend._ffi.NULL + ) + return _RevokedCertificate( + self._backend, self._sorted_crl, revoked[0] + ) + + @property + def signature_hash_algorithm(self): + oid = self.signature_algorithm_oid + try: + return x509._SIG_OIDS_TO_HASH[oid] + except KeyError: + raise UnsupportedAlgorithm( + "Signature algorithm OID:{} not recognized".format(oid) + ) + + @property + def signature_algorithm_oid(self): + alg = self._backend._ffi.new("X509_ALGOR **") + self._backend._lib.X509_CRL_get0_signature( + self._x509_crl, self._backend._ffi.NULL, alg + ) + self._backend.openssl_assert(alg[0] != self._backend._ffi.NULL) + oid = _obj2txt(self._backend, alg[0].algorithm) + return x509.ObjectIdentifier(oid) + + @property + def issuer(self): + issuer = self._backend._lib.X509_CRL_get_issuer(self._x509_crl) + self._backend.openssl_assert(issuer != self._backend._ffi.NULL) + return _decode_x509_name(self._backend, issuer) + + @property + def next_update(self): + nu = self._backend._lib.X509_CRL_get_nextUpdate(self._x509_crl) + self._backend.openssl_assert(nu != self._backend._ffi.NULL) + return _parse_asn1_time(self._backend, nu) + + @property + def last_update(self): + lu = self._backend._lib.X509_CRL_get_lastUpdate(self._x509_crl) + self._backend.openssl_assert(lu != self._backend._ffi.NULL) + return _parse_asn1_time(self._backend, lu) + + @property + def signature(self): + sig = self._backend._ffi.new("ASN1_BIT_STRING **") + self._backend._lib.X509_CRL_get0_signature( + self._x509_crl, sig, self._backend._ffi.NULL + ) + self._backend.openssl_assert(sig[0] != self._backend._ffi.NULL) + return _asn1_string_to_bytes(self._backend, sig[0]) + + @property + def tbs_certlist_bytes(self): + pp = self._backend._ffi.new("unsigned char **") + res = self._backend._lib.i2d_re_X509_CRL_tbs(self._x509_crl, pp) + self._backend.openssl_assert(res > 0) + pp = self._backend._ffi.gc( + pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0]) + ) + return self._backend._ffi.buffer(pp[0], res)[:] + + def public_bytes(self, encoding): + bio = self._backend._create_mem_bio_gc() + if encoding is serialization.Encoding.PEM: + res = self._backend._lib.PEM_write_bio_X509_CRL( + bio, self._x509_crl + ) + elif encoding is serialization.Encoding.DER: + res = self._backend._lib.i2d_X509_CRL_bio(bio, self._x509_crl) + else: + raise TypeError("encoding must be an item from the Encoding enum") + + self._backend.openssl_assert(res == 1) + return self._backend._read_mem_bio(bio) + + def _revoked_cert(self, idx): + revoked = self._backend._lib.X509_CRL_get_REVOKED(self._x509_crl) + r = self._backend._lib.sk_X509_REVOKED_value(revoked, idx) + self._backend.openssl_assert(r != self._backend._ffi.NULL) + return _RevokedCertificate(self._backend, self, r) + + def __iter__(self): + for i in range(len(self)): + yield self._revoked_cert(i) + + def __getitem__(self, idx): + if isinstance(idx, slice): + start, stop, step = idx.indices(len(self)) + return [self._revoked_cert(i) for i in range(start, stop, step)] + else: + idx = operator.index(idx) + if idx < 0: + idx += len(self) + if not 0 <= idx < len(self): + raise IndexError + return self._revoked_cert(idx) + + def __len__(self): + revoked = self._backend._lib.X509_CRL_get_REVOKED(self._x509_crl) + if revoked == self._backend._ffi.NULL: + return 0 + else: + return self._backend._lib.sk_X509_REVOKED_num(revoked) + + @utils.cached_property + def extensions(self): + return _CRL_EXTENSION_PARSER.parse(self._backend, self._x509_crl) + + def is_signature_valid(self, public_key): + if not isinstance(public_key, (dsa.DSAPublicKey, rsa.RSAPublicKey, + ec.EllipticCurvePublicKey)): + raise TypeError('Expecting one of DSAPublicKey, RSAPublicKey,' + ' or EllipticCurvePublicKey.') + res = self._backend._lib.X509_CRL_verify( + self._x509_crl, public_key._evp_pkey + ) + + if res != 1: + self._backend._consume_errors() + return False + + return True + + +@utils.register_interface(x509.CertificateSigningRequest) +class _CertificateSigningRequest(object): + def __init__(self, backend, x509_req): + self._backend = backend + self._x509_req = x509_req + + def __eq__(self, other): + if not isinstance(other, _CertificateSigningRequest): + return NotImplemented + + self_bytes = self.public_bytes(serialization.Encoding.DER) + other_bytes = other.public_bytes(serialization.Encoding.DER) + return self_bytes == other_bytes + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.public_bytes(serialization.Encoding.DER)) + + def public_key(self): + pkey = self._backend._lib.X509_REQ_get_pubkey(self._x509_req) + self._backend.openssl_assert(pkey != self._backend._ffi.NULL) + pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free) + return self._backend._evp_pkey_to_public_key(pkey) + + @property + def subject(self): + subject = self._backend._lib.X509_REQ_get_subject_name(self._x509_req) + self._backend.openssl_assert(subject != self._backend._ffi.NULL) + return _decode_x509_name(self._backend, subject) + + @property + def signature_hash_algorithm(self): + oid = self.signature_algorithm_oid + try: + return x509._SIG_OIDS_TO_HASH[oid] + except KeyError: + raise UnsupportedAlgorithm( + "Signature algorithm OID:{} not recognized".format(oid) + ) + + @property + def signature_algorithm_oid(self): + alg = self._backend._ffi.new("X509_ALGOR **") + self._backend._lib.X509_REQ_get0_signature( + self._x509_req, self._backend._ffi.NULL, alg + ) + self._backend.openssl_assert(alg[0] != self._backend._ffi.NULL) + oid = _obj2txt(self._backend, alg[0].algorithm) + return x509.ObjectIdentifier(oid) + + @utils.cached_property + def extensions(self): + x509_exts = self._backend._lib.X509_REQ_get_extensions(self._x509_req) + x509_exts = self._backend._ffi.gc( + x509_exts, + lambda x: self._backend._lib.sk_X509_EXTENSION_pop_free( + x, self._backend._ffi.addressof( + self._backend._lib._original_lib, "X509_EXTENSION_free" + ) + ) + ) + return _CSR_EXTENSION_PARSER.parse(self._backend, x509_exts) + + def public_bytes(self, encoding): + bio = self._backend._create_mem_bio_gc() + if encoding is serialization.Encoding.PEM: + res = self._backend._lib.PEM_write_bio_X509_REQ( + bio, self._x509_req + ) + elif encoding is serialization.Encoding.DER: + res = self._backend._lib.i2d_X509_REQ_bio(bio, self._x509_req) + else: + raise TypeError("encoding must be an item from the Encoding enum") + + self._backend.openssl_assert(res == 1) + return self._backend._read_mem_bio(bio) + + @property + def tbs_certrequest_bytes(self): + pp = self._backend._ffi.new("unsigned char **") + res = self._backend._lib.i2d_re_X509_REQ_tbs(self._x509_req, pp) + self._backend.openssl_assert(res > 0) + pp = self._backend._ffi.gc( + pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0]) + ) + return self._backend._ffi.buffer(pp[0], res)[:] + + @property + def signature(self): + sig = self._backend._ffi.new("ASN1_BIT_STRING **") + self._backend._lib.X509_REQ_get0_signature( + self._x509_req, sig, self._backend._ffi.NULL + ) + self._backend.openssl_assert(sig[0] != self._backend._ffi.NULL) + return _asn1_string_to_bytes(self._backend, sig[0]) + + @property + def is_signature_valid(self): + pkey = self._backend._lib.X509_REQ_get_pubkey(self._x509_req) + self._backend.openssl_assert(pkey != self._backend._ffi.NULL) + pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free) + res = self._backend._lib.X509_REQ_verify(self._x509_req, pkey) + + if res != 1: + self._backend._consume_errors() + return False + + return True + + +@utils.register_interface( + x509.certificate_transparency.SignedCertificateTimestamp +) +class _SignedCertificateTimestamp(object): + def __init__(self, backend, sct_list, sct): + self._backend = backend + # Keep the SCT_LIST that this SCT came from alive. + self._sct_list = sct_list + self._sct = sct + + @property + def version(self): + version = self._backend._lib.SCT_get_version(self._sct) + assert version == self._backend._lib.SCT_VERSION_V1 + return x509.certificate_transparency.Version.v1 + + @property + def log_id(self): + out = self._backend._ffi.new("unsigned char **") + log_id_length = self._backend._lib.SCT_get0_log_id(self._sct, out) + assert log_id_length >= 0 + return self._backend._ffi.buffer(out[0], log_id_length)[:] + + @property + def timestamp(self): + timestamp = self._backend._lib.SCT_get_timestamp(self._sct) + milliseconds = timestamp % 1000 + return datetime.datetime.utcfromtimestamp( + timestamp // 1000 + ).replace(microsecond=milliseconds * 1000) + + @property + def entry_type(self): + entry_type = self._backend._lib.SCT_get_log_entry_type(self._sct) + # We currently only support loading SCTs from the X.509 extension, so + # we only have precerts. + assert entry_type == self._backend._lib.CT_LOG_ENTRY_TYPE_PRECERT + return x509.certificate_transparency.LogEntryType.PRE_CERTIFICATE + + @property + def _signature(self): + ptrptr = self._backend._ffi.new("unsigned char **") + res = self._backend._lib.SCT_get0_signature(self._sct, ptrptr) + self._backend.openssl_assert(res > 0) + self._backend.openssl_assert(ptrptr[0] != self._backend._ffi.NULL) + return self._backend._ffi.buffer(ptrptr[0], res)[:] + + def __hash__(self): + return hash(self._signature) + + def __eq__(self, other): + if not isinstance(other, _SignedCertificateTimestamp): + return NotImplemented + + return self._signature == other._signature + + def __ne__(self, other): + return not self == other diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/bindings/__init__.py b/packages/wakatime/packages/py27/cryptography/hazmat/bindings/__init__.py new file mode 100644 index 0000000..4b54088 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/bindings/__init__.py @@ -0,0 +1,5 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/bindings/openssl/__init__.py b/packages/wakatime/packages/py27/cryptography/hazmat/bindings/openssl/__init__.py new file mode 100644 index 0000000..4b54088 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/bindings/openssl/__init__.py @@ -0,0 +1,5 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/bindings/openssl/_conditional.py b/packages/wakatime/packages/py27/cryptography/hazmat/bindings/openssl/_conditional.py new file mode 100644 index 0000000..a39bb66 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/bindings/openssl/_conditional.py @@ -0,0 +1,441 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + + +def cryptography_has_ec2m(): + return [ + "EC_POINT_set_affine_coordinates_GF2m", + "EC_POINT_get_affine_coordinates_GF2m", + "EC_POINT_set_compressed_coordinates_GF2m", + ] + + +def cryptography_has_ec_1_0_2(): + return [ + "EC_curve_nid2nist", + ] + + +def cryptography_has_set_ecdh_auto(): + return [ + "SSL_CTX_set_ecdh_auto", + ] + + +def cryptography_has_rsa_r_pkcs_decoding_error(): + return [ + "RSA_R_PKCS_DECODING_ERROR" + ] + + +def cryptography_has_rsa_oaep_md(): + return [ + "EVP_PKEY_CTX_set_rsa_oaep_md", + ] + + +def cryptography_has_rsa_oaep_label(): + return [ + "EVP_PKEY_CTX_set0_rsa_oaep_label", + ] + + +def cryptography_has_ssl3_method(): + return [ + "SSLv3_method", + "SSLv3_client_method", + "SSLv3_server_method", + ] + + +def cryptography_has_alpn(): + return [ + "SSL_CTX_set_alpn_protos", + "SSL_set_alpn_protos", + "SSL_CTX_set_alpn_select_cb", + "SSL_get0_alpn_selected", + ] + + +def cryptography_has_compression(): + return [ + "SSL_get_current_compression", + "SSL_get_current_expansion", + "SSL_COMP_get_name", + ] + + +def cryptography_has_get_server_tmp_key(): + return [ + "SSL_get_server_tmp_key", + ] + + +def cryptography_has_102_verification_error_codes(): + return [ + 'X509_V_ERR_SUITE_B_INVALID_VERSION', + 'X509_V_ERR_SUITE_B_INVALID_ALGORITHM', + 'X509_V_ERR_SUITE_B_INVALID_CURVE', + 'X509_V_ERR_SUITE_B_INVALID_SIGNATURE_ALGORITHM', + 'X509_V_ERR_SUITE_B_LOS_NOT_ALLOWED', + 'X509_V_ERR_SUITE_B_CANNOT_SIGN_P_384_WITH_P_256', + 'X509_V_ERR_HOSTNAME_MISMATCH', + 'X509_V_ERR_EMAIL_MISMATCH', + 'X509_V_ERR_IP_ADDRESS_MISMATCH' + ] + + +def cryptography_has_102_verification_params(): + return [ + "X509_V_FLAG_SUITEB_128_LOS_ONLY", + "X509_V_FLAG_SUITEB_192_LOS", + "X509_V_FLAG_SUITEB_128_LOS", + "X509_VERIFY_PARAM_set1_host", + "X509_VERIFY_PARAM_set1_email", + "X509_VERIFY_PARAM_set1_ip", + "X509_VERIFY_PARAM_set1_ip_asc", + "X509_VERIFY_PARAM_set_hostflags", + "SSL_get0_param", + "X509_CHECK_FLAG_ALWAYS_CHECK_SUBJECT", + "X509_CHECK_FLAG_NO_WILDCARDS", + "X509_CHECK_FLAG_NO_PARTIAL_WILDCARDS", + "X509_CHECK_FLAG_MULTI_LABEL_WILDCARDS", + "X509_CHECK_FLAG_SINGLE_LABEL_SUBDOMAINS" + ] + + +def cryptography_has_110_verification_params(): + return [ + "X509_CHECK_FLAG_NEVER_CHECK_SUBJECT" + ] + + +def cryptography_has_x509_v_flag_trusted_first(): + return [ + "X509_V_FLAG_TRUSTED_FIRST", + ] + + +def cryptography_has_x509_v_flag_partial_chain(): + return [ + "X509_V_FLAG_PARTIAL_CHAIN", + ] + + +def cryptography_has_set_cert_cb(): + return [ + "SSL_CTX_set_cert_cb", + "SSL_set_cert_cb", + ] + + +def cryptography_has_ssl_st(): + return [ + "SSL_ST_BEFORE", + "SSL_ST_OK", + "SSL_ST_INIT", + "SSL_ST_RENEGOTIATE", + ] + + +def cryptography_has_tls_st(): + return [ + "TLS_ST_BEFORE", + "TLS_ST_OK", + ] + + +def cryptography_has_locking_callbacks(): + return [ + "Cryptography_setup_ssl_threads", + ] + + +def cryptography_has_scrypt(): + return [ + "EVP_PBE_scrypt", + ] + + +def cryptography_has_generic_dtls_method(): + return [ + "DTLS_method", + "DTLS_server_method", + "DTLS_client_method", + "SSL_OP_NO_DTLSv1", + "SSL_OP_NO_DTLSv1_2", + "DTLS_set_link_mtu", + "DTLS_get_link_min_mtu", + ] + + +def cryptography_has_evp_pkey_dhx(): + return [ + "EVP_PKEY_DHX", + ] + + +def cryptography_has_mem_functions(): + return [ + "Cryptography_CRYPTO_set_mem_functions", + ] + + +def cryptography_has_sct(): + return [ + "SCT_get_version", + "SCT_get_log_entry_type", + "SCT_get0_log_id", + "SCT_get0_signature", + "SCT_get_timestamp", + "SCT_set_source", + "sk_SCT_num", + "sk_SCT_value", + "SCT_LIST_free", + "sk_SCT_push", + "sk_SCT_new_null", + "SCT_new", + "SCT_set1_log_id", + "SCT_set_timestamp", + "SCT_set_version", + "SCT_set_log_entry_type", + ] + + +def cryptography_has_x509_store_ctx_get_issuer(): + return [ + "X509_STORE_get_get_issuer", + "X509_STORE_set_get_issuer", + ] + + +def cryptography_has_x25519(): + return [ + "EVP_PKEY_X25519", + "NID_X25519", + ] + + +def cryptography_has_x448(): + return [ + "EVP_PKEY_X448", + "NID_X448", + ] + + +def cryptography_has_ed448(): + return [ + "EVP_PKEY_ED448", + "NID_ED448", + ] + + +def cryptography_has_ed25519(): + return [ + "NID_ED25519", + "EVP_PKEY_ED25519", + ] + + +def cryptography_has_poly1305(): + return [ + "NID_poly1305", + "EVP_PKEY_POLY1305", + ] + + +def cryptography_has_oneshot_evp_digest_sign_verify(): + return [ + "EVP_DigestSign", + "EVP_DigestVerify", + ] + + +def cryptography_has_evp_digestfinal_xof(): + return [ + "EVP_DigestFinalXOF", + ] + + +def cryptography_has_evp_pkey_get_set_tls_encodedpoint(): + return [ + "EVP_PKEY_get1_tls_encodedpoint", + "EVP_PKEY_set1_tls_encodedpoint", + ] + + +def cryptography_has_fips(): + return [ + "FIPS_mode_set", + "FIPS_mode", + ] + + +def cryptography_has_ssl_sigalgs(): + return [ + "SSL_CTX_set1_sigalgs_list", + "SSL_get_sigalgs", + ] + + +def cryptography_has_psk(): + return [ + "SSL_CTX_use_psk_identity_hint", + "SSL_CTX_set_psk_server_callback", + "SSL_CTX_set_psk_client_callback", + ] + + +def cryptography_has_custom_ext(): + return [ + "SSL_CTX_add_client_custom_ext", + "SSL_CTX_add_server_custom_ext", + "SSL_extension_supported", + ] + + +def cryptography_has_openssl_cleanup(): + return [ + "OPENSSL_cleanup", + ] + + +def cryptography_has_cipher_details(): + return [ + "SSL_CIPHER_is_aead", + "SSL_CIPHER_get_cipher_nid", + "SSL_CIPHER_get_digest_nid", + "SSL_CIPHER_get_kx_nid", + "SSL_CIPHER_get_auth_nid", + ] + + +def cryptography_has_tlsv13(): + return [ + "SSL_OP_NO_TLSv1_3", + "SSL_VERIFY_POST_HANDSHAKE", + "SSL_CTX_set_ciphersuites", + "SSL_verify_client_post_handshake", + "SSL_CTX_set_post_handshake_auth", + "SSL_set_post_handshake_auth", + "SSL_SESSION_get_max_early_data", + "SSL_write_early_data", + "SSL_read_early_data", + "SSL_CTX_set_max_early_data", + ] + + +def cryptography_has_raw_key(): + return [ + "EVP_PKEY_new_raw_private_key", + "EVP_PKEY_new_raw_public_key", + "EVP_PKEY_get_raw_private_key", + "EVP_PKEY_get_raw_public_key", + ] + + +def cryptography_has_evp_r_memory_limit_exceeded(): + return [ + "EVP_R_MEMORY_LIMIT_EXCEEDED", + ] + + +def cryptography_has_engine(): + return [ + "ENGINE_by_id", + "ENGINE_init", + "ENGINE_finish", + "ENGINE_get_default_RAND", + "ENGINE_set_default_RAND", + "ENGINE_unregister_RAND", + "ENGINE_ctrl_cmd", + "ENGINE_free", + "ENGINE_get_name", + "Cryptography_add_osrandom_engine", + ] + + +def cryptography_has_verified_chain(): + return [ + "SSL_get0_verified_chain", + ] + + +# This is a mapping of +# {condition: function-returning-names-dependent-on-that-condition} so we can +# loop over them and delete unsupported names at runtime. It will be removed +# when cffi supports #if in cdef. We use functions instead of just a dict of +# lists so we can use coverage to measure which are used. +CONDITIONAL_NAMES = { + "Cryptography_HAS_EC2M": cryptography_has_ec2m, + "Cryptography_HAS_EC_1_0_2": cryptography_has_ec_1_0_2, + "Cryptography_HAS_SET_ECDH_AUTO": cryptography_has_set_ecdh_auto, + "Cryptography_HAS_RSA_R_PKCS_DECODING_ERROR": ( + cryptography_has_rsa_r_pkcs_decoding_error + ), + "Cryptography_HAS_RSA_OAEP_MD": cryptography_has_rsa_oaep_md, + "Cryptography_HAS_RSA_OAEP_LABEL": cryptography_has_rsa_oaep_label, + "Cryptography_HAS_SSL3_METHOD": cryptography_has_ssl3_method, + "Cryptography_HAS_ALPN": cryptography_has_alpn, + "Cryptography_HAS_COMPRESSION": cryptography_has_compression, + "Cryptography_HAS_GET_SERVER_TMP_KEY": cryptography_has_get_server_tmp_key, + "Cryptography_HAS_102_VERIFICATION_ERROR_CODES": ( + cryptography_has_102_verification_error_codes + ), + "Cryptography_HAS_102_VERIFICATION_PARAMS": ( + cryptography_has_102_verification_params + ), + "Cryptography_HAS_110_VERIFICATION_PARAMS": ( + cryptography_has_110_verification_params + ), + "Cryptography_HAS_X509_V_FLAG_TRUSTED_FIRST": ( + cryptography_has_x509_v_flag_trusted_first + ), + "Cryptography_HAS_X509_V_FLAG_PARTIAL_CHAIN": ( + cryptography_has_x509_v_flag_partial_chain + ), + "Cryptography_HAS_SET_CERT_CB": cryptography_has_set_cert_cb, + "Cryptography_HAS_SSL_ST": cryptography_has_ssl_st, + "Cryptography_HAS_TLS_ST": cryptography_has_tls_st, + "Cryptography_HAS_LOCKING_CALLBACKS": cryptography_has_locking_callbacks, + "Cryptography_HAS_SCRYPT": cryptography_has_scrypt, + "Cryptography_HAS_GENERIC_DTLS_METHOD": ( + cryptography_has_generic_dtls_method + ), + "Cryptography_HAS_EVP_PKEY_DHX": cryptography_has_evp_pkey_dhx, + "Cryptography_HAS_MEM_FUNCTIONS": cryptography_has_mem_functions, + "Cryptography_HAS_SCT": cryptography_has_sct, + "Cryptography_HAS_X509_STORE_CTX_GET_ISSUER": ( + cryptography_has_x509_store_ctx_get_issuer + ), + "Cryptography_HAS_X25519": cryptography_has_x25519, + "Cryptography_HAS_X448": cryptography_has_x448, + "Cryptography_HAS_ED448": cryptography_has_ed448, + "Cryptography_HAS_ED25519": cryptography_has_ed25519, + "Cryptography_HAS_POLY1305": cryptography_has_poly1305, + "Cryptography_HAS_ONESHOT_EVP_DIGEST_SIGN_VERIFY": ( + cryptography_has_oneshot_evp_digest_sign_verify + ), + "Cryptography_HAS_EVP_PKEY_get_set_tls_encodedpoint": ( + cryptography_has_evp_pkey_get_set_tls_encodedpoint + ), + "Cryptography_HAS_FIPS": cryptography_has_fips, + "Cryptography_HAS_SIGALGS": cryptography_has_ssl_sigalgs, + "Cryptography_HAS_PSK": cryptography_has_psk, + "Cryptography_HAS_CUSTOM_EXT": cryptography_has_custom_ext, + "Cryptography_HAS_OPENSSL_CLEANUP": cryptography_has_openssl_cleanup, + "Cryptography_HAS_CIPHER_DETAILS": cryptography_has_cipher_details, + "Cryptography_HAS_TLSv1_3": cryptography_has_tlsv13, + "Cryptography_HAS_RAW_KEY": cryptography_has_raw_key, + "Cryptography_HAS_EVP_DIGESTFINAL_XOF": ( + cryptography_has_evp_digestfinal_xof + ), + "Cryptography_HAS_EVP_R_MEMORY_LIMIT_EXCEEDED": ( + cryptography_has_evp_r_memory_limit_exceeded + ), + "Cryptography_HAS_ENGINE": cryptography_has_engine, + "Cryptography_HAS_VERIFIED_CHAIN": cryptography_has_verified_chain, +} diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/bindings/openssl/binding.py b/packages/wakatime/packages/py27/cryptography/hazmat/bindings/openssl/binding.py new file mode 100644 index 0000000..9740516 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/bindings/openssl/binding.py @@ -0,0 +1,197 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import collections +import threading +import types +import warnings + +import cryptography +from cryptography import utils +from cryptography.exceptions import InternalError +from cryptography.hazmat.bindings._openssl import ffi, lib +from cryptography.hazmat.bindings.openssl._conditional import CONDITIONAL_NAMES + +_OpenSSLErrorWithText = collections.namedtuple( + "_OpenSSLErrorWithText", ["code", "lib", "func", "reason", "reason_text"] +) + + +class _OpenSSLError(object): + def __init__(self, code, lib, func, reason): + self._code = code + self._lib = lib + self._func = func + self._reason = reason + + def _lib_reason_match(self, lib, reason): + return lib == self.lib and reason == self.reason + + code = utils.read_only_property("_code") + lib = utils.read_only_property("_lib") + func = utils.read_only_property("_func") + reason = utils.read_only_property("_reason") + + +def _consume_errors(lib): + errors = [] + while True: + code = lib.ERR_get_error() + if code == 0: + break + + err_lib = lib.ERR_GET_LIB(code) + err_func = lib.ERR_GET_FUNC(code) + err_reason = lib.ERR_GET_REASON(code) + + errors.append(_OpenSSLError(code, err_lib, err_func, err_reason)) + + return errors + + +def _openssl_assert(lib, ok): + if not ok: + errors = _consume_errors(lib) + errors_with_text = [] + for err in errors: + buf = ffi.new("char[]", 256) + lib.ERR_error_string_n(err.code, buf, len(buf)) + err_text_reason = ffi.string(buf) + + errors_with_text.append( + _OpenSSLErrorWithText( + err.code, err.lib, err.func, err.reason, err_text_reason + ) + ) + + raise InternalError( + "Unknown OpenSSL error. This error is commonly encountered when " + "another library is not cleaning up the OpenSSL error stack. If " + "you are using cryptography with another library that uses " + "OpenSSL try disabling it before reporting a bug. Otherwise " + "please file an issue at https://github.com/pyca/cryptography/" + "issues with information on how to reproduce " + "this. ({0!r})".format(errors_with_text), + errors_with_text + ) + + +def build_conditional_library(lib, conditional_names): + conditional_lib = types.ModuleType("lib") + conditional_lib._original_lib = lib + excluded_names = set() + for condition, names_cb in conditional_names.items(): + if not getattr(lib, condition): + excluded_names.update(names_cb()) + + for attr in dir(lib): + if attr not in excluded_names: + setattr(conditional_lib, attr, getattr(lib, attr)) + + return conditional_lib + + +class Binding(object): + """ + OpenSSL API wrapper. + """ + lib = None + ffi = ffi + _lib_loaded = False + _init_lock = threading.Lock() + _lock_init_lock = threading.Lock() + + def __init__(self): + self._ensure_ffi_initialized() + + @classmethod + def _register_osrandom_engine(cls): + # Clear any errors extant in the queue before we start. In many + # scenarios other things may be interacting with OpenSSL in the same + # process space and it has proven untenable to assume that they will + # reliably clear the error queue. Once we clear it here we will + # error on any subsequent unexpected item in the stack. + cls.lib.ERR_clear_error() + if cls.lib.Cryptography_HAS_ENGINE: + result = cls.lib.Cryptography_add_osrandom_engine() + _openssl_assert(cls.lib, result in (1, 2)) + + @classmethod + def _ensure_ffi_initialized(cls): + with cls._init_lock: + if not cls._lib_loaded: + cls.lib = build_conditional_library(lib, CONDITIONAL_NAMES) + cls._lib_loaded = True + # initialize the SSL library + cls.lib.SSL_library_init() + # adds all ciphers/digests for EVP + cls.lib.OpenSSL_add_all_algorithms() + # loads error strings for libcrypto and libssl functions + cls.lib.SSL_load_error_strings() + cls._register_osrandom_engine() + + @classmethod + def init_static_locks(cls): + with cls._lock_init_lock: + cls._ensure_ffi_initialized() + # Use Python's implementation if available, importing _ssl triggers + # the setup for this. + __import__("_ssl") + + if (not cls.lib.Cryptography_HAS_LOCKING_CALLBACKS or + cls.lib.CRYPTO_get_locking_callback() != cls.ffi.NULL): + return + + # If nothing else has setup a locking callback already, we set up + # our own + res = lib.Cryptography_setup_ssl_threads() + _openssl_assert(cls.lib, res == 1) + + +def _verify_openssl_version(lib): + if ( + lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_102 and + not lib.CRYPTOGRAPHY_IS_LIBRESSL + ): + warnings.warn( + "OpenSSL version 1.0.1 is no longer supported by the OpenSSL " + "project, please upgrade. The next version of cryptography will " + "drop support for it.", + utils.CryptographyDeprecationWarning + ) + + +def _verify_package_version(version): + # Occasionally we run into situations where the version of the Python + # package does not match the version of the shared object that is loaded. + # This may occur in environments where multiple versions of cryptography + # are installed and available in the python path. To avoid errors cropping + # up later this code checks that the currently imported package and the + # shared object that were loaded have the same version and raise an + # ImportError if they do not + so_package_version = ffi.string(lib.CRYPTOGRAPHY_PACKAGE_VERSION) + if version.encode("ascii") != so_package_version: + raise ImportError( + "The version of cryptography does not match the loaded " + "shared object. This can happen if you have multiple copies of " + "cryptography installed in your Python path. Please try creating " + "a new virtual environment to resolve this issue. " + "Loaded python version: {}, shared object version: {}".format( + version, so_package_version + ) + ) + + +_verify_package_version(cryptography.__version__) + +# OpenSSL is not thread safe until the locks are initialized. We call this +# method in module scope so that it executes with the import lock. On +# Pythons < 3.4 this import lock is a global lock, which can prevent a race +# condition registering the OpenSSL locks. On Python 3.4+ the import lock +# is per module so this approach will not work. +Binding.init_static_locks() + +_verify_openssl_version(Binding.lib) diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/__init__.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/__init__.py new file mode 100644 index 0000000..4b54088 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/__init__.py @@ -0,0 +1,5 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/__init__.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/__init__.py new file mode 100644 index 0000000..494a7a1 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/__init__.py @@ -0,0 +1,40 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + + +@six.add_metaclass(abc.ABCMeta) +class AsymmetricSignatureContext(object): + @abc.abstractmethod + def update(self, data): + """ + Processes the provided bytes and returns nothing. + """ + + @abc.abstractmethod + def finalize(self): + """ + Returns the signature as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class AsymmetricVerificationContext(object): + @abc.abstractmethod + def update(self, data): + """ + Processes the provided bytes and returns nothing. + """ + + @abc.abstractmethod + def verify(self): + """ + Raises an exception if the bytes provided to update do not match the + signature or the signature does not match the public key. + """ diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/dh.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/dh.py new file mode 100644 index 0000000..4fc9952 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/dh.py @@ -0,0 +1,212 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography import utils + + +def generate_parameters(generator, key_size, backend): + return backend.generate_dh_parameters(generator, key_size) + + +class DHPrivateNumbers(object): + def __init__(self, x, public_numbers): + if not isinstance(x, six.integer_types): + raise TypeError("x must be an integer.") + + if not isinstance(public_numbers, DHPublicNumbers): + raise TypeError("public_numbers must be an instance of " + "DHPublicNumbers.") + + self._x = x + self._public_numbers = public_numbers + + def __eq__(self, other): + if not isinstance(other, DHPrivateNumbers): + return NotImplemented + + return ( + self._x == other._x and + self._public_numbers == other._public_numbers + ) + + def __ne__(self, other): + return not self == other + + def private_key(self, backend): + return backend.load_dh_private_numbers(self) + + public_numbers = utils.read_only_property("_public_numbers") + x = utils.read_only_property("_x") + + +class DHPublicNumbers(object): + def __init__(self, y, parameter_numbers): + if not isinstance(y, six.integer_types): + raise TypeError("y must be an integer.") + + if not isinstance(parameter_numbers, DHParameterNumbers): + raise TypeError( + "parameters must be an instance of DHParameterNumbers.") + + self._y = y + self._parameter_numbers = parameter_numbers + + def __eq__(self, other): + if not isinstance(other, DHPublicNumbers): + return NotImplemented + + return ( + self._y == other._y and + self._parameter_numbers == other._parameter_numbers + ) + + def __ne__(self, other): + return not self == other + + def public_key(self, backend): + return backend.load_dh_public_numbers(self) + + y = utils.read_only_property("_y") + parameter_numbers = utils.read_only_property("_parameter_numbers") + + +class DHParameterNumbers(object): + def __init__(self, p, g, q=None): + if ( + not isinstance(p, six.integer_types) or + not isinstance(g, six.integer_types) + ): + raise TypeError("p and g must be integers") + if q is not None and not isinstance(q, six.integer_types): + raise TypeError("q must be integer or None") + + if g < 2: + raise ValueError("DH generator must be 2 or greater") + + self._p = p + self._g = g + self._q = q + + def __eq__(self, other): + if not isinstance(other, DHParameterNumbers): + return NotImplemented + + return ( + self._p == other._p and + self._g == other._g and + self._q == other._q + ) + + def __ne__(self, other): + return not self == other + + def parameters(self, backend): + return backend.load_dh_parameter_numbers(self) + + p = utils.read_only_property("_p") + g = utils.read_only_property("_g") + q = utils.read_only_property("_q") + + +@six.add_metaclass(abc.ABCMeta) +class DHParameters(object): + @abc.abstractmethod + def generate_private_key(self): + """ + Generates and returns a DHPrivateKey. + """ + + @abc.abstractmethod + def parameter_bytes(self, encoding, format): + """ + Returns the parameters serialized as bytes. + """ + + @abc.abstractmethod + def parameter_numbers(self): + """ + Returns a DHParameterNumbers. + """ + + +DHParametersWithSerialization = DHParameters + + +@six.add_metaclass(abc.ABCMeta) +class DHPrivateKey(object): + @abc.abstractproperty + def key_size(self): + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def public_key(self): + """ + The DHPublicKey associated with this private key. + """ + + @abc.abstractmethod + def parameters(self): + """ + The DHParameters object associated with this private key. + """ + + @abc.abstractmethod + def exchange(self, peer_public_key): + """ + Given peer's DHPublicKey, carry out the key exchange and + return shared key as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DHPrivateKeyWithSerialization(DHPrivateKey): + @abc.abstractmethod + def private_numbers(self): + """ + Returns a DHPrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes(self, encoding, format, encryption_algorithm): + """ + Returns the key serialized as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DHPublicKey(object): + @abc.abstractproperty + def key_size(self): + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def parameters(self): + """ + The DHParameters object associated with this public key. + """ + + @abc.abstractmethod + def public_numbers(self): + """ + Returns a DHPublicNumbers. + """ + + @abc.abstractmethod + def public_bytes(self, encoding, format): + """ + Returns the key serialized as bytes. + """ + + +DHPublicKeyWithSerialization = DHPublicKey diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/dsa.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/dsa.py new file mode 100644 index 0000000..e380a44 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/dsa.py @@ -0,0 +1,254 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography import utils + + +@six.add_metaclass(abc.ABCMeta) +class DSAParameters(object): + @abc.abstractmethod + def generate_private_key(self): + """ + Generates and returns a DSAPrivateKey. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DSAParametersWithNumbers(DSAParameters): + @abc.abstractmethod + def parameter_numbers(self): + """ + Returns a DSAParameterNumbers. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DSAPrivateKey(object): + @abc.abstractproperty + def key_size(self): + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def public_key(self): + """ + The DSAPublicKey associated with this private key. + """ + + @abc.abstractmethod + def parameters(self): + """ + The DSAParameters object associated with this private key. + """ + + @abc.abstractmethod + def signer(self, signature_algorithm): + """ + Returns an AsymmetricSignatureContext used for signing data. + """ + + @abc.abstractmethod + def sign(self, data, algorithm): + """ + Signs the data + """ + + +@six.add_metaclass(abc.ABCMeta) +class DSAPrivateKeyWithSerialization(DSAPrivateKey): + @abc.abstractmethod + def private_numbers(self): + """ + Returns a DSAPrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes(self, encoding, format, encryption_algorithm): + """ + Returns the key serialized as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DSAPublicKey(object): + @abc.abstractproperty + def key_size(self): + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def parameters(self): + """ + The DSAParameters object associated with this public key. + """ + + @abc.abstractmethod + def verifier(self, signature, signature_algorithm): + """ + Returns an AsymmetricVerificationContext used for signing data. + """ + + @abc.abstractmethod + def public_numbers(self): + """ + Returns a DSAPublicNumbers. + """ + + @abc.abstractmethod + def public_bytes(self, encoding, format): + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def verify(self, signature, data, algorithm): + """ + Verifies the signature of the data. + """ + + +DSAPublicKeyWithSerialization = DSAPublicKey + + +def generate_parameters(key_size, backend): + return backend.generate_dsa_parameters(key_size) + + +def generate_private_key(key_size, backend): + return backend.generate_dsa_private_key_and_parameters(key_size) + + +def _check_dsa_parameters(parameters): + if parameters.p.bit_length() not in [1024, 2048, 3072]: + raise ValueError("p must be exactly 1024, 2048, or 3072 bits long") + if parameters.q.bit_length() not in [160, 224, 256]: + raise ValueError("q must be exactly 160, 224, or 256 bits long") + + if not (1 < parameters.g < parameters.p): + raise ValueError("g, p don't satisfy 1 < g < p.") + + +def _check_dsa_private_numbers(numbers): + parameters = numbers.public_numbers.parameter_numbers + _check_dsa_parameters(parameters) + if numbers.x <= 0 or numbers.x >= parameters.q: + raise ValueError("x must be > 0 and < q.") + + if numbers.public_numbers.y != pow(parameters.g, numbers.x, parameters.p): + raise ValueError("y must be equal to (g ** x % p).") + + +class DSAParameterNumbers(object): + def __init__(self, p, q, g): + if ( + not isinstance(p, six.integer_types) or + not isinstance(q, six.integer_types) or + not isinstance(g, six.integer_types) + ): + raise TypeError( + "DSAParameterNumbers p, q, and g arguments must be integers." + ) + + self._p = p + self._q = q + self._g = g + + p = utils.read_only_property("_p") + q = utils.read_only_property("_q") + g = utils.read_only_property("_g") + + def parameters(self, backend): + return backend.load_dsa_parameter_numbers(self) + + def __eq__(self, other): + if not isinstance(other, DSAParameterNumbers): + return NotImplemented + + return self.p == other.p and self.q == other.q and self.g == other.g + + def __ne__(self, other): + return not self == other + + def __repr__(self): + return ( + "".format( + self=self + ) + ) + + +class DSAPublicNumbers(object): + def __init__(self, y, parameter_numbers): + if not isinstance(y, six.integer_types): + raise TypeError("DSAPublicNumbers y argument must be an integer.") + + if not isinstance(parameter_numbers, DSAParameterNumbers): + raise TypeError( + "parameter_numbers must be a DSAParameterNumbers instance." + ) + + self._y = y + self._parameter_numbers = parameter_numbers + + y = utils.read_only_property("_y") + parameter_numbers = utils.read_only_property("_parameter_numbers") + + def public_key(self, backend): + return backend.load_dsa_public_numbers(self) + + def __eq__(self, other): + if not isinstance(other, DSAPublicNumbers): + return NotImplemented + + return ( + self.y == other.y and + self.parameter_numbers == other.parameter_numbers + ) + + def __ne__(self, other): + return not self == other + + def __repr__(self): + return ( + "".format(self=self) + ) + + +class DSAPrivateNumbers(object): + def __init__(self, x, public_numbers): + if not isinstance(x, six.integer_types): + raise TypeError("DSAPrivateNumbers x argument must be an integer.") + + if not isinstance(public_numbers, DSAPublicNumbers): + raise TypeError( + "public_numbers must be a DSAPublicNumbers instance." + ) + self._public_numbers = public_numbers + self._x = x + + x = utils.read_only_property("_x") + public_numbers = utils.read_only_property("_public_numbers") + + def private_key(self, backend): + return backend.load_dsa_private_numbers(self) + + def __eq__(self, other): + if not isinstance(other, DSAPrivateNumbers): + return NotImplemented + + return ( + self.x == other.x and self.public_numbers == other.public_numbers + ) + + def __ne__(self, other): + return not self == other diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/ec.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/ec.py new file mode 100644 index 0000000..529391f --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/ec.py @@ -0,0 +1,500 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +import warnings + +import six + +from cryptography import utils +from cryptography.hazmat._oid import ObjectIdentifier + + +class EllipticCurveOID(object): + SECP192R1 = ObjectIdentifier("1.2.840.10045.3.1.1") + SECP224R1 = ObjectIdentifier("1.3.132.0.33") + SECP256K1 = ObjectIdentifier("1.3.132.0.10") + SECP256R1 = ObjectIdentifier("1.2.840.10045.3.1.7") + SECP384R1 = ObjectIdentifier("1.3.132.0.34") + SECP521R1 = ObjectIdentifier("1.3.132.0.35") + BRAINPOOLP256R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.7") + BRAINPOOLP384R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.11") + BRAINPOOLP512R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.13") + SECT163K1 = ObjectIdentifier("1.3.132.0.1") + SECT163R2 = ObjectIdentifier("1.3.132.0.15") + SECT233K1 = ObjectIdentifier("1.3.132.0.26") + SECT233R1 = ObjectIdentifier("1.3.132.0.27") + SECT283K1 = ObjectIdentifier("1.3.132.0.16") + SECT283R1 = ObjectIdentifier("1.3.132.0.17") + SECT409K1 = ObjectIdentifier("1.3.132.0.36") + SECT409R1 = ObjectIdentifier("1.3.132.0.37") + SECT571K1 = ObjectIdentifier("1.3.132.0.38") + SECT571R1 = ObjectIdentifier("1.3.132.0.39") + + +@six.add_metaclass(abc.ABCMeta) +class EllipticCurve(object): + @abc.abstractproperty + def name(self): + """ + The name of the curve. e.g. secp256r1. + """ + + @abc.abstractproperty + def key_size(self): + """ + Bit size of a secret scalar for the curve. + """ + + +@six.add_metaclass(abc.ABCMeta) +class EllipticCurveSignatureAlgorithm(object): + @abc.abstractproperty + def algorithm(self): + """ + The digest algorithm used with this signature. + """ + + +@six.add_metaclass(abc.ABCMeta) +class EllipticCurvePrivateKey(object): + @abc.abstractmethod + def signer(self, signature_algorithm): + """ + Returns an AsymmetricSignatureContext used for signing data. + """ + + @abc.abstractmethod + def exchange(self, algorithm, peer_public_key): + """ + Performs a key exchange operation using the provided algorithm with the + provided peer's public key. + """ + + @abc.abstractmethod + def public_key(self): + """ + The EllipticCurvePublicKey for this private key. + """ + + @abc.abstractproperty + def curve(self): + """ + The EllipticCurve that this key is on. + """ + + @abc.abstractproperty + def key_size(self): + """ + Bit size of a secret scalar for the curve. + """ + + @abc.abstractmethod + def sign(self, data, signature_algorithm): + """ + Signs the data + """ + + +@six.add_metaclass(abc.ABCMeta) +class EllipticCurvePrivateKeyWithSerialization(EllipticCurvePrivateKey): + @abc.abstractmethod + def private_numbers(self): + """ + Returns an EllipticCurvePrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes(self, encoding, format, encryption_algorithm): + """ + Returns the key serialized as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class EllipticCurvePublicKey(object): + @abc.abstractmethod + def verifier(self, signature, signature_algorithm): + """ + Returns an AsymmetricVerificationContext used for signing data. + """ + + @abc.abstractproperty + def curve(self): + """ + The EllipticCurve that this key is on. + """ + + @abc.abstractproperty + def key_size(self): + """ + Bit size of a secret scalar for the curve. + """ + + @abc.abstractmethod + def public_numbers(self): + """ + Returns an EllipticCurvePublicNumbers. + """ + + @abc.abstractmethod + def public_bytes(self, encoding, format): + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def verify(self, signature, data, signature_algorithm): + """ + Verifies the signature of the data. + """ + + @classmethod + def from_encoded_point(cls, curve, data): + utils._check_bytes("data", data) + + if not isinstance(curve, EllipticCurve): + raise TypeError("curve must be an EllipticCurve instance") + + if len(data) == 0: + raise ValueError("data must not be an empty byte string") + + if six.indexbytes(data, 0) not in [0x02, 0x03, 0x04]: + raise ValueError("Unsupported elliptic curve point type") + + from cryptography.hazmat.backends.openssl.backend import backend + return backend.load_elliptic_curve_public_bytes(curve, data) + + +EllipticCurvePublicKeyWithSerialization = EllipticCurvePublicKey + + +@utils.register_interface(EllipticCurve) +class SECT571R1(object): + name = "sect571r1" + key_size = 570 + + +@utils.register_interface(EllipticCurve) +class SECT409R1(object): + name = "sect409r1" + key_size = 409 + + +@utils.register_interface(EllipticCurve) +class SECT283R1(object): + name = "sect283r1" + key_size = 283 + + +@utils.register_interface(EllipticCurve) +class SECT233R1(object): + name = "sect233r1" + key_size = 233 + + +@utils.register_interface(EllipticCurve) +class SECT163R2(object): + name = "sect163r2" + key_size = 163 + + +@utils.register_interface(EllipticCurve) +class SECT571K1(object): + name = "sect571k1" + key_size = 571 + + +@utils.register_interface(EllipticCurve) +class SECT409K1(object): + name = "sect409k1" + key_size = 409 + + +@utils.register_interface(EllipticCurve) +class SECT283K1(object): + name = "sect283k1" + key_size = 283 + + +@utils.register_interface(EllipticCurve) +class SECT233K1(object): + name = "sect233k1" + key_size = 233 + + +@utils.register_interface(EllipticCurve) +class SECT163K1(object): + name = "sect163k1" + key_size = 163 + + +@utils.register_interface(EllipticCurve) +class SECP521R1(object): + name = "secp521r1" + key_size = 521 + + +@utils.register_interface(EllipticCurve) +class SECP384R1(object): + name = "secp384r1" + key_size = 384 + + +@utils.register_interface(EllipticCurve) +class SECP256R1(object): + name = "secp256r1" + key_size = 256 + + +@utils.register_interface(EllipticCurve) +class SECP256K1(object): + name = "secp256k1" + key_size = 256 + + +@utils.register_interface(EllipticCurve) +class SECP224R1(object): + name = "secp224r1" + key_size = 224 + + +@utils.register_interface(EllipticCurve) +class SECP192R1(object): + name = "secp192r1" + key_size = 192 + + +@utils.register_interface(EllipticCurve) +class BrainpoolP256R1(object): + name = "brainpoolP256r1" + key_size = 256 + + +@utils.register_interface(EllipticCurve) +class BrainpoolP384R1(object): + name = "brainpoolP384r1" + key_size = 384 + + +@utils.register_interface(EllipticCurve) +class BrainpoolP512R1(object): + name = "brainpoolP512r1" + key_size = 512 + + +_CURVE_TYPES = { + "prime192v1": SECP192R1, + "prime256v1": SECP256R1, + + "secp192r1": SECP192R1, + "secp224r1": SECP224R1, + "secp256r1": SECP256R1, + "secp384r1": SECP384R1, + "secp521r1": SECP521R1, + "secp256k1": SECP256K1, + + "sect163k1": SECT163K1, + "sect233k1": SECT233K1, + "sect283k1": SECT283K1, + "sect409k1": SECT409K1, + "sect571k1": SECT571K1, + + "sect163r2": SECT163R2, + "sect233r1": SECT233R1, + "sect283r1": SECT283R1, + "sect409r1": SECT409R1, + "sect571r1": SECT571R1, + + "brainpoolP256r1": BrainpoolP256R1, + "brainpoolP384r1": BrainpoolP384R1, + "brainpoolP512r1": BrainpoolP512R1, +} + + +@utils.register_interface(EllipticCurveSignatureAlgorithm) +class ECDSA(object): + def __init__(self, algorithm): + self._algorithm = algorithm + + algorithm = utils.read_only_property("_algorithm") + + +def generate_private_key(curve, backend): + return backend.generate_elliptic_curve_private_key(curve) + + +def derive_private_key(private_value, curve, backend): + if not isinstance(private_value, six.integer_types): + raise TypeError("private_value must be an integer type.") + + if private_value <= 0: + raise ValueError("private_value must be a positive integer.") + + if not isinstance(curve, EllipticCurve): + raise TypeError("curve must provide the EllipticCurve interface.") + + return backend.derive_elliptic_curve_private_key(private_value, curve) + + +class EllipticCurvePublicNumbers(object): + def __init__(self, x, y, curve): + if ( + not isinstance(x, six.integer_types) or + not isinstance(y, six.integer_types) + ): + raise TypeError("x and y must be integers.") + + if not isinstance(curve, EllipticCurve): + raise TypeError("curve must provide the EllipticCurve interface.") + + self._y = y + self._x = x + self._curve = curve + + def public_key(self, backend): + return backend.load_elliptic_curve_public_numbers(self) + + def encode_point(self): + warnings.warn( + "encode_point has been deprecated on EllipticCurvePublicNumbers" + " and will be removed in a future version. Please use " + "EllipticCurvePublicKey.public_bytes to obtain both " + "compressed and uncompressed point encoding.", + utils.DeprecatedIn25, + stacklevel=2, + ) + # key_size is in bits. Convert to bytes and round up + byte_length = (self.curve.key_size + 7) // 8 + return ( + b'\x04' + utils.int_to_bytes(self.x, byte_length) + + utils.int_to_bytes(self.y, byte_length) + ) + + @classmethod + def from_encoded_point(cls, curve, data): + if not isinstance(curve, EllipticCurve): + raise TypeError("curve must be an EllipticCurve instance") + + warnings.warn( + "Support for unsafe construction of public numbers from " + "encoded data will be removed in a future version. " + "Please use EllipticCurvePublicKey.from_encoded_point", + utils.DeprecatedIn25, + stacklevel=2, + ) + + if data.startswith(b'\x04'): + # key_size is in bits. Convert to bytes and round up + byte_length = (curve.key_size + 7) // 8 + if len(data) == 2 * byte_length + 1: + x = utils.int_from_bytes(data[1:byte_length + 1], 'big') + y = utils.int_from_bytes(data[byte_length + 1:], 'big') + return cls(x, y, curve) + else: + raise ValueError('Invalid elliptic curve point data length') + else: + raise ValueError('Unsupported elliptic curve point type') + + curve = utils.read_only_property("_curve") + x = utils.read_only_property("_x") + y = utils.read_only_property("_y") + + def __eq__(self, other): + if not isinstance(other, EllipticCurvePublicNumbers): + return NotImplemented + + return ( + self.x == other.x and + self.y == other.y and + self.curve.name == other.curve.name and + self.curve.key_size == other.curve.key_size + ) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.x, self.y, self.curve.name, self.curve.key_size)) + + def __repr__(self): + return ( + "".format(self) + ) + + +class EllipticCurvePrivateNumbers(object): + def __init__(self, private_value, public_numbers): + if not isinstance(private_value, six.integer_types): + raise TypeError("private_value must be an integer.") + + if not isinstance(public_numbers, EllipticCurvePublicNumbers): + raise TypeError( + "public_numbers must be an EllipticCurvePublicNumbers " + "instance." + ) + + self._private_value = private_value + self._public_numbers = public_numbers + + def private_key(self, backend): + return backend.load_elliptic_curve_private_numbers(self) + + private_value = utils.read_only_property("_private_value") + public_numbers = utils.read_only_property("_public_numbers") + + def __eq__(self, other): + if not isinstance(other, EllipticCurvePrivateNumbers): + return NotImplemented + + return ( + self.private_value == other.private_value and + self.public_numbers == other.public_numbers + ) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.private_value, self.public_numbers)) + + +class ECDH(object): + pass + + +_OID_TO_CURVE = { + EllipticCurveOID.SECP192R1: SECP192R1, + EllipticCurveOID.SECP224R1: SECP224R1, + EllipticCurveOID.SECP256K1: SECP256K1, + EllipticCurveOID.SECP256R1: SECP256R1, + EllipticCurveOID.SECP384R1: SECP384R1, + EllipticCurveOID.SECP521R1: SECP521R1, + EllipticCurveOID.BRAINPOOLP256R1: BrainpoolP256R1, + EllipticCurveOID.BRAINPOOLP384R1: BrainpoolP384R1, + EllipticCurveOID.BRAINPOOLP512R1: BrainpoolP512R1, + EllipticCurveOID.SECT163K1: SECT163K1, + EllipticCurveOID.SECT163R2: SECT163R2, + EllipticCurveOID.SECT233K1: SECT233K1, + EllipticCurveOID.SECT233R1: SECT233R1, + EllipticCurveOID.SECT283K1: SECT283K1, + EllipticCurveOID.SECT283R1: SECT283R1, + EllipticCurveOID.SECT409K1: SECT409K1, + EllipticCurveOID.SECT409R1: SECT409R1, + EllipticCurveOID.SECT571K1: SECT571K1, + EllipticCurveOID.SECT571R1: SECT571R1, +} + + +def get_curve_for_oid(oid): + try: + return _OID_TO_CURVE[oid] + except KeyError: + raise LookupError( + "The provided object identifier has no matching elliptic " + "curve class" + ) diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/ed25519.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/ed25519.py new file mode 100644 index 0000000..d89445f --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/ed25519.py @@ -0,0 +1,84 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons + + +_ED25519_KEY_SIZE = 32 +_ED25519_SIG_SIZE = 64 + + +@six.add_metaclass(abc.ABCMeta) +class Ed25519PublicKey(object): + @classmethod + def from_public_bytes(cls, data): + from cryptography.hazmat.backends.openssl.backend import backend + if not backend.ed25519_supported(): + raise UnsupportedAlgorithm( + "ed25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM + ) + + return backend.ed25519_load_public_bytes(data) + + @abc.abstractmethod + def public_bytes(self, encoding, format): + """ + The serialized bytes of the public key. + """ + + @abc.abstractmethod + def verify(self, signature, data): + """ + Verify the signature. + """ + + +@six.add_metaclass(abc.ABCMeta) +class Ed25519PrivateKey(object): + @classmethod + def generate(cls): + from cryptography.hazmat.backends.openssl.backend import backend + if not backend.ed25519_supported(): + raise UnsupportedAlgorithm( + "ed25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM + ) + + return backend.ed25519_generate_key() + + @classmethod + def from_private_bytes(cls, data): + from cryptography.hazmat.backends.openssl.backend import backend + if not backend.ed25519_supported(): + raise UnsupportedAlgorithm( + "ed25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM + ) + + return backend.ed25519_load_private_bytes(data) + + @abc.abstractmethod + def public_key(self): + """ + The Ed25519PublicKey derived from the private key. + """ + + @abc.abstractmethod + def private_bytes(self, encoding, format, encryption_algorithm): + """ + The serialized bytes of the private key. + """ + + @abc.abstractmethod + def sign(self, data): + """ + Signs the data. + """ diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/ed448.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/ed448.py new file mode 100644 index 0000000..939157a --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/ed448.py @@ -0,0 +1,79 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons + + +@six.add_metaclass(abc.ABCMeta) +class Ed448PublicKey(object): + @classmethod + def from_public_bytes(cls, data): + from cryptography.hazmat.backends.openssl.backend import backend + if not backend.ed448_supported(): + raise UnsupportedAlgorithm( + "ed448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM + ) + + return backend.ed448_load_public_bytes(data) + + @abc.abstractmethod + def public_bytes(self, encoding, format): + """ + The serialized bytes of the public key. + """ + + @abc.abstractmethod + def verify(self, signature, data): + """ + Verify the signature. + """ + + +@six.add_metaclass(abc.ABCMeta) +class Ed448PrivateKey(object): + @classmethod + def generate(cls): + from cryptography.hazmat.backends.openssl.backend import backend + if not backend.ed448_supported(): + raise UnsupportedAlgorithm( + "ed448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM + ) + return backend.ed448_generate_key() + + @classmethod + def from_private_bytes(cls, data): + from cryptography.hazmat.backends.openssl.backend import backend + if not backend.ed448_supported(): + raise UnsupportedAlgorithm( + "ed448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM + ) + + return backend.ed448_load_private_bytes(data) + + @abc.abstractmethod + def public_key(self): + """ + The Ed448PublicKey derived from the private key. + """ + + @abc.abstractmethod + def sign(self, data): + """ + Signs the data. + """ + + @abc.abstractmethod + def private_bytes(self, encoding, format, encryption_algorithm): + """ + The serialized bytes of the private key. + """ diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/padding.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/padding.py new file mode 100644 index 0000000..a37c3f9 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/padding.py @@ -0,0 +1,79 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +import math + +import six + +from cryptography import utils +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import rsa + + +@six.add_metaclass(abc.ABCMeta) +class AsymmetricPadding(object): + @abc.abstractproperty + def name(self): + """ + A string naming this padding (e.g. "PSS", "PKCS1"). + """ + + +@utils.register_interface(AsymmetricPadding) +class PKCS1v15(object): + name = "EMSA-PKCS1-v1_5" + + +@utils.register_interface(AsymmetricPadding) +class PSS(object): + MAX_LENGTH = object() + name = "EMSA-PSS" + + def __init__(self, mgf, salt_length): + self._mgf = mgf + + if (not isinstance(salt_length, six.integer_types) and + salt_length is not self.MAX_LENGTH): + raise TypeError("salt_length must be an integer.") + + if salt_length is not self.MAX_LENGTH and salt_length < 0: + raise ValueError("salt_length must be zero or greater.") + + self._salt_length = salt_length + + +@utils.register_interface(AsymmetricPadding) +class OAEP(object): + name = "EME-OAEP" + + def __init__(self, mgf, algorithm, label): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + + self._mgf = mgf + self._algorithm = algorithm + self._label = label + + +class MGF1(object): + MAX_LENGTH = object() + + def __init__(self, algorithm): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + + self._algorithm = algorithm + + +def calculate_max_pss_salt_length(key, hash_algorithm): + if not isinstance(key, (rsa.RSAPrivateKey, rsa.RSAPublicKey)): + raise TypeError("key must be an RSA public or private key") + # bit length - 1 per RFC 3447 + emlen = int(math.ceil((key.key_size - 1) / 8.0)) + salt_length = emlen - hash_algorithm.digest_size - 2 + assert salt_length >= 0 + return salt_length diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/rsa.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/rsa.py new file mode 100644 index 0000000..27db671 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/rsa.py @@ -0,0 +1,368 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +try: + # Only available in math in 3.5+ + from math import gcd +except ImportError: + from fractions import gcd + +import six + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.backends.interfaces import RSABackend + + +@six.add_metaclass(abc.ABCMeta) +class RSAPrivateKey(object): + @abc.abstractmethod + def signer(self, padding, algorithm): + """ + Returns an AsymmetricSignatureContext used for signing data. + """ + + @abc.abstractmethod + def decrypt(self, ciphertext, padding): + """ + Decrypts the provided ciphertext. + """ + + @abc.abstractproperty + def key_size(self): + """ + The bit length of the public modulus. + """ + + @abc.abstractmethod + def public_key(self): + """ + The RSAPublicKey associated with this private key. + """ + + @abc.abstractmethod + def sign(self, data, padding, algorithm): + """ + Signs the data. + """ + + +@six.add_metaclass(abc.ABCMeta) +class RSAPrivateKeyWithSerialization(RSAPrivateKey): + @abc.abstractmethod + def private_numbers(self): + """ + Returns an RSAPrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes(self, encoding, format, encryption_algorithm): + """ + Returns the key serialized as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class RSAPublicKey(object): + @abc.abstractmethod + def verifier(self, signature, padding, algorithm): + """ + Returns an AsymmetricVerificationContext used for verifying signatures. + """ + + @abc.abstractmethod + def encrypt(self, plaintext, padding): + """ + Encrypts the given plaintext. + """ + + @abc.abstractproperty + def key_size(self): + """ + The bit length of the public modulus. + """ + + @abc.abstractmethod + def public_numbers(self): + """ + Returns an RSAPublicNumbers + """ + + @abc.abstractmethod + def public_bytes(self, encoding, format): + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def verify(self, signature, data, padding, algorithm): + """ + Verifies the signature of the data. + """ + + +RSAPublicKeyWithSerialization = RSAPublicKey + + +def generate_private_key(public_exponent, key_size, backend): + if not isinstance(backend, RSABackend): + raise UnsupportedAlgorithm( + "Backend object does not implement RSABackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + _verify_rsa_parameters(public_exponent, key_size) + return backend.generate_rsa_private_key(public_exponent, key_size) + + +def _verify_rsa_parameters(public_exponent, key_size): + if public_exponent < 3: + raise ValueError("public_exponent must be >= 3.") + + if public_exponent & 1 == 0: + raise ValueError("public_exponent must be odd.") + + if key_size < 512: + raise ValueError("key_size must be at least 512-bits.") + + +def _check_private_key_components(p, q, private_exponent, dmp1, dmq1, iqmp, + public_exponent, modulus): + if modulus < 3: + raise ValueError("modulus must be >= 3.") + + if p >= modulus: + raise ValueError("p must be < modulus.") + + if q >= modulus: + raise ValueError("q must be < modulus.") + + if dmp1 >= modulus: + raise ValueError("dmp1 must be < modulus.") + + if dmq1 >= modulus: + raise ValueError("dmq1 must be < modulus.") + + if iqmp >= modulus: + raise ValueError("iqmp must be < modulus.") + + if private_exponent >= modulus: + raise ValueError("private_exponent must be < modulus.") + + if public_exponent < 3 or public_exponent >= modulus: + raise ValueError("public_exponent must be >= 3 and < modulus.") + + if public_exponent & 1 == 0: + raise ValueError("public_exponent must be odd.") + + if dmp1 & 1 == 0: + raise ValueError("dmp1 must be odd.") + + if dmq1 & 1 == 0: + raise ValueError("dmq1 must be odd.") + + if p * q != modulus: + raise ValueError("p*q must equal modulus.") + + +def _check_public_key_components(e, n): + if n < 3: + raise ValueError("n must be >= 3.") + + if e < 3 or e >= n: + raise ValueError("e must be >= 3 and < n.") + + if e & 1 == 0: + raise ValueError("e must be odd.") + + +def _modinv(e, m): + """ + Modular Multiplicative Inverse. Returns x such that: (x*e) mod m == 1 + """ + x1, y1, x2, y2 = 1, 0, 0, 1 + a, b = e, m + while b > 0: + q, r = divmod(a, b) + xn, yn = x1 - q * x2, y1 - q * y2 + a, b, x1, y1, x2, y2 = b, r, x2, y2, xn, yn + return x1 % m + + +def rsa_crt_iqmp(p, q): + """ + Compute the CRT (q ** -1) % p value from RSA primes p and q. + """ + return _modinv(q, p) + + +def rsa_crt_dmp1(private_exponent, p): + """ + Compute the CRT private_exponent % (p - 1) value from the RSA + private_exponent (d) and p. + """ + return private_exponent % (p - 1) + + +def rsa_crt_dmq1(private_exponent, q): + """ + Compute the CRT private_exponent % (q - 1) value from the RSA + private_exponent (d) and q. + """ + return private_exponent % (q - 1) + + +# Controls the number of iterations rsa_recover_prime_factors will perform +# to obtain the prime factors. Each iteration increments by 2 so the actual +# maximum attempts is half this number. +_MAX_RECOVERY_ATTEMPTS = 1000 + + +def rsa_recover_prime_factors(n, e, d): + """ + Compute factors p and q from the private exponent d. We assume that n has + no more than two factors. This function is adapted from code in PyCrypto. + """ + # See 8.2.2(i) in Handbook of Applied Cryptography. + ktot = d * e - 1 + # The quantity d*e-1 is a multiple of phi(n), even, + # and can be represented as t*2^s. + t = ktot + while t % 2 == 0: + t = t // 2 + # Cycle through all multiplicative inverses in Zn. + # The algorithm is non-deterministic, but there is a 50% chance + # any candidate a leads to successful factoring. + # See "Digitalized Signatures and Public Key Functions as Intractable + # as Factorization", M. Rabin, 1979 + spotted = False + a = 2 + while not spotted and a < _MAX_RECOVERY_ATTEMPTS: + k = t + # Cycle through all values a^{t*2^i}=a^k + while k < ktot: + cand = pow(a, k, n) + # Check if a^k is a non-trivial root of unity (mod n) + if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1: + # We have found a number such that (cand-1)(cand+1)=0 (mod n). + # Either of the terms divides n. + p = gcd(cand + 1, n) + spotted = True + break + k *= 2 + # This value was not any good... let's try another! + a += 2 + if not spotted: + raise ValueError("Unable to compute factors p and q from exponent d.") + # Found ! + q, r = divmod(n, p) + assert r == 0 + p, q = sorted((p, q), reverse=True) + return (p, q) + + +class RSAPrivateNumbers(object): + def __init__(self, p, q, d, dmp1, dmq1, iqmp, + public_numbers): + if ( + not isinstance(p, six.integer_types) or + not isinstance(q, six.integer_types) or + not isinstance(d, six.integer_types) or + not isinstance(dmp1, six.integer_types) or + not isinstance(dmq1, six.integer_types) or + not isinstance(iqmp, six.integer_types) + ): + raise TypeError( + "RSAPrivateNumbers p, q, d, dmp1, dmq1, iqmp arguments must" + " all be an integers." + ) + + if not isinstance(public_numbers, RSAPublicNumbers): + raise TypeError( + "RSAPrivateNumbers public_numbers must be an RSAPublicNumbers" + " instance." + ) + + self._p = p + self._q = q + self._d = d + self._dmp1 = dmp1 + self._dmq1 = dmq1 + self._iqmp = iqmp + self._public_numbers = public_numbers + + p = utils.read_only_property("_p") + q = utils.read_only_property("_q") + d = utils.read_only_property("_d") + dmp1 = utils.read_only_property("_dmp1") + dmq1 = utils.read_only_property("_dmq1") + iqmp = utils.read_only_property("_iqmp") + public_numbers = utils.read_only_property("_public_numbers") + + def private_key(self, backend): + return backend.load_rsa_private_numbers(self) + + def __eq__(self, other): + if not isinstance(other, RSAPrivateNumbers): + return NotImplemented + + return ( + self.p == other.p and + self.q == other.q and + self.d == other.d and + self.dmp1 == other.dmp1 and + self.dmq1 == other.dmq1 and + self.iqmp == other.iqmp and + self.public_numbers == other.public_numbers + ) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(( + self.p, + self.q, + self.d, + self.dmp1, + self.dmq1, + self.iqmp, + self.public_numbers, + )) + + +class RSAPublicNumbers(object): + def __init__(self, e, n): + if ( + not isinstance(e, six.integer_types) or + not isinstance(n, six.integer_types) + ): + raise TypeError("RSAPublicNumbers arguments must be integers.") + + self._e = e + self._n = n + + e = utils.read_only_property("_e") + n = utils.read_only_property("_n") + + def public_key(self, backend): + return backend.load_rsa_public_numbers(self) + + def __repr__(self): + return "".format(self) + + def __eq__(self, other): + if not isinstance(other, RSAPublicNumbers): + return NotImplemented + + return self.e == other.e and self.n == other.n + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.e, self.n)) diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/utils.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/utils.py new file mode 100644 index 0000000..14d2abe --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/utils.py @@ -0,0 +1,37 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.hazmat._der import ( + DERReader, INTEGER, SEQUENCE, encode_der, encode_der_integer +) +from cryptography.hazmat.primitives import hashes + + +def decode_dss_signature(signature): + with DERReader(signature).read_single_element(SEQUENCE) as seq: + r = seq.read_element(INTEGER).as_integer() + s = seq.read_element(INTEGER).as_integer() + return r, s + + +def encode_dss_signature(r, s): + return encode_der( + SEQUENCE, + encode_der(INTEGER, encode_der_integer(r)), + encode_der(INTEGER, encode_der_integer(s)), + ) + + +class Prehashed(object): + def __init__(self, algorithm): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of HashAlgorithm.") + + self._algorithm = algorithm + self._digest_size = algorithm.digest_size + + digest_size = utils.read_only_property("_digest_size") diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/x25519.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/x25519.py new file mode 100644 index 0000000..4e8badf --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/x25519.py @@ -0,0 +1,73 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons + + +@six.add_metaclass(abc.ABCMeta) +class X25519PublicKey(object): + @classmethod + def from_public_bytes(cls, data): + from cryptography.hazmat.backends.openssl.backend import backend + if not backend.x25519_supported(): + raise UnsupportedAlgorithm( + "X25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM + ) + + return backend.x25519_load_public_bytes(data) + + @abc.abstractmethod + def public_bytes(self, encoding=None, format=None): + """ + The serialized bytes of the public key. + """ + + +@six.add_metaclass(abc.ABCMeta) +class X25519PrivateKey(object): + @classmethod + def generate(cls): + from cryptography.hazmat.backends.openssl.backend import backend + if not backend.x25519_supported(): + raise UnsupportedAlgorithm( + "X25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM + ) + return backend.x25519_generate_key() + + @classmethod + def from_private_bytes(cls, data): + from cryptography.hazmat.backends.openssl.backend import backend + if not backend.x25519_supported(): + raise UnsupportedAlgorithm( + "X25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM + ) + + return backend.x25519_load_private_bytes(data) + + @abc.abstractmethod + def public_key(self): + """ + The serialized bytes of the public key. + """ + + @abc.abstractmethod + def private_bytes(self, encoding, format, encryption_algorithm): + """ + The serialized bytes of the private key. + """ + + @abc.abstractmethod + def exchange(self, peer_public_key): + """ + Performs a key exchange operation using the provided peer's public key. + """ diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/x448.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/x448.py new file mode 100644 index 0000000..475e678 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/asymmetric/x448.py @@ -0,0 +1,73 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons + + +@six.add_metaclass(abc.ABCMeta) +class X448PublicKey(object): + @classmethod + def from_public_bytes(cls, data): + from cryptography.hazmat.backends.openssl.backend import backend + if not backend.x448_supported(): + raise UnsupportedAlgorithm( + "X448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM + ) + + return backend.x448_load_public_bytes(data) + + @abc.abstractmethod + def public_bytes(self, encoding, format): + """ + The serialized bytes of the public key. + """ + + +@six.add_metaclass(abc.ABCMeta) +class X448PrivateKey(object): + @classmethod + def generate(cls): + from cryptography.hazmat.backends.openssl.backend import backend + if not backend.x448_supported(): + raise UnsupportedAlgorithm( + "X448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM + ) + return backend.x448_generate_key() + + @classmethod + def from_private_bytes(cls, data): + from cryptography.hazmat.backends.openssl.backend import backend + if not backend.x448_supported(): + raise UnsupportedAlgorithm( + "X448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM + ) + + return backend.x448_load_private_bytes(data) + + @abc.abstractmethod + def public_key(self): + """ + The serialized bytes of the public key. + """ + + @abc.abstractmethod + def private_bytes(self, encoding, format, encryption_algorithm): + """ + The serialized bytes of the private key. + """ + + @abc.abstractmethod + def exchange(self, peer_public_key): + """ + Performs a key exchange operation using the provided peer's public key. + """ diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/ciphers/__init__.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/ciphers/__init__.py new file mode 100644 index 0000000..171b1c6 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/ciphers/__init__.py @@ -0,0 +1,21 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography.hazmat.primitives.ciphers.base import ( + AEADCipherContext, AEADDecryptionContext, AEADEncryptionContext, + BlockCipherAlgorithm, Cipher, CipherAlgorithm, CipherContext +) + + +__all__ = [ + "Cipher", + "CipherAlgorithm", + "BlockCipherAlgorithm", + "CipherContext", + "AEADCipherContext", + "AEADDecryptionContext", + "AEADEncryptionContext", +] diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/ciphers/aead.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/ciphers/aead.py new file mode 100644 index 0000000..42e19ad --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/ciphers/aead.py @@ -0,0 +1,188 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import os + +from cryptography import exceptions, utils +from cryptography.hazmat.backends.openssl import aead +from cryptography.hazmat.backends.openssl.backend import backend + + +class ChaCha20Poly1305(object): + _MAX_SIZE = 2 ** 32 + + def __init__(self, key): + if not backend.aead_cipher_supported(self): + raise exceptions.UnsupportedAlgorithm( + "ChaCha20Poly1305 is not supported by this version of OpenSSL", + exceptions._Reasons.UNSUPPORTED_CIPHER + ) + utils._check_byteslike("key", key) + + if len(key) != 32: + raise ValueError("ChaCha20Poly1305 key must be 32 bytes.") + + self._key = key + + @classmethod + def generate_key(cls): + return os.urandom(32) + + def encrypt(self, nonce, data, associated_data): + if associated_data is None: + associated_data = b"" + + if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE: + # This is OverflowError to match what cffi would raise + raise OverflowError( + "Data or associated data too long. Max 2**32 bytes" + ) + + self._check_params(nonce, data, associated_data) + return aead._encrypt( + backend, self, nonce, data, associated_data, 16 + ) + + def decrypt(self, nonce, data, associated_data): + if associated_data is None: + associated_data = b"" + + self._check_params(nonce, data, associated_data) + return aead._decrypt( + backend, self, nonce, data, associated_data, 16 + ) + + def _check_params(self, nonce, data, associated_data): + utils._check_byteslike("nonce", nonce) + utils._check_bytes("data", data) + utils._check_bytes("associated_data", associated_data) + if len(nonce) != 12: + raise ValueError("Nonce must be 12 bytes") + + +class AESCCM(object): + _MAX_SIZE = 2 ** 32 + + def __init__(self, key, tag_length=16): + utils._check_byteslike("key", key) + if len(key) not in (16, 24, 32): + raise ValueError("AESCCM key must be 128, 192, or 256 bits.") + + self._key = key + if not isinstance(tag_length, int): + raise TypeError("tag_length must be an integer") + + if tag_length not in (4, 6, 8, 10, 12, 14, 16): + raise ValueError("Invalid tag_length") + + self._tag_length = tag_length + + if not backend.aead_cipher_supported(self): + raise exceptions.UnsupportedAlgorithm( + "AESCCM is not supported by this version of OpenSSL", + exceptions._Reasons.UNSUPPORTED_CIPHER + ) + + @classmethod + def generate_key(cls, bit_length): + if not isinstance(bit_length, int): + raise TypeError("bit_length must be an integer") + + if bit_length not in (128, 192, 256): + raise ValueError("bit_length must be 128, 192, or 256") + + return os.urandom(bit_length // 8) + + def encrypt(self, nonce, data, associated_data): + if associated_data is None: + associated_data = b"" + + if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE: + # This is OverflowError to match what cffi would raise + raise OverflowError( + "Data or associated data too long. Max 2**32 bytes" + ) + + self._check_params(nonce, data, associated_data) + self._validate_lengths(nonce, len(data)) + return aead._encrypt( + backend, self, nonce, data, associated_data, self._tag_length + ) + + def decrypt(self, nonce, data, associated_data): + if associated_data is None: + associated_data = b"" + + self._check_params(nonce, data, associated_data) + return aead._decrypt( + backend, self, nonce, data, associated_data, self._tag_length + ) + + def _validate_lengths(self, nonce, data_len): + # For information about computing this, see + # https://tools.ietf.org/html/rfc3610#section-2.1 + l_val = 15 - len(nonce) + if 2 ** (8 * l_val) < data_len: + raise ValueError("Nonce too long for data") + + def _check_params(self, nonce, data, associated_data): + utils._check_byteslike("nonce", nonce) + utils._check_bytes("data", data) + utils._check_bytes("associated_data", associated_data) + if not 7 <= len(nonce) <= 13: + raise ValueError("Nonce must be between 7 and 13 bytes") + + +class AESGCM(object): + _MAX_SIZE = 2 ** 32 + + def __init__(self, key): + utils._check_byteslike("key", key) + if len(key) not in (16, 24, 32): + raise ValueError("AESGCM key must be 128, 192, or 256 bits.") + + self._key = key + + @classmethod + def generate_key(cls, bit_length): + if not isinstance(bit_length, int): + raise TypeError("bit_length must be an integer") + + if bit_length not in (128, 192, 256): + raise ValueError("bit_length must be 128, 192, or 256") + + return os.urandom(bit_length // 8) + + def encrypt(self, nonce, data, associated_data): + if associated_data is None: + associated_data = b"" + + if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE: + # This is OverflowError to match what cffi would raise + raise OverflowError( + "Data or associated data too long. Max 2**32 bytes" + ) + + self._check_params(nonce, data, associated_data) + return aead._encrypt( + backend, self, nonce, data, associated_data, 16 + ) + + def decrypt(self, nonce, data, associated_data): + if associated_data is None: + associated_data = b"" + + self._check_params(nonce, data, associated_data) + return aead._decrypt( + backend, self, nonce, data, associated_data, 16 + ) + + def _check_params(self, nonce, data, associated_data): + utils._check_byteslike("nonce", nonce) + utils._check_bytes("data", data) + utils._check_bytes("associated_data", associated_data) + if len(nonce) == 0: + raise ValueError("Nonce must be at least 1 byte") diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/ciphers/algorithms.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/ciphers/algorithms.py new file mode 100644 index 0000000..f4d5160 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/ciphers/algorithms.py @@ -0,0 +1,167 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.hazmat.primitives.ciphers import ( + BlockCipherAlgorithm, CipherAlgorithm +) +from cryptography.hazmat.primitives.ciphers.modes import ModeWithNonce + + +def _verify_key_size(algorithm, key): + # Verify that the key is instance of bytes + utils._check_byteslike("key", key) + + # Verify that the key size matches the expected key size + if len(key) * 8 not in algorithm.key_sizes: + raise ValueError("Invalid key size ({}) for {}.".format( + len(key) * 8, algorithm.name + )) + return key + + +@utils.register_interface(BlockCipherAlgorithm) +@utils.register_interface(CipherAlgorithm) +class AES(object): + name = "AES" + block_size = 128 + # 512 added to support AES-256-XTS, which uses 512-bit keys + key_sizes = frozenset([128, 192, 256, 512]) + + def __init__(self, key): + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(BlockCipherAlgorithm) +@utils.register_interface(CipherAlgorithm) +class Camellia(object): + name = "camellia" + block_size = 128 + key_sizes = frozenset([128, 192, 256]) + + def __init__(self, key): + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(BlockCipherAlgorithm) +@utils.register_interface(CipherAlgorithm) +class TripleDES(object): + name = "3DES" + block_size = 64 + key_sizes = frozenset([64, 128, 192]) + + def __init__(self, key): + if len(key) == 8: + key += key + key + elif len(key) == 16: + key += key[:8] + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(BlockCipherAlgorithm) +@utils.register_interface(CipherAlgorithm) +class Blowfish(object): + name = "Blowfish" + block_size = 64 + key_sizes = frozenset(range(32, 449, 8)) + + def __init__(self, key): + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(BlockCipherAlgorithm) +@utils.register_interface(CipherAlgorithm) +class CAST5(object): + name = "CAST5" + block_size = 64 + key_sizes = frozenset(range(40, 129, 8)) + + def __init__(self, key): + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(CipherAlgorithm) +class ARC4(object): + name = "RC4" + key_sizes = frozenset([40, 56, 64, 80, 128, 160, 192, 256]) + + def __init__(self, key): + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(CipherAlgorithm) +class IDEA(object): + name = "IDEA" + block_size = 64 + key_sizes = frozenset([128]) + + def __init__(self, key): + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(BlockCipherAlgorithm) +@utils.register_interface(CipherAlgorithm) +class SEED(object): + name = "SEED" + block_size = 128 + key_sizes = frozenset([128]) + + def __init__(self, key): + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(CipherAlgorithm) +@utils.register_interface(ModeWithNonce) +class ChaCha20(object): + name = "ChaCha20" + key_sizes = frozenset([256]) + + def __init__(self, key, nonce): + self.key = _verify_key_size(self, key) + utils._check_byteslike("nonce", nonce) + + if len(nonce) != 16: + raise ValueError("nonce must be 128-bits (16 bytes)") + + self._nonce = nonce + + nonce = utils.read_only_property("_nonce") + + @property + def key_size(self): + return len(self.key) * 8 diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/ciphers/base.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/ciphers/base.py new file mode 100644 index 0000000..4d5f8d6 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/ciphers/base.py @@ -0,0 +1,235 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, AlreadyUpdated, NotYetFinalized, UnsupportedAlgorithm, + _Reasons +) +from cryptography.hazmat.backends.interfaces import CipherBackend +from cryptography.hazmat.primitives.ciphers import modes + + +@six.add_metaclass(abc.ABCMeta) +class CipherAlgorithm(object): + @abc.abstractproperty + def name(self): + """ + A string naming this mode (e.g. "AES", "Camellia"). + """ + + @abc.abstractproperty + def key_size(self): + """ + The size of the key being used as an integer in bits (e.g. 128, 256). + """ + + +@six.add_metaclass(abc.ABCMeta) +class BlockCipherAlgorithm(object): + @abc.abstractproperty + def block_size(self): + """ + The size of a block as an integer in bits (e.g. 64, 128). + """ + + +@six.add_metaclass(abc.ABCMeta) +class CipherContext(object): + @abc.abstractmethod + def update(self, data): + """ + Processes the provided bytes through the cipher and returns the results + as bytes. + """ + + @abc.abstractmethod + def update_into(self, data, buf): + """ + Processes the provided bytes and writes the resulting data into the + provided buffer. Returns the number of bytes written. + """ + + @abc.abstractmethod + def finalize(self): + """ + Returns the results of processing the final block as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class AEADCipherContext(object): + @abc.abstractmethod + def authenticate_additional_data(self, data): + """ + Authenticates the provided bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class AEADDecryptionContext(object): + @abc.abstractmethod + def finalize_with_tag(self, tag): + """ + Returns the results of processing the final block as bytes and allows + delayed passing of the authentication tag. + """ + + +@six.add_metaclass(abc.ABCMeta) +class AEADEncryptionContext(object): + @abc.abstractproperty + def tag(self): + """ + Returns tag bytes. This is only available after encryption is + finalized. + """ + + +class Cipher(object): + def __init__(self, algorithm, mode, backend): + if not isinstance(backend, CipherBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement CipherBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + if not isinstance(algorithm, CipherAlgorithm): + raise TypeError("Expected interface of CipherAlgorithm.") + + if mode is not None: + mode.validate_for_algorithm(algorithm) + + self.algorithm = algorithm + self.mode = mode + self._backend = backend + + def encryptor(self): + if isinstance(self.mode, modes.ModeWithAuthenticationTag): + if self.mode.tag is not None: + raise ValueError( + "Authentication tag must be None when encrypting." + ) + ctx = self._backend.create_symmetric_encryption_ctx( + self.algorithm, self.mode + ) + return self._wrap_ctx(ctx, encrypt=True) + + def decryptor(self): + ctx = self._backend.create_symmetric_decryption_ctx( + self.algorithm, self.mode + ) + return self._wrap_ctx(ctx, encrypt=False) + + def _wrap_ctx(self, ctx, encrypt): + if isinstance(self.mode, modes.ModeWithAuthenticationTag): + if encrypt: + return _AEADEncryptionContext(ctx) + else: + return _AEADCipherContext(ctx) + else: + return _CipherContext(ctx) + + +@utils.register_interface(CipherContext) +class _CipherContext(object): + def __init__(self, ctx): + self._ctx = ctx + + def update(self, data): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + return self._ctx.update(data) + + def update_into(self, data, buf): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + return self._ctx.update_into(data, buf) + + def finalize(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + data = self._ctx.finalize() + self._ctx = None + return data + + +@utils.register_interface(AEADCipherContext) +@utils.register_interface(CipherContext) +@utils.register_interface(AEADDecryptionContext) +class _AEADCipherContext(object): + def __init__(self, ctx): + self._ctx = ctx + self._bytes_processed = 0 + self._aad_bytes_processed = 0 + self._tag = None + self._updated = False + + def _check_limit(self, data_size): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + self._updated = True + self._bytes_processed += data_size + if self._bytes_processed > self._ctx._mode._MAX_ENCRYPTED_BYTES: + raise ValueError( + "{} has a maximum encrypted byte limit of {}".format( + self._ctx._mode.name, self._ctx._mode._MAX_ENCRYPTED_BYTES + ) + ) + + def update(self, data): + self._check_limit(len(data)) + return self._ctx.update(data) + + def update_into(self, data, buf): + self._check_limit(len(data)) + return self._ctx.update_into(data, buf) + + def finalize(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + data = self._ctx.finalize() + self._tag = self._ctx.tag + self._ctx = None + return data + + def finalize_with_tag(self, tag): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + data = self._ctx.finalize_with_tag(tag) + self._tag = self._ctx.tag + self._ctx = None + return data + + def authenticate_additional_data(self, data): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + if self._updated: + raise AlreadyUpdated("Update has been called on this context.") + + self._aad_bytes_processed += len(data) + if self._aad_bytes_processed > self._ctx._mode._MAX_AAD_BYTES: + raise ValueError( + "{} has a maximum AAD byte limit of {}".format( + self._ctx._mode.name, self._ctx._mode._MAX_AAD_BYTES + ) + ) + + self._ctx.authenticate_additional_data(data) + + +@utils.register_interface(AEADEncryptionContext) +class _AEADEncryptionContext(_AEADCipherContext): + @property + def tag(self): + if self._ctx is not None: + raise NotYetFinalized("You must finalize encryption before " + "getting the tag.") + return self._tag diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/ciphers/modes.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/ciphers/modes.py new file mode 100644 index 0000000..78fa1c4 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/ciphers/modes.py @@ -0,0 +1,218 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography import utils + + +@six.add_metaclass(abc.ABCMeta) +class Mode(object): + @abc.abstractproperty + def name(self): + """ + A string naming this mode (e.g. "ECB", "CBC"). + """ + + @abc.abstractmethod + def validate_for_algorithm(self, algorithm): + """ + Checks that all the necessary invariants of this (mode, algorithm) + combination are met. + """ + + +@six.add_metaclass(abc.ABCMeta) +class ModeWithInitializationVector(object): + @abc.abstractproperty + def initialization_vector(self): + """ + The value of the initialization vector for this mode as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class ModeWithTweak(object): + @abc.abstractproperty + def tweak(self): + """ + The value of the tweak for this mode as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class ModeWithNonce(object): + @abc.abstractproperty + def nonce(self): + """ + The value of the nonce for this mode as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class ModeWithAuthenticationTag(object): + @abc.abstractproperty + def tag(self): + """ + The value of the tag supplied to the constructor of this mode. + """ + + +def _check_aes_key_length(self, algorithm): + if algorithm.key_size > 256 and algorithm.name == "AES": + raise ValueError( + "Only 128, 192, and 256 bit keys are allowed for this AES mode" + ) + + +def _check_iv_length(self, algorithm): + if len(self.initialization_vector) * 8 != algorithm.block_size: + raise ValueError("Invalid IV size ({}) for {}.".format( + len(self.initialization_vector), self.name + )) + + +def _check_iv_and_key_length(self, algorithm): + _check_aes_key_length(self, algorithm) + _check_iv_length(self, algorithm) + + +@utils.register_interface(Mode) +@utils.register_interface(ModeWithInitializationVector) +class CBC(object): + name = "CBC" + + def __init__(self, initialization_vector): + utils._check_byteslike("initialization_vector", initialization_vector) + self._initialization_vector = initialization_vector + + initialization_vector = utils.read_only_property("_initialization_vector") + validate_for_algorithm = _check_iv_and_key_length + + +@utils.register_interface(Mode) +@utils.register_interface(ModeWithTweak) +class XTS(object): + name = "XTS" + + def __init__(self, tweak): + utils._check_byteslike("tweak", tweak) + + if len(tweak) != 16: + raise ValueError("tweak must be 128-bits (16 bytes)") + + self._tweak = tweak + + tweak = utils.read_only_property("_tweak") + + def validate_for_algorithm(self, algorithm): + if algorithm.key_size not in (256, 512): + raise ValueError( + "The XTS specification requires a 256-bit key for AES-128-XTS" + " and 512-bit key for AES-256-XTS" + ) + + +@utils.register_interface(Mode) +class ECB(object): + name = "ECB" + + validate_for_algorithm = _check_aes_key_length + + +@utils.register_interface(Mode) +@utils.register_interface(ModeWithInitializationVector) +class OFB(object): + name = "OFB" + + def __init__(self, initialization_vector): + utils._check_byteslike("initialization_vector", initialization_vector) + self._initialization_vector = initialization_vector + + initialization_vector = utils.read_only_property("_initialization_vector") + validate_for_algorithm = _check_iv_and_key_length + + +@utils.register_interface(Mode) +@utils.register_interface(ModeWithInitializationVector) +class CFB(object): + name = "CFB" + + def __init__(self, initialization_vector): + utils._check_byteslike("initialization_vector", initialization_vector) + self._initialization_vector = initialization_vector + + initialization_vector = utils.read_only_property("_initialization_vector") + validate_for_algorithm = _check_iv_and_key_length + + +@utils.register_interface(Mode) +@utils.register_interface(ModeWithInitializationVector) +class CFB8(object): + name = "CFB8" + + def __init__(self, initialization_vector): + utils._check_byteslike("initialization_vector", initialization_vector) + self._initialization_vector = initialization_vector + + initialization_vector = utils.read_only_property("_initialization_vector") + validate_for_algorithm = _check_iv_and_key_length + + +@utils.register_interface(Mode) +@utils.register_interface(ModeWithNonce) +class CTR(object): + name = "CTR" + + def __init__(self, nonce): + utils._check_byteslike("nonce", nonce) + self._nonce = nonce + + nonce = utils.read_only_property("_nonce") + + def validate_for_algorithm(self, algorithm): + _check_aes_key_length(self, algorithm) + if len(self.nonce) * 8 != algorithm.block_size: + raise ValueError("Invalid nonce size ({}) for {}.".format( + len(self.nonce), self.name + )) + + +@utils.register_interface(Mode) +@utils.register_interface(ModeWithInitializationVector) +@utils.register_interface(ModeWithAuthenticationTag) +class GCM(object): + name = "GCM" + _MAX_ENCRYPTED_BYTES = (2 ** 39 - 256) // 8 + _MAX_AAD_BYTES = (2 ** 64) // 8 + + def __init__(self, initialization_vector, tag=None, min_tag_length=16): + # len(initialization_vector) must in [1, 2 ** 64), but it's impossible + # to actually construct a bytes object that large, so we don't check + # for it + utils._check_byteslike("initialization_vector", initialization_vector) + if len(initialization_vector) == 0: + raise ValueError("initialization_vector must be at least 1 byte") + self._initialization_vector = initialization_vector + if tag is not None: + utils._check_bytes("tag", tag) + if min_tag_length < 4: + raise ValueError("min_tag_length must be >= 4") + if len(tag) < min_tag_length: + raise ValueError( + "Authentication tag must be {} bytes or longer.".format( + min_tag_length) + ) + self._tag = tag + self._min_tag_length = min_tag_length + + tag = utils.read_only_property("_tag") + initialization_vector = utils.read_only_property("_initialization_vector") + + def validate_for_algorithm(self, algorithm): + _check_aes_key_length(self, algorithm) diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/cmac.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/cmac.py new file mode 100644 index 0000000..95a8d97 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/cmac.py @@ -0,0 +1,64 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import CMACBackend +from cryptography.hazmat.primitives import ciphers + + +class CMAC(object): + def __init__(self, algorithm, backend, ctx=None): + if not isinstance(backend, CMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement CMACBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + if not isinstance(algorithm, ciphers.BlockCipherAlgorithm): + raise TypeError( + "Expected instance of BlockCipherAlgorithm." + ) + self._algorithm = algorithm + + self._backend = backend + if ctx is None: + self._ctx = self._backend.create_cmac_ctx(self._algorithm) + else: + self._ctx = ctx + + def update(self, data): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + + utils._check_bytes("data", data) + self._ctx.update(data) + + def finalize(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + digest = self._ctx.finalize() + self._ctx = None + return digest + + def verify(self, signature): + utils._check_bytes("signature", signature) + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + + ctx, self._ctx = self._ctx, None + ctx.verify(signature) + + def copy(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + return CMAC( + self._algorithm, + backend=self._backend, + ctx=self._ctx.copy() + ) diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/constant_time.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/constant_time.py new file mode 100644 index 0000000..35ceafe --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/constant_time.py @@ -0,0 +1,35 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import hmac +import warnings + +from cryptography import utils +from cryptography.hazmat.bindings._constant_time import lib + + +if hasattr(hmac, "compare_digest"): + def bytes_eq(a, b): + if not isinstance(a, bytes) or not isinstance(b, bytes): + raise TypeError("a and b must be bytes.") + + return hmac.compare_digest(a, b) + +else: + warnings.warn( + "Support for your Python version is deprecated. The next version of " + "cryptography will remove support. Please upgrade to a release " + "(2.7.7+) that supports hmac.compare_digest as soon as possible.", + utils.PersistentlyDeprecated2018, + ) + + def bytes_eq(a, b): + if not isinstance(a, bytes) or not isinstance(b, bytes): + raise TypeError("a and b must be bytes.") + + return lib.Cryptography_constant_time_bytes_eq( + a, len(a), b, len(b) + ) == 1 diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/hashes.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/hashes.py new file mode 100644 index 0000000..9be2b60 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/hashes.py @@ -0,0 +1,255 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HashBackend + + +@six.add_metaclass(abc.ABCMeta) +class HashAlgorithm(object): + @abc.abstractproperty + def name(self): + """ + A string naming this algorithm (e.g. "sha256", "md5"). + """ + + @abc.abstractproperty + def digest_size(self): + """ + The size of the resulting digest in bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class HashContext(object): + @abc.abstractproperty + def algorithm(self): + """ + A HashAlgorithm that will be used by this context. + """ + + @abc.abstractmethod + def update(self, data): + """ + Processes the provided bytes through the hash. + """ + + @abc.abstractmethod + def finalize(self): + """ + Finalizes the hash context and returns the hash digest as bytes. + """ + + @abc.abstractmethod + def copy(self): + """ + Return a HashContext that is a copy of the current context. + """ + + +@six.add_metaclass(abc.ABCMeta) +class ExtendableOutputFunction(object): + """ + An interface for extendable output functions. + """ + + +@utils.register_interface(HashContext) +class Hash(object): + def __init__(self, algorithm, backend, ctx=None): + if not isinstance(backend, HashBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HashBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + if not isinstance(algorithm, HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + self._algorithm = algorithm + + self._backend = backend + + if ctx is None: + self._ctx = self._backend.create_hash_ctx(self.algorithm) + else: + self._ctx = ctx + + algorithm = utils.read_only_property("_algorithm") + + def update(self, data): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + utils._check_byteslike("data", data) + self._ctx.update(data) + + def copy(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + return Hash( + self.algorithm, backend=self._backend, ctx=self._ctx.copy() + ) + + def finalize(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + digest = self._ctx.finalize() + self._ctx = None + return digest + + +@utils.register_interface(HashAlgorithm) +class SHA1(object): + name = "sha1" + digest_size = 20 + block_size = 64 + + +@utils.register_interface(HashAlgorithm) +class SHA512_224(object): # noqa: N801 + name = "sha512-224" + digest_size = 28 + block_size = 128 + + +@utils.register_interface(HashAlgorithm) +class SHA512_256(object): # noqa: N801 + name = "sha512-256" + digest_size = 32 + block_size = 128 + + +@utils.register_interface(HashAlgorithm) +class SHA224(object): + name = "sha224" + digest_size = 28 + block_size = 64 + + +@utils.register_interface(HashAlgorithm) +class SHA256(object): + name = "sha256" + digest_size = 32 + block_size = 64 + + +@utils.register_interface(HashAlgorithm) +class SHA384(object): + name = "sha384" + digest_size = 48 + block_size = 128 + + +@utils.register_interface(HashAlgorithm) +class SHA512(object): + name = "sha512" + digest_size = 64 + block_size = 128 + + +@utils.register_interface(HashAlgorithm) +class SHA3_224(object): # noqa: N801 + name = "sha3-224" + digest_size = 28 + + +@utils.register_interface(HashAlgorithm) +class SHA3_256(object): # noqa: N801 + name = "sha3-256" + digest_size = 32 + + +@utils.register_interface(HashAlgorithm) +class SHA3_384(object): # noqa: N801 + name = "sha3-384" + digest_size = 48 + + +@utils.register_interface(HashAlgorithm) +class SHA3_512(object): # noqa: N801 + name = "sha3-512" + digest_size = 64 + + +@utils.register_interface(HashAlgorithm) +@utils.register_interface(ExtendableOutputFunction) +class SHAKE128(object): + name = "shake128" + + def __init__(self, digest_size): + if not isinstance(digest_size, six.integer_types): + raise TypeError("digest_size must be an integer") + + if digest_size < 1: + raise ValueError("digest_size must be a positive integer") + + self._digest_size = digest_size + + digest_size = utils.read_only_property("_digest_size") + + +@utils.register_interface(HashAlgorithm) +@utils.register_interface(ExtendableOutputFunction) +class SHAKE256(object): + name = "shake256" + + def __init__(self, digest_size): + if not isinstance(digest_size, six.integer_types): + raise TypeError("digest_size must be an integer") + + if digest_size < 1: + raise ValueError("digest_size must be a positive integer") + + self._digest_size = digest_size + + digest_size = utils.read_only_property("_digest_size") + + +@utils.register_interface(HashAlgorithm) +class MD5(object): + name = "md5" + digest_size = 16 + block_size = 64 + + +@utils.register_interface(HashAlgorithm) +class BLAKE2b(object): + name = "blake2b" + _max_digest_size = 64 + _min_digest_size = 1 + block_size = 128 + + def __init__(self, digest_size): + + if digest_size != 64: + raise ValueError("Digest size must be 64") + + self._digest_size = digest_size + + digest_size = utils.read_only_property("_digest_size") + + +@utils.register_interface(HashAlgorithm) +class BLAKE2s(object): + name = "blake2s" + block_size = 64 + _max_digest_size = 32 + _min_digest_size = 1 + + def __init__(self, digest_size): + + if digest_size != 32: + raise ValueError("Digest size must be 32") + + self._digest_size = digest_size + + digest_size = utils.read_only_property("_digest_size") diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/hmac.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/hmac.py new file mode 100644 index 0000000..9eceeac --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/hmac.py @@ -0,0 +1,66 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HMACBackend +from cryptography.hazmat.primitives import hashes + + +@utils.register_interface(hashes.HashContext) +class HMAC(object): + def __init__(self, key, algorithm, backend, ctx=None): + if not isinstance(backend, HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HMACBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + self._algorithm = algorithm + + self._backend = backend + self._key = key + if ctx is None: + self._ctx = self._backend.create_hmac_ctx(key, self.algorithm) + else: + self._ctx = ctx + + algorithm = utils.read_only_property("_algorithm") + + def update(self, data): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + utils._check_byteslike("data", data) + self._ctx.update(data) + + def copy(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + return HMAC( + self._key, + self.algorithm, + backend=self._backend, + ctx=self._ctx.copy() + ) + + def finalize(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + digest = self._ctx.finalize() + self._ctx = None + return digest + + def verify(self, signature): + utils._check_bytes("signature", signature) + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + + ctx, self._ctx = self._ctx, None + ctx.verify(signature) diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/kdf/__init__.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/kdf/__init__.py new file mode 100644 index 0000000..2d0724e --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/kdf/__init__.py @@ -0,0 +1,26 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + + +@six.add_metaclass(abc.ABCMeta) +class KeyDerivationFunction(object): + @abc.abstractmethod + def derive(self, key_material): + """ + Deterministically generates and returns a new key based on the existing + key material. + """ + + @abc.abstractmethod + def verify(self, key_material, expected_key): + """ + Checks whether the key generated by the key material matches the + expected derived key. Raises an exception if they do not match. + """ diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/kdf/concatkdf.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/kdf/concatkdf.py new file mode 100644 index 0000000..7cb6385 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/kdf/concatkdf.py @@ -0,0 +1,124 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import struct + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HMACBackend +from cryptography.hazmat.backends.interfaces import HashBackend +from cryptography.hazmat.primitives import constant_time, hashes, hmac +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +def _int_to_u32be(n): + return struct.pack('>I', n) + + +def _common_args_checks(algorithm, length, otherinfo): + max_length = algorithm.digest_size * (2 ** 32 - 1) + if length > max_length: + raise ValueError( + "Can not derive keys larger than {} bits.".format( + max_length + )) + if otherinfo is not None: + utils._check_bytes("otherinfo", otherinfo) + + +def _concatkdf_derive(key_material, length, auxfn, otherinfo): + utils._check_byteslike("key_material", key_material) + output = [b""] + outlen = 0 + counter = 1 + + while (length > outlen): + h = auxfn() + h.update(_int_to_u32be(counter)) + h.update(key_material) + h.update(otherinfo) + output.append(h.finalize()) + outlen += len(output[-1]) + counter += 1 + + return b"".join(output)[:length] + + +@utils.register_interface(KeyDerivationFunction) +class ConcatKDFHash(object): + def __init__(self, algorithm, length, otherinfo, backend): + + _common_args_checks(algorithm, length, otherinfo) + self._algorithm = algorithm + self._length = length + self._otherinfo = otherinfo + if self._otherinfo is None: + self._otherinfo = b"" + + if not isinstance(backend, HashBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HashBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + self._backend = backend + self._used = False + + def _hash(self): + return hashes.Hash(self._algorithm, self._backend) + + def derive(self, key_material): + if self._used: + raise AlreadyFinalized + self._used = True + return _concatkdf_derive(key_material, self._length, + self._hash, self._otherinfo) + + def verify(self, key_material, expected_key): + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey + + +@utils.register_interface(KeyDerivationFunction) +class ConcatKDFHMAC(object): + def __init__(self, algorithm, length, salt, otherinfo, backend): + + _common_args_checks(algorithm, length, otherinfo) + self._algorithm = algorithm + self._length = length + self._otherinfo = otherinfo + if self._otherinfo is None: + self._otherinfo = b"" + + if salt is None: + salt = b"\x00" * algorithm.block_size + else: + utils._check_bytes("salt", salt) + + self._salt = salt + + if not isinstance(backend, HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HMACBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + self._backend = backend + self._used = False + + def _hmac(self): + return hmac.HMAC(self._salt, self._algorithm, self._backend) + + def derive(self, key_material): + if self._used: + raise AlreadyFinalized + self._used = True + return _concatkdf_derive(key_material, self._length, + self._hmac, self._otherinfo) + + def verify(self, key_material, expected_key): + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/kdf/hkdf.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/kdf/hkdf.py new file mode 100644 index 0000000..01f0f28 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/kdf/hkdf.py @@ -0,0 +1,110 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import six + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HMACBackend +from cryptography.hazmat.primitives import constant_time, hmac +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +@utils.register_interface(KeyDerivationFunction) +class HKDF(object): + def __init__(self, algorithm, length, salt, info, backend): + if not isinstance(backend, HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HMACBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + self._algorithm = algorithm + + if salt is None: + salt = b"\x00" * self._algorithm.digest_size + else: + utils._check_bytes("salt", salt) + + self._salt = salt + + self._backend = backend + + self._hkdf_expand = HKDFExpand(self._algorithm, length, info, backend) + + def _extract(self, key_material): + h = hmac.HMAC(self._salt, self._algorithm, backend=self._backend) + h.update(key_material) + return h.finalize() + + def derive(self, key_material): + utils._check_byteslike("key_material", key_material) + return self._hkdf_expand.derive(self._extract(key_material)) + + def verify(self, key_material, expected_key): + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey + + +@utils.register_interface(KeyDerivationFunction) +class HKDFExpand(object): + def __init__(self, algorithm, length, info, backend): + if not isinstance(backend, HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HMACBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + self._algorithm = algorithm + + self._backend = backend + + max_length = 255 * algorithm.digest_size + + if length > max_length: + raise ValueError( + "Can not derive keys larger than {} octets.".format( + max_length + )) + + self._length = length + + if info is None: + info = b"" + else: + utils._check_bytes("info", info) + + self._info = info + + self._used = False + + def _expand(self, key_material): + output = [b""] + counter = 1 + + while self._algorithm.digest_size * (len(output) - 1) < self._length: + h = hmac.HMAC(key_material, self._algorithm, backend=self._backend) + h.update(output[-1]) + h.update(self._info) + h.update(six.int2byte(counter)) + output.append(h.finalize()) + counter += 1 + + return b"".join(output)[:self._length] + + def derive(self, key_material): + utils._check_byteslike("key_material", key_material) + if self._used: + raise AlreadyFinalized + + self._used = True + return self._expand(key_material) + + def verify(self, key_material, expected_key): + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/kdf/kbkdf.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/kdf/kbkdf.py new file mode 100644 index 0000000..56783a8 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/kdf/kbkdf.py @@ -0,0 +1,145 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from enum import Enum + +from six.moves import range + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HMACBackend +from cryptography.hazmat.primitives import constant_time, hashes, hmac +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +class Mode(Enum): + CounterMode = "ctr" + + +class CounterLocation(Enum): + BeforeFixed = "before_fixed" + AfterFixed = "after_fixed" + + +@utils.register_interface(KeyDerivationFunction) +class KBKDFHMAC(object): + def __init__(self, algorithm, mode, length, rlen, llen, + location, label, context, fixed, backend): + if not isinstance(backend, HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HMACBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + if not isinstance(algorithm, hashes.HashAlgorithm): + raise UnsupportedAlgorithm( + "Algorithm supplied is not a supported hash algorithm.", + _Reasons.UNSUPPORTED_HASH + ) + + if not backend.hmac_supported(algorithm): + raise UnsupportedAlgorithm( + "Algorithm supplied is not a supported hmac algorithm.", + _Reasons.UNSUPPORTED_HASH + ) + + if not isinstance(mode, Mode): + raise TypeError("mode must be of type Mode") + + if not isinstance(location, CounterLocation): + raise TypeError("location must be of type CounterLocation") + + if (label or context) and fixed: + raise ValueError("When supplying fixed data, " + "label and context are ignored.") + + if rlen is None or not self._valid_byte_length(rlen): + raise ValueError("rlen must be between 1 and 4") + + if llen is None and fixed is None: + raise ValueError("Please specify an llen") + + if llen is not None and not isinstance(llen, int): + raise TypeError("llen must be an integer") + + if label is None: + label = b'' + + if context is None: + context = b'' + + utils._check_bytes("label", label) + utils._check_bytes("context", context) + self._algorithm = algorithm + self._mode = mode + self._length = length + self._rlen = rlen + self._llen = llen + self._location = location + self._label = label + self._context = context + self._backend = backend + self._used = False + self._fixed_data = fixed + + def _valid_byte_length(self, value): + if not isinstance(value, int): + raise TypeError('value must be of type int') + + value_bin = utils.int_to_bytes(1, value) + if not 1 <= len(value_bin) <= 4: + return False + return True + + def derive(self, key_material): + if self._used: + raise AlreadyFinalized + + utils._check_byteslike("key_material", key_material) + self._used = True + + # inverse floor division (equivalent to ceiling) + rounds = -(-self._length // self._algorithm.digest_size) + + output = [b''] + + # For counter mode, the number of iterations shall not be + # larger than 2^r-1, where r <= 32 is the binary length of the counter + # This ensures that the counter values used as an input to the + # PRF will not repeat during a particular call to the KDF function. + r_bin = utils.int_to_bytes(1, self._rlen) + if rounds > pow(2, len(r_bin) * 8) - 1: + raise ValueError('There are too many iterations.') + + for i in range(1, rounds + 1): + h = hmac.HMAC(key_material, self._algorithm, backend=self._backend) + + counter = utils.int_to_bytes(i, self._rlen) + if self._location == CounterLocation.BeforeFixed: + h.update(counter) + + h.update(self._generate_fixed_input()) + + if self._location == CounterLocation.AfterFixed: + h.update(counter) + + output.append(h.finalize()) + + return b''.join(output)[:self._length] + + def _generate_fixed_input(self): + if self._fixed_data and isinstance(self._fixed_data, bytes): + return self._fixed_data + + l_val = utils.int_to_bytes(self._length * 8, self._llen) + + return b"".join([self._label, b"\x00", self._context, l_val]) + + def verify(self, key_material, expected_key): + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/kdf/pbkdf2.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/kdf/pbkdf2.py new file mode 100644 index 0000000..07d8ac6 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/kdf/pbkdf2.py @@ -0,0 +1,56 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import PBKDF2HMACBackend +from cryptography.hazmat.primitives import constant_time +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +@utils.register_interface(KeyDerivationFunction) +class PBKDF2HMAC(object): + def __init__(self, algorithm, length, salt, iterations, backend): + if not isinstance(backend, PBKDF2HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement PBKDF2HMACBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + if not backend.pbkdf2_hmac_supported(algorithm): + raise UnsupportedAlgorithm( + "{} is not supported for PBKDF2 by this backend.".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + self._used = False + self._algorithm = algorithm + self._length = length + utils._check_bytes("salt", salt) + self._salt = salt + self._iterations = iterations + self._backend = backend + + def derive(self, key_material): + if self._used: + raise AlreadyFinalized("PBKDF2 instances can only be used once.") + self._used = True + + utils._check_byteslike("key_material", key_material) + return self._backend.derive_pbkdf2_hmac( + self._algorithm, + self._length, + self._salt, + self._iterations, + key_material + ) + + def verify(self, key_material, expected_key): + derived_key = self.derive(key_material) + if not constant_time.bytes_eq(derived_key, expected_key): + raise InvalidKey("Keys do not match.") diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/kdf/scrypt.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/kdf/scrypt.py new file mode 100644 index 0000000..df9745e --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/kdf/scrypt.py @@ -0,0 +1,63 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import sys + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import ScryptBackend +from cryptography.hazmat.primitives import constant_time +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +# This is used by the scrypt tests to skip tests that require more memory +# than the MEM_LIMIT +_MEM_LIMIT = sys.maxsize // 2 + + +@utils.register_interface(KeyDerivationFunction) +class Scrypt(object): + def __init__(self, salt, length, n, r, p, backend): + if not isinstance(backend, ScryptBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement ScryptBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + self._length = length + utils._check_bytes("salt", salt) + if n < 2 or (n & (n - 1)) != 0: + raise ValueError("n must be greater than 1 and be a power of 2.") + + if r < 1: + raise ValueError("r must be greater than or equal to 1.") + + if p < 1: + raise ValueError("p must be greater than or equal to 1.") + + self._used = False + self._salt = salt + self._n = n + self._r = r + self._p = p + self._backend = backend + + def derive(self, key_material): + if self._used: + raise AlreadyFinalized("Scrypt instances can only be used once.") + self._used = True + + utils._check_byteslike("key_material", key_material) + return self._backend.derive_scrypt( + key_material, self._salt, self._length, self._n, self._r, self._p + ) + + def verify(self, key_material, expected_key): + derived_key = self.derive(key_material) + if not constant_time.bytes_eq(derived_key, expected_key): + raise InvalidKey("Keys do not match.") diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/kdf/x963kdf.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/kdf/x963kdf.py new file mode 100644 index 0000000..9eb50b0 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/kdf/x963kdf.py @@ -0,0 +1,68 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import struct + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HashBackend +from cryptography.hazmat.primitives import constant_time, hashes +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +def _int_to_u32be(n): + return struct.pack('>I', n) + + +@utils.register_interface(KeyDerivationFunction) +class X963KDF(object): + def __init__(self, algorithm, length, sharedinfo, backend): + + max_len = algorithm.digest_size * (2 ** 32 - 1) + if length > max_len: + raise ValueError( + "Can not derive keys larger than {} bits.".format(max_len)) + if sharedinfo is not None: + utils._check_bytes("sharedinfo", sharedinfo) + + self._algorithm = algorithm + self._length = length + self._sharedinfo = sharedinfo + + if not isinstance(backend, HashBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HashBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + self._backend = backend + self._used = False + + def derive(self, key_material): + if self._used: + raise AlreadyFinalized + self._used = True + utils._check_byteslike("key_material", key_material) + output = [b""] + outlen = 0 + counter = 1 + + while self._length > outlen: + h = hashes.Hash(self._algorithm, self._backend) + h.update(key_material) + h.update(_int_to_u32be(counter)) + if self._sharedinfo is not None: + h.update(self._sharedinfo) + output.append(h.finalize()) + outlen += len(output[-1]) + counter += 1 + + return b"".join(output)[:self._length] + + def verify(self, key_material, expected_key): + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/keywrap.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/keywrap.py new file mode 100644 index 0000000..f55c519 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/keywrap.py @@ -0,0 +1,154 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import struct + +from cryptography.hazmat.primitives.ciphers import Cipher +from cryptography.hazmat.primitives.ciphers.algorithms import AES +from cryptography.hazmat.primitives.ciphers.modes import ECB +from cryptography.hazmat.primitives.constant_time import bytes_eq + + +def _wrap_core(wrapping_key, a, r, backend): + # RFC 3394 Key Wrap - 2.2.1 (index method) + encryptor = Cipher(AES(wrapping_key), ECB(), backend).encryptor() + n = len(r) + for j in range(6): + for i in range(n): + # every encryption operation is a discrete 16 byte chunk (because + # AES has a 128-bit block size) and since we're using ECB it is + # safe to reuse the encryptor for the entire operation + b = encryptor.update(a + r[i]) + # pack/unpack are safe as these are always 64-bit chunks + a = struct.pack( + ">Q", struct.unpack(">Q", b[:8])[0] ^ ((n * j) + i + 1) + ) + r[i] = b[-8:] + + assert encryptor.finalize() == b"" + + return a + b"".join(r) + + +def aes_key_wrap(wrapping_key, key_to_wrap, backend): + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + if len(key_to_wrap) < 16: + raise ValueError("The key to wrap must be at least 16 bytes") + + if len(key_to_wrap) % 8 != 0: + raise ValueError("The key to wrap must be a multiple of 8 bytes") + + a = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6" + r = [key_to_wrap[i:i + 8] for i in range(0, len(key_to_wrap), 8)] + return _wrap_core(wrapping_key, a, r, backend) + + +def _unwrap_core(wrapping_key, a, r, backend): + # Implement RFC 3394 Key Unwrap - 2.2.2 (index method) + decryptor = Cipher(AES(wrapping_key), ECB(), backend).decryptor() + n = len(r) + for j in reversed(range(6)): + for i in reversed(range(n)): + # pack/unpack are safe as these are always 64-bit chunks + atr = struct.pack( + ">Q", struct.unpack(">Q", a)[0] ^ ((n * j) + i + 1) + ) + r[i] + # every decryption operation is a discrete 16 byte chunk so + # it is safe to reuse the decryptor for the entire operation + b = decryptor.update(atr) + a = b[:8] + r[i] = b[-8:] + + assert decryptor.finalize() == b"" + return a, r + + +def aes_key_wrap_with_padding(wrapping_key, key_to_wrap, backend): + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + aiv = b"\xA6\x59\x59\xA6" + struct.pack(">i", len(key_to_wrap)) + # pad the key to wrap if necessary + pad = (8 - (len(key_to_wrap) % 8)) % 8 + key_to_wrap = key_to_wrap + b"\x00" * pad + if len(key_to_wrap) == 8: + # RFC 5649 - 4.1 - exactly 8 octets after padding + encryptor = Cipher(AES(wrapping_key), ECB(), backend).encryptor() + b = encryptor.update(aiv + key_to_wrap) + assert encryptor.finalize() == b"" + return b + else: + r = [key_to_wrap[i:i + 8] for i in range(0, len(key_to_wrap), 8)] + return _wrap_core(wrapping_key, aiv, r, backend) + + +def aes_key_unwrap_with_padding(wrapping_key, wrapped_key, backend): + if len(wrapped_key) < 16: + raise InvalidUnwrap("Must be at least 16 bytes") + + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + if len(wrapped_key) == 16: + # RFC 5649 - 4.2 - exactly two 64-bit blocks + decryptor = Cipher(AES(wrapping_key), ECB(), backend).decryptor() + b = decryptor.update(wrapped_key) + assert decryptor.finalize() == b"" + a = b[:8] + data = b[8:] + n = 1 + else: + r = [wrapped_key[i:i + 8] for i in range(0, len(wrapped_key), 8)] + encrypted_aiv = r.pop(0) + n = len(r) + a, r = _unwrap_core(wrapping_key, encrypted_aiv, r, backend) + data = b"".join(r) + + # 1) Check that MSB(32,A) = A65959A6. + # 2) Check that 8*(n-1) < LSB(32,A) <= 8*n. If so, let + # MLI = LSB(32,A). + # 3) Let b = (8*n)-MLI, and then check that the rightmost b octets of + # the output data are zero. + (mli,) = struct.unpack(">I", a[4:]) + b = (8 * n) - mli + if ( + not bytes_eq(a[:4], b"\xa6\x59\x59\xa6") or not + 8 * (n - 1) < mli <= 8 * n or ( + b != 0 and not bytes_eq(data[-b:], b"\x00" * b) + ) + ): + raise InvalidUnwrap() + + if b == 0: + return data + else: + return data[:-b] + + +def aes_key_unwrap(wrapping_key, wrapped_key, backend): + if len(wrapped_key) < 24: + raise InvalidUnwrap("Must be at least 24 bytes") + + if len(wrapped_key) % 8 != 0: + raise InvalidUnwrap("The wrapped key must be a multiple of 8 bytes") + + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + aiv = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6" + r = [wrapped_key[i:i + 8] for i in range(0, len(wrapped_key), 8)] + a = r.pop(0) + a, r = _unwrap_core(wrapping_key, a, r, backend) + if not bytes_eq(a, aiv): + raise InvalidUnwrap() + + return b"".join(r) + + +class InvalidUnwrap(Exception): + pass diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/padding.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/padding.py new file mode 100644 index 0000000..170c802 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/padding.py @@ -0,0 +1,200 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography import utils +from cryptography.exceptions import AlreadyFinalized +from cryptography.hazmat.bindings._padding import lib + + +@six.add_metaclass(abc.ABCMeta) +class PaddingContext(object): + @abc.abstractmethod + def update(self, data): + """ + Pads the provided bytes and returns any available data as bytes. + """ + + @abc.abstractmethod + def finalize(self): + """ + Finalize the padding, returns bytes. + """ + + +def _byte_padding_check(block_size): + if not (0 <= block_size <= 2040): + raise ValueError("block_size must be in range(0, 2041).") + + if block_size % 8 != 0: + raise ValueError("block_size must be a multiple of 8.") + + +def _byte_padding_update(buffer_, data, block_size): + if buffer_ is None: + raise AlreadyFinalized("Context was already finalized.") + + utils._check_bytes("data", data) + + buffer_ += data + + finished_blocks = len(buffer_) // (block_size // 8) + + result = buffer_[:finished_blocks * (block_size // 8)] + buffer_ = buffer_[finished_blocks * (block_size // 8):] + + return buffer_, result + + +def _byte_padding_pad(buffer_, block_size, paddingfn): + if buffer_ is None: + raise AlreadyFinalized("Context was already finalized.") + + pad_size = block_size // 8 - len(buffer_) + return buffer_ + paddingfn(pad_size) + + +def _byte_unpadding_update(buffer_, data, block_size): + if buffer_ is None: + raise AlreadyFinalized("Context was already finalized.") + + utils._check_bytes("data", data) + + buffer_ += data + + finished_blocks = max(len(buffer_) // (block_size // 8) - 1, 0) + + result = buffer_[:finished_blocks * (block_size // 8)] + buffer_ = buffer_[finished_blocks * (block_size // 8):] + + return buffer_, result + + +def _byte_unpadding_check(buffer_, block_size, checkfn): + if buffer_ is None: + raise AlreadyFinalized("Context was already finalized.") + + if len(buffer_) != block_size // 8: + raise ValueError("Invalid padding bytes.") + + valid = checkfn(buffer_, block_size // 8) + + if not valid: + raise ValueError("Invalid padding bytes.") + + pad_size = six.indexbytes(buffer_, -1) + return buffer_[:-pad_size] + + +class PKCS7(object): + def __init__(self, block_size): + _byte_padding_check(block_size) + self.block_size = block_size + + def padder(self): + return _PKCS7PaddingContext(self.block_size) + + def unpadder(self): + return _PKCS7UnpaddingContext(self.block_size) + + +@utils.register_interface(PaddingContext) +class _PKCS7PaddingContext(object): + def __init__(self, block_size): + self.block_size = block_size + # TODO: more copies than necessary, we should use zero-buffer (#193) + self._buffer = b"" + + def update(self, data): + self._buffer, result = _byte_padding_update( + self._buffer, data, self.block_size) + return result + + def _padding(self, size): + return six.int2byte(size) * size + + def finalize(self): + result = _byte_padding_pad( + self._buffer, self.block_size, self._padding) + self._buffer = None + return result + + +@utils.register_interface(PaddingContext) +class _PKCS7UnpaddingContext(object): + def __init__(self, block_size): + self.block_size = block_size + # TODO: more copies than necessary, we should use zero-buffer (#193) + self._buffer = b"" + + def update(self, data): + self._buffer, result = _byte_unpadding_update( + self._buffer, data, self.block_size) + return result + + def finalize(self): + result = _byte_unpadding_check( + self._buffer, self.block_size, + lib.Cryptography_check_pkcs7_padding) + self._buffer = None + return result + + +class ANSIX923(object): + def __init__(self, block_size): + _byte_padding_check(block_size) + self.block_size = block_size + + def padder(self): + return _ANSIX923PaddingContext(self.block_size) + + def unpadder(self): + return _ANSIX923UnpaddingContext(self.block_size) + + +@utils.register_interface(PaddingContext) +class _ANSIX923PaddingContext(object): + def __init__(self, block_size): + self.block_size = block_size + # TODO: more copies than necessary, we should use zero-buffer (#193) + self._buffer = b"" + + def update(self, data): + self._buffer, result = _byte_padding_update( + self._buffer, data, self.block_size) + return result + + def _padding(self, size): + return six.int2byte(0) * (size - 1) + six.int2byte(size) + + def finalize(self): + result = _byte_padding_pad( + self._buffer, self.block_size, self._padding) + self._buffer = None + return result + + +@utils.register_interface(PaddingContext) +class _ANSIX923UnpaddingContext(object): + def __init__(self, block_size): + self.block_size = block_size + # TODO: more copies than necessary, we should use zero-buffer (#193) + self._buffer = b"" + + def update(self, data): + self._buffer, result = _byte_unpadding_update( + self._buffer, data, self.block_size) + return result + + def finalize(self): + result = _byte_unpadding_check( + self._buffer, self.block_size, + lib.Cryptography_check_ansix923_padding) + self._buffer = None + return result diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/poly1305.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/poly1305.py new file mode 100644 index 0000000..d92f62a --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/poly1305.py @@ -0,0 +1,55 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, UnsupportedAlgorithm, _Reasons +) + + +class Poly1305(object): + def __init__(self, key): + from cryptography.hazmat.backends.openssl.backend import backend + if not backend.poly1305_supported(): + raise UnsupportedAlgorithm( + "poly1305 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_MAC + ) + self._ctx = backend.create_poly1305_ctx(key) + + def update(self, data): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + utils._check_byteslike("data", data) + self._ctx.update(data) + + def finalize(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + mac = self._ctx.finalize() + self._ctx = None + return mac + + def verify(self, tag): + utils._check_bytes("tag", tag) + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + + ctx, self._ctx = self._ctx, None + ctx.verify(tag) + + @classmethod + def generate_tag(cls, key, data): + p = Poly1305(key) + p.update(data) + return p.finalize() + + @classmethod + def verify_tag(cls, key, data, tag): + p = Poly1305(key) + p.update(data) + p.verify(tag) diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/serialization/__init__.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/serialization/__init__.py new file mode 100644 index 0000000..f6d4ce9 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/serialization/__init__.py @@ -0,0 +1,26 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography.hazmat.primitives.serialization.base import ( + BestAvailableEncryption, Encoding, KeySerializationEncryption, + NoEncryption, ParameterFormat, PrivateFormat, PublicFormat, + load_der_parameters, load_der_private_key, load_der_public_key, + load_pem_parameters, load_pem_private_key, load_pem_public_key, +) +from cryptography.hazmat.primitives.serialization.ssh import ( + load_ssh_public_key +) + + +_PEM_DER = (Encoding.PEM, Encoding.DER) + +__all__ = [ + "load_der_parameters", "load_der_private_key", "load_der_public_key", + "load_pem_parameters", "load_pem_private_key", "load_pem_public_key", + "load_ssh_public_key", "Encoding", "PrivateFormat", "PublicFormat", + "ParameterFormat", "KeySerializationEncryption", "BestAvailableEncryption", + "NoEncryption", +] diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/serialization/base.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/serialization/base.py new file mode 100644 index 0000000..4218ea8 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/serialization/base.py @@ -0,0 +1,82 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +from enum import Enum + +import six + +from cryptography import utils + + +def load_pem_private_key(data, password, backend): + return backend.load_pem_private_key(data, password) + + +def load_pem_public_key(data, backend): + return backend.load_pem_public_key(data) + + +def load_pem_parameters(data, backend): + return backend.load_pem_parameters(data) + + +def load_der_private_key(data, password, backend): + return backend.load_der_private_key(data, password) + + +def load_der_public_key(data, backend): + return backend.load_der_public_key(data) + + +def load_der_parameters(data, backend): + return backend.load_der_parameters(data) + + +class Encoding(Enum): + PEM = "PEM" + DER = "DER" + OpenSSH = "OpenSSH" + Raw = "Raw" + X962 = "ANSI X9.62" + + +class PrivateFormat(Enum): + PKCS8 = "PKCS8" + TraditionalOpenSSL = "TraditionalOpenSSL" + Raw = "Raw" + + +class PublicFormat(Enum): + SubjectPublicKeyInfo = "X.509 subjectPublicKeyInfo with PKCS#1" + PKCS1 = "Raw PKCS#1" + OpenSSH = "OpenSSH" + Raw = "Raw" + CompressedPoint = "X9.62 Compressed Point" + UncompressedPoint = "X9.62 Uncompressed Point" + + +class ParameterFormat(Enum): + PKCS3 = "PKCS3" + + +@six.add_metaclass(abc.ABCMeta) +class KeySerializationEncryption(object): + pass + + +@utils.register_interface(KeySerializationEncryption) +class BestAvailableEncryption(object): + def __init__(self, password): + if not isinstance(password, bytes) or len(password) == 0: + raise ValueError("Password must be 1 or more bytes.") + + self.password = password + + +@utils.register_interface(KeySerializationEncryption) +class NoEncryption(object): + pass diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/serialization/pkcs12.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/serialization/pkcs12.py new file mode 100644 index 0000000..98161d5 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/serialization/pkcs12.py @@ -0,0 +1,9 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + + +def load_key_and_certificates(data, password, backend): + return backend.load_key_and_certificates_from_pkcs12(data, password) diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/serialization/ssh.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/serialization/ssh.py new file mode 100644 index 0000000..a1d6c8c --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/serialization/ssh.py @@ -0,0 +1,153 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import base64 +import struct + +import six + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm +from cryptography.hazmat.primitives.asymmetric import dsa, ec, ed25519, rsa + + +def load_ssh_public_key(data, backend): + key_parts = data.split(b' ', 2) + + if len(key_parts) < 2: + raise ValueError( + 'Key is not in the proper format or contains extra data.') + + key_type = key_parts[0] + + if key_type == b'ssh-rsa': + loader = _load_ssh_rsa_public_key + elif key_type == b'ssh-dss': + loader = _load_ssh_dss_public_key + elif key_type in [ + b'ecdsa-sha2-nistp256', b'ecdsa-sha2-nistp384', b'ecdsa-sha2-nistp521', + ]: + loader = _load_ssh_ecdsa_public_key + elif key_type == b'ssh-ed25519': + loader = _load_ssh_ed25519_public_key + else: + raise UnsupportedAlgorithm('Key type is not supported.') + + key_body = key_parts[1] + + try: + decoded_data = base64.b64decode(key_body) + except TypeError: + raise ValueError('Key is not in the proper format.') + + inner_key_type, rest = _ssh_read_next_string(decoded_data) + + if inner_key_type != key_type: + raise ValueError( + 'Key header and key body contain different key type values.' + ) + + return loader(key_type, rest, backend) + + +def _load_ssh_rsa_public_key(key_type, decoded_data, backend): + e, rest = _ssh_read_next_mpint(decoded_data) + n, rest = _ssh_read_next_mpint(rest) + + if rest: + raise ValueError('Key body contains extra bytes.') + + return rsa.RSAPublicNumbers(e, n).public_key(backend) + + +def _load_ssh_dss_public_key(key_type, decoded_data, backend): + p, rest = _ssh_read_next_mpint(decoded_data) + q, rest = _ssh_read_next_mpint(rest) + g, rest = _ssh_read_next_mpint(rest) + y, rest = _ssh_read_next_mpint(rest) + + if rest: + raise ValueError('Key body contains extra bytes.') + + parameter_numbers = dsa.DSAParameterNumbers(p, q, g) + public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers) + + return public_numbers.public_key(backend) + + +def _load_ssh_ecdsa_public_key(expected_key_type, decoded_data, backend): + curve_name, rest = _ssh_read_next_string(decoded_data) + data, rest = _ssh_read_next_string(rest) + + if expected_key_type != b"ecdsa-sha2-" + curve_name: + raise ValueError( + 'Key header and key body contain different key type values.' + ) + + if rest: + raise ValueError('Key body contains extra bytes.') + + curve = { + b"nistp256": ec.SECP256R1, + b"nistp384": ec.SECP384R1, + b"nistp521": ec.SECP521R1, + }[curve_name]() + + if six.indexbytes(data, 0) != 4: + raise NotImplementedError( + "Compressed elliptic curve points are not supported" + ) + + return ec.EllipticCurvePublicKey.from_encoded_point(curve, data) + + +def _load_ssh_ed25519_public_key(expected_key_type, decoded_data, backend): + data, rest = _ssh_read_next_string(decoded_data) + + if rest: + raise ValueError('Key body contains extra bytes.') + + return ed25519.Ed25519PublicKey.from_public_bytes(data) + + +def _ssh_read_next_string(data): + """ + Retrieves the next RFC 4251 string value from the data. + + While the RFC calls these strings, in Python they are bytes objects. + """ + if len(data) < 4: + raise ValueError("Key is not in the proper format") + + str_len, = struct.unpack('>I', data[:4]) + if len(data) < str_len + 4: + raise ValueError("Key is not in the proper format") + + return data[4:4 + str_len], data[4 + str_len:] + + +def _ssh_read_next_mpint(data): + """ + Reads the next mpint from the data. + + Currently, all mpints are interpreted as unsigned. + """ + mpint_data, rest = _ssh_read_next_string(data) + + return ( + utils.int_from_bytes(mpint_data, byteorder='big', signed=False), rest + ) + + +def _ssh_write_string(data): + return struct.pack(">I", len(data)) + data + + +def _ssh_write_mpint(value): + data = utils.int_to_bytes(value) + if six.indexbytes(data, 0) & 0x80: + data = b"\x00" + data + return _ssh_write_string(data) diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/twofactor/__init__.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/twofactor/__init__.py new file mode 100644 index 0000000..e71f9e6 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/twofactor/__init__.py @@ -0,0 +1,9 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + + +class InvalidToken(Exception): + pass diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/twofactor/hotp.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/twofactor/hotp.py new file mode 100644 index 0000000..4ad1bdc --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/twofactor/hotp.py @@ -0,0 +1,68 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import struct + +import six + +from cryptography.exceptions import ( + UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HMACBackend +from cryptography.hazmat.primitives import constant_time, hmac +from cryptography.hazmat.primitives.hashes import SHA1, SHA256, SHA512 +from cryptography.hazmat.primitives.twofactor import InvalidToken +from cryptography.hazmat.primitives.twofactor.utils import _generate_uri + + +class HOTP(object): + def __init__(self, key, length, algorithm, backend, + enforce_key_length=True): + if not isinstance(backend, HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HMACBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + if len(key) < 16 and enforce_key_length is True: + raise ValueError("Key length has to be at least 128 bits.") + + if not isinstance(length, six.integer_types): + raise TypeError("Length parameter must be an integer type.") + + if length < 6 or length > 8: + raise ValueError("Length of HOTP has to be between 6 to 8.") + + if not isinstance(algorithm, (SHA1, SHA256, SHA512)): + raise TypeError("Algorithm must be SHA1, SHA256 or SHA512.") + + self._key = key + self._length = length + self._algorithm = algorithm + self._backend = backend + + def generate(self, counter): + truncated_value = self._dynamic_truncate(counter) + hotp = truncated_value % (10 ** self._length) + return "{0:0{1}}".format(hotp, self._length).encode() + + def verify(self, hotp, counter): + if not constant_time.bytes_eq(self.generate(counter), hotp): + raise InvalidToken("Supplied HOTP value does not match.") + + def _dynamic_truncate(self, counter): + ctx = hmac.HMAC(self._key, self._algorithm, self._backend) + ctx.update(struct.pack(">Q", counter)) + hmac_value = ctx.finalize() + + offset = six.indexbytes(hmac_value, len(hmac_value) - 1) & 0b1111 + p = hmac_value[offset:offset + 4] + return struct.unpack(">I", p)[0] & 0x7fffffff + + def get_provisioning_uri(self, account_name, counter, issuer): + return _generate_uri(self, "hotp", account_name, issuer, [ + ("counter", int(counter)), + ]) diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/twofactor/totp.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/twofactor/totp.py new file mode 100644 index 0000000..499f282 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/twofactor/totp.py @@ -0,0 +1,40 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography.exceptions import ( + UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HMACBackend +from cryptography.hazmat.primitives import constant_time +from cryptography.hazmat.primitives.twofactor import InvalidToken +from cryptography.hazmat.primitives.twofactor.hotp import HOTP +from cryptography.hazmat.primitives.twofactor.utils import _generate_uri + + +class TOTP(object): + def __init__(self, key, length, algorithm, time_step, backend, + enforce_key_length=True): + if not isinstance(backend, HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HMACBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + self._time_step = time_step + self._hotp = HOTP(key, length, algorithm, backend, enforce_key_length) + + def generate(self, time): + counter = int(time / self._time_step) + return self._hotp.generate(counter) + + def verify(self, totp, time): + if not constant_time.bytes_eq(self.generate(time), totp): + raise InvalidToken("Supplied TOTP value does not match.") + + def get_provisioning_uri(self, account_name, issuer): + return _generate_uri(self._hotp, "totp", account_name, issuer, [ + ("period", int(self._time_step)), + ]) diff --git a/packages/wakatime/packages/py27/cryptography/hazmat/primitives/twofactor/utils.py b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/twofactor/utils.py new file mode 100644 index 0000000..0ed8c4c --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/hazmat/primitives/twofactor/utils.py @@ -0,0 +1,30 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import base64 + +from six.moves.urllib.parse import quote, urlencode + + +def _generate_uri(hotp, type_name, account_name, issuer, extra_parameters): + parameters = [ + ("digits", hotp._length), + ("secret", base64.b32encode(hotp._key)), + ("algorithm", hotp._algorithm.name.upper()), + ] + + if issuer is not None: + parameters.append(("issuer", issuer)) + + parameters.extend(extra_parameters) + + uriparts = { + "type": type_name, + "label": ("%s:%s" % (quote(issuer), quote(account_name)) if issuer + else quote(account_name)), + "parameters": urlencode(parameters), + } + return "otpauth://{type}/{label}?{parameters}".format(**uriparts) diff --git a/packages/wakatime/packages/py27/cryptography/utils.py b/packages/wakatime/packages/py27/cryptography/utils.py new file mode 100644 index 0000000..0b36f63 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/utils.py @@ -0,0 +1,173 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +import binascii +import inspect +import sys +import warnings + + +# We use a UserWarning subclass, instead of DeprecationWarning, because CPython +# decided deprecation warnings should be invisble by default. +class CryptographyDeprecationWarning(UserWarning): + pass + + +# Several APIs were deprecated with no specific end-of-life date because of the +# ubiquity of their use. They should not be removed until we agree on when that +# cycle ends. +PersistentlyDeprecated2017 = CryptographyDeprecationWarning +PersistentlyDeprecated2018 = CryptographyDeprecationWarning +DeprecatedIn25 = CryptographyDeprecationWarning +DeprecatedIn27 = CryptographyDeprecationWarning + + +def _check_bytes(name, value): + if not isinstance(value, bytes): + raise TypeError("{} must be bytes".format(name)) + + +def _check_byteslike(name, value): + try: + memoryview(value) + except TypeError: + raise TypeError("{} must be bytes-like".format(name)) + + +def read_only_property(name): + return property(lambda self: getattr(self, name)) + + +def register_interface(iface): + def register_decorator(klass): + verify_interface(iface, klass) + iface.register(klass) + return klass + return register_decorator + + +def register_interface_if(predicate, iface): + def register_decorator(klass): + if predicate: + verify_interface(iface, klass) + iface.register(klass) + return klass + return register_decorator + + +if hasattr(int, "from_bytes"): + int_from_bytes = int.from_bytes +else: + def int_from_bytes(data, byteorder, signed=False): + assert byteorder == 'big' + assert not signed + + return int(binascii.hexlify(data), 16) + + +if hasattr(int, "to_bytes"): + def int_to_bytes(integer, length=None): + return integer.to_bytes( + length or (integer.bit_length() + 7) // 8 or 1, 'big' + ) +else: + def int_to_bytes(integer, length=None): + hex_string = '%x' % integer + if length is None: + n = len(hex_string) + else: + n = length * 2 + return binascii.unhexlify(hex_string.zfill(n + (n & 1))) + + +class InterfaceNotImplemented(Exception): + pass + + +if hasattr(inspect, "signature"): + signature = inspect.signature +else: + signature = inspect.getargspec + + +def verify_interface(iface, klass): + for method in iface.__abstractmethods__: + if not hasattr(klass, method): + raise InterfaceNotImplemented( + "{} is missing a {!r} method".format(klass, method) + ) + if isinstance(getattr(iface, method), abc.abstractproperty): + # Can't properly verify these yet. + continue + sig = signature(getattr(iface, method)) + actual = signature(getattr(klass, method)) + if sig != actual: + raise InterfaceNotImplemented( + "{}.{}'s signature differs from the expected. Expected: " + "{!r}. Received: {!r}".format( + klass, method, sig, actual + ) + ) + + +# No longer needed as of 2.2, but retained because we have external consumers +# who use it. +def bit_length(x): + return x.bit_length() + + +class _DeprecatedValue(object): + def __init__(self, value, message, warning_class): + self.value = value + self.message = message + self.warning_class = warning_class + + +class _ModuleWithDeprecations(object): + def __init__(self, module): + self.__dict__["_module"] = module + + def __getattr__(self, attr): + obj = getattr(self._module, attr) + if isinstance(obj, _DeprecatedValue): + warnings.warn(obj.message, obj.warning_class, stacklevel=2) + obj = obj.value + return obj + + def __setattr__(self, attr, value): + setattr(self._module, attr, value) + + def __delattr__(self, attr): + obj = getattr(self._module, attr) + if isinstance(obj, _DeprecatedValue): + warnings.warn(obj.message, obj.warning_class, stacklevel=2) + + delattr(self._module, attr) + + def __dir__(self): + return ["_module"] + dir(self._module) + + +def deprecated(value, module_name, message, warning_class): + module = sys.modules[module_name] + if not isinstance(module, _ModuleWithDeprecations): + sys.modules[module_name] = _ModuleWithDeprecations(module) + return _DeprecatedValue(value, message, warning_class) + + +def cached_property(func): + cached_name = "_cached_{}".format(func) + sentinel = object() + + def inner(instance): + cache = getattr(instance, cached_name, sentinel) + if cache is not sentinel: + return cache + result = func(instance) + setattr(instance, cached_name, result) + return result + return property(inner) diff --git a/packages/wakatime/packages/py27/cryptography/x509/__init__.py b/packages/wakatime/packages/py27/cryptography/x509/__init__.py new file mode 100644 index 0000000..b761e26 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/x509/__init__.py @@ -0,0 +1,189 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography.x509 import certificate_transparency +from cryptography.x509.base import ( + Certificate, CertificateBuilder, CertificateRevocationList, + CertificateRevocationListBuilder, + CertificateSigningRequest, CertificateSigningRequestBuilder, + InvalidVersion, RevokedCertificate, RevokedCertificateBuilder, + Version, load_der_x509_certificate, load_der_x509_crl, load_der_x509_csr, + load_pem_x509_certificate, load_pem_x509_crl, load_pem_x509_csr, + random_serial_number, +) +from cryptography.x509.extensions import ( + AccessDescription, AuthorityInformationAccess, + AuthorityKeyIdentifier, BasicConstraints, CRLDistributionPoints, + CRLNumber, CRLReason, CertificateIssuer, CertificatePolicies, + DeltaCRLIndicator, DistributionPoint, DuplicateExtension, ExtendedKeyUsage, + Extension, ExtensionNotFound, ExtensionType, Extensions, FreshestCRL, + GeneralNames, InhibitAnyPolicy, InvalidityDate, IssuerAlternativeName, + IssuingDistributionPoint, KeyUsage, NameConstraints, NoticeReference, + OCSPNoCheck, OCSPNonce, PolicyConstraints, PolicyInformation, + PrecertPoison, PrecertificateSignedCertificateTimestamps, ReasonFlags, + SubjectAlternativeName, SubjectKeyIdentifier, TLSFeature, TLSFeatureType, + UnrecognizedExtension, UserNotice +) +from cryptography.x509.general_name import ( + DNSName, DirectoryName, GeneralName, IPAddress, OtherName, RFC822Name, + RegisteredID, UniformResourceIdentifier, UnsupportedGeneralNameType, + _GENERAL_NAMES +) +from cryptography.x509.name import ( + Name, NameAttribute, RelativeDistinguishedName +) +from cryptography.x509.oid import ( + AuthorityInformationAccessOID, CRLEntryExtensionOID, + CertificatePoliciesOID, ExtendedKeyUsageOID, ExtensionOID, NameOID, + ObjectIdentifier, SignatureAlgorithmOID, _SIG_OIDS_TO_HASH +) + + +OID_AUTHORITY_INFORMATION_ACCESS = ExtensionOID.AUTHORITY_INFORMATION_ACCESS +OID_AUTHORITY_KEY_IDENTIFIER = ExtensionOID.AUTHORITY_KEY_IDENTIFIER +OID_BASIC_CONSTRAINTS = ExtensionOID.BASIC_CONSTRAINTS +OID_CERTIFICATE_POLICIES = ExtensionOID.CERTIFICATE_POLICIES +OID_CRL_DISTRIBUTION_POINTS = ExtensionOID.CRL_DISTRIBUTION_POINTS +OID_EXTENDED_KEY_USAGE = ExtensionOID.EXTENDED_KEY_USAGE +OID_FRESHEST_CRL = ExtensionOID.FRESHEST_CRL +OID_INHIBIT_ANY_POLICY = ExtensionOID.INHIBIT_ANY_POLICY +OID_ISSUER_ALTERNATIVE_NAME = ExtensionOID.ISSUER_ALTERNATIVE_NAME +OID_KEY_USAGE = ExtensionOID.KEY_USAGE +OID_NAME_CONSTRAINTS = ExtensionOID.NAME_CONSTRAINTS +OID_OCSP_NO_CHECK = ExtensionOID.OCSP_NO_CHECK +OID_POLICY_CONSTRAINTS = ExtensionOID.POLICY_CONSTRAINTS +OID_POLICY_MAPPINGS = ExtensionOID.POLICY_MAPPINGS +OID_SUBJECT_ALTERNATIVE_NAME = ExtensionOID.SUBJECT_ALTERNATIVE_NAME +OID_SUBJECT_DIRECTORY_ATTRIBUTES = ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES +OID_SUBJECT_INFORMATION_ACCESS = ExtensionOID.SUBJECT_INFORMATION_ACCESS +OID_SUBJECT_KEY_IDENTIFIER = ExtensionOID.SUBJECT_KEY_IDENTIFIER + +OID_DSA_WITH_SHA1 = SignatureAlgorithmOID.DSA_WITH_SHA1 +OID_DSA_WITH_SHA224 = SignatureAlgorithmOID.DSA_WITH_SHA224 +OID_DSA_WITH_SHA256 = SignatureAlgorithmOID.DSA_WITH_SHA256 +OID_ECDSA_WITH_SHA1 = SignatureAlgorithmOID.ECDSA_WITH_SHA1 +OID_ECDSA_WITH_SHA224 = SignatureAlgorithmOID.ECDSA_WITH_SHA224 +OID_ECDSA_WITH_SHA256 = SignatureAlgorithmOID.ECDSA_WITH_SHA256 +OID_ECDSA_WITH_SHA384 = SignatureAlgorithmOID.ECDSA_WITH_SHA384 +OID_ECDSA_WITH_SHA512 = SignatureAlgorithmOID.ECDSA_WITH_SHA512 +OID_RSA_WITH_MD5 = SignatureAlgorithmOID.RSA_WITH_MD5 +OID_RSA_WITH_SHA1 = SignatureAlgorithmOID.RSA_WITH_SHA1 +OID_RSA_WITH_SHA224 = SignatureAlgorithmOID.RSA_WITH_SHA224 +OID_RSA_WITH_SHA256 = SignatureAlgorithmOID.RSA_WITH_SHA256 +OID_RSA_WITH_SHA384 = SignatureAlgorithmOID.RSA_WITH_SHA384 +OID_RSA_WITH_SHA512 = SignatureAlgorithmOID.RSA_WITH_SHA512 +OID_RSASSA_PSS = SignatureAlgorithmOID.RSASSA_PSS + +OID_COMMON_NAME = NameOID.COMMON_NAME +OID_COUNTRY_NAME = NameOID.COUNTRY_NAME +OID_DOMAIN_COMPONENT = NameOID.DOMAIN_COMPONENT +OID_DN_QUALIFIER = NameOID.DN_QUALIFIER +OID_EMAIL_ADDRESS = NameOID.EMAIL_ADDRESS +OID_GENERATION_QUALIFIER = NameOID.GENERATION_QUALIFIER +OID_GIVEN_NAME = NameOID.GIVEN_NAME +OID_LOCALITY_NAME = NameOID.LOCALITY_NAME +OID_ORGANIZATIONAL_UNIT_NAME = NameOID.ORGANIZATIONAL_UNIT_NAME +OID_ORGANIZATION_NAME = NameOID.ORGANIZATION_NAME +OID_PSEUDONYM = NameOID.PSEUDONYM +OID_SERIAL_NUMBER = NameOID.SERIAL_NUMBER +OID_STATE_OR_PROVINCE_NAME = NameOID.STATE_OR_PROVINCE_NAME +OID_SURNAME = NameOID.SURNAME +OID_TITLE = NameOID.TITLE + +OID_CLIENT_AUTH = ExtendedKeyUsageOID.CLIENT_AUTH +OID_CODE_SIGNING = ExtendedKeyUsageOID.CODE_SIGNING +OID_EMAIL_PROTECTION = ExtendedKeyUsageOID.EMAIL_PROTECTION +OID_OCSP_SIGNING = ExtendedKeyUsageOID.OCSP_SIGNING +OID_SERVER_AUTH = ExtendedKeyUsageOID.SERVER_AUTH +OID_TIME_STAMPING = ExtendedKeyUsageOID.TIME_STAMPING + +OID_ANY_POLICY = CertificatePoliciesOID.ANY_POLICY +OID_CPS_QUALIFIER = CertificatePoliciesOID.CPS_QUALIFIER +OID_CPS_USER_NOTICE = CertificatePoliciesOID.CPS_USER_NOTICE + +OID_CERTIFICATE_ISSUER = CRLEntryExtensionOID.CERTIFICATE_ISSUER +OID_CRL_REASON = CRLEntryExtensionOID.CRL_REASON +OID_INVALIDITY_DATE = CRLEntryExtensionOID.INVALIDITY_DATE + +OID_CA_ISSUERS = AuthorityInformationAccessOID.CA_ISSUERS +OID_OCSP = AuthorityInformationAccessOID.OCSP + +__all__ = [ + "certificate_transparency", + "load_pem_x509_certificate", + "load_der_x509_certificate", + "load_pem_x509_csr", + "load_der_x509_csr", + "load_pem_x509_crl", + "load_der_x509_crl", + "random_serial_number", + "InvalidVersion", + "DeltaCRLIndicator", + "DuplicateExtension", + "ExtensionNotFound", + "UnsupportedGeneralNameType", + "NameAttribute", + "Name", + "RelativeDistinguishedName", + "ObjectIdentifier", + "ExtensionType", + "Extensions", + "Extension", + "ExtendedKeyUsage", + "FreshestCRL", + "IssuingDistributionPoint", + "TLSFeature", + "TLSFeatureType", + "OCSPNoCheck", + "BasicConstraints", + "CRLNumber", + "KeyUsage", + "AuthorityInformationAccess", + "AccessDescription", + "CertificatePolicies", + "PolicyInformation", + "UserNotice", + "NoticeReference", + "SubjectKeyIdentifier", + "NameConstraints", + "CRLDistributionPoints", + "DistributionPoint", + "ReasonFlags", + "InhibitAnyPolicy", + "SubjectAlternativeName", + "IssuerAlternativeName", + "AuthorityKeyIdentifier", + "GeneralNames", + "GeneralName", + "RFC822Name", + "DNSName", + "UniformResourceIdentifier", + "RegisteredID", + "DirectoryName", + "IPAddress", + "OtherName", + "Certificate", + "CertificateRevocationList", + "CertificateRevocationListBuilder", + "CertificateSigningRequest", + "RevokedCertificate", + "RevokedCertificateBuilder", + "CertificateSigningRequestBuilder", + "CertificateBuilder", + "Version", + "_SIG_OIDS_TO_HASH", + "OID_CA_ISSUERS", + "OID_OCSP", + "_GENERAL_NAMES", + "CertificateIssuer", + "CRLReason", + "InvalidityDate", + "UnrecognizedExtension", + "PolicyConstraints", + "PrecertificateSignedCertificateTimestamps", + "PrecertPoison", + "OCSPNonce", +] diff --git a/packages/wakatime/packages/py27/cryptography/x509/base.py b/packages/wakatime/packages/py27/cryptography/x509/base.py new file mode 100644 index 0000000..3983c9b --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/x509/base.py @@ -0,0 +1,758 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +import datetime +import os +from enum import Enum + +import six + +from cryptography import utils +from cryptography.hazmat.primitives.asymmetric import ( + dsa, ec, ed25519, ed448, rsa +) +from cryptography.x509.extensions import Extension, ExtensionType +from cryptography.x509.name import Name + + +_EARLIEST_UTC_TIME = datetime.datetime(1950, 1, 1) + + +def _reject_duplicate_extension(extension, extensions): + # This is quadratic in the number of extensions + for e in extensions: + if e.oid == extension.oid: + raise ValueError('This extension has already been set.') + + +def _convert_to_naive_utc_time(time): + """Normalizes a datetime to a naive datetime in UTC. + + time -- datetime to normalize. Assumed to be in UTC if not timezone + aware. + """ + if time.tzinfo is not None: + offset = time.utcoffset() + offset = offset if offset else datetime.timedelta() + return time.replace(tzinfo=None) - offset + else: + return time + + +class Version(Enum): + v1 = 0 + v3 = 2 + + +def load_pem_x509_certificate(data, backend): + return backend.load_pem_x509_certificate(data) + + +def load_der_x509_certificate(data, backend): + return backend.load_der_x509_certificate(data) + + +def load_pem_x509_csr(data, backend): + return backend.load_pem_x509_csr(data) + + +def load_der_x509_csr(data, backend): + return backend.load_der_x509_csr(data) + + +def load_pem_x509_crl(data, backend): + return backend.load_pem_x509_crl(data) + + +def load_der_x509_crl(data, backend): + return backend.load_der_x509_crl(data) + + +class InvalidVersion(Exception): + def __init__(self, msg, parsed_version): + super(InvalidVersion, self).__init__(msg) + self.parsed_version = parsed_version + + +@six.add_metaclass(abc.ABCMeta) +class Certificate(object): + @abc.abstractmethod + def fingerprint(self, algorithm): + """ + Returns bytes using digest passed. + """ + + @abc.abstractproperty + def serial_number(self): + """ + Returns certificate serial number + """ + + @abc.abstractproperty + def version(self): + """ + Returns the certificate version + """ + + @abc.abstractmethod + def public_key(self): + """ + Returns the public key + """ + + @abc.abstractproperty + def not_valid_before(self): + """ + Not before time (represented as UTC datetime) + """ + + @abc.abstractproperty + def not_valid_after(self): + """ + Not after time (represented as UTC datetime) + """ + + @abc.abstractproperty + def issuer(self): + """ + Returns the issuer name object. + """ + + @abc.abstractproperty + def subject(self): + """ + Returns the subject name object. + """ + + @abc.abstractproperty + def signature_hash_algorithm(self): + """ + Returns a HashAlgorithm corresponding to the type of the digest signed + in the certificate. + """ + + @abc.abstractproperty + def signature_algorithm_oid(self): + """ + Returns the ObjectIdentifier of the signature algorithm. + """ + + @abc.abstractproperty + def extensions(self): + """ + Returns an Extensions object. + """ + + @abc.abstractproperty + def signature(self): + """ + Returns the signature bytes. + """ + + @abc.abstractproperty + def tbs_certificate_bytes(self): + """ + Returns the tbsCertificate payload bytes as defined in RFC 5280. + """ + + @abc.abstractmethod + def __eq__(self, other): + """ + Checks equality. + """ + + @abc.abstractmethod + def __ne__(self, other): + """ + Checks not equal. + """ + + @abc.abstractmethod + def __hash__(self): + """ + Computes a hash. + """ + + @abc.abstractmethod + def public_bytes(self, encoding): + """ + Serializes the certificate to PEM or DER format. + """ + + +@six.add_metaclass(abc.ABCMeta) +class CertificateRevocationList(object): + @abc.abstractmethod + def public_bytes(self, encoding): + """ + Serializes the CRL to PEM or DER format. + """ + + @abc.abstractmethod + def fingerprint(self, algorithm): + """ + Returns bytes using digest passed. + """ + + @abc.abstractmethod + def get_revoked_certificate_by_serial_number(self, serial_number): + """ + Returns an instance of RevokedCertificate or None if the serial_number + is not in the CRL. + """ + + @abc.abstractproperty + def signature_hash_algorithm(self): + """ + Returns a HashAlgorithm corresponding to the type of the digest signed + in the certificate. + """ + + @abc.abstractproperty + def signature_algorithm_oid(self): + """ + Returns the ObjectIdentifier of the signature algorithm. + """ + + @abc.abstractproperty + def issuer(self): + """ + Returns the X509Name with the issuer of this CRL. + """ + + @abc.abstractproperty + def next_update(self): + """ + Returns the date of next update for this CRL. + """ + + @abc.abstractproperty + def last_update(self): + """ + Returns the date of last update for this CRL. + """ + + @abc.abstractproperty + def extensions(self): + """ + Returns an Extensions object containing a list of CRL extensions. + """ + + @abc.abstractproperty + def signature(self): + """ + Returns the signature bytes. + """ + + @abc.abstractproperty + def tbs_certlist_bytes(self): + """ + Returns the tbsCertList payload bytes as defined in RFC 5280. + """ + + @abc.abstractmethod + def __eq__(self, other): + """ + Checks equality. + """ + + @abc.abstractmethod + def __ne__(self, other): + """ + Checks not equal. + """ + + @abc.abstractmethod + def __len__(self): + """ + Number of revoked certificates in the CRL. + """ + + @abc.abstractmethod + def __getitem__(self, idx): + """ + Returns a revoked certificate (or slice of revoked certificates). + """ + + @abc.abstractmethod + def __iter__(self): + """ + Iterator over the revoked certificates + """ + + @abc.abstractmethod + def is_signature_valid(self, public_key): + """ + Verifies signature of revocation list against given public key. + """ + + +@six.add_metaclass(abc.ABCMeta) +class CertificateSigningRequest(object): + @abc.abstractmethod + def __eq__(self, other): + """ + Checks equality. + """ + + @abc.abstractmethod + def __ne__(self, other): + """ + Checks not equal. + """ + + @abc.abstractmethod + def __hash__(self): + """ + Computes a hash. + """ + + @abc.abstractmethod + def public_key(self): + """ + Returns the public key + """ + + @abc.abstractproperty + def subject(self): + """ + Returns the subject name object. + """ + + @abc.abstractproperty + def signature_hash_algorithm(self): + """ + Returns a HashAlgorithm corresponding to the type of the digest signed + in the certificate. + """ + + @abc.abstractproperty + def signature_algorithm_oid(self): + """ + Returns the ObjectIdentifier of the signature algorithm. + """ + + @abc.abstractproperty + def extensions(self): + """ + Returns the extensions in the signing request. + """ + + @abc.abstractmethod + def public_bytes(self, encoding): + """ + Encodes the request to PEM or DER format. + """ + + @abc.abstractproperty + def signature(self): + """ + Returns the signature bytes. + """ + + @abc.abstractproperty + def tbs_certrequest_bytes(self): + """ + Returns the PKCS#10 CertificationRequestInfo bytes as defined in RFC + 2986. + """ + + @abc.abstractproperty + def is_signature_valid(self): + """ + Verifies signature of signing request. + """ + + +@six.add_metaclass(abc.ABCMeta) +class RevokedCertificate(object): + @abc.abstractproperty + def serial_number(self): + """ + Returns the serial number of the revoked certificate. + """ + + @abc.abstractproperty + def revocation_date(self): + """ + Returns the date of when this certificate was revoked. + """ + + @abc.abstractproperty + def extensions(self): + """ + Returns an Extensions object containing a list of Revoked extensions. + """ + + +class CertificateSigningRequestBuilder(object): + def __init__(self, subject_name=None, extensions=[]): + """ + Creates an empty X.509 certificate request (v1). + """ + self._subject_name = subject_name + self._extensions = extensions + + def subject_name(self, name): + """ + Sets the certificate requestor's distinguished name. + """ + if not isinstance(name, Name): + raise TypeError('Expecting x509.Name object.') + if self._subject_name is not None: + raise ValueError('The subject name may only be set once.') + return CertificateSigningRequestBuilder(name, self._extensions) + + def add_extension(self, extension, critical): + """ + Adds an X.509 extension to the certificate request. + """ + if not isinstance(extension, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extension.oid, critical, extension) + _reject_duplicate_extension(extension, self._extensions) + + return CertificateSigningRequestBuilder( + self._subject_name, self._extensions + [extension] + ) + + def sign(self, private_key, algorithm, backend): + """ + Signs the request using the requestor's private key. + """ + if self._subject_name is None: + raise ValueError("A CertificateSigningRequest must have a subject") + return backend.create_x509_csr(self, private_key, algorithm) + + +class CertificateBuilder(object): + def __init__(self, issuer_name=None, subject_name=None, + public_key=None, serial_number=None, not_valid_before=None, + not_valid_after=None, extensions=[]): + self._version = Version.v3 + self._issuer_name = issuer_name + self._subject_name = subject_name + self._public_key = public_key + self._serial_number = serial_number + self._not_valid_before = not_valid_before + self._not_valid_after = not_valid_after + self._extensions = extensions + + def issuer_name(self, name): + """ + Sets the CA's distinguished name. + """ + if not isinstance(name, Name): + raise TypeError('Expecting x509.Name object.') + if self._issuer_name is not None: + raise ValueError('The issuer name may only be set once.') + return CertificateBuilder( + name, self._subject_name, self._public_key, + self._serial_number, self._not_valid_before, + self._not_valid_after, self._extensions + ) + + def subject_name(self, name): + """ + Sets the requestor's distinguished name. + """ + if not isinstance(name, Name): + raise TypeError('Expecting x509.Name object.') + if self._subject_name is not None: + raise ValueError('The subject name may only be set once.') + return CertificateBuilder( + self._issuer_name, name, self._public_key, + self._serial_number, self._not_valid_before, + self._not_valid_after, self._extensions + ) + + def public_key(self, key): + """ + Sets the requestor's public key (as found in the signing request). + """ + if not isinstance(key, (dsa.DSAPublicKey, rsa.RSAPublicKey, + ec.EllipticCurvePublicKey, + ed25519.Ed25519PublicKey, + ed448.Ed448PublicKey)): + raise TypeError('Expecting one of DSAPublicKey, RSAPublicKey,' + ' EllipticCurvePublicKey, Ed25519PublicKey or' + ' Ed448PublicKey.') + if self._public_key is not None: + raise ValueError('The public key may only be set once.') + return CertificateBuilder( + self._issuer_name, self._subject_name, key, + self._serial_number, self._not_valid_before, + self._not_valid_after, self._extensions + ) + + def serial_number(self, number): + """ + Sets the certificate serial number. + """ + if not isinstance(number, six.integer_types): + raise TypeError('Serial number must be of integral type.') + if self._serial_number is not None: + raise ValueError('The serial number may only be set once.') + if number <= 0: + raise ValueError('The serial number should be positive.') + + # ASN.1 integers are always signed, so most significant bit must be + # zero. + if number.bit_length() >= 160: # As defined in RFC 5280 + raise ValueError('The serial number should not be more than 159 ' + 'bits.') + return CertificateBuilder( + self._issuer_name, self._subject_name, + self._public_key, number, self._not_valid_before, + self._not_valid_after, self._extensions + ) + + def not_valid_before(self, time): + """ + Sets the certificate activation time. + """ + if not isinstance(time, datetime.datetime): + raise TypeError('Expecting datetime object.') + if self._not_valid_before is not None: + raise ValueError('The not valid before may only be set once.') + time = _convert_to_naive_utc_time(time) + if time < _EARLIEST_UTC_TIME: + raise ValueError('The not valid before date must be on or after' + ' 1950 January 1).') + if self._not_valid_after is not None and time > self._not_valid_after: + raise ValueError( + 'The not valid before date must be before the not valid after ' + 'date.' + ) + return CertificateBuilder( + self._issuer_name, self._subject_name, + self._public_key, self._serial_number, time, + self._not_valid_after, self._extensions + ) + + def not_valid_after(self, time): + """ + Sets the certificate expiration time. + """ + if not isinstance(time, datetime.datetime): + raise TypeError('Expecting datetime object.') + if self._not_valid_after is not None: + raise ValueError('The not valid after may only be set once.') + time = _convert_to_naive_utc_time(time) + if time < _EARLIEST_UTC_TIME: + raise ValueError('The not valid after date must be on or after' + ' 1950 January 1.') + if (self._not_valid_before is not None and + time < self._not_valid_before): + raise ValueError( + 'The not valid after date must be after the not valid before ' + 'date.' + ) + return CertificateBuilder( + self._issuer_name, self._subject_name, + self._public_key, self._serial_number, self._not_valid_before, + time, self._extensions + ) + + def add_extension(self, extension, critical): + """ + Adds an X.509 extension to the certificate. + """ + if not isinstance(extension, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extension.oid, critical, extension) + _reject_duplicate_extension(extension, self._extensions) + + return CertificateBuilder( + self._issuer_name, self._subject_name, + self._public_key, self._serial_number, self._not_valid_before, + self._not_valid_after, self._extensions + [extension] + ) + + def sign(self, private_key, algorithm, backend): + """ + Signs the certificate using the CA's private key. + """ + if self._subject_name is None: + raise ValueError("A certificate must have a subject name") + + if self._issuer_name is None: + raise ValueError("A certificate must have an issuer name") + + if self._serial_number is None: + raise ValueError("A certificate must have a serial number") + + if self._not_valid_before is None: + raise ValueError("A certificate must have a not valid before time") + + if self._not_valid_after is None: + raise ValueError("A certificate must have a not valid after time") + + if self._public_key is None: + raise ValueError("A certificate must have a public key") + + return backend.create_x509_certificate(self, private_key, algorithm) + + +class CertificateRevocationListBuilder(object): + def __init__(self, issuer_name=None, last_update=None, next_update=None, + extensions=[], revoked_certificates=[]): + self._issuer_name = issuer_name + self._last_update = last_update + self._next_update = next_update + self._extensions = extensions + self._revoked_certificates = revoked_certificates + + def issuer_name(self, issuer_name): + if not isinstance(issuer_name, Name): + raise TypeError('Expecting x509.Name object.') + if self._issuer_name is not None: + raise ValueError('The issuer name may only be set once.') + return CertificateRevocationListBuilder( + issuer_name, self._last_update, self._next_update, + self._extensions, self._revoked_certificates + ) + + def last_update(self, last_update): + if not isinstance(last_update, datetime.datetime): + raise TypeError('Expecting datetime object.') + if self._last_update is not None: + raise ValueError('Last update may only be set once.') + last_update = _convert_to_naive_utc_time(last_update) + if last_update < _EARLIEST_UTC_TIME: + raise ValueError('The last update date must be on or after' + ' 1950 January 1.') + if self._next_update is not None and last_update > self._next_update: + raise ValueError( + 'The last update date must be before the next update date.' + ) + return CertificateRevocationListBuilder( + self._issuer_name, last_update, self._next_update, + self._extensions, self._revoked_certificates + ) + + def next_update(self, next_update): + if not isinstance(next_update, datetime.datetime): + raise TypeError('Expecting datetime object.') + if self._next_update is not None: + raise ValueError('Last update may only be set once.') + next_update = _convert_to_naive_utc_time(next_update) + if next_update < _EARLIEST_UTC_TIME: + raise ValueError('The last update date must be on or after' + ' 1950 January 1.') + if self._last_update is not None and next_update < self._last_update: + raise ValueError( + 'The next update date must be after the last update date.' + ) + return CertificateRevocationListBuilder( + self._issuer_name, self._last_update, next_update, + self._extensions, self._revoked_certificates + ) + + def add_extension(self, extension, critical): + """ + Adds an X.509 extension to the certificate revocation list. + """ + if not isinstance(extension, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extension.oid, critical, extension) + _reject_duplicate_extension(extension, self._extensions) + return CertificateRevocationListBuilder( + self._issuer_name, self._last_update, self._next_update, + self._extensions + [extension], self._revoked_certificates + ) + + def add_revoked_certificate(self, revoked_certificate): + """ + Adds a revoked certificate to the CRL. + """ + if not isinstance(revoked_certificate, RevokedCertificate): + raise TypeError("Must be an instance of RevokedCertificate") + + return CertificateRevocationListBuilder( + self._issuer_name, self._last_update, + self._next_update, self._extensions, + self._revoked_certificates + [revoked_certificate] + ) + + def sign(self, private_key, algorithm, backend): + if self._issuer_name is None: + raise ValueError("A CRL must have an issuer name") + + if self._last_update is None: + raise ValueError("A CRL must have a last update time") + + if self._next_update is None: + raise ValueError("A CRL must have a next update time") + + return backend.create_x509_crl(self, private_key, algorithm) + + +class RevokedCertificateBuilder(object): + def __init__(self, serial_number=None, revocation_date=None, + extensions=[]): + self._serial_number = serial_number + self._revocation_date = revocation_date + self._extensions = extensions + + def serial_number(self, number): + if not isinstance(number, six.integer_types): + raise TypeError('Serial number must be of integral type.') + if self._serial_number is not None: + raise ValueError('The serial number may only be set once.') + if number <= 0: + raise ValueError('The serial number should be positive') + + # ASN.1 integers are always signed, so most significant bit must be + # zero. + if number.bit_length() >= 160: # As defined in RFC 5280 + raise ValueError('The serial number should not be more than 159 ' + 'bits.') + return RevokedCertificateBuilder( + number, self._revocation_date, self._extensions + ) + + def revocation_date(self, time): + if not isinstance(time, datetime.datetime): + raise TypeError('Expecting datetime object.') + if self._revocation_date is not None: + raise ValueError('The revocation date may only be set once.') + time = _convert_to_naive_utc_time(time) + if time < _EARLIEST_UTC_TIME: + raise ValueError('The revocation date must be on or after' + ' 1950 January 1.') + return RevokedCertificateBuilder( + self._serial_number, time, self._extensions + ) + + def add_extension(self, extension, critical): + if not isinstance(extension, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extension.oid, critical, extension) + _reject_duplicate_extension(extension, self._extensions) + return RevokedCertificateBuilder( + self._serial_number, self._revocation_date, + self._extensions + [extension] + ) + + def build(self, backend): + if self._serial_number is None: + raise ValueError("A revoked certificate must have a serial number") + if self._revocation_date is None: + raise ValueError( + "A revoked certificate must have a revocation date" + ) + + return backend.create_x509_revoked_certificate(self) + + +def random_serial_number(): + return utils.int_from_bytes(os.urandom(20), "big") >> 1 diff --git a/packages/wakatime/packages/py27/cryptography/x509/certificate_transparency.py b/packages/wakatime/packages/py27/cryptography/x509/certificate_transparency.py new file mode 100644 index 0000000..d00fe81 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/x509/certificate_transparency.py @@ -0,0 +1,46 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +from enum import Enum + +import six + + +class LogEntryType(Enum): + X509_CERTIFICATE = 0 + PRE_CERTIFICATE = 1 + + +class Version(Enum): + v1 = 0 + + +@six.add_metaclass(abc.ABCMeta) +class SignedCertificateTimestamp(object): + @abc.abstractproperty + def version(self): + """ + Returns the SCT version. + """ + + @abc.abstractproperty + def log_id(self): + """ + Returns an identifier indicating which log this SCT is for. + """ + + @abc.abstractproperty + def timestamp(self): + """ + Returns the timestamp for this SCT. + """ + + @abc.abstractproperty + def entry_type(self): + """ + Returns whether this is an SCT for a certificate or pre-certificate. + """ diff --git a/packages/wakatime/packages/py27/cryptography/x509/extensions.py b/packages/wakatime/packages/py27/cryptography/x509/extensions.py new file mode 100644 index 0000000..f60075a --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/x509/extensions.py @@ -0,0 +1,1609 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +import datetime +import hashlib +import ipaddress +import warnings +from enum import Enum + +import six + +from cryptography import utils +from cryptography.hazmat._der import ( + BIT_STRING, DERReader, OBJECT_IDENTIFIER, SEQUENCE +) +from cryptography.hazmat.primitives import constant_time, serialization +from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePublicKey +from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey +from cryptography.x509.certificate_transparency import ( + SignedCertificateTimestamp +) +from cryptography.x509.general_name import GeneralName, IPAddress, OtherName +from cryptography.x509.name import RelativeDistinguishedName +from cryptography.x509.oid import ( + CRLEntryExtensionOID, ExtensionOID, OCSPExtensionOID, ObjectIdentifier, +) + + +def _key_identifier_from_public_key(public_key): + if isinstance(public_key, RSAPublicKey): + data = public_key.public_bytes( + serialization.Encoding.DER, + serialization.PublicFormat.PKCS1, + ) + elif isinstance(public_key, EllipticCurvePublicKey): + data = public_key.public_bytes( + serialization.Encoding.X962, + serialization.PublicFormat.UncompressedPoint + ) + else: + # This is a very slow way to do this. + serialized = public_key.public_bytes( + serialization.Encoding.DER, + serialization.PublicFormat.SubjectPublicKeyInfo + ) + + reader = DERReader(serialized) + with reader.read_single_element(SEQUENCE) as public_key_info: + algorithm = public_key_info.read_element(SEQUENCE) + public_key = public_key_info.read_element(BIT_STRING) + + # Double-check the algorithm structure. + with algorithm: + algorithm.read_element(OBJECT_IDENTIFIER) + if not algorithm.is_empty(): + # Skip the optional parameters field. + algorithm.read_any_element() + + # BIT STRING contents begin with the number of padding bytes added. It + # must be zero for SubjectPublicKeyInfo structures. + if public_key.read_byte() != 0: + raise ValueError('Invalid public key encoding') + + data = public_key.data + + return hashlib.sha1(data).digest() + + +def _make_sequence_methods(field_name): + def len_method(self): + return len(getattr(self, field_name)) + + def iter_method(self): + return iter(getattr(self, field_name)) + + def getitem_method(self, idx): + return getattr(self, field_name)[idx] + + return len_method, iter_method, getitem_method + + +class DuplicateExtension(Exception): + def __init__(self, msg, oid): + super(DuplicateExtension, self).__init__(msg) + self.oid = oid + + +class ExtensionNotFound(Exception): + def __init__(self, msg, oid): + super(ExtensionNotFound, self).__init__(msg) + self.oid = oid + + +@six.add_metaclass(abc.ABCMeta) +class ExtensionType(object): + @abc.abstractproperty + def oid(self): + """ + Returns the oid associated with the given extension type. + """ + + +class Extensions(object): + def __init__(self, extensions): + self._extensions = extensions + + def get_extension_for_oid(self, oid): + for ext in self: + if ext.oid == oid: + return ext + + raise ExtensionNotFound("No {} extension was found".format(oid), oid) + + def get_extension_for_class(self, extclass): + if extclass is UnrecognizedExtension: + raise TypeError( + "UnrecognizedExtension can't be used with " + "get_extension_for_class because more than one instance of the" + " class may be present." + ) + + for ext in self: + if isinstance(ext.value, extclass): + return ext + + raise ExtensionNotFound( + "No {} extension was found".format(extclass), extclass.oid + ) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_extensions") + + def __repr__(self): + return ( + "".format(self._extensions) + ) + + +@utils.register_interface(ExtensionType) +class CRLNumber(object): + oid = ExtensionOID.CRL_NUMBER + + def __init__(self, crl_number): + if not isinstance(crl_number, six.integer_types): + raise TypeError("crl_number must be an integer") + + self._crl_number = crl_number + + def __eq__(self, other): + if not isinstance(other, CRLNumber): + return NotImplemented + + return self.crl_number == other.crl_number + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.crl_number) + + def __repr__(self): + return "".format(self.crl_number) + + crl_number = utils.read_only_property("_crl_number") + + +@utils.register_interface(ExtensionType) +class AuthorityKeyIdentifier(object): + oid = ExtensionOID.AUTHORITY_KEY_IDENTIFIER + + def __init__(self, key_identifier, authority_cert_issuer, + authority_cert_serial_number): + if (authority_cert_issuer is None) != ( + authority_cert_serial_number is None + ): + raise ValueError( + "authority_cert_issuer and authority_cert_serial_number " + "must both be present or both None" + ) + + if authority_cert_issuer is not None: + authority_cert_issuer = list(authority_cert_issuer) + if not all( + isinstance(x, GeneralName) for x in authority_cert_issuer + ): + raise TypeError( + "authority_cert_issuer must be a list of GeneralName " + "objects" + ) + + if authority_cert_serial_number is not None and not isinstance( + authority_cert_serial_number, six.integer_types + ): + raise TypeError( + "authority_cert_serial_number must be an integer" + ) + + self._key_identifier = key_identifier + self._authority_cert_issuer = authority_cert_issuer + self._authority_cert_serial_number = authority_cert_serial_number + + @classmethod + def from_issuer_public_key(cls, public_key): + digest = _key_identifier_from_public_key(public_key) + return cls( + key_identifier=digest, + authority_cert_issuer=None, + authority_cert_serial_number=None + ) + + @classmethod + def from_issuer_subject_key_identifier(cls, ski): + if isinstance(ski, SubjectKeyIdentifier): + digest = ski.digest + else: + digest = ski.value.digest + warnings.warn( + "Extension objects are deprecated as arguments to " + "from_issuer_subject_key_identifier and support will be " + "removed soon. Please migrate to passing a " + "SubjectKeyIdentifier directly.", + utils.DeprecatedIn27, + stacklevel=2, + ) + + return cls( + key_identifier=digest, + authority_cert_issuer=None, + authority_cert_serial_number=None + ) + + def __repr__(self): + return ( + "".format(self) + ) + + def __eq__(self, other): + if not isinstance(other, AuthorityKeyIdentifier): + return NotImplemented + + return ( + self.key_identifier == other.key_identifier and + self.authority_cert_issuer == other.authority_cert_issuer and + self.authority_cert_serial_number == + other.authority_cert_serial_number + ) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + if self.authority_cert_issuer is None: + aci = None + else: + aci = tuple(self.authority_cert_issuer) + return hash(( + self.key_identifier, aci, self.authority_cert_serial_number + )) + + key_identifier = utils.read_only_property("_key_identifier") + authority_cert_issuer = utils.read_only_property("_authority_cert_issuer") + authority_cert_serial_number = utils.read_only_property( + "_authority_cert_serial_number" + ) + + +@utils.register_interface(ExtensionType) +class SubjectKeyIdentifier(object): + oid = ExtensionOID.SUBJECT_KEY_IDENTIFIER + + def __init__(self, digest): + self._digest = digest + + @classmethod + def from_public_key(cls, public_key): + return cls(_key_identifier_from_public_key(public_key)) + + digest = utils.read_only_property("_digest") + + def __repr__(self): + return "".format(self.digest) + + def __eq__(self, other): + if not isinstance(other, SubjectKeyIdentifier): + return NotImplemented + + return constant_time.bytes_eq(self.digest, other.digest) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.digest) + + +@utils.register_interface(ExtensionType) +class AuthorityInformationAccess(object): + oid = ExtensionOID.AUTHORITY_INFORMATION_ACCESS + + def __init__(self, descriptions): + descriptions = list(descriptions) + if not all(isinstance(x, AccessDescription) for x in descriptions): + raise TypeError( + "Every item in the descriptions list must be an " + "AccessDescription" + ) + + self._descriptions = descriptions + + __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions") + + def __repr__(self): + return "".format(self._descriptions) + + def __eq__(self, other): + if not isinstance(other, AuthorityInformationAccess): + return NotImplemented + + return self._descriptions == other._descriptions + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(tuple(self._descriptions)) + + +class AccessDescription(object): + def __init__(self, access_method, access_location): + if not isinstance(access_method, ObjectIdentifier): + raise TypeError("access_method must be an ObjectIdentifier") + + if not isinstance(access_location, GeneralName): + raise TypeError("access_location must be a GeneralName") + + self._access_method = access_method + self._access_location = access_location + + def __repr__(self): + return ( + "".format(self) + ) + + def __eq__(self, other): + if not isinstance(other, AccessDescription): + return NotImplemented + + return ( + self.access_method == other.access_method and + self.access_location == other.access_location + ) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.access_method, self.access_location)) + + access_method = utils.read_only_property("_access_method") + access_location = utils.read_only_property("_access_location") + + +@utils.register_interface(ExtensionType) +class BasicConstraints(object): + oid = ExtensionOID.BASIC_CONSTRAINTS + + def __init__(self, ca, path_length): + if not isinstance(ca, bool): + raise TypeError("ca must be a boolean value") + + if path_length is not None and not ca: + raise ValueError("path_length must be None when ca is False") + + if ( + path_length is not None and + (not isinstance(path_length, six.integer_types) or path_length < 0) + ): + raise TypeError( + "path_length must be a non-negative integer or None" + ) + + self._ca = ca + self._path_length = path_length + + ca = utils.read_only_property("_ca") + path_length = utils.read_only_property("_path_length") + + def __repr__(self): + return ("").format(self) + + def __eq__(self, other): + if not isinstance(other, BasicConstraints): + return NotImplemented + + return self.ca == other.ca and self.path_length == other.path_length + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.ca, self.path_length)) + + +@utils.register_interface(ExtensionType) +class DeltaCRLIndicator(object): + oid = ExtensionOID.DELTA_CRL_INDICATOR + + def __init__(self, crl_number): + if not isinstance(crl_number, six.integer_types): + raise TypeError("crl_number must be an integer") + + self._crl_number = crl_number + + crl_number = utils.read_only_property("_crl_number") + + def __eq__(self, other): + if not isinstance(other, DeltaCRLIndicator): + return NotImplemented + + return self.crl_number == other.crl_number + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.crl_number) + + def __repr__(self): + return "".format(self) + + +@utils.register_interface(ExtensionType) +class CRLDistributionPoints(object): + oid = ExtensionOID.CRL_DISTRIBUTION_POINTS + + def __init__(self, distribution_points): + distribution_points = list(distribution_points) + if not all( + isinstance(x, DistributionPoint) for x in distribution_points + ): + raise TypeError( + "distribution_points must be a list of DistributionPoint " + "objects" + ) + + self._distribution_points = distribution_points + + __len__, __iter__, __getitem__ = _make_sequence_methods( + "_distribution_points" + ) + + def __repr__(self): + return "".format(self._distribution_points) + + def __eq__(self, other): + if not isinstance(other, CRLDistributionPoints): + return NotImplemented + + return self._distribution_points == other._distribution_points + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(tuple(self._distribution_points)) + + +@utils.register_interface(ExtensionType) +class FreshestCRL(object): + oid = ExtensionOID.FRESHEST_CRL + + def __init__(self, distribution_points): + distribution_points = list(distribution_points) + if not all( + isinstance(x, DistributionPoint) for x in distribution_points + ): + raise TypeError( + "distribution_points must be a list of DistributionPoint " + "objects" + ) + + self._distribution_points = distribution_points + + __len__, __iter__, __getitem__ = _make_sequence_methods( + "_distribution_points" + ) + + def __repr__(self): + return "".format(self._distribution_points) + + def __eq__(self, other): + if not isinstance(other, FreshestCRL): + return NotImplemented + + return self._distribution_points == other._distribution_points + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(tuple(self._distribution_points)) + + +class DistributionPoint(object): + def __init__(self, full_name, relative_name, reasons, crl_issuer): + if full_name and relative_name: + raise ValueError( + "You cannot provide both full_name and relative_name, at " + "least one must be None." + ) + + if full_name: + full_name = list(full_name) + if not all(isinstance(x, GeneralName) for x in full_name): + raise TypeError( + "full_name must be a list of GeneralName objects" + ) + + if relative_name: + if not isinstance(relative_name, RelativeDistinguishedName): + raise TypeError( + "relative_name must be a RelativeDistinguishedName" + ) + + if crl_issuer: + crl_issuer = list(crl_issuer) + if not all(isinstance(x, GeneralName) for x in crl_issuer): + raise TypeError( + "crl_issuer must be None or a list of general names" + ) + + if reasons and (not isinstance(reasons, frozenset) or not all( + isinstance(x, ReasonFlags) for x in reasons + )): + raise TypeError("reasons must be None or frozenset of ReasonFlags") + + if reasons and ( + ReasonFlags.unspecified in reasons or + ReasonFlags.remove_from_crl in reasons + ): + raise ValueError( + "unspecified and remove_from_crl are not valid reasons in a " + "DistributionPoint" + ) + + if reasons and not crl_issuer and not (full_name or relative_name): + raise ValueError( + "You must supply crl_issuer, full_name, or relative_name when " + "reasons is not None" + ) + + self._full_name = full_name + self._relative_name = relative_name + self._reasons = reasons + self._crl_issuer = crl_issuer + + def __repr__(self): + return ( + "" + .format(self) + ) + + def __eq__(self, other): + if not isinstance(other, DistributionPoint): + return NotImplemented + + return ( + self.full_name == other.full_name and + self.relative_name == other.relative_name and + self.reasons == other.reasons and + self.crl_issuer == other.crl_issuer + ) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + if self.full_name is not None: + fn = tuple(self.full_name) + else: + fn = None + + if self.crl_issuer is not None: + crl_issuer = tuple(self.crl_issuer) + else: + crl_issuer = None + + return hash((fn, self.relative_name, self.reasons, crl_issuer)) + + full_name = utils.read_only_property("_full_name") + relative_name = utils.read_only_property("_relative_name") + reasons = utils.read_only_property("_reasons") + crl_issuer = utils.read_only_property("_crl_issuer") + + +class ReasonFlags(Enum): + unspecified = "unspecified" + key_compromise = "keyCompromise" + ca_compromise = "cACompromise" + affiliation_changed = "affiliationChanged" + superseded = "superseded" + cessation_of_operation = "cessationOfOperation" + certificate_hold = "certificateHold" + privilege_withdrawn = "privilegeWithdrawn" + aa_compromise = "aACompromise" + remove_from_crl = "removeFromCRL" + + +@utils.register_interface(ExtensionType) +class PolicyConstraints(object): + oid = ExtensionOID.POLICY_CONSTRAINTS + + def __init__(self, require_explicit_policy, inhibit_policy_mapping): + if require_explicit_policy is not None and not isinstance( + require_explicit_policy, six.integer_types + ): + raise TypeError( + "require_explicit_policy must be a non-negative integer or " + "None" + ) + + if inhibit_policy_mapping is not None and not isinstance( + inhibit_policy_mapping, six.integer_types + ): + raise TypeError( + "inhibit_policy_mapping must be a non-negative integer or None" + ) + + if inhibit_policy_mapping is None and require_explicit_policy is None: + raise ValueError( + "At least one of require_explicit_policy and " + "inhibit_policy_mapping must not be None" + ) + + self._require_explicit_policy = require_explicit_policy + self._inhibit_policy_mapping = inhibit_policy_mapping + + def __repr__(self): + return ( + u"".format(self) + ) + + def __eq__(self, other): + if not isinstance(other, PolicyConstraints): + return NotImplemented + + return ( + self.require_explicit_policy == other.require_explicit_policy and + self.inhibit_policy_mapping == other.inhibit_policy_mapping + ) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash( + (self.require_explicit_policy, self.inhibit_policy_mapping) + ) + + require_explicit_policy = utils.read_only_property( + "_require_explicit_policy" + ) + inhibit_policy_mapping = utils.read_only_property( + "_inhibit_policy_mapping" + ) + + +@utils.register_interface(ExtensionType) +class CertificatePolicies(object): + oid = ExtensionOID.CERTIFICATE_POLICIES + + def __init__(self, policies): + policies = list(policies) + if not all(isinstance(x, PolicyInformation) for x in policies): + raise TypeError( + "Every item in the policies list must be a " + "PolicyInformation" + ) + + self._policies = policies + + __len__, __iter__, __getitem__ = _make_sequence_methods("_policies") + + def __repr__(self): + return "".format(self._policies) + + def __eq__(self, other): + if not isinstance(other, CertificatePolicies): + return NotImplemented + + return self._policies == other._policies + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(tuple(self._policies)) + + +class PolicyInformation(object): + def __init__(self, policy_identifier, policy_qualifiers): + if not isinstance(policy_identifier, ObjectIdentifier): + raise TypeError("policy_identifier must be an ObjectIdentifier") + + self._policy_identifier = policy_identifier + + if policy_qualifiers: + policy_qualifiers = list(policy_qualifiers) + if not all( + isinstance(x, (six.text_type, UserNotice)) + for x in policy_qualifiers + ): + raise TypeError( + "policy_qualifiers must be a list of strings and/or " + "UserNotice objects or None" + ) + + self._policy_qualifiers = policy_qualifiers + + def __repr__(self): + return ( + "".format(self) + ) + + def __eq__(self, other): + if not isinstance(other, PolicyInformation): + return NotImplemented + + return ( + self.policy_identifier == other.policy_identifier and + self.policy_qualifiers == other.policy_qualifiers + ) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + if self.policy_qualifiers is not None: + pq = tuple(self.policy_qualifiers) + else: + pq = None + + return hash((self.policy_identifier, pq)) + + policy_identifier = utils.read_only_property("_policy_identifier") + policy_qualifiers = utils.read_only_property("_policy_qualifiers") + + +class UserNotice(object): + def __init__(self, notice_reference, explicit_text): + if notice_reference and not isinstance( + notice_reference, NoticeReference + ): + raise TypeError( + "notice_reference must be None or a NoticeReference" + ) + + self._notice_reference = notice_reference + self._explicit_text = explicit_text + + def __repr__(self): + return ( + "".format(self) + ) + + def __eq__(self, other): + if not isinstance(other, UserNotice): + return NotImplemented + + return ( + self.notice_reference == other.notice_reference and + self.explicit_text == other.explicit_text + ) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.notice_reference, self.explicit_text)) + + notice_reference = utils.read_only_property("_notice_reference") + explicit_text = utils.read_only_property("_explicit_text") + + +class NoticeReference(object): + def __init__(self, organization, notice_numbers): + self._organization = organization + notice_numbers = list(notice_numbers) + if not all(isinstance(x, int) for x in notice_numbers): + raise TypeError( + "notice_numbers must be a list of integers" + ) + + self._notice_numbers = notice_numbers + + def __repr__(self): + return ( + "".format(self) + ) + + def __eq__(self, other): + if not isinstance(other, NoticeReference): + return NotImplemented + + return ( + self.organization == other.organization and + self.notice_numbers == other.notice_numbers + ) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.organization, tuple(self.notice_numbers))) + + organization = utils.read_only_property("_organization") + notice_numbers = utils.read_only_property("_notice_numbers") + + +@utils.register_interface(ExtensionType) +class ExtendedKeyUsage(object): + oid = ExtensionOID.EXTENDED_KEY_USAGE + + def __init__(self, usages): + usages = list(usages) + if not all(isinstance(x, ObjectIdentifier) for x in usages): + raise TypeError( + "Every item in the usages list must be an ObjectIdentifier" + ) + + self._usages = usages + + __len__, __iter__, __getitem__ = _make_sequence_methods("_usages") + + def __repr__(self): + return "".format(self._usages) + + def __eq__(self, other): + if not isinstance(other, ExtendedKeyUsage): + return NotImplemented + + return self._usages == other._usages + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(tuple(self._usages)) + + +@utils.register_interface(ExtensionType) +class OCSPNoCheck(object): + oid = ExtensionOID.OCSP_NO_CHECK + + def __eq__(self, other): + if not isinstance(other, OCSPNoCheck): + return NotImplemented + + return True + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(OCSPNoCheck) + + def __repr__(self): + return "" + + +@utils.register_interface(ExtensionType) +class PrecertPoison(object): + oid = ExtensionOID.PRECERT_POISON + + def __eq__(self, other): + if not isinstance(other, PrecertPoison): + return NotImplemented + + return True + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(PrecertPoison) + + def __repr__(self): + return "" + + +@utils.register_interface(ExtensionType) +class TLSFeature(object): + oid = ExtensionOID.TLS_FEATURE + + def __init__(self, features): + features = list(features) + if ( + not all(isinstance(x, TLSFeatureType) for x in features) or + len(features) == 0 + ): + raise TypeError( + "features must be a list of elements from the TLSFeatureType " + "enum" + ) + + self._features = features + + __len__, __iter__, __getitem__ = _make_sequence_methods("_features") + + def __repr__(self): + return "".format(self) + + def __eq__(self, other): + if not isinstance(other, TLSFeature): + return NotImplemented + + return self._features == other._features + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(tuple(self._features)) + + +class TLSFeatureType(Enum): + # status_request is defined in RFC 6066 and is used for what is commonly + # called OCSP Must-Staple when present in the TLS Feature extension in an + # X.509 certificate. + status_request = 5 + # status_request_v2 is defined in RFC 6961 and allows multiple OCSP + # responses to be provided. It is not currently in use by clients or + # servers. + status_request_v2 = 17 + + +_TLS_FEATURE_TYPE_TO_ENUM = dict((x.value, x) for x in TLSFeatureType) + + +@utils.register_interface(ExtensionType) +class InhibitAnyPolicy(object): + oid = ExtensionOID.INHIBIT_ANY_POLICY + + def __init__(self, skip_certs): + if not isinstance(skip_certs, six.integer_types): + raise TypeError("skip_certs must be an integer") + + if skip_certs < 0: + raise ValueError("skip_certs must be a non-negative integer") + + self._skip_certs = skip_certs + + def __repr__(self): + return "".format(self) + + def __eq__(self, other): + if not isinstance(other, InhibitAnyPolicy): + return NotImplemented + + return self.skip_certs == other.skip_certs + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.skip_certs) + + skip_certs = utils.read_only_property("_skip_certs") + + +@utils.register_interface(ExtensionType) +class KeyUsage(object): + oid = ExtensionOID.KEY_USAGE + + def __init__(self, digital_signature, content_commitment, key_encipherment, + data_encipherment, key_agreement, key_cert_sign, crl_sign, + encipher_only, decipher_only): + if not key_agreement and (encipher_only or decipher_only): + raise ValueError( + "encipher_only and decipher_only can only be true when " + "key_agreement is true" + ) + + self._digital_signature = digital_signature + self._content_commitment = content_commitment + self._key_encipherment = key_encipherment + self._data_encipherment = data_encipherment + self._key_agreement = key_agreement + self._key_cert_sign = key_cert_sign + self._crl_sign = crl_sign + self._encipher_only = encipher_only + self._decipher_only = decipher_only + + digital_signature = utils.read_only_property("_digital_signature") + content_commitment = utils.read_only_property("_content_commitment") + key_encipherment = utils.read_only_property("_key_encipherment") + data_encipherment = utils.read_only_property("_data_encipherment") + key_agreement = utils.read_only_property("_key_agreement") + key_cert_sign = utils.read_only_property("_key_cert_sign") + crl_sign = utils.read_only_property("_crl_sign") + + @property + def encipher_only(self): + if not self.key_agreement: + raise ValueError( + "encipher_only is undefined unless key_agreement is true" + ) + else: + return self._encipher_only + + @property + def decipher_only(self): + if not self.key_agreement: + raise ValueError( + "decipher_only is undefined unless key_agreement is true" + ) + else: + return self._decipher_only + + def __repr__(self): + try: + encipher_only = self.encipher_only + decipher_only = self.decipher_only + except ValueError: + encipher_only = None + decipher_only = None + + return ("").format( + self, encipher_only, decipher_only) + + def __eq__(self, other): + if not isinstance(other, KeyUsage): + return NotImplemented + + return ( + self.digital_signature == other.digital_signature and + self.content_commitment == other.content_commitment and + self.key_encipherment == other.key_encipherment and + self.data_encipherment == other.data_encipherment and + self.key_agreement == other.key_agreement and + self.key_cert_sign == other.key_cert_sign and + self.crl_sign == other.crl_sign and + self._encipher_only == other._encipher_only and + self._decipher_only == other._decipher_only + ) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(( + self.digital_signature, self.content_commitment, + self.key_encipherment, self.data_encipherment, + self.key_agreement, self.key_cert_sign, + self.crl_sign, self._encipher_only, + self._decipher_only + )) + + +@utils.register_interface(ExtensionType) +class NameConstraints(object): + oid = ExtensionOID.NAME_CONSTRAINTS + + def __init__(self, permitted_subtrees, excluded_subtrees): + if permitted_subtrees is not None: + permitted_subtrees = list(permitted_subtrees) + if not all( + isinstance(x, GeneralName) for x in permitted_subtrees + ): + raise TypeError( + "permitted_subtrees must be a list of GeneralName objects " + "or None" + ) + + self._validate_ip_name(permitted_subtrees) + + if excluded_subtrees is not None: + excluded_subtrees = list(excluded_subtrees) + if not all( + isinstance(x, GeneralName) for x in excluded_subtrees + ): + raise TypeError( + "excluded_subtrees must be a list of GeneralName objects " + "or None" + ) + + self._validate_ip_name(excluded_subtrees) + + if permitted_subtrees is None and excluded_subtrees is None: + raise ValueError( + "At least one of permitted_subtrees and excluded_subtrees " + "must not be None" + ) + + self._permitted_subtrees = permitted_subtrees + self._excluded_subtrees = excluded_subtrees + + def __eq__(self, other): + if not isinstance(other, NameConstraints): + return NotImplemented + + return ( + self.excluded_subtrees == other.excluded_subtrees and + self.permitted_subtrees == other.permitted_subtrees + ) + + def __ne__(self, other): + return not self == other + + def _validate_ip_name(self, tree): + if any(isinstance(name, IPAddress) and not isinstance( + name.value, (ipaddress.IPv4Network, ipaddress.IPv6Network) + ) for name in tree): + raise TypeError( + "IPAddress name constraints must be an IPv4Network or" + " IPv6Network object" + ) + + def __repr__(self): + return ( + u"".format(self) + ) + + def __hash__(self): + if self.permitted_subtrees is not None: + ps = tuple(self.permitted_subtrees) + else: + ps = None + + if self.excluded_subtrees is not None: + es = tuple(self.excluded_subtrees) + else: + es = None + + return hash((ps, es)) + + permitted_subtrees = utils.read_only_property("_permitted_subtrees") + excluded_subtrees = utils.read_only_property("_excluded_subtrees") + + +class Extension(object): + def __init__(self, oid, critical, value): + if not isinstance(oid, ObjectIdentifier): + raise TypeError( + "oid argument must be an ObjectIdentifier instance." + ) + + if not isinstance(critical, bool): + raise TypeError("critical must be a boolean value") + + self._oid = oid + self._critical = critical + self._value = value + + oid = utils.read_only_property("_oid") + critical = utils.read_only_property("_critical") + value = utils.read_only_property("_value") + + def __repr__(self): + return ("").format(self) + + def __eq__(self, other): + if not isinstance(other, Extension): + return NotImplemented + + return ( + self.oid == other.oid and + self.critical == other.critical and + self.value == other.value + ) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.oid, self.critical, self.value)) + + +class GeneralNames(object): + def __init__(self, general_names): + general_names = list(general_names) + if not all(isinstance(x, GeneralName) for x in general_names): + raise TypeError( + "Every item in the general_names list must be an " + "object conforming to the GeneralName interface" + ) + + self._general_names = general_names + __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") + + def get_values_for_type(self, type): + # Return the value of each GeneralName, except for OtherName instances + # which we return directly because it has two important properties not + # just one value. + objs = (i for i in self if isinstance(i, type)) + if type != OtherName: + objs = (i.value for i in objs) + return list(objs) + + def __repr__(self): + return "".format(self._general_names) + + def __eq__(self, other): + if not isinstance(other, GeneralNames): + return NotImplemented + + return self._general_names == other._general_names + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(tuple(self._general_names)) + + +@utils.register_interface(ExtensionType) +class SubjectAlternativeName(object): + oid = ExtensionOID.SUBJECT_ALTERNATIVE_NAME + + def __init__(self, general_names): + self._general_names = GeneralNames(general_names) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") + + def get_values_for_type(self, type): + return self._general_names.get_values_for_type(type) + + def __repr__(self): + return "".format(self._general_names) + + def __eq__(self, other): + if not isinstance(other, SubjectAlternativeName): + return NotImplemented + + return self._general_names == other._general_names + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self._general_names) + + +@utils.register_interface(ExtensionType) +class IssuerAlternativeName(object): + oid = ExtensionOID.ISSUER_ALTERNATIVE_NAME + + def __init__(self, general_names): + self._general_names = GeneralNames(general_names) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") + + def get_values_for_type(self, type): + return self._general_names.get_values_for_type(type) + + def __repr__(self): + return "".format(self._general_names) + + def __eq__(self, other): + if not isinstance(other, IssuerAlternativeName): + return NotImplemented + + return self._general_names == other._general_names + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self._general_names) + + +@utils.register_interface(ExtensionType) +class CertificateIssuer(object): + oid = CRLEntryExtensionOID.CERTIFICATE_ISSUER + + def __init__(self, general_names): + self._general_names = GeneralNames(general_names) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") + + def get_values_for_type(self, type): + return self._general_names.get_values_for_type(type) + + def __repr__(self): + return "".format(self._general_names) + + def __eq__(self, other): + if not isinstance(other, CertificateIssuer): + return NotImplemented + + return self._general_names == other._general_names + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self._general_names) + + +@utils.register_interface(ExtensionType) +class CRLReason(object): + oid = CRLEntryExtensionOID.CRL_REASON + + def __init__(self, reason): + if not isinstance(reason, ReasonFlags): + raise TypeError("reason must be an element from ReasonFlags") + + self._reason = reason + + def __repr__(self): + return "".format(self._reason) + + def __eq__(self, other): + if not isinstance(other, CRLReason): + return NotImplemented + + return self.reason == other.reason + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.reason) + + reason = utils.read_only_property("_reason") + + +@utils.register_interface(ExtensionType) +class InvalidityDate(object): + oid = CRLEntryExtensionOID.INVALIDITY_DATE + + def __init__(self, invalidity_date): + if not isinstance(invalidity_date, datetime.datetime): + raise TypeError("invalidity_date must be a datetime.datetime") + + self._invalidity_date = invalidity_date + + def __repr__(self): + return "".format( + self._invalidity_date + ) + + def __eq__(self, other): + if not isinstance(other, InvalidityDate): + return NotImplemented + + return self.invalidity_date == other.invalidity_date + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.invalidity_date) + + invalidity_date = utils.read_only_property("_invalidity_date") + + +@utils.register_interface(ExtensionType) +class PrecertificateSignedCertificateTimestamps(object): + oid = ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS + + def __init__(self, signed_certificate_timestamps): + signed_certificate_timestamps = list(signed_certificate_timestamps) + if not all( + isinstance(sct, SignedCertificateTimestamp) + for sct in signed_certificate_timestamps + ): + raise TypeError( + "Every item in the signed_certificate_timestamps list must be " + "a SignedCertificateTimestamp" + ) + self._signed_certificate_timestamps = signed_certificate_timestamps + + __len__, __iter__, __getitem__ = _make_sequence_methods( + "_signed_certificate_timestamps" + ) + + def __repr__(self): + return ( + "".format( + list(self) + ) + ) + + def __hash__(self): + return hash(tuple(self._signed_certificate_timestamps)) + + def __eq__(self, other): + if not isinstance(other, PrecertificateSignedCertificateTimestamps): + return NotImplemented + + return ( + self._signed_certificate_timestamps == + other._signed_certificate_timestamps + ) + + def __ne__(self, other): + return not self == other + + +@utils.register_interface(ExtensionType) +class OCSPNonce(object): + oid = OCSPExtensionOID.NONCE + + def __init__(self, nonce): + if not isinstance(nonce, bytes): + raise TypeError("nonce must be bytes") + + self._nonce = nonce + + def __eq__(self, other): + if not isinstance(other, OCSPNonce): + return NotImplemented + + return self.nonce == other.nonce + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.nonce) + + def __repr__(self): + return "".format(self) + + nonce = utils.read_only_property("_nonce") + + +@utils.register_interface(ExtensionType) +class IssuingDistributionPoint(object): + oid = ExtensionOID.ISSUING_DISTRIBUTION_POINT + + def __init__(self, full_name, relative_name, only_contains_user_certs, + only_contains_ca_certs, only_some_reasons, indirect_crl, + only_contains_attribute_certs): + if ( + only_some_reasons and ( + not isinstance(only_some_reasons, frozenset) or not all( + isinstance(x, ReasonFlags) for x in only_some_reasons + ) + ) + ): + raise TypeError( + "only_some_reasons must be None or frozenset of ReasonFlags" + ) + + if only_some_reasons and ( + ReasonFlags.unspecified in only_some_reasons or + ReasonFlags.remove_from_crl in only_some_reasons + ): + raise ValueError( + "unspecified and remove_from_crl are not valid reasons in an " + "IssuingDistributionPoint" + ) + + if not ( + isinstance(only_contains_user_certs, bool) and + isinstance(only_contains_ca_certs, bool) and + isinstance(indirect_crl, bool) and + isinstance(only_contains_attribute_certs, bool) + ): + raise TypeError( + "only_contains_user_certs, only_contains_ca_certs, " + "indirect_crl and only_contains_attribute_certs " + "must all be boolean." + ) + + crl_constraints = [ + only_contains_user_certs, only_contains_ca_certs, + indirect_crl, only_contains_attribute_certs + ] + + if len([x for x in crl_constraints if x]) > 1: + raise ValueError( + "Only one of the following can be set to True: " + "only_contains_user_certs, only_contains_ca_certs, " + "indirect_crl, only_contains_attribute_certs" + ) + + if ( + not any([ + only_contains_user_certs, only_contains_ca_certs, + indirect_crl, only_contains_attribute_certs, full_name, + relative_name, only_some_reasons + ]) + ): + raise ValueError( + "Cannot create empty extension: " + "if only_contains_user_certs, only_contains_ca_certs, " + "indirect_crl, and only_contains_attribute_certs are all False" + ", then either full_name, relative_name, or only_some_reasons " + "must have a value." + ) + + self._only_contains_user_certs = only_contains_user_certs + self._only_contains_ca_certs = only_contains_ca_certs + self._indirect_crl = indirect_crl + self._only_contains_attribute_certs = only_contains_attribute_certs + self._only_some_reasons = only_some_reasons + self._full_name = full_name + self._relative_name = relative_name + + def __repr__(self): + return ( + "".format(self) + ) + + def __eq__(self, other): + if not isinstance(other, IssuingDistributionPoint): + return NotImplemented + + return ( + self.full_name == other.full_name and + self.relative_name == other.relative_name and + self.only_contains_user_certs == other.only_contains_user_certs and + self.only_contains_ca_certs == other.only_contains_ca_certs and + self.only_some_reasons == other.only_some_reasons and + self.indirect_crl == other.indirect_crl and + self.only_contains_attribute_certs == + other.only_contains_attribute_certs + ) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(( + self.full_name, + self.relative_name, + self.only_contains_user_certs, + self.only_contains_ca_certs, + self.only_some_reasons, + self.indirect_crl, + self.only_contains_attribute_certs, + )) + + full_name = utils.read_only_property("_full_name") + relative_name = utils.read_only_property("_relative_name") + only_contains_user_certs = utils.read_only_property( + "_only_contains_user_certs" + ) + only_contains_ca_certs = utils.read_only_property( + "_only_contains_ca_certs" + ) + only_some_reasons = utils.read_only_property("_only_some_reasons") + indirect_crl = utils.read_only_property("_indirect_crl") + only_contains_attribute_certs = utils.read_only_property( + "_only_contains_attribute_certs" + ) + + +@utils.register_interface(ExtensionType) +class UnrecognizedExtension(object): + def __init__(self, oid, value): + if not isinstance(oid, ObjectIdentifier): + raise TypeError("oid must be an ObjectIdentifier") + self._oid = oid + self._value = value + + oid = utils.read_only_property("_oid") + value = utils.read_only_property("_value") + + def __repr__(self): + return ( + "".format( + self + ) + ) + + def __eq__(self, other): + if not isinstance(other, UnrecognizedExtension): + return NotImplemented + + return self.oid == other.oid and self.value == other.value + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.oid, self.value)) diff --git a/packages/wakatime/packages/py27/cryptography/x509/general_name.py b/packages/wakatime/packages/py27/cryptography/x509/general_name.py new file mode 100644 index 0000000..1233841 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/x509/general_name.py @@ -0,0 +1,360 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +import ipaddress +import warnings +from email.utils import parseaddr + +import six +from six.moves import urllib_parse + +from cryptography import utils +from cryptography.x509.name import Name +from cryptography.x509.oid import ObjectIdentifier + + +_GENERAL_NAMES = { + 0: "otherName", + 1: "rfc822Name", + 2: "dNSName", + 3: "x400Address", + 4: "directoryName", + 5: "ediPartyName", + 6: "uniformResourceIdentifier", + 7: "iPAddress", + 8: "registeredID", +} + + +def _lazy_import_idna(): + # Import idna lazily becase it allocates a decent amount of memory, and + # we're only using it in deprecated paths. + try: + import idna + return idna + except ImportError: + raise ImportError( + "idna is not installed, but a deprecated feature that requires it" + " was used. See: https://cryptography.io/en/latest/faq/#importe" + "rror-idna-is-not-installed" + ) + + +class UnsupportedGeneralNameType(Exception): + def __init__(self, msg, type): + super(UnsupportedGeneralNameType, self).__init__(msg) + self.type = type + + +@six.add_metaclass(abc.ABCMeta) +class GeneralName(object): + @abc.abstractproperty + def value(self): + """ + Return the value of the object + """ + + +@utils.register_interface(GeneralName) +class RFC822Name(object): + def __init__(self, value): + if isinstance(value, six.text_type): + try: + value.encode("ascii") + except UnicodeEncodeError: + value = self._idna_encode(value) + warnings.warn( + "RFC822Name values should be passed as an A-label string. " + "This means unicode characters should be encoded via " + "idna. Support for passing unicode strings (aka U-label) " + "will be removed in a future version.", + utils.PersistentlyDeprecated2017, + stacklevel=2, + ) + else: + raise TypeError("value must be string") + + name, address = parseaddr(value) + if name or not address: + # parseaddr has found a name (e.g. Name ) or the entire + # value is an empty string. + raise ValueError("Invalid rfc822name value") + + self._value = value + + value = utils.read_only_property("_value") + + @classmethod + def _init_without_validation(cls, value): + instance = cls.__new__(cls) + instance._value = value + return instance + + def _idna_encode(self, value): + idna = _lazy_import_idna() + _, address = parseaddr(value) + parts = address.split(u"@") + return parts[0] + "@" + idna.encode(parts[1]).decode("ascii") + + def __repr__(self): + return "".format(self.value) + + def __eq__(self, other): + if not isinstance(other, RFC822Name): + return NotImplemented + + return self.value == other.value + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.value) + + +def _idna_encode(value): + idna = _lazy_import_idna() + # Retain prefixes '*.' for common/alt names and '.' for name constraints + for prefix in ['*.', '.']: + if value.startswith(prefix): + value = value[len(prefix):] + return prefix + idna.encode(value).decode("ascii") + return idna.encode(value).decode("ascii") + + +@utils.register_interface(GeneralName) +class DNSName(object): + def __init__(self, value): + if isinstance(value, six.text_type): + try: + value.encode("ascii") + except UnicodeEncodeError: + value = _idna_encode(value) + warnings.warn( + "DNSName values should be passed as an A-label string. " + "This means unicode characters should be encoded via " + "idna. Support for passing unicode strings (aka U-label) " + "will be removed in a future version.", + utils.PersistentlyDeprecated2017, + stacklevel=2, + ) + else: + raise TypeError("value must be string") + + self._value = value + + value = utils.read_only_property("_value") + + @classmethod + def _init_without_validation(cls, value): + instance = cls.__new__(cls) + instance._value = value + return instance + + def __repr__(self): + return "".format(self.value) + + def __eq__(self, other): + if not isinstance(other, DNSName): + return NotImplemented + + return self.value == other.value + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.value) + + +@utils.register_interface(GeneralName) +class UniformResourceIdentifier(object): + def __init__(self, value): + if isinstance(value, six.text_type): + try: + value.encode("ascii") + except UnicodeEncodeError: + value = self._idna_encode(value) + warnings.warn( + "URI values should be passed as an A-label string. " + "This means unicode characters should be encoded via " + "idna. Support for passing unicode strings (aka U-label) " + " will be removed in a future version.", + utils.PersistentlyDeprecated2017, + stacklevel=2, + ) + else: + raise TypeError("value must be string") + + self._value = value + + value = utils.read_only_property("_value") + + @classmethod + def _init_without_validation(cls, value): + instance = cls.__new__(cls) + instance._value = value + return instance + + def _idna_encode(self, value): + idna = _lazy_import_idna() + parsed = urllib_parse.urlparse(value) + if parsed.port: + netloc = ( + idna.encode(parsed.hostname) + + ":{}".format(parsed.port).encode("ascii") + ).decode("ascii") + else: + netloc = idna.encode(parsed.hostname).decode("ascii") + + # Note that building a URL in this fashion means it should be + # semantically indistinguishable from the original but is not + # guaranteed to be exactly the same. + return urllib_parse.urlunparse(( + parsed.scheme, + netloc, + parsed.path, + parsed.params, + parsed.query, + parsed.fragment + )) + + def __repr__(self): + return "".format(self.value) + + def __eq__(self, other): + if not isinstance(other, UniformResourceIdentifier): + return NotImplemented + + return self.value == other.value + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.value) + + +@utils.register_interface(GeneralName) +class DirectoryName(object): + def __init__(self, value): + if not isinstance(value, Name): + raise TypeError("value must be a Name") + + self._value = value + + value = utils.read_only_property("_value") + + def __repr__(self): + return "".format(self.value) + + def __eq__(self, other): + if not isinstance(other, DirectoryName): + return NotImplemented + + return self.value == other.value + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.value) + + +@utils.register_interface(GeneralName) +class RegisteredID(object): + def __init__(self, value): + if not isinstance(value, ObjectIdentifier): + raise TypeError("value must be an ObjectIdentifier") + + self._value = value + + value = utils.read_only_property("_value") + + def __repr__(self): + return "".format(self.value) + + def __eq__(self, other): + if not isinstance(other, RegisteredID): + return NotImplemented + + return self.value == other.value + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.value) + + +@utils.register_interface(GeneralName) +class IPAddress(object): + def __init__(self, value): + if not isinstance( + value, + ( + ipaddress.IPv4Address, + ipaddress.IPv6Address, + ipaddress.IPv4Network, + ipaddress.IPv6Network + ) + ): + raise TypeError( + "value must be an instance of ipaddress.IPv4Address, " + "ipaddress.IPv6Address, ipaddress.IPv4Network, or " + "ipaddress.IPv6Network" + ) + + self._value = value + + value = utils.read_only_property("_value") + + def __repr__(self): + return "".format(self.value) + + def __eq__(self, other): + if not isinstance(other, IPAddress): + return NotImplemented + + return self.value == other.value + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.value) + + +@utils.register_interface(GeneralName) +class OtherName(object): + def __init__(self, type_id, value): + if not isinstance(type_id, ObjectIdentifier): + raise TypeError("type_id must be an ObjectIdentifier") + if not isinstance(value, bytes): + raise TypeError("value must be a binary string") + + self._type_id = type_id + self._value = value + + type_id = utils.read_only_property("_type_id") + value = utils.read_only_property("_value") + + def __repr__(self): + return "".format( + self.type_id, self.value) + + def __eq__(self, other): + if not isinstance(other, OtherName): + return NotImplemented + + return self.type_id == other.type_id and self.value == other.value + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.type_id, self.value)) diff --git a/packages/wakatime/packages/py27/cryptography/x509/name.py b/packages/wakatime/packages/py27/cryptography/x509/name.py new file mode 100644 index 0000000..ca2a175 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/x509/name.py @@ -0,0 +1,259 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from enum import Enum + +import six + +from cryptography import utils +from cryptography.x509.oid import NameOID, ObjectIdentifier + + +class _ASN1Type(Enum): + UTF8String = 12 + NumericString = 18 + PrintableString = 19 + T61String = 20 + IA5String = 22 + UTCTime = 23 + GeneralizedTime = 24 + VisibleString = 26 + UniversalString = 28 + BMPString = 30 + + +_ASN1_TYPE_TO_ENUM = dict((i.value, i) for i in _ASN1Type) +_SENTINEL = object() +_NAMEOID_DEFAULT_TYPE = { + NameOID.COUNTRY_NAME: _ASN1Type.PrintableString, + NameOID.JURISDICTION_COUNTRY_NAME: _ASN1Type.PrintableString, + NameOID.SERIAL_NUMBER: _ASN1Type.PrintableString, + NameOID.DN_QUALIFIER: _ASN1Type.PrintableString, + NameOID.EMAIL_ADDRESS: _ASN1Type.IA5String, + NameOID.DOMAIN_COMPONENT: _ASN1Type.IA5String, +} + +#: Short attribute names from RFC 4514: +#: https://tools.ietf.org/html/rfc4514#page-7 +_NAMEOID_TO_NAME = { + NameOID.COMMON_NAME: 'CN', + NameOID.LOCALITY_NAME: 'L', + NameOID.STATE_OR_PROVINCE_NAME: 'ST', + NameOID.ORGANIZATION_NAME: 'O', + NameOID.ORGANIZATIONAL_UNIT_NAME: 'OU', + NameOID.COUNTRY_NAME: 'C', + NameOID.STREET_ADDRESS: 'STREET', + NameOID.DOMAIN_COMPONENT: 'DC', + NameOID.USER_ID: 'UID', +} + + +def _escape_dn_value(val): + """Escape special characters in RFC4514 Distinguished Name value.""" + + # See https://tools.ietf.org/html/rfc4514#section-2.4 + val = val.replace('\\', '\\\\') + val = val.replace('"', '\\"') + val = val.replace('+', '\\+') + val = val.replace(',', '\\,') + val = val.replace(';', '\\;') + val = val.replace('<', '\\<') + val = val.replace('>', '\\>') + val = val.replace('\0', '\\00') + + if val[0] in ('#', ' '): + val = '\\' + val + if val[-1] == ' ': + val = val[:-1] + '\\ ' + + return val + + +class NameAttribute(object): + def __init__(self, oid, value, _type=_SENTINEL): + if not isinstance(oid, ObjectIdentifier): + raise TypeError( + "oid argument must be an ObjectIdentifier instance." + ) + + if not isinstance(value, six.text_type): + raise TypeError( + "value argument must be a text type." + ) + + if ( + oid == NameOID.COUNTRY_NAME or + oid == NameOID.JURISDICTION_COUNTRY_NAME + ): + if len(value.encode("utf8")) != 2: + raise ValueError( + "Country name must be a 2 character country code" + ) + + if len(value) == 0: + raise ValueError("Value cannot be an empty string") + + # The appropriate ASN1 string type varies by OID and is defined across + # multiple RFCs including 2459, 3280, and 5280. In general UTF8String + # is preferred (2459), but 3280 and 5280 specify several OIDs with + # alternate types. This means when we see the sentinel value we need + # to look up whether the OID has a non-UTF8 type. If it does, set it + # to that. Otherwise, UTF8! + if _type == _SENTINEL: + _type = _NAMEOID_DEFAULT_TYPE.get(oid, _ASN1Type.UTF8String) + + if not isinstance(_type, _ASN1Type): + raise TypeError("_type must be from the _ASN1Type enum") + + self._oid = oid + self._value = value + self._type = _type + + oid = utils.read_only_property("_oid") + value = utils.read_only_property("_value") + + def rfc4514_string(self): + """ + Format as RFC4514 Distinguished Name string. + + Use short attribute name if available, otherwise fall back to OID + dotted string. + """ + key = _NAMEOID_TO_NAME.get(self.oid, self.oid.dotted_string) + return '%s=%s' % (key, _escape_dn_value(self.value)) + + def __eq__(self, other): + if not isinstance(other, NameAttribute): + return NotImplemented + + return ( + self.oid == other.oid and + self.value == other.value + ) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.oid, self.value)) + + def __repr__(self): + return "".format(self) + + +class RelativeDistinguishedName(object): + def __init__(self, attributes): + attributes = list(attributes) + if not attributes: + raise ValueError("a relative distinguished name cannot be empty") + if not all(isinstance(x, NameAttribute) for x in attributes): + raise TypeError("attributes must be an iterable of NameAttribute") + + # Keep list and frozenset to preserve attribute order where it matters + self._attributes = attributes + self._attribute_set = frozenset(attributes) + + if len(self._attribute_set) != len(attributes): + raise ValueError("duplicate attributes are not allowed") + + def get_attributes_for_oid(self, oid): + return [i for i in self if i.oid == oid] + + def rfc4514_string(self): + """ + Format as RFC4514 Distinguished Name string. + + Within each RDN, attributes are joined by '+', although that is rarely + used in certificates. + """ + return '+'.join(attr.rfc4514_string() for attr in self._attributes) + + def __eq__(self, other): + if not isinstance(other, RelativeDistinguishedName): + return NotImplemented + + return self._attribute_set == other._attribute_set + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self._attribute_set) + + def __iter__(self): + return iter(self._attributes) + + def __len__(self): + return len(self._attributes) + + def __repr__(self): + return "".format(self.rfc4514_string()) + + +class Name(object): + def __init__(self, attributes): + attributes = list(attributes) + if all(isinstance(x, NameAttribute) for x in attributes): + self._attributes = [ + RelativeDistinguishedName([x]) for x in attributes + ] + elif all(isinstance(x, RelativeDistinguishedName) for x in attributes): + self._attributes = attributes + else: + raise TypeError( + "attributes must be a list of NameAttribute" + " or a list RelativeDistinguishedName" + ) + + def rfc4514_string(self): + """ + Format as RFC4514 Distinguished Name string. + For example 'CN=foobar.com,O=Foo Corp,C=US' + + An X.509 name is a two-level structure: a list of sets of attributes. + Each list element is separated by ',' and within each list element, set + elements are separated by '+'. The latter is almost never used in + real world certificates. + """ + return ','.join(attr.rfc4514_string() for attr in self._attributes) + + def get_attributes_for_oid(self, oid): + return [i for i in self if i.oid == oid] + + @property + def rdns(self): + return self._attributes + + def public_bytes(self, backend): + return backend.x509_name_bytes(self) + + def __eq__(self, other): + if not isinstance(other, Name): + return NotImplemented + + return self._attributes == other._attributes + + def __ne__(self, other): + return not self == other + + def __hash__(self): + # TODO: this is relatively expensive, if this looks like a bottleneck + # for you, consider optimizing! + return hash(tuple(self._attributes)) + + def __iter__(self): + for rdn in self._attributes: + for ava in rdn: + yield ava + + def __len__(self): + return sum(len(rdn) for rdn in self._attributes) + + def __repr__(self): + if six.PY2: + return "".format(self.rfc4514_string().encode('utf8')) + else: + return "".format(self.rfc4514_string()) diff --git a/packages/wakatime/packages/py27/cryptography/x509/ocsp.py b/packages/wakatime/packages/py27/cryptography/x509/ocsp.py new file mode 100644 index 0000000..b15063d --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/x509/ocsp.py @@ -0,0 +1,429 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +import datetime +from enum import Enum + +import six + +from cryptography import x509 +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import ed25519, ed448 +from cryptography.x509.base import ( + _EARLIEST_UTC_TIME, _convert_to_naive_utc_time, _reject_duplicate_extension +) + + +_OIDS_TO_HASH = { + "1.3.14.3.2.26": hashes.SHA1(), + "2.16.840.1.101.3.4.2.4": hashes.SHA224(), + "2.16.840.1.101.3.4.2.1": hashes.SHA256(), + "2.16.840.1.101.3.4.2.2": hashes.SHA384(), + "2.16.840.1.101.3.4.2.3": hashes.SHA512(), +} + + +class OCSPResponderEncoding(Enum): + HASH = "By Hash" + NAME = "By Name" + + +class OCSPResponseStatus(Enum): + SUCCESSFUL = 0 + MALFORMED_REQUEST = 1 + INTERNAL_ERROR = 2 + TRY_LATER = 3 + SIG_REQUIRED = 5 + UNAUTHORIZED = 6 + + +_RESPONSE_STATUS_TO_ENUM = dict((x.value, x) for x in OCSPResponseStatus) +_ALLOWED_HASHES = ( + hashes.SHA1, hashes.SHA224, hashes.SHA256, + hashes.SHA384, hashes.SHA512 +) + + +def _verify_algorithm(algorithm): + if not isinstance(algorithm, _ALLOWED_HASHES): + raise ValueError( + "Algorithm must be SHA1, SHA224, SHA256, SHA384, or SHA512" + ) + + +class OCSPCertStatus(Enum): + GOOD = 0 + REVOKED = 1 + UNKNOWN = 2 + + +_CERT_STATUS_TO_ENUM = dict((x.value, x) for x in OCSPCertStatus) + + +def load_der_ocsp_request(data): + from cryptography.hazmat.backends.openssl.backend import backend + return backend.load_der_ocsp_request(data) + + +def load_der_ocsp_response(data): + from cryptography.hazmat.backends.openssl.backend import backend + return backend.load_der_ocsp_response(data) + + +class OCSPRequestBuilder(object): + def __init__(self, request=None, extensions=[]): + self._request = request + self._extensions = extensions + + def add_certificate(self, cert, issuer, algorithm): + if self._request is not None: + raise ValueError("Only one certificate can be added to a request") + + _verify_algorithm(algorithm) + if ( + not isinstance(cert, x509.Certificate) or + not isinstance(issuer, x509.Certificate) + ): + raise TypeError("cert and issuer must be a Certificate") + + return OCSPRequestBuilder((cert, issuer, algorithm), self._extensions) + + def add_extension(self, extension, critical): + if not isinstance(extension, x509.ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = x509.Extension(extension.oid, critical, extension) + _reject_duplicate_extension(extension, self._extensions) + + return OCSPRequestBuilder( + self._request, self._extensions + [extension] + ) + + def build(self): + from cryptography.hazmat.backends.openssl.backend import backend + if self._request is None: + raise ValueError("You must add a certificate before building") + + return backend.create_ocsp_request(self) + + +class _SingleResponse(object): + def __init__(self, cert, issuer, algorithm, cert_status, this_update, + next_update, revocation_time, revocation_reason): + if ( + not isinstance(cert, x509.Certificate) or + not isinstance(issuer, x509.Certificate) + ): + raise TypeError("cert and issuer must be a Certificate") + + _verify_algorithm(algorithm) + if not isinstance(this_update, datetime.datetime): + raise TypeError("this_update must be a datetime object") + if ( + next_update is not None and + not isinstance(next_update, datetime.datetime) + ): + raise TypeError("next_update must be a datetime object or None") + + self._cert = cert + self._issuer = issuer + self._algorithm = algorithm + self._this_update = this_update + self._next_update = next_update + + if not isinstance(cert_status, OCSPCertStatus): + raise TypeError( + "cert_status must be an item from the OCSPCertStatus enum" + ) + if cert_status is not OCSPCertStatus.REVOKED: + if revocation_time is not None: + raise ValueError( + "revocation_time can only be provided if the certificate " + "is revoked" + ) + if revocation_reason is not None: + raise ValueError( + "revocation_reason can only be provided if the certificate" + " is revoked" + ) + else: + if not isinstance(revocation_time, datetime.datetime): + raise TypeError("revocation_time must be a datetime object") + + revocation_time = _convert_to_naive_utc_time(revocation_time) + if revocation_time < _EARLIEST_UTC_TIME: + raise ValueError('The revocation_time must be on or after' + ' 1950 January 1.') + + if ( + revocation_reason is not None and + not isinstance(revocation_reason, x509.ReasonFlags) + ): + raise TypeError( + "revocation_reason must be an item from the ReasonFlags " + "enum or None" + ) + + self._cert_status = cert_status + self._revocation_time = revocation_time + self._revocation_reason = revocation_reason + + +class OCSPResponseBuilder(object): + def __init__(self, response=None, responder_id=None, certs=None, + extensions=[]): + self._response = response + self._responder_id = responder_id + self._certs = certs + self._extensions = extensions + + def add_response(self, cert, issuer, algorithm, cert_status, this_update, + next_update, revocation_time, revocation_reason): + if self._response is not None: + raise ValueError("Only one response per OCSPResponse.") + + singleresp = _SingleResponse( + cert, issuer, algorithm, cert_status, this_update, next_update, + revocation_time, revocation_reason + ) + return OCSPResponseBuilder( + singleresp, self._responder_id, + self._certs, self._extensions, + ) + + def responder_id(self, encoding, responder_cert): + if self._responder_id is not None: + raise ValueError("responder_id can only be set once") + if not isinstance(responder_cert, x509.Certificate): + raise TypeError("responder_cert must be a Certificate") + if not isinstance(encoding, OCSPResponderEncoding): + raise TypeError( + "encoding must be an element from OCSPResponderEncoding" + ) + + return OCSPResponseBuilder( + self._response, (responder_cert, encoding), + self._certs, self._extensions, + ) + + def certificates(self, certs): + if self._certs is not None: + raise ValueError("certificates may only be set once") + certs = list(certs) + if len(certs) == 0: + raise ValueError("certs must not be an empty list") + if not all(isinstance(x, x509.Certificate) for x in certs): + raise TypeError("certs must be a list of Certificates") + return OCSPResponseBuilder( + self._response, self._responder_id, + certs, self._extensions, + ) + + def add_extension(self, extension, critical): + if not isinstance(extension, x509.ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = x509.Extension(extension.oid, critical, extension) + _reject_duplicate_extension(extension, self._extensions) + + return OCSPResponseBuilder( + self._response, self._responder_id, + self._certs, self._extensions + [extension], + ) + + def sign(self, private_key, algorithm): + from cryptography.hazmat.backends.openssl.backend import backend + if self._response is None: + raise ValueError("You must add a response before signing") + if self._responder_id is None: + raise ValueError("You must add a responder_id before signing") + + if isinstance(private_key, + (ed25519.Ed25519PrivateKey, ed448.Ed448PrivateKey)): + if algorithm is not None: + raise ValueError( + "algorithm must be None when signing via ed25519 or ed448" + ) + elif not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Algorithm must be a registered hash algorithm.") + + return backend.create_ocsp_response( + OCSPResponseStatus.SUCCESSFUL, self, private_key, algorithm + ) + + @classmethod + def build_unsuccessful(cls, response_status): + from cryptography.hazmat.backends.openssl.backend import backend + if not isinstance(response_status, OCSPResponseStatus): + raise TypeError( + "response_status must be an item from OCSPResponseStatus" + ) + if response_status is OCSPResponseStatus.SUCCESSFUL: + raise ValueError("response_status cannot be SUCCESSFUL") + + return backend.create_ocsp_response(response_status, None, None, None) + + +@six.add_metaclass(abc.ABCMeta) +class OCSPRequest(object): + @abc.abstractproperty + def issuer_key_hash(self): + """ + The hash of the issuer public key + """ + + @abc.abstractproperty + def issuer_name_hash(self): + """ + The hash of the issuer name + """ + + @abc.abstractproperty + def hash_algorithm(self): + """ + The hash algorithm used in the issuer name and key hashes + """ + + @abc.abstractproperty + def serial_number(self): + """ + The serial number of the cert whose status is being checked + """ + @abc.abstractmethod + def public_bytes(self, encoding): + """ + Serializes the request to DER + """ + + @abc.abstractproperty + def extensions(self): + """ + The list of request extensions. Not single request extensions. + """ + + +@six.add_metaclass(abc.ABCMeta) +class OCSPResponse(object): + @abc.abstractproperty + def response_status(self): + """ + The status of the response. This is a value from the OCSPResponseStatus + enumeration + """ + + @abc.abstractproperty + def signature_algorithm_oid(self): + """ + The ObjectIdentifier of the signature algorithm + """ + + @abc.abstractproperty + def signature_hash_algorithm(self): + """ + Returns a HashAlgorithm corresponding to the type of the digest signed + """ + + @abc.abstractproperty + def signature(self): + """ + The signature bytes + """ + + @abc.abstractproperty + def tbs_response_bytes(self): + """ + The tbsResponseData bytes + """ + + @abc.abstractproperty + def certificates(self): + """ + A list of certificates used to help build a chain to verify the OCSP + response. This situation occurs when the OCSP responder uses a delegate + certificate. + """ + + @abc.abstractproperty + def responder_key_hash(self): + """ + The responder's key hash or None + """ + + @abc.abstractproperty + def responder_name(self): + """ + The responder's Name or None + """ + + @abc.abstractproperty + def produced_at(self): + """ + The time the response was produced + """ + + @abc.abstractproperty + def certificate_status(self): + """ + The status of the certificate (an element from the OCSPCertStatus enum) + """ + + @abc.abstractproperty + def revocation_time(self): + """ + The date of when the certificate was revoked or None if not + revoked. + """ + + @abc.abstractproperty + def revocation_reason(self): + """ + The reason the certificate was revoked or None if not specified or + not revoked. + """ + + @abc.abstractproperty + def this_update(self): + """ + The most recent time at which the status being indicated is known by + the responder to have been correct + """ + + @abc.abstractproperty + def next_update(self): + """ + The time when newer information will be available + """ + + @abc.abstractproperty + def issuer_key_hash(self): + """ + The hash of the issuer public key + """ + + @abc.abstractproperty + def issuer_name_hash(self): + """ + The hash of the issuer name + """ + + @abc.abstractproperty + def hash_algorithm(self): + """ + The hash algorithm used in the issuer name and key hashes + """ + + @abc.abstractproperty + def serial_number(self): + """ + The serial number of the cert whose status is being checked + """ + + @abc.abstractproperty + def extensions(self): + """ + The list of response extensions. Not single response extensions. + """ diff --git a/packages/wakatime/packages/py27/cryptography/x509/oid.py b/packages/wakatime/packages/py27/cryptography/x509/oid.py new file mode 100644 index 0000000..c1e5dc5 --- /dev/null +++ b/packages/wakatime/packages/py27/cryptography/x509/oid.py @@ -0,0 +1,232 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography.hazmat._oid import ObjectIdentifier +from cryptography.hazmat.primitives import hashes + + +class ExtensionOID(object): + SUBJECT_DIRECTORY_ATTRIBUTES = ObjectIdentifier("2.5.29.9") + SUBJECT_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.14") + KEY_USAGE = ObjectIdentifier("2.5.29.15") + SUBJECT_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.17") + ISSUER_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.18") + BASIC_CONSTRAINTS = ObjectIdentifier("2.5.29.19") + NAME_CONSTRAINTS = ObjectIdentifier("2.5.29.30") + CRL_DISTRIBUTION_POINTS = ObjectIdentifier("2.5.29.31") + CERTIFICATE_POLICIES = ObjectIdentifier("2.5.29.32") + POLICY_MAPPINGS = ObjectIdentifier("2.5.29.33") + AUTHORITY_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.35") + POLICY_CONSTRAINTS = ObjectIdentifier("2.5.29.36") + EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37") + FRESHEST_CRL = ObjectIdentifier("2.5.29.46") + INHIBIT_ANY_POLICY = ObjectIdentifier("2.5.29.54") + ISSUING_DISTRIBUTION_POINT = ObjectIdentifier("2.5.29.28") + AUTHORITY_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.1") + SUBJECT_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.11") + OCSP_NO_CHECK = ObjectIdentifier("1.3.6.1.5.5.7.48.1.5") + TLS_FEATURE = ObjectIdentifier("1.3.6.1.5.5.7.1.24") + CRL_NUMBER = ObjectIdentifier("2.5.29.20") + DELTA_CRL_INDICATOR = ObjectIdentifier("2.5.29.27") + PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS = ( + ObjectIdentifier("1.3.6.1.4.1.11129.2.4.2") + ) + PRECERT_POISON = ( + ObjectIdentifier("1.3.6.1.4.1.11129.2.4.3") + ) + + +class OCSPExtensionOID(object): + NONCE = ObjectIdentifier("1.3.6.1.5.5.7.48.1.2") + + +class CRLEntryExtensionOID(object): + CERTIFICATE_ISSUER = ObjectIdentifier("2.5.29.29") + CRL_REASON = ObjectIdentifier("2.5.29.21") + INVALIDITY_DATE = ObjectIdentifier("2.5.29.24") + + +class NameOID(object): + COMMON_NAME = ObjectIdentifier("2.5.4.3") + COUNTRY_NAME = ObjectIdentifier("2.5.4.6") + LOCALITY_NAME = ObjectIdentifier("2.5.4.7") + STATE_OR_PROVINCE_NAME = ObjectIdentifier("2.5.4.8") + STREET_ADDRESS = ObjectIdentifier("2.5.4.9") + ORGANIZATION_NAME = ObjectIdentifier("2.5.4.10") + ORGANIZATIONAL_UNIT_NAME = ObjectIdentifier("2.5.4.11") + SERIAL_NUMBER = ObjectIdentifier("2.5.4.5") + SURNAME = ObjectIdentifier("2.5.4.4") + GIVEN_NAME = ObjectIdentifier("2.5.4.42") + TITLE = ObjectIdentifier("2.5.4.12") + GENERATION_QUALIFIER = ObjectIdentifier("2.5.4.44") + X500_UNIQUE_IDENTIFIER = ObjectIdentifier("2.5.4.45") + DN_QUALIFIER = ObjectIdentifier("2.5.4.46") + PSEUDONYM = ObjectIdentifier("2.5.4.65") + USER_ID = ObjectIdentifier("0.9.2342.19200300.100.1.1") + DOMAIN_COMPONENT = ObjectIdentifier("0.9.2342.19200300.100.1.25") + EMAIL_ADDRESS = ObjectIdentifier("1.2.840.113549.1.9.1") + JURISDICTION_COUNTRY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.3") + JURISDICTION_LOCALITY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.1") + JURISDICTION_STATE_OR_PROVINCE_NAME = ObjectIdentifier( + "1.3.6.1.4.1.311.60.2.1.2" + ) + BUSINESS_CATEGORY = ObjectIdentifier("2.5.4.15") + POSTAL_ADDRESS = ObjectIdentifier("2.5.4.16") + POSTAL_CODE = ObjectIdentifier("2.5.4.17") + + +class SignatureAlgorithmOID(object): + RSA_WITH_MD5 = ObjectIdentifier("1.2.840.113549.1.1.4") + RSA_WITH_SHA1 = ObjectIdentifier("1.2.840.113549.1.1.5") + # This is an alternate OID for RSA with SHA1 that is occasionally seen + _RSA_WITH_SHA1 = ObjectIdentifier("1.3.14.3.2.29") + RSA_WITH_SHA224 = ObjectIdentifier("1.2.840.113549.1.1.14") + RSA_WITH_SHA256 = ObjectIdentifier("1.2.840.113549.1.1.11") + RSA_WITH_SHA384 = ObjectIdentifier("1.2.840.113549.1.1.12") + RSA_WITH_SHA512 = ObjectIdentifier("1.2.840.113549.1.1.13") + RSASSA_PSS = ObjectIdentifier("1.2.840.113549.1.1.10") + ECDSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10045.4.1") + ECDSA_WITH_SHA224 = ObjectIdentifier("1.2.840.10045.4.3.1") + ECDSA_WITH_SHA256 = ObjectIdentifier("1.2.840.10045.4.3.2") + ECDSA_WITH_SHA384 = ObjectIdentifier("1.2.840.10045.4.3.3") + ECDSA_WITH_SHA512 = ObjectIdentifier("1.2.840.10045.4.3.4") + DSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10040.4.3") + DSA_WITH_SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.3.1") + DSA_WITH_SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.3.2") + ED25519 = ObjectIdentifier("1.3.101.112") + ED448 = ObjectIdentifier("1.3.101.113") + + +_SIG_OIDS_TO_HASH = { + SignatureAlgorithmOID.RSA_WITH_MD5: hashes.MD5(), + SignatureAlgorithmOID.RSA_WITH_SHA1: hashes.SHA1(), + SignatureAlgorithmOID._RSA_WITH_SHA1: hashes.SHA1(), + SignatureAlgorithmOID.RSA_WITH_SHA224: hashes.SHA224(), + SignatureAlgorithmOID.RSA_WITH_SHA256: hashes.SHA256(), + SignatureAlgorithmOID.RSA_WITH_SHA384: hashes.SHA384(), + SignatureAlgorithmOID.RSA_WITH_SHA512: hashes.SHA512(), + SignatureAlgorithmOID.ECDSA_WITH_SHA1: hashes.SHA1(), + SignatureAlgorithmOID.ECDSA_WITH_SHA224: hashes.SHA224(), + SignatureAlgorithmOID.ECDSA_WITH_SHA256: hashes.SHA256(), + SignatureAlgorithmOID.ECDSA_WITH_SHA384: hashes.SHA384(), + SignatureAlgorithmOID.ECDSA_WITH_SHA512: hashes.SHA512(), + SignatureAlgorithmOID.DSA_WITH_SHA1: hashes.SHA1(), + SignatureAlgorithmOID.DSA_WITH_SHA224: hashes.SHA224(), + SignatureAlgorithmOID.DSA_WITH_SHA256: hashes.SHA256(), + SignatureAlgorithmOID.ED25519: None, + SignatureAlgorithmOID.ED448: None, +} + + +class ExtendedKeyUsageOID(object): + SERVER_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.1") + CLIENT_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.2") + CODE_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.3") + EMAIL_PROTECTION = ObjectIdentifier("1.3.6.1.5.5.7.3.4") + TIME_STAMPING = ObjectIdentifier("1.3.6.1.5.5.7.3.8") + OCSP_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.9") + ANY_EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37.0") + + +class AuthorityInformationAccessOID(object): + CA_ISSUERS = ObjectIdentifier("1.3.6.1.5.5.7.48.2") + OCSP = ObjectIdentifier("1.3.6.1.5.5.7.48.1") + + +class CertificatePoliciesOID(object): + CPS_QUALIFIER = ObjectIdentifier("1.3.6.1.5.5.7.2.1") + CPS_USER_NOTICE = ObjectIdentifier("1.3.6.1.5.5.7.2.2") + ANY_POLICY = ObjectIdentifier("2.5.29.32.0") + + +_OID_NAMES = { + NameOID.COMMON_NAME: "commonName", + NameOID.COUNTRY_NAME: "countryName", + NameOID.LOCALITY_NAME: "localityName", + NameOID.STATE_OR_PROVINCE_NAME: "stateOrProvinceName", + NameOID.STREET_ADDRESS: "streetAddress", + NameOID.ORGANIZATION_NAME: "organizationName", + NameOID.ORGANIZATIONAL_UNIT_NAME: "organizationalUnitName", + NameOID.SERIAL_NUMBER: "serialNumber", + NameOID.SURNAME: "surname", + NameOID.GIVEN_NAME: "givenName", + NameOID.TITLE: "title", + NameOID.GENERATION_QUALIFIER: "generationQualifier", + NameOID.X500_UNIQUE_IDENTIFIER: "x500UniqueIdentifier", + NameOID.DN_QUALIFIER: "dnQualifier", + NameOID.PSEUDONYM: "pseudonym", + NameOID.USER_ID: "userID", + NameOID.DOMAIN_COMPONENT: "domainComponent", + NameOID.EMAIL_ADDRESS: "emailAddress", + NameOID.JURISDICTION_COUNTRY_NAME: "jurisdictionCountryName", + NameOID.JURISDICTION_LOCALITY_NAME: "jurisdictionLocalityName", + NameOID.JURISDICTION_STATE_OR_PROVINCE_NAME: ( + "jurisdictionStateOrProvinceName" + ), + NameOID.BUSINESS_CATEGORY: "businessCategory", + NameOID.POSTAL_ADDRESS: "postalAddress", + NameOID.POSTAL_CODE: "postalCode", + + SignatureAlgorithmOID.RSA_WITH_MD5: "md5WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA1: "sha1WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA224: "sha224WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA256: "sha256WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA384: "sha384WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA512: "sha512WithRSAEncryption", + SignatureAlgorithmOID.RSASSA_PSS: "RSASSA-PSS", + SignatureAlgorithmOID.ECDSA_WITH_SHA1: "ecdsa-with-SHA1", + SignatureAlgorithmOID.ECDSA_WITH_SHA224: "ecdsa-with-SHA224", + SignatureAlgorithmOID.ECDSA_WITH_SHA256: "ecdsa-with-SHA256", + SignatureAlgorithmOID.ECDSA_WITH_SHA384: "ecdsa-with-SHA384", + SignatureAlgorithmOID.ECDSA_WITH_SHA512: "ecdsa-with-SHA512", + SignatureAlgorithmOID.DSA_WITH_SHA1: "dsa-with-sha1", + SignatureAlgorithmOID.DSA_WITH_SHA224: "dsa-with-sha224", + SignatureAlgorithmOID.DSA_WITH_SHA256: "dsa-with-sha256", + SignatureAlgorithmOID.ED25519: "ed25519", + SignatureAlgorithmOID.ED448: "ed448", + ExtendedKeyUsageOID.SERVER_AUTH: "serverAuth", + ExtendedKeyUsageOID.CLIENT_AUTH: "clientAuth", + ExtendedKeyUsageOID.CODE_SIGNING: "codeSigning", + ExtendedKeyUsageOID.EMAIL_PROTECTION: "emailProtection", + ExtendedKeyUsageOID.TIME_STAMPING: "timeStamping", + ExtendedKeyUsageOID.OCSP_SIGNING: "OCSPSigning", + ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES: "subjectDirectoryAttributes", + ExtensionOID.SUBJECT_KEY_IDENTIFIER: "subjectKeyIdentifier", + ExtensionOID.KEY_USAGE: "keyUsage", + ExtensionOID.SUBJECT_ALTERNATIVE_NAME: "subjectAltName", + ExtensionOID.ISSUER_ALTERNATIVE_NAME: "issuerAltName", + ExtensionOID.BASIC_CONSTRAINTS: "basicConstraints", + ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS: ( + "signedCertificateTimestampList" + ), + ExtensionOID.PRECERT_POISON: "ctPoison", + CRLEntryExtensionOID.CRL_REASON: "cRLReason", + CRLEntryExtensionOID.INVALIDITY_DATE: "invalidityDate", + CRLEntryExtensionOID.CERTIFICATE_ISSUER: "certificateIssuer", + ExtensionOID.NAME_CONSTRAINTS: "nameConstraints", + ExtensionOID.CRL_DISTRIBUTION_POINTS: "cRLDistributionPoints", + ExtensionOID.CERTIFICATE_POLICIES: "certificatePolicies", + ExtensionOID.POLICY_MAPPINGS: "policyMappings", + ExtensionOID.AUTHORITY_KEY_IDENTIFIER: "authorityKeyIdentifier", + ExtensionOID.POLICY_CONSTRAINTS: "policyConstraints", + ExtensionOID.EXTENDED_KEY_USAGE: "extendedKeyUsage", + ExtensionOID.FRESHEST_CRL: "freshestCRL", + ExtensionOID.INHIBIT_ANY_POLICY: "inhibitAnyPolicy", + ExtensionOID.ISSUING_DISTRIBUTION_POINT: ( + "issuingDistributionPoint" + ), + ExtensionOID.AUTHORITY_INFORMATION_ACCESS: "authorityInfoAccess", + ExtensionOID.SUBJECT_INFORMATION_ACCESS: "subjectInfoAccess", + ExtensionOID.OCSP_NO_CHECK: "OCSPNoCheck", + ExtensionOID.CRL_NUMBER: "cRLNumber", + ExtensionOID.DELTA_CRL_INDICATOR: "deltaCRLIndicator", + ExtensionOID.TLS_FEATURE: "TLSFeature", + AuthorityInformationAccessOID.OCSP: "OCSP", + AuthorityInformationAccessOID.CA_ISSUERS: "caIssuers", + CertificatePoliciesOID.CPS_QUALIFIER: "id-qt-cps", + CertificatePoliciesOID.CPS_USER_NOTICE: "id-qt-unotice", + OCSPExtensionOID.NONCE: "OCSPNonce", +}