mirror of https://github.com/tp4a/teleport
add python modules for MySQL.
parent
b4be3de0b3
commit
65e22960da
Binary file not shown.
|
@ -0,0 +1,47 @@
|
|||
# coding: utf-8
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
from .version import __version__, __version_info__
|
||||
|
||||
__all__ = [
|
||||
'__version__',
|
||||
'__version_info__',
|
||||
'load_order',
|
||||
]
|
||||
|
||||
|
||||
def load_order():
|
||||
"""
|
||||
Returns a list of the module and sub-module names for asn1crypto in
|
||||
dependency load order, for the sake of live reloading code
|
||||
|
||||
:return:
|
||||
A list of unicode strings of module names, as they would appear in
|
||||
sys.modules, ordered by which module should be reloaded first
|
||||
"""
|
||||
|
||||
return [
|
||||
'asn1crypto._errors',
|
||||
'asn1crypto._int',
|
||||
'asn1crypto._ordereddict',
|
||||
'asn1crypto._teletex_codec',
|
||||
'asn1crypto._types',
|
||||
'asn1crypto._inet',
|
||||
'asn1crypto._iri',
|
||||
'asn1crypto.version',
|
||||
'asn1crypto.pem',
|
||||
'asn1crypto.util',
|
||||
'asn1crypto.parser',
|
||||
'asn1crypto.core',
|
||||
'asn1crypto.algos',
|
||||
'asn1crypto.keys',
|
||||
'asn1crypto.x509',
|
||||
'asn1crypto.crl',
|
||||
'asn1crypto.csr',
|
||||
'asn1crypto.ocsp',
|
||||
'asn1crypto.cms',
|
||||
'asn1crypto.pdf',
|
||||
'asn1crypto.pkcs12',
|
||||
'asn1crypto.tsp',
|
||||
'asn1crypto',
|
||||
]
|
|
@ -0,0 +1,54 @@
|
|||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Exports the following items:
|
||||
|
||||
- unwrap()
|
||||
- APIException()
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
import re
|
||||
import textwrap
|
||||
|
||||
|
||||
class APIException(Exception):
|
||||
"""
|
||||
An exception indicating an API has been removed from asn1crypto
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def unwrap(string, *params):
|
||||
"""
|
||||
Takes a multi-line string and does the following:
|
||||
|
||||
- dedents
|
||||
- converts newlines with text before and after into a single line
|
||||
- strips leading and trailing whitespace
|
||||
|
||||
:param string:
|
||||
The string to format
|
||||
|
||||
:param *params:
|
||||
Params to interpolate into the string
|
||||
|
||||
:return:
|
||||
The formatted string
|
||||
"""
|
||||
|
||||
output = textwrap.dedent(string)
|
||||
|
||||
# Unwrap lines, taking into account bulleted lists, ordered lists and
|
||||
# underlines consisting of = signs
|
||||
if output.find('\n') != -1:
|
||||
output = re.sub('(?<=\\S)\n(?=[^ \n\t\\d\\*\\-=])', ' ', output)
|
||||
|
||||
if params:
|
||||
output = output % params
|
||||
|
||||
output = output.strip()
|
||||
|
||||
return output
|
|
@ -0,0 +1,170 @@
|
|||
# coding: utf-8
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
import socket
|
||||
import struct
|
||||
|
||||
from ._errors import unwrap
|
||||
from ._types import byte_cls, bytes_to_list, str_cls, type_name
|
||||
|
||||
|
||||
def inet_ntop(address_family, packed_ip):
|
||||
"""
|
||||
Windows compatibility shim for socket.inet_ntop().
|
||||
|
||||
:param address_family:
|
||||
socket.AF_INET for IPv4 or socket.AF_INET6 for IPv6
|
||||
|
||||
:param packed_ip:
|
||||
A byte string of the network form of an IP address
|
||||
|
||||
:return:
|
||||
A unicode string of the IP address
|
||||
"""
|
||||
|
||||
if address_family not in set([socket.AF_INET, socket.AF_INET6]):
|
||||
raise ValueError(unwrap(
|
||||
'''
|
||||
address_family must be socket.AF_INET (%s) or socket.AF_INET6 (%s),
|
||||
not %s
|
||||
''',
|
||||
repr(socket.AF_INET),
|
||||
repr(socket.AF_INET6),
|
||||
repr(address_family)
|
||||
))
|
||||
|
||||
if not isinstance(packed_ip, byte_cls):
|
||||
raise TypeError(unwrap(
|
||||
'''
|
||||
packed_ip must be a byte string, not %s
|
||||
''',
|
||||
type_name(packed_ip)
|
||||
))
|
||||
|
||||
required_len = 4 if address_family == socket.AF_INET else 16
|
||||
if len(packed_ip) != required_len:
|
||||
raise ValueError(unwrap(
|
||||
'''
|
||||
packed_ip must be %d bytes long - is %d
|
||||
''',
|
||||
required_len,
|
||||
len(packed_ip)
|
||||
))
|
||||
|
||||
if address_family == socket.AF_INET:
|
||||
return '%d.%d.%d.%d' % tuple(bytes_to_list(packed_ip))
|
||||
|
||||
octets = struct.unpack(b'!HHHHHHHH', packed_ip)
|
||||
|
||||
runs_of_zero = {}
|
||||
longest_run = 0
|
||||
zero_index = None
|
||||
for i, octet in enumerate(octets + (-1,)):
|
||||
if octet != 0:
|
||||
if zero_index is not None:
|
||||
length = i - zero_index
|
||||
if length not in runs_of_zero:
|
||||
runs_of_zero[length] = zero_index
|
||||
longest_run = max(longest_run, length)
|
||||
zero_index = None
|
||||
elif zero_index is None:
|
||||
zero_index = i
|
||||
|
||||
hexed = [hex(o)[2:] for o in octets]
|
||||
|
||||
if longest_run < 2:
|
||||
return ':'.join(hexed)
|
||||
|
||||
zero_start = runs_of_zero[longest_run]
|
||||
zero_end = zero_start + longest_run
|
||||
|
||||
return ':'.join(hexed[:zero_start]) + '::' + ':'.join(hexed[zero_end:])
|
||||
|
||||
|
||||
def inet_pton(address_family, ip_string):
|
||||
"""
|
||||
Windows compatibility shim for socket.inet_ntop().
|
||||
|
||||
:param address_family:
|
||||
socket.AF_INET for IPv4 or socket.AF_INET6 for IPv6
|
||||
|
||||
:param ip_string:
|
||||
A unicode string of an IP address
|
||||
|
||||
:return:
|
||||
A byte string of the network form of the IP address
|
||||
"""
|
||||
|
||||
if address_family not in set([socket.AF_INET, socket.AF_INET6]):
|
||||
raise ValueError(unwrap(
|
||||
'''
|
||||
address_family must be socket.AF_INET (%s) or socket.AF_INET6 (%s),
|
||||
not %s
|
||||
''',
|
||||
repr(socket.AF_INET),
|
||||
repr(socket.AF_INET6),
|
||||
repr(address_family)
|
||||
))
|
||||
|
||||
if not isinstance(ip_string, str_cls):
|
||||
raise TypeError(unwrap(
|
||||
'''
|
||||
ip_string must be a unicode string, not %s
|
||||
''',
|
||||
type_name(ip_string)
|
||||
))
|
||||
|
||||
if address_family == socket.AF_INET:
|
||||
octets = ip_string.split('.')
|
||||
error = len(octets) != 4
|
||||
if not error:
|
||||
ints = []
|
||||
for o in octets:
|
||||
o = int(o)
|
||||
if o > 255 or o < 0:
|
||||
error = True
|
||||
break
|
||||
ints.append(o)
|
||||
|
||||
if error:
|
||||
raise ValueError(unwrap(
|
||||
'''
|
||||
ip_string must be a dotted string with four integers in the
|
||||
range of 0 to 255, got %s
|
||||
''',
|
||||
repr(ip_string)
|
||||
))
|
||||
|
||||
return struct.pack(b'!BBBB', *ints)
|
||||
|
||||
error = False
|
||||
omitted = ip_string.count('::')
|
||||
if omitted > 1:
|
||||
error = True
|
||||
elif omitted == 0:
|
||||
octets = ip_string.split(':')
|
||||
error = len(octets) != 8
|
||||
else:
|
||||
begin, end = ip_string.split('::')
|
||||
begin_octets = begin.split(':')
|
||||
end_octets = end.split(':')
|
||||
missing = 8 - len(begin_octets) - len(end_octets)
|
||||
octets = begin_octets + (['0'] * missing) + end_octets
|
||||
|
||||
if not error:
|
||||
ints = []
|
||||
for o in octets:
|
||||
o = int(o, 16)
|
||||
if o > 65535 or o < 0:
|
||||
error = True
|
||||
break
|
||||
ints.append(o)
|
||||
|
||||
return struct.pack(b'!HHHHHHHH', *ints)
|
||||
|
||||
raise ValueError(unwrap(
|
||||
'''
|
||||
ip_string must be a valid ipv6 string, got %s
|
||||
''',
|
||||
repr(ip_string)
|
||||
))
|
|
@ -0,0 +1,22 @@
|
|||
# coding: utf-8
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
|
||||
def fill_width(bytes_, width):
|
||||
"""
|
||||
Ensure a byte string representing a positive integer is a specific width
|
||||
(in bytes)
|
||||
|
||||
:param bytes_:
|
||||
The integer byte string
|
||||
|
||||
:param width:
|
||||
The desired width as an integer
|
||||
|
||||
:return:
|
||||
A byte string of the width specified
|
||||
"""
|
||||
|
||||
while len(bytes_) < width:
|
||||
bytes_ = b'\x00' + bytes_
|
||||
return bytes_
|
|
@ -0,0 +1,291 @@
|
|||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Functions to convert unicode IRIs into ASCII byte string URIs and back. Exports
|
||||
the following items:
|
||||
|
||||
- iri_to_uri()
|
||||
- uri_to_iri()
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
from encodings import idna # noqa
|
||||
import codecs
|
||||
import re
|
||||
import sys
|
||||
|
||||
from ._errors import unwrap
|
||||
from ._types import byte_cls, str_cls, type_name, bytes_to_list, int_types
|
||||
|
||||
if sys.version_info < (3,):
|
||||
from urlparse import urlsplit, urlunsplit
|
||||
from urllib import (
|
||||
quote as urlquote,
|
||||
unquote as unquote_to_bytes,
|
||||
)
|
||||
|
||||
else:
|
||||
from urllib.parse import (
|
||||
quote as urlquote,
|
||||
unquote_to_bytes,
|
||||
urlsplit,
|
||||
urlunsplit,
|
||||
)
|
||||
|
||||
|
||||
def iri_to_uri(value, normalize=False):
|
||||
"""
|
||||
Encodes a unicode IRI into an ASCII byte string URI
|
||||
|
||||
:param value:
|
||||
A unicode string of an IRI
|
||||
|
||||
:param normalize:
|
||||
A bool that controls URI normalization
|
||||
|
||||
:return:
|
||||
A byte string of the ASCII-encoded URI
|
||||
"""
|
||||
|
||||
if not isinstance(value, str_cls):
|
||||
raise TypeError(unwrap(
|
||||
'''
|
||||
value must be a unicode string, not %s
|
||||
''',
|
||||
type_name(value)
|
||||
))
|
||||
|
||||
scheme = None
|
||||
# Python 2.6 doesn't split properly is the URL doesn't start with http:// or https://
|
||||
if sys.version_info < (2, 7) and not value.startswith('http://') and not value.startswith('https://'):
|
||||
real_prefix = None
|
||||
prefix_match = re.match('^[^:]*://', value)
|
||||
if prefix_match:
|
||||
real_prefix = prefix_match.group(0)
|
||||
value = 'http://' + value[len(real_prefix):]
|
||||
parsed = urlsplit(value)
|
||||
if real_prefix:
|
||||
value = real_prefix + value[7:]
|
||||
scheme = _urlquote(real_prefix[:-3])
|
||||
else:
|
||||
parsed = urlsplit(value)
|
||||
|
||||
if scheme is None:
|
||||
scheme = _urlquote(parsed.scheme)
|
||||
hostname = parsed.hostname
|
||||
if hostname is not None:
|
||||
hostname = hostname.encode('idna')
|
||||
# RFC 3986 allows userinfo to contain sub-delims
|
||||
username = _urlquote(parsed.username, safe='!$&\'()*+,;=')
|
||||
password = _urlquote(parsed.password, safe='!$&\'()*+,;=')
|
||||
port = parsed.port
|
||||
if port is not None:
|
||||
port = str_cls(port).encode('ascii')
|
||||
|
||||
netloc = b''
|
||||
if username is not None:
|
||||
netloc += username
|
||||
if password:
|
||||
netloc += b':' + password
|
||||
netloc += b'@'
|
||||
if hostname is not None:
|
||||
netloc += hostname
|
||||
if port is not None:
|
||||
default_http = scheme == b'http' and port == b'80'
|
||||
default_https = scheme == b'https' and port == b'443'
|
||||
if not normalize or (not default_http and not default_https):
|
||||
netloc += b':' + port
|
||||
|
||||
# RFC 3986 allows a path to contain sub-delims, plus "@" and ":"
|
||||
path = _urlquote(parsed.path, safe='/!$&\'()*+,;=@:')
|
||||
# RFC 3986 allows the query to contain sub-delims, plus "@", ":" , "/" and "?"
|
||||
query = _urlquote(parsed.query, safe='/?!$&\'()*+,;=@:')
|
||||
# RFC 3986 allows the fragment to contain sub-delims, plus "@", ":" , "/" and "?"
|
||||
fragment = _urlquote(parsed.fragment, safe='/?!$&\'()*+,;=@:')
|
||||
|
||||
if normalize and query is None and fragment is None and path == b'/':
|
||||
path = None
|
||||
|
||||
# Python 2.7 compat
|
||||
if path is None:
|
||||
path = ''
|
||||
|
||||
output = urlunsplit((scheme, netloc, path, query, fragment))
|
||||
if isinstance(output, str_cls):
|
||||
output = output.encode('latin1')
|
||||
return output
|
||||
|
||||
|
||||
def uri_to_iri(value):
|
||||
"""
|
||||
Converts an ASCII URI byte string into a unicode IRI
|
||||
|
||||
:param value:
|
||||
An ASCII-encoded byte string of the URI
|
||||
|
||||
:return:
|
||||
A unicode string of the IRI
|
||||
"""
|
||||
|
||||
if not isinstance(value, byte_cls):
|
||||
raise TypeError(unwrap(
|
||||
'''
|
||||
value must be a byte string, not %s
|
||||
''',
|
||||
type_name(value)
|
||||
))
|
||||
|
||||
parsed = urlsplit(value)
|
||||
|
||||
scheme = parsed.scheme
|
||||
if scheme is not None:
|
||||
scheme = scheme.decode('ascii')
|
||||
|
||||
username = _urlunquote(parsed.username, remap=[':', '@'])
|
||||
password = _urlunquote(parsed.password, remap=[':', '@'])
|
||||
hostname = parsed.hostname
|
||||
if hostname:
|
||||
hostname = hostname.decode('idna')
|
||||
port = parsed.port
|
||||
if port and not isinstance(port, int_types):
|
||||
port = port.decode('ascii')
|
||||
|
||||
netloc = ''
|
||||
if username is not None:
|
||||
netloc += username
|
||||
if password:
|
||||
netloc += ':' + password
|
||||
netloc += '@'
|
||||
if hostname is not None:
|
||||
netloc += hostname
|
||||
if port is not None:
|
||||
netloc += ':' + str_cls(port)
|
||||
|
||||
path = _urlunquote(parsed.path, remap=['/'], preserve=True)
|
||||
query = _urlunquote(parsed.query, remap=['&', '='], preserve=True)
|
||||
fragment = _urlunquote(parsed.fragment)
|
||||
|
||||
return urlunsplit((scheme, netloc, path, query, fragment))
|
||||
|
||||
|
||||
def _iri_utf8_errors_handler(exc):
|
||||
"""
|
||||
Error handler for decoding UTF-8 parts of a URI into an IRI. Leaves byte
|
||||
sequences encoded in %XX format, but as part of a unicode string.
|
||||
|
||||
:param exc:
|
||||
The UnicodeDecodeError exception
|
||||
|
||||
:return:
|
||||
A 2-element tuple of (replacement unicode string, integer index to
|
||||
resume at)
|
||||
"""
|
||||
|
||||
bytes_as_ints = bytes_to_list(exc.object[exc.start:exc.end])
|
||||
replacements = ['%%%02x' % num for num in bytes_as_ints]
|
||||
return (''.join(replacements), exc.end)
|
||||
|
||||
|
||||
codecs.register_error('iriutf8', _iri_utf8_errors_handler)
|
||||
|
||||
|
||||
def _urlquote(string, safe=''):
|
||||
"""
|
||||
Quotes a unicode string for use in a URL
|
||||
|
||||
:param string:
|
||||
A unicode string
|
||||
|
||||
:param safe:
|
||||
A unicode string of character to not encode
|
||||
|
||||
:return:
|
||||
None (if string is None) or an ASCII byte string of the quoted string
|
||||
"""
|
||||
|
||||
if string is None or string == '':
|
||||
return None
|
||||
|
||||
# Anything already hex quoted is pulled out of the URL and unquoted if
|
||||
# possible
|
||||
escapes = []
|
||||
if re.search('%[0-9a-fA-F]{2}', string):
|
||||
# Try to unquote any percent values, restoring them if they are not
|
||||
# valid UTF-8. Also, requote any safe chars since encoded versions of
|
||||
# those are functionally different than the unquoted ones.
|
||||
def _try_unescape(match):
|
||||
byte_string = unquote_to_bytes(match.group(0))
|
||||
unicode_string = byte_string.decode('utf-8', 'iriutf8')
|
||||
for safe_char in list(safe):
|
||||
unicode_string = unicode_string.replace(safe_char, '%%%02x' % ord(safe_char))
|
||||
return unicode_string
|
||||
string = re.sub('(?:%[0-9a-fA-F]{2})+', _try_unescape, string)
|
||||
|
||||
# Once we have the minimal set of hex quoted values, removed them from
|
||||
# the string so that they are not double quoted
|
||||
def _extract_escape(match):
|
||||
escapes.append(match.group(0).encode('ascii'))
|
||||
return '\x00'
|
||||
string = re.sub('%[0-9a-fA-F]{2}', _extract_escape, string)
|
||||
|
||||
output = urlquote(string.encode('utf-8'), safe=safe.encode('utf-8'))
|
||||
if not isinstance(output, byte_cls):
|
||||
output = output.encode('ascii')
|
||||
|
||||
# Restore the existing quoted values that we extracted
|
||||
if len(escapes) > 0:
|
||||
def _return_escape(_):
|
||||
return escapes.pop(0)
|
||||
output = re.sub(b'%00', _return_escape, output)
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def _urlunquote(byte_string, remap=None, preserve=None):
|
||||
"""
|
||||
Unquotes a URI portion from a byte string into unicode using UTF-8
|
||||
|
||||
:param byte_string:
|
||||
A byte string of the data to unquote
|
||||
|
||||
:param remap:
|
||||
A list of characters (as unicode) that should be re-mapped to a
|
||||
%XX encoding. This is used when characters are not valid in part of a
|
||||
URL.
|
||||
|
||||
:param preserve:
|
||||
A bool - indicates that the chars to be remapped if they occur in
|
||||
non-hex form, should be preserved. E.g. / for URL path.
|
||||
|
||||
:return:
|
||||
A unicode string
|
||||
"""
|
||||
|
||||
if byte_string is None:
|
||||
return byte_string
|
||||
|
||||
if byte_string == b'':
|
||||
return ''
|
||||
|
||||
if preserve:
|
||||
replacements = ['\x1A', '\x1C', '\x1D', '\x1E', '\x1F']
|
||||
preserve_unmap = {}
|
||||
for char in remap:
|
||||
replacement = replacements.pop(0)
|
||||
preserve_unmap[replacement] = char
|
||||
byte_string = byte_string.replace(char.encode('ascii'), replacement.encode('ascii'))
|
||||
|
||||
byte_string = unquote_to_bytes(byte_string)
|
||||
|
||||
if remap:
|
||||
for char in remap:
|
||||
byte_string = byte_string.replace(char.encode('ascii'), ('%%%02x' % ord(char)).encode('ascii'))
|
||||
|
||||
output = byte_string.decode('utf-8', 'iriutf8')
|
||||
|
||||
if preserve:
|
||||
for replacement, original in preserve_unmap.items():
|
||||
output = output.replace(replacement, original)
|
||||
|
||||
return output
|
|
@ -0,0 +1,135 @@
|
|||
# Copyright (c) 2009 Raymond Hettinger
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person
|
||||
# obtaining a copy of this software and associated documentation files
|
||||
# (the "Software"), to deal in the Software without restriction,
|
||||
# including without limitation the rights to use, copy, modify, merge,
|
||||
# publish, distribute, sublicense, and/or sell copies of the Software,
|
||||
# and to permit persons to whom the Software is furnished to do so,
|
||||
# subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
# OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
import sys
|
||||
|
||||
if not sys.version_info < (2, 7):
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
else:
|
||||
|
||||
from UserDict import DictMixin
|
||||
|
||||
class OrderedDict(dict, DictMixin):
|
||||
|
||||
def __init__(self, *args, **kwds):
|
||||
if len(args) > 1:
|
||||
raise TypeError('expected at most 1 arguments, got %d' % len(args))
|
||||
try:
|
||||
self.__end
|
||||
except AttributeError:
|
||||
self.clear()
|
||||
self.update(*args, **kwds)
|
||||
|
||||
def clear(self):
|
||||
self.__end = end = []
|
||||
end += [None, end, end] # sentinel node for doubly linked list
|
||||
self.__map = {} # key --> [key, prev, next]
|
||||
dict.clear(self)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if key not in self:
|
||||
end = self.__end
|
||||
curr = end[1]
|
||||
curr[2] = end[1] = self.__map[key] = [key, curr, end]
|
||||
dict.__setitem__(self, key, value)
|
||||
|
||||
def __delitem__(self, key):
|
||||
dict.__delitem__(self, key)
|
||||
key, prev, next_ = self.__map.pop(key)
|
||||
prev[2] = next_
|
||||
next_[1] = prev
|
||||
|
||||
def __iter__(self):
|
||||
end = self.__end
|
||||
curr = end[2]
|
||||
while curr is not end:
|
||||
yield curr[0]
|
||||
curr = curr[2]
|
||||
|
||||
def __reversed__(self):
|
||||
end = self.__end
|
||||
curr = end[1]
|
||||
while curr is not end:
|
||||
yield curr[0]
|
||||
curr = curr[1]
|
||||
|
||||
def popitem(self, last=True):
|
||||
if not self:
|
||||
raise KeyError('dictionary is empty')
|
||||
if last:
|
||||
key = reversed(self).next()
|
||||
else:
|
||||
key = iter(self).next()
|
||||
value = self.pop(key)
|
||||
return key, value
|
||||
|
||||
def __reduce__(self):
|
||||
items = [[k, self[k]] for k in self]
|
||||
tmp = self.__map, self.__end
|
||||
del self.__map, self.__end
|
||||
inst_dict = vars(self).copy()
|
||||
self.__map, self.__end = tmp
|
||||
if inst_dict:
|
||||
return (self.__class__, (items,), inst_dict)
|
||||
return self.__class__, (items,)
|
||||
|
||||
def keys(self):
|
||||
return list(self)
|
||||
|
||||
setdefault = DictMixin.setdefault
|
||||
update = DictMixin.update
|
||||
pop = DictMixin.pop
|
||||
values = DictMixin.values
|
||||
items = DictMixin.items
|
||||
iterkeys = DictMixin.iterkeys
|
||||
itervalues = DictMixin.itervalues
|
||||
iteritems = DictMixin.iteritems
|
||||
|
||||
def __repr__(self):
|
||||
if not self:
|
||||
return '%s()' % (self.__class__.__name__,)
|
||||
return '%s(%r)' % (self.__class__.__name__, self.items())
|
||||
|
||||
def copy(self):
|
||||
return self.__class__(self)
|
||||
|
||||
@classmethod
|
||||
def fromkeys(cls, iterable, value=None):
|
||||
d = cls()
|
||||
for key in iterable:
|
||||
d[key] = value
|
||||
return d
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, OrderedDict):
|
||||
if len(self) != len(other):
|
||||
return False
|
||||
for p, q in zip(self.items(), other.items()):
|
||||
if p != q:
|
||||
return False
|
||||
return True
|
||||
return dict.__eq__(self, other)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
|
@ -0,0 +1,331 @@
|
|||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Implementation of the teletex T.61 codec. Exports the following items:
|
||||
|
||||
- register()
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
import codecs
|
||||
|
||||
|
||||
class TeletexCodec(codecs.Codec):
|
||||
|
||||
def encode(self, input_, errors='strict'):
|
||||
return codecs.charmap_encode(input_, errors, ENCODING_TABLE)
|
||||
|
||||
def decode(self, input_, errors='strict'):
|
||||
return codecs.charmap_decode(input_, errors, DECODING_TABLE)
|
||||
|
||||
|
||||
class TeletexIncrementalEncoder(codecs.IncrementalEncoder):
|
||||
|
||||
def encode(self, input_, final=False):
|
||||
return codecs.charmap_encode(input_, self.errors, ENCODING_TABLE)[0]
|
||||
|
||||
|
||||
class TeletexIncrementalDecoder(codecs.IncrementalDecoder):
|
||||
|
||||
def decode(self, input_, final=False):
|
||||
return codecs.charmap_decode(input_, self.errors, DECODING_TABLE)[0]
|
||||
|
||||
|
||||
class TeletexStreamWriter(TeletexCodec, codecs.StreamWriter):
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class TeletexStreamReader(TeletexCodec, codecs.StreamReader):
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def teletex_search_function(name):
|
||||
"""
|
||||
Search function for teletex codec that is passed to codecs.register()
|
||||
"""
|
||||
|
||||
if name != 'teletex':
|
||||
return None
|
||||
|
||||
return codecs.CodecInfo(
|
||||
name='teletex',
|
||||
encode=TeletexCodec().encode,
|
||||
decode=TeletexCodec().decode,
|
||||
incrementalencoder=TeletexIncrementalEncoder,
|
||||
incrementaldecoder=TeletexIncrementalDecoder,
|
||||
streamreader=TeletexStreamReader,
|
||||
streamwriter=TeletexStreamWriter,
|
||||
)
|
||||
|
||||
|
||||
def register():
|
||||
"""
|
||||
Registers the teletex codec
|
||||
"""
|
||||
|
||||
codecs.register(teletex_search_function)
|
||||
|
||||
|
||||
# http://en.wikipedia.org/wiki/ITU_T.61
|
||||
DECODING_TABLE = (
|
||||
'\u0000'
|
||||
'\u0001'
|
||||
'\u0002'
|
||||
'\u0003'
|
||||
'\u0004'
|
||||
'\u0005'
|
||||
'\u0006'
|
||||
'\u0007'
|
||||
'\u0008'
|
||||
'\u0009'
|
||||
'\u000A'
|
||||
'\u000B'
|
||||
'\u000C'
|
||||
'\u000D'
|
||||
'\u000E'
|
||||
'\u000F'
|
||||
'\u0010'
|
||||
'\u0011'
|
||||
'\u0012'
|
||||
'\u0013'
|
||||
'\u0014'
|
||||
'\u0015'
|
||||
'\u0016'
|
||||
'\u0017'
|
||||
'\u0018'
|
||||
'\u0019'
|
||||
'\u001A'
|
||||
'\u001B'
|
||||
'\u001C'
|
||||
'\u001D'
|
||||
'\u001E'
|
||||
'\u001F'
|
||||
'\u0020'
|
||||
'\u0021'
|
||||
'\u0022'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\u0025'
|
||||
'\u0026'
|
||||
'\u0027'
|
||||
'\u0028'
|
||||
'\u0029'
|
||||
'\u002A'
|
||||
'\u002B'
|
||||
'\u002C'
|
||||
'\u002D'
|
||||
'\u002E'
|
||||
'\u002F'
|
||||
'\u0030'
|
||||
'\u0031'
|
||||
'\u0032'
|
||||
'\u0033'
|
||||
'\u0034'
|
||||
'\u0035'
|
||||
'\u0036'
|
||||
'\u0037'
|
||||
'\u0038'
|
||||
'\u0039'
|
||||
'\u003A'
|
||||
'\u003B'
|
||||
'\u003C'
|
||||
'\u003D'
|
||||
'\u003E'
|
||||
'\u003F'
|
||||
'\u0040'
|
||||
'\u0041'
|
||||
'\u0042'
|
||||
'\u0043'
|
||||
'\u0044'
|
||||
'\u0045'
|
||||
'\u0046'
|
||||
'\u0047'
|
||||
'\u0048'
|
||||
'\u0049'
|
||||
'\u004A'
|
||||
'\u004B'
|
||||
'\u004C'
|
||||
'\u004D'
|
||||
'\u004E'
|
||||
'\u004F'
|
||||
'\u0050'
|
||||
'\u0051'
|
||||
'\u0052'
|
||||
'\u0053'
|
||||
'\u0054'
|
||||
'\u0055'
|
||||
'\u0056'
|
||||
'\u0057'
|
||||
'\u0058'
|
||||
'\u0059'
|
||||
'\u005A'
|
||||
'\u005B'
|
||||
'\ufffe'
|
||||
'\u005D'
|
||||
'\ufffe'
|
||||
'\u005F'
|
||||
'\ufffe'
|
||||
'\u0061'
|
||||
'\u0062'
|
||||
'\u0063'
|
||||
'\u0064'
|
||||
'\u0065'
|
||||
'\u0066'
|
||||
'\u0067'
|
||||
'\u0068'
|
||||
'\u0069'
|
||||
'\u006A'
|
||||
'\u006B'
|
||||
'\u006C'
|
||||
'\u006D'
|
||||
'\u006E'
|
||||
'\u006F'
|
||||
'\u0070'
|
||||
'\u0071'
|
||||
'\u0072'
|
||||
'\u0073'
|
||||
'\u0074'
|
||||
'\u0075'
|
||||
'\u0076'
|
||||
'\u0077'
|
||||
'\u0078'
|
||||
'\u0079'
|
||||
'\u007A'
|
||||
'\ufffe'
|
||||
'\u007C'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\u007F'
|
||||
'\u0080'
|
||||
'\u0081'
|
||||
'\u0082'
|
||||
'\u0083'
|
||||
'\u0084'
|
||||
'\u0085'
|
||||
'\u0086'
|
||||
'\u0087'
|
||||
'\u0088'
|
||||
'\u0089'
|
||||
'\u008A'
|
||||
'\u008B'
|
||||
'\u008C'
|
||||
'\u008D'
|
||||
'\u008E'
|
||||
'\u008F'
|
||||
'\u0090'
|
||||
'\u0091'
|
||||
'\u0092'
|
||||
'\u0093'
|
||||
'\u0094'
|
||||
'\u0095'
|
||||
'\u0096'
|
||||
'\u0097'
|
||||
'\u0098'
|
||||
'\u0099'
|
||||
'\u009A'
|
||||
'\u009B'
|
||||
'\u009C'
|
||||
'\u009D'
|
||||
'\u009E'
|
||||
'\u009F'
|
||||
'\u00A0'
|
||||
'\u00A1'
|
||||
'\u00A2'
|
||||
'\u00A3'
|
||||
'\u0024'
|
||||
'\u00A5'
|
||||
'\u0023'
|
||||
'\u00A7'
|
||||
'\u00A4'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\u00AB'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\u00B0'
|
||||
'\u00B1'
|
||||
'\u00B2'
|
||||
'\u00B3'
|
||||
'\u00D7'
|
||||
'\u00B5'
|
||||
'\u00B6'
|
||||
'\u00B7'
|
||||
'\u00F7'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\u00BB'
|
||||
'\u00BC'
|
||||
'\u00BD'
|
||||
'\u00BE'
|
||||
'\u00BF'
|
||||
'\ufffe'
|
||||
'\u0300'
|
||||
'\u0301'
|
||||
'\u0302'
|
||||
'\u0303'
|
||||
'\u0304'
|
||||
'\u0306'
|
||||
'\u0307'
|
||||
'\u0308'
|
||||
'\ufffe'
|
||||
'\u030A'
|
||||
'\u0327'
|
||||
'\u0332'
|
||||
'\u030B'
|
||||
'\u0328'
|
||||
'\u030C'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\u2126'
|
||||
'\u00C6'
|
||||
'\u00D0'
|
||||
'\u00AA'
|
||||
'\u0126'
|
||||
'\ufffe'
|
||||
'\u0132'
|
||||
'\u013F'
|
||||
'\u0141'
|
||||
'\u00D8'
|
||||
'\u0152'
|
||||
'\u00BA'
|
||||
'\u00DE'
|
||||
'\u0166'
|
||||
'\u014A'
|
||||
'\u0149'
|
||||
'\u0138'
|
||||
'\u00E6'
|
||||
'\u0111'
|
||||
'\u00F0'
|
||||
'\u0127'
|
||||
'\u0131'
|
||||
'\u0133'
|
||||
'\u0140'
|
||||
'\u0142'
|
||||
'\u00F8'
|
||||
'\u0153'
|
||||
'\u00DF'
|
||||
'\u00FE'
|
||||
'\u0167'
|
||||
'\u014B'
|
||||
'\ufffe'
|
||||
)
|
||||
ENCODING_TABLE = codecs.charmap_build(DECODING_TABLE)
|
|
@ -0,0 +1,46 @@
|
|||
# coding: utf-8
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
import inspect
|
||||
import sys
|
||||
|
||||
|
||||
if sys.version_info < (3,):
|
||||
str_cls = unicode # noqa
|
||||
byte_cls = str
|
||||
int_types = (int, long) # noqa
|
||||
|
||||
def bytes_to_list(byte_string):
|
||||
return [ord(b) for b in byte_string]
|
||||
|
||||
chr_cls = chr
|
||||
|
||||
else:
|
||||
str_cls = str
|
||||
byte_cls = bytes
|
||||
int_types = int
|
||||
|
||||
bytes_to_list = list
|
||||
|
||||
def chr_cls(num):
|
||||
return bytes([num])
|
||||
|
||||
|
||||
def type_name(value):
|
||||
"""
|
||||
Returns a user-readable name for the type of an object
|
||||
|
||||
:param value:
|
||||
A value to get the type name of
|
||||
|
||||
:return:
|
||||
A unicode string of the object's type name
|
||||
"""
|
||||
|
||||
if inspect.isclass(value):
|
||||
cls = value
|
||||
else:
|
||||
cls = value.__class__
|
||||
if cls.__module__ in set(['builtins', '__builtin__']):
|
||||
return cls.__name__
|
||||
return '%s.%s' % (cls.__module__, cls.__name__)
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,961 @@
|
|||
# coding: utf-8
|
||||
|
||||
"""
|
||||
ASN.1 type classes for cryptographic message syntax (CMS). Structures are also
|
||||
compatible with PKCS#7. Exports the following items:
|
||||
|
||||
- AuthenticatedData()
|
||||
- AuthEnvelopedData()
|
||||
- CompressedData()
|
||||
- ContentInfo()
|
||||
- DigestedData()
|
||||
- EncryptedData()
|
||||
- EnvelopedData()
|
||||
- SignedAndEnvelopedData()
|
||||
- SignedData()
|
||||
|
||||
Other type classes are defined that help compose the types listed above.
|
||||
|
||||
Most CMS structures in the wild are formatted as ContentInfo encapsulating one of the other types.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
try:
|
||||
import zlib
|
||||
except (ImportError):
|
||||
zlib = None
|
||||
|
||||
from .algos import (
|
||||
_ForceNullParameters,
|
||||
DigestAlgorithm,
|
||||
EncryptionAlgorithm,
|
||||
HmacAlgorithm,
|
||||
KdfAlgorithm,
|
||||
RSAESOAEPParams,
|
||||
SignedDigestAlgorithm,
|
||||
)
|
||||
from .core import (
|
||||
Any,
|
||||
BitString,
|
||||
Choice,
|
||||
Enumerated,
|
||||
GeneralizedTime,
|
||||
Integer,
|
||||
ObjectIdentifier,
|
||||
OctetBitString,
|
||||
OctetString,
|
||||
ParsableOctetString,
|
||||
Sequence,
|
||||
SequenceOf,
|
||||
SetOf,
|
||||
UTCTime,
|
||||
UTF8String,
|
||||
)
|
||||
from .crl import CertificateList
|
||||
from .keys import PublicKeyInfo
|
||||
from .ocsp import OCSPResponse
|
||||
from .x509 import Attributes, Certificate, Extensions, GeneralName, GeneralNames, Name
|
||||
|
||||
|
||||
# These structures are taken from
|
||||
# ftp://ftp.rsasecurity.com/pub/pkcs/ascii/pkcs-6.asc
|
||||
|
||||
class ExtendedCertificateInfo(Sequence):
|
||||
_fields = [
|
||||
('version', Integer),
|
||||
('certificate', Certificate),
|
||||
('attributes', Attributes),
|
||||
]
|
||||
|
||||
|
||||
class ExtendedCertificate(Sequence):
|
||||
_fields = [
|
||||
('extended_certificate_info', ExtendedCertificateInfo),
|
||||
('signature_algorithm', SignedDigestAlgorithm),
|
||||
('signature', OctetBitString),
|
||||
]
|
||||
|
||||
|
||||
# These structures are taken from https://tools.ietf.org/html/rfc5652,
|
||||
# https://tools.ietf.org/html/rfc5083, http://tools.ietf.org/html/rfc2315,
|
||||
# https://tools.ietf.org/html/rfc5940, https://tools.ietf.org/html/rfc3274,
|
||||
# https://tools.ietf.org/html/rfc3281
|
||||
|
||||
|
||||
class CMSVersion(Integer):
|
||||
_map = {
|
||||
0: 'v0',
|
||||
1: 'v1',
|
||||
2: 'v2',
|
||||
3: 'v3',
|
||||
4: 'v4',
|
||||
5: 'v5',
|
||||
}
|
||||
|
||||
|
||||
class CMSAttributeType(ObjectIdentifier):
|
||||
_map = {
|
||||
'1.2.840.113549.1.9.3': 'content_type',
|
||||
'1.2.840.113549.1.9.4': 'message_digest',
|
||||
'1.2.840.113549.1.9.5': 'signing_time',
|
||||
'1.2.840.113549.1.9.6': 'counter_signature',
|
||||
# https://tools.ietf.org/html/rfc3161#page-20
|
||||
'1.2.840.113549.1.9.16.2.14': 'signature_time_stamp_token',
|
||||
# https://tools.ietf.org/html/rfc6211#page-5
|
||||
'1.2.840.113549.1.9.52': 'cms_algorithm_protection',
|
||||
# https://docs.microsoft.com/en-us/previous-versions/hh968145(v%3Dvs.85)
|
||||
'1.3.6.1.4.1.311.2.4.1': 'microsoft_nested_signature',
|
||||
# Some places refer to this as SPC_RFC3161_OBJID, others szOID_RFC3161_counterSign.
|
||||
# https://docs.microsoft.com/en-us/windows/win32/api/wincrypt/ns-wincrypt-crypt_algorithm_identifier
|
||||
# refers to szOID_RFC3161_counterSign as "1.2.840.113549.1.9.16.1.4",
|
||||
# but that OID is also called szOID_TIMESTAMP_TOKEN. Because of there being
|
||||
# no canonical source for this OID, we give it our own name
|
||||
'1.3.6.1.4.1.311.3.3.1': 'microsoft_time_stamp_token',
|
||||
}
|
||||
|
||||
|
||||
class Time(Choice):
|
||||
_alternatives = [
|
||||
('utc_time', UTCTime),
|
||||
('generalized_time', GeneralizedTime),
|
||||
]
|
||||
|
||||
|
||||
class ContentType(ObjectIdentifier):
|
||||
_map = {
|
||||
'1.2.840.113549.1.7.1': 'data',
|
||||
'1.2.840.113549.1.7.2': 'signed_data',
|
||||
'1.2.840.113549.1.7.3': 'enveloped_data',
|
||||
'1.2.840.113549.1.7.4': 'signed_and_enveloped_data',
|
||||
'1.2.840.113549.1.7.5': 'digested_data',
|
||||
'1.2.840.113549.1.7.6': 'encrypted_data',
|
||||
'1.2.840.113549.1.9.16.1.2': 'authenticated_data',
|
||||
'1.2.840.113549.1.9.16.1.9': 'compressed_data',
|
||||
'1.2.840.113549.1.9.16.1.23': 'authenticated_enveloped_data',
|
||||
}
|
||||
|
||||
|
||||
class CMSAlgorithmProtection(Sequence):
|
||||
_fields = [
|
||||
('digest_algorithm', DigestAlgorithm),
|
||||
('signature_algorithm', SignedDigestAlgorithm, {'implicit': 1, 'optional': True}),
|
||||
('mac_algorithm', HmacAlgorithm, {'implicit': 2, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class SetOfContentType(SetOf):
|
||||
_child_spec = ContentType
|
||||
|
||||
|
||||
class SetOfOctetString(SetOf):
|
||||
_child_spec = OctetString
|
||||
|
||||
|
||||
class SetOfTime(SetOf):
|
||||
_child_spec = Time
|
||||
|
||||
|
||||
class SetOfAny(SetOf):
|
||||
_child_spec = Any
|
||||
|
||||
|
||||
class SetOfCMSAlgorithmProtection(SetOf):
|
||||
_child_spec = CMSAlgorithmProtection
|
||||
|
||||
|
||||
class CMSAttribute(Sequence):
|
||||
_fields = [
|
||||
('type', CMSAttributeType),
|
||||
('values', None),
|
||||
]
|
||||
|
||||
_oid_specs = {}
|
||||
|
||||
def _values_spec(self):
|
||||
return self._oid_specs.get(self['type'].native, SetOfAny)
|
||||
|
||||
_spec_callbacks = {
|
||||
'values': _values_spec
|
||||
}
|
||||
|
||||
|
||||
class CMSAttributes(SetOf):
|
||||
_child_spec = CMSAttribute
|
||||
|
||||
|
||||
class IssuerSerial(Sequence):
|
||||
_fields = [
|
||||
('issuer', GeneralNames),
|
||||
('serial', Integer),
|
||||
('issuer_uid', OctetBitString, {'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class AttCertVersion(Integer):
|
||||
_map = {
|
||||
0: 'v1',
|
||||
1: 'v2',
|
||||
}
|
||||
|
||||
|
||||
class AttCertSubject(Choice):
|
||||
_alternatives = [
|
||||
('base_certificate_id', IssuerSerial, {'explicit': 0}),
|
||||
('subject_name', GeneralNames, {'explicit': 1}),
|
||||
]
|
||||
|
||||
|
||||
class AttCertValidityPeriod(Sequence):
|
||||
_fields = [
|
||||
('not_before_time', GeneralizedTime),
|
||||
('not_after_time', GeneralizedTime),
|
||||
]
|
||||
|
||||
|
||||
class AttributeCertificateInfoV1(Sequence):
|
||||
_fields = [
|
||||
('version', AttCertVersion, {'default': 'v1'}),
|
||||
('subject', AttCertSubject),
|
||||
('issuer', GeneralNames),
|
||||
('signature', SignedDigestAlgorithm),
|
||||
('serial_number', Integer),
|
||||
('att_cert_validity_period', AttCertValidityPeriod),
|
||||
('attributes', Attributes),
|
||||
('issuer_unique_id', OctetBitString, {'optional': True}),
|
||||
('extensions', Extensions, {'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class AttributeCertificateV1(Sequence):
|
||||
_fields = [
|
||||
('ac_info', AttributeCertificateInfoV1),
|
||||
('signature_algorithm', SignedDigestAlgorithm),
|
||||
('signature', OctetBitString),
|
||||
]
|
||||
|
||||
|
||||
class DigestedObjectType(Enumerated):
|
||||
_map = {
|
||||
0: 'public_key',
|
||||
1: 'public_key_cert',
|
||||
2: 'other_objy_types',
|
||||
}
|
||||
|
||||
|
||||
class ObjectDigestInfo(Sequence):
|
||||
_fields = [
|
||||
('digested_object_type', DigestedObjectType),
|
||||
('other_object_type_id', ObjectIdentifier, {'optional': True}),
|
||||
('digest_algorithm', DigestAlgorithm),
|
||||
('object_digest', OctetBitString),
|
||||
]
|
||||
|
||||
|
||||
class Holder(Sequence):
|
||||
_fields = [
|
||||
('base_certificate_id', IssuerSerial, {'implicit': 0, 'optional': True}),
|
||||
('entity_name', GeneralNames, {'implicit': 1, 'optional': True}),
|
||||
('object_digest_info', ObjectDigestInfo, {'implicit': 2, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class V2Form(Sequence):
|
||||
_fields = [
|
||||
('issuer_name', GeneralNames, {'optional': True}),
|
||||
('base_certificate_id', IssuerSerial, {'explicit': 0, 'optional': True}),
|
||||
('object_digest_info', ObjectDigestInfo, {'explicit': 1, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class AttCertIssuer(Choice):
|
||||
_alternatives = [
|
||||
('v1_form', GeneralNames),
|
||||
('v2_form', V2Form, {'explicit': 0}),
|
||||
]
|
||||
|
||||
|
||||
class IetfAttrValue(Choice):
|
||||
_alternatives = [
|
||||
('octets', OctetString),
|
||||
('oid', ObjectIdentifier),
|
||||
('string', UTF8String),
|
||||
]
|
||||
|
||||
|
||||
class IetfAttrValues(SequenceOf):
|
||||
_child_spec = IetfAttrValue
|
||||
|
||||
|
||||
class IetfAttrSyntax(Sequence):
|
||||
_fields = [
|
||||
('policy_authority', GeneralNames, {'implicit': 0, 'optional': True}),
|
||||
('values', IetfAttrValues),
|
||||
]
|
||||
|
||||
|
||||
class SetOfIetfAttrSyntax(SetOf):
|
||||
_child_spec = IetfAttrSyntax
|
||||
|
||||
|
||||
class SvceAuthInfo(Sequence):
|
||||
_fields = [
|
||||
('service', GeneralName),
|
||||
('ident', GeneralName),
|
||||
('auth_info', OctetString, {'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class SetOfSvceAuthInfo(SetOf):
|
||||
_child_spec = SvceAuthInfo
|
||||
|
||||
|
||||
class RoleSyntax(Sequence):
|
||||
_fields = [
|
||||
('role_authority', GeneralNames, {'implicit': 0, 'optional': True}),
|
||||
('role_name', GeneralName, {'implicit': 1}),
|
||||
]
|
||||
|
||||
|
||||
class SetOfRoleSyntax(SetOf):
|
||||
_child_spec = RoleSyntax
|
||||
|
||||
|
||||
class ClassList(BitString):
|
||||
_map = {
|
||||
0: 'unmarked',
|
||||
1: 'unclassified',
|
||||
2: 'restricted',
|
||||
3: 'confidential',
|
||||
4: 'secret',
|
||||
5: 'top_secret',
|
||||
}
|
||||
|
||||
|
||||
class SecurityCategory(Sequence):
|
||||
_fields = [
|
||||
('type', ObjectIdentifier, {'implicit': 0}),
|
||||
('value', Any, {'implicit': 1}),
|
||||
]
|
||||
|
||||
|
||||
class SetOfSecurityCategory(SetOf):
|
||||
_child_spec = SecurityCategory
|
||||
|
||||
|
||||
class Clearance(Sequence):
|
||||
_fields = [
|
||||
('policy_id', ObjectIdentifier, {'implicit': 0}),
|
||||
('class_list', ClassList, {'implicit': 1, 'default': 'unclassified'}),
|
||||
('security_categories', SetOfSecurityCategory, {'implicit': 2, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class SetOfClearance(SetOf):
|
||||
_child_spec = Clearance
|
||||
|
||||
|
||||
class BigTime(Sequence):
|
||||
_fields = [
|
||||
('major', Integer),
|
||||
('fractional_seconds', Integer),
|
||||
('sign', Integer, {'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class LeapData(Sequence):
|
||||
_fields = [
|
||||
('leap_time', BigTime),
|
||||
('action', Integer),
|
||||
]
|
||||
|
||||
|
||||
class SetOfLeapData(SetOf):
|
||||
_child_spec = LeapData
|
||||
|
||||
|
||||
class TimingMetrics(Sequence):
|
||||
_fields = [
|
||||
('ntp_time', BigTime),
|
||||
('offset', BigTime),
|
||||
('delay', BigTime),
|
||||
('expiration', BigTime),
|
||||
('leap_event', SetOfLeapData, {'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class SetOfTimingMetrics(SetOf):
|
||||
_child_spec = TimingMetrics
|
||||
|
||||
|
||||
class TimingPolicy(Sequence):
|
||||
_fields = [
|
||||
('policy_id', SequenceOf, {'spec': ObjectIdentifier}),
|
||||
('max_offset', BigTime, {'explicit': 0, 'optional': True}),
|
||||
('max_delay', BigTime, {'explicit': 1, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class SetOfTimingPolicy(SetOf):
|
||||
_child_spec = TimingPolicy
|
||||
|
||||
|
||||
class AttCertAttributeType(ObjectIdentifier):
|
||||
_map = {
|
||||
'1.3.6.1.5.5.7.10.1': 'authentication_info',
|
||||
'1.3.6.1.5.5.7.10.2': 'access_identity',
|
||||
'1.3.6.1.5.5.7.10.3': 'charging_identity',
|
||||
'1.3.6.1.5.5.7.10.4': 'group',
|
||||
'2.5.4.72': 'role',
|
||||
'2.5.4.55': 'clearance',
|
||||
'1.3.6.1.4.1.601.10.4.1': 'timing_metrics',
|
||||
'1.3.6.1.4.1.601.10.4.2': 'timing_policy',
|
||||
}
|
||||
|
||||
|
||||
class AttCertAttribute(Sequence):
|
||||
_fields = [
|
||||
('type', AttCertAttributeType),
|
||||
('values', None),
|
||||
]
|
||||
|
||||
_oid_specs = {
|
||||
'authentication_info': SetOfSvceAuthInfo,
|
||||
'access_identity': SetOfSvceAuthInfo,
|
||||
'charging_identity': SetOfIetfAttrSyntax,
|
||||
'group': SetOfIetfAttrSyntax,
|
||||
'role': SetOfRoleSyntax,
|
||||
'clearance': SetOfClearance,
|
||||
'timing_metrics': SetOfTimingMetrics,
|
||||
'timing_policy': SetOfTimingPolicy,
|
||||
}
|
||||
|
||||
def _values_spec(self):
|
||||
return self._oid_specs.get(self['type'].native, SetOfAny)
|
||||
|
||||
_spec_callbacks = {
|
||||
'values': _values_spec
|
||||
}
|
||||
|
||||
|
||||
class AttCertAttributes(SequenceOf):
|
||||
_child_spec = AttCertAttribute
|
||||
|
||||
|
||||
class AttributeCertificateInfoV2(Sequence):
|
||||
_fields = [
|
||||
('version', AttCertVersion),
|
||||
('holder', Holder),
|
||||
('issuer', AttCertIssuer),
|
||||
('signature', SignedDigestAlgorithm),
|
||||
('serial_number', Integer),
|
||||
('att_cert_validity_period', AttCertValidityPeriod),
|
||||
('attributes', AttCertAttributes),
|
||||
('issuer_unique_id', OctetBitString, {'optional': True}),
|
||||
('extensions', Extensions, {'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class AttributeCertificateV2(Sequence):
|
||||
# Handle the situation where a V2 cert is encoded as V1
|
||||
_bad_tag = 1
|
||||
|
||||
_fields = [
|
||||
('ac_info', AttributeCertificateInfoV2),
|
||||
('signature_algorithm', SignedDigestAlgorithm),
|
||||
('signature', OctetBitString),
|
||||
]
|
||||
|
||||
|
||||
class OtherCertificateFormat(Sequence):
|
||||
_fields = [
|
||||
('other_cert_format', ObjectIdentifier),
|
||||
('other_cert', Any),
|
||||
]
|
||||
|
||||
|
||||
class CertificateChoices(Choice):
|
||||
_alternatives = [
|
||||
('certificate', Certificate),
|
||||
('extended_certificate', ExtendedCertificate, {'implicit': 0}),
|
||||
('v1_attr_cert', AttributeCertificateV1, {'implicit': 1}),
|
||||
('v2_attr_cert', AttributeCertificateV2, {'implicit': 2}),
|
||||
('other', OtherCertificateFormat, {'implicit': 3}),
|
||||
]
|
||||
|
||||
def validate(self, class_, tag, contents):
|
||||
"""
|
||||
Ensures that the class and tag specified exist as an alternative. This
|
||||
custom version fixes parsing broken encodings there a V2 attribute
|
||||
# certificate is encoded as a V1
|
||||
|
||||
:param class_:
|
||||
The integer class_ from the encoded value header
|
||||
|
||||
:param tag:
|
||||
The integer tag from the encoded value header
|
||||
|
||||
:param contents:
|
||||
A byte string of the contents of the value - used when the object
|
||||
is explicitly tagged
|
||||
|
||||
:raises:
|
||||
ValueError - when value is not a valid alternative
|
||||
"""
|
||||
|
||||
super(CertificateChoices, self).validate(class_, tag, contents)
|
||||
if self._choice == 2:
|
||||
if AttCertVersion.load(Sequence.load(contents)[0].dump()).native == 'v2':
|
||||
self._choice = 3
|
||||
|
||||
|
||||
class CertificateSet(SetOf):
|
||||
_child_spec = CertificateChoices
|
||||
|
||||
|
||||
class ContentInfo(Sequence):
|
||||
_fields = [
|
||||
('content_type', ContentType),
|
||||
('content', Any, {'explicit': 0, 'optional': True}),
|
||||
]
|
||||
|
||||
_oid_pair = ('content_type', 'content')
|
||||
_oid_specs = {}
|
||||
|
||||
|
||||
class SetOfContentInfo(SetOf):
|
||||
_child_spec = ContentInfo
|
||||
|
||||
|
||||
class EncapsulatedContentInfo(Sequence):
|
||||
_fields = [
|
||||
('content_type', ContentType),
|
||||
('content', ParsableOctetString, {'explicit': 0, 'optional': True}),
|
||||
]
|
||||
|
||||
_oid_pair = ('content_type', 'content')
|
||||
_oid_specs = {}
|
||||
|
||||
|
||||
class IssuerAndSerialNumber(Sequence):
|
||||
_fields = [
|
||||
('issuer', Name),
|
||||
('serial_number', Integer),
|
||||
]
|
||||
|
||||
|
||||
class SignerIdentifier(Choice):
|
||||
_alternatives = [
|
||||
('issuer_and_serial_number', IssuerAndSerialNumber),
|
||||
('subject_key_identifier', OctetString, {'implicit': 0}),
|
||||
]
|
||||
|
||||
|
||||
class DigestAlgorithms(SetOf):
|
||||
_child_spec = DigestAlgorithm
|
||||
|
||||
|
||||
class CertificateRevocationLists(SetOf):
|
||||
_child_spec = CertificateList
|
||||
|
||||
|
||||
class SCVPReqRes(Sequence):
|
||||
_fields = [
|
||||
('request', ContentInfo, {'explicit': 0, 'optional': True}),
|
||||
('response', ContentInfo),
|
||||
]
|
||||
|
||||
|
||||
class OtherRevInfoFormatId(ObjectIdentifier):
|
||||
_map = {
|
||||
'1.3.6.1.5.5.7.16.2': 'ocsp_response',
|
||||
'1.3.6.1.5.5.7.16.4': 'scvp',
|
||||
}
|
||||
|
||||
|
||||
class OtherRevocationInfoFormat(Sequence):
|
||||
_fields = [
|
||||
('other_rev_info_format', OtherRevInfoFormatId),
|
||||
('other_rev_info', Any),
|
||||
]
|
||||
|
||||
_oid_pair = ('other_rev_info_format', 'other_rev_info')
|
||||
_oid_specs = {
|
||||
'ocsp_response': OCSPResponse,
|
||||
'scvp': SCVPReqRes,
|
||||
}
|
||||
|
||||
|
||||
class RevocationInfoChoice(Choice):
|
||||
_alternatives = [
|
||||
('crl', CertificateList),
|
||||
('other', OtherRevocationInfoFormat, {'implicit': 1}),
|
||||
]
|
||||
|
||||
|
||||
class RevocationInfoChoices(SetOf):
|
||||
_child_spec = RevocationInfoChoice
|
||||
|
||||
|
||||
class SignerInfo(Sequence):
|
||||
_fields = [
|
||||
('version', CMSVersion),
|
||||
('sid', SignerIdentifier),
|
||||
('digest_algorithm', DigestAlgorithm),
|
||||
('signed_attrs', CMSAttributes, {'implicit': 0, 'optional': True}),
|
||||
('signature_algorithm', SignedDigestAlgorithm),
|
||||
('signature', OctetString),
|
||||
('unsigned_attrs', CMSAttributes, {'implicit': 1, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class SignerInfos(SetOf):
|
||||
_child_spec = SignerInfo
|
||||
|
||||
|
||||
class SignedData(Sequence):
|
||||
_fields = [
|
||||
('version', CMSVersion),
|
||||
('digest_algorithms', DigestAlgorithms),
|
||||
('encap_content_info', None),
|
||||
('certificates', CertificateSet, {'implicit': 0, 'optional': True}),
|
||||
('crls', RevocationInfoChoices, {'implicit': 1, 'optional': True}),
|
||||
('signer_infos', SignerInfos),
|
||||
]
|
||||
|
||||
def _encap_content_info_spec(self):
|
||||
# If the encap_content_info is version v1, then this could be a PKCS#7
|
||||
# structure, or a CMS structure. CMS wraps the encoded value in an
|
||||
# Octet String tag.
|
||||
|
||||
# If the version is greater than 1, it is definite CMS
|
||||
if self['version'].native != 'v1':
|
||||
return EncapsulatedContentInfo
|
||||
|
||||
# Otherwise, the ContentInfo spec from PKCS#7 will be compatible with
|
||||
# CMS v1 (which only allows Data, an Octet String) and PKCS#7, which
|
||||
# allows Any
|
||||
return ContentInfo
|
||||
|
||||
_spec_callbacks = {
|
||||
'encap_content_info': _encap_content_info_spec
|
||||
}
|
||||
|
||||
|
||||
class OriginatorInfo(Sequence):
|
||||
_fields = [
|
||||
('certs', CertificateSet, {'implicit': 0, 'optional': True}),
|
||||
('crls', RevocationInfoChoices, {'implicit': 1, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class RecipientIdentifier(Choice):
|
||||
_alternatives = [
|
||||
('issuer_and_serial_number', IssuerAndSerialNumber),
|
||||
('subject_key_identifier', OctetString, {'implicit': 0}),
|
||||
]
|
||||
|
||||
|
||||
class KeyEncryptionAlgorithmId(ObjectIdentifier):
|
||||
_map = {
|
||||
'1.2.840.113549.1.1.1': 'rsaes_pkcs1v15',
|
||||
'1.2.840.113549.1.1.7': 'rsaes_oaep',
|
||||
'2.16.840.1.101.3.4.1.5': 'aes128_wrap',
|
||||
'2.16.840.1.101.3.4.1.8': 'aes128_wrap_pad',
|
||||
'2.16.840.1.101.3.4.1.25': 'aes192_wrap',
|
||||
'2.16.840.1.101.3.4.1.28': 'aes192_wrap_pad',
|
||||
'2.16.840.1.101.3.4.1.45': 'aes256_wrap',
|
||||
'2.16.840.1.101.3.4.1.48': 'aes256_wrap_pad',
|
||||
}
|
||||
|
||||
_reverse_map = {
|
||||
'rsa': '1.2.840.113549.1.1.1',
|
||||
'rsaes_pkcs1v15': '1.2.840.113549.1.1.1',
|
||||
'rsaes_oaep': '1.2.840.113549.1.1.7',
|
||||
'aes128_wrap': '2.16.840.1.101.3.4.1.5',
|
||||
'aes128_wrap_pad': '2.16.840.1.101.3.4.1.8',
|
||||
'aes192_wrap': '2.16.840.1.101.3.4.1.25',
|
||||
'aes192_wrap_pad': '2.16.840.1.101.3.4.1.28',
|
||||
'aes256_wrap': '2.16.840.1.101.3.4.1.45',
|
||||
'aes256_wrap_pad': '2.16.840.1.101.3.4.1.48',
|
||||
}
|
||||
|
||||
|
||||
class KeyEncryptionAlgorithm(_ForceNullParameters, Sequence):
|
||||
_fields = [
|
||||
('algorithm', KeyEncryptionAlgorithmId),
|
||||
('parameters', Any, {'optional': True}),
|
||||
]
|
||||
|
||||
_oid_pair = ('algorithm', 'parameters')
|
||||
_oid_specs = {
|
||||
'rsaes_oaep': RSAESOAEPParams,
|
||||
}
|
||||
|
||||
|
||||
class KeyTransRecipientInfo(Sequence):
|
||||
_fields = [
|
||||
('version', CMSVersion),
|
||||
('rid', RecipientIdentifier),
|
||||
('key_encryption_algorithm', KeyEncryptionAlgorithm),
|
||||
('encrypted_key', OctetString),
|
||||
]
|
||||
|
||||
|
||||
class OriginatorIdentifierOrKey(Choice):
|
||||
_alternatives = [
|
||||
('issuer_and_serial_number', IssuerAndSerialNumber),
|
||||
('subject_key_identifier', OctetString, {'implicit': 0}),
|
||||
('originator_key', PublicKeyInfo, {'implicit': 1}),
|
||||
]
|
||||
|
||||
|
||||
class OtherKeyAttribute(Sequence):
|
||||
_fields = [
|
||||
('key_attr_id', ObjectIdentifier),
|
||||
('key_attr', Any),
|
||||
]
|
||||
|
||||
|
||||
class RecipientKeyIdentifier(Sequence):
|
||||
_fields = [
|
||||
('subject_key_identifier', OctetString),
|
||||
('date', GeneralizedTime, {'optional': True}),
|
||||
('other', OtherKeyAttribute, {'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class KeyAgreementRecipientIdentifier(Choice):
|
||||
_alternatives = [
|
||||
('issuer_and_serial_number', IssuerAndSerialNumber),
|
||||
('r_key_id', RecipientKeyIdentifier, {'implicit': 0}),
|
||||
]
|
||||
|
||||
|
||||
class RecipientEncryptedKey(Sequence):
|
||||
_fields = [
|
||||
('rid', KeyAgreementRecipientIdentifier),
|
||||
('encrypted_key', OctetString),
|
||||
]
|
||||
|
||||
|
||||
class RecipientEncryptedKeys(SequenceOf):
|
||||
_child_spec = RecipientEncryptedKey
|
||||
|
||||
|
||||
class KeyAgreeRecipientInfo(Sequence):
|
||||
_fields = [
|
||||
('version', CMSVersion),
|
||||
('originator', OriginatorIdentifierOrKey, {'explicit': 0}),
|
||||
('ukm', OctetString, {'explicit': 1, 'optional': True}),
|
||||
('key_encryption_algorithm', KeyEncryptionAlgorithm),
|
||||
('recipient_encrypted_keys', RecipientEncryptedKeys),
|
||||
]
|
||||
|
||||
|
||||
class KEKIdentifier(Sequence):
|
||||
_fields = [
|
||||
('key_identifier', OctetString),
|
||||
('date', GeneralizedTime, {'optional': True}),
|
||||
('other', OtherKeyAttribute, {'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class KEKRecipientInfo(Sequence):
|
||||
_fields = [
|
||||
('version', CMSVersion),
|
||||
('kekid', KEKIdentifier),
|
||||
('key_encryption_algorithm', KeyEncryptionAlgorithm),
|
||||
('encrypted_key', OctetString),
|
||||
]
|
||||
|
||||
|
||||
class PasswordRecipientInfo(Sequence):
|
||||
_fields = [
|
||||
('version', CMSVersion),
|
||||
('key_derivation_algorithm', KdfAlgorithm, {'implicit': 0, 'optional': True}),
|
||||
('key_encryption_algorithm', KeyEncryptionAlgorithm),
|
||||
('encrypted_key', OctetString),
|
||||
]
|
||||
|
||||
|
||||
class OtherRecipientInfo(Sequence):
|
||||
_fields = [
|
||||
('ori_type', ObjectIdentifier),
|
||||
('ori_value', Any),
|
||||
]
|
||||
|
||||
|
||||
class RecipientInfo(Choice):
|
||||
_alternatives = [
|
||||
('ktri', KeyTransRecipientInfo),
|
||||
('kari', KeyAgreeRecipientInfo, {'implicit': 1}),
|
||||
('kekri', KEKRecipientInfo, {'implicit': 2}),
|
||||
('pwri', PasswordRecipientInfo, {'implicit': 3}),
|
||||
('ori', OtherRecipientInfo, {'implicit': 4}),
|
||||
]
|
||||
|
||||
|
||||
class RecipientInfos(SetOf):
|
||||
_child_spec = RecipientInfo
|
||||
|
||||
|
||||
class EncryptedContentInfo(Sequence):
|
||||
_fields = [
|
||||
('content_type', ContentType),
|
||||
('content_encryption_algorithm', EncryptionAlgorithm),
|
||||
('encrypted_content', OctetString, {'implicit': 0, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class EnvelopedData(Sequence):
|
||||
_fields = [
|
||||
('version', CMSVersion),
|
||||
('originator_info', OriginatorInfo, {'implicit': 0, 'optional': True}),
|
||||
('recipient_infos', RecipientInfos),
|
||||
('encrypted_content_info', EncryptedContentInfo),
|
||||
('unprotected_attrs', CMSAttributes, {'implicit': 1, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class SignedAndEnvelopedData(Sequence):
|
||||
_fields = [
|
||||
('version', CMSVersion),
|
||||
('recipient_infos', RecipientInfos),
|
||||
('digest_algorithms', DigestAlgorithms),
|
||||
('encrypted_content_info', EncryptedContentInfo),
|
||||
('certificates', CertificateSet, {'implicit': 0, 'optional': True}),
|
||||
('crls', CertificateRevocationLists, {'implicit': 1, 'optional': True}),
|
||||
('signer_infos', SignerInfos),
|
||||
]
|
||||
|
||||
|
||||
class DigestedData(Sequence):
|
||||
_fields = [
|
||||
('version', CMSVersion),
|
||||
('digest_algorithm', DigestAlgorithm),
|
||||
('encap_content_info', None),
|
||||
('digest', OctetString),
|
||||
]
|
||||
|
||||
def _encap_content_info_spec(self):
|
||||
# If the encap_content_info is version v1, then this could be a PKCS#7
|
||||
# structure, or a CMS structure. CMS wraps the encoded value in an
|
||||
# Octet String tag.
|
||||
|
||||
# If the version is greater than 1, it is definite CMS
|
||||
if self['version'].native != 'v1':
|
||||
return EncapsulatedContentInfo
|
||||
|
||||
# Otherwise, the ContentInfo spec from PKCS#7 will be compatible with
|
||||
# CMS v1 (which only allows Data, an Octet String) and PKCS#7, which
|
||||
# allows Any
|
||||
return ContentInfo
|
||||
|
||||
_spec_callbacks = {
|
||||
'encap_content_info': _encap_content_info_spec
|
||||
}
|
||||
|
||||
|
||||
class EncryptedData(Sequence):
|
||||
_fields = [
|
||||
('version', CMSVersion),
|
||||
('encrypted_content_info', EncryptedContentInfo),
|
||||
('unprotected_attrs', CMSAttributes, {'implicit': 1, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class AuthenticatedData(Sequence):
|
||||
_fields = [
|
||||
('version', CMSVersion),
|
||||
('originator_info', OriginatorInfo, {'implicit': 0, 'optional': True}),
|
||||
('recipient_infos', RecipientInfos),
|
||||
('mac_algorithm', HmacAlgorithm),
|
||||
('digest_algorithm', DigestAlgorithm, {'implicit': 1, 'optional': True}),
|
||||
# This does not require the _spec_callbacks approach of SignedData and
|
||||
# DigestedData since AuthenticatedData was not part of PKCS#7
|
||||
('encap_content_info', EncapsulatedContentInfo),
|
||||
('auth_attrs', CMSAttributes, {'implicit': 2, 'optional': True}),
|
||||
('mac', OctetString),
|
||||
('unauth_attrs', CMSAttributes, {'implicit': 3, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class AuthEnvelopedData(Sequence):
|
||||
_fields = [
|
||||
('version', CMSVersion),
|
||||
('originator_info', OriginatorInfo, {'implicit': 0, 'optional': True}),
|
||||
('recipient_infos', RecipientInfos),
|
||||
('auth_encrypted_content_info', EncryptedContentInfo),
|
||||
('auth_attrs', CMSAttributes, {'implicit': 1, 'optional': True}),
|
||||
('mac', OctetString),
|
||||
('unauth_attrs', CMSAttributes, {'implicit': 2, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class CompressionAlgorithmId(ObjectIdentifier):
|
||||
_map = {
|
||||
'1.2.840.113549.1.9.16.3.8': 'zlib',
|
||||
}
|
||||
|
||||
|
||||
class CompressionAlgorithm(Sequence):
|
||||
_fields = [
|
||||
('algorithm', CompressionAlgorithmId),
|
||||
('parameters', Any, {'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class CompressedData(Sequence):
|
||||
_fields = [
|
||||
('version', CMSVersion),
|
||||
('compression_algorithm', CompressionAlgorithm),
|
||||
('encap_content_info', EncapsulatedContentInfo),
|
||||
]
|
||||
|
||||
_decompressed = None
|
||||
|
||||
@property
|
||||
def decompressed(self):
|
||||
if self._decompressed is None:
|
||||
if zlib is None:
|
||||
raise SystemError('The zlib module is not available')
|
||||
self._decompressed = zlib.decompress(self['encap_content_info']['content'].native)
|
||||
return self._decompressed
|
||||
|
||||
|
||||
ContentInfo._oid_specs = {
|
||||
'data': OctetString,
|
||||
'signed_data': SignedData,
|
||||
'enveloped_data': EnvelopedData,
|
||||
'signed_and_enveloped_data': SignedAndEnvelopedData,
|
||||
'digested_data': DigestedData,
|
||||
'encrypted_data': EncryptedData,
|
||||
'authenticated_data': AuthenticatedData,
|
||||
'compressed_data': CompressedData,
|
||||
'authenticated_enveloped_data': AuthEnvelopedData,
|
||||
}
|
||||
|
||||
|
||||
EncapsulatedContentInfo._oid_specs = {
|
||||
'signed_data': SignedData,
|
||||
'enveloped_data': EnvelopedData,
|
||||
'signed_and_enveloped_data': SignedAndEnvelopedData,
|
||||
'digested_data': DigestedData,
|
||||
'encrypted_data': EncryptedData,
|
||||
'authenticated_data': AuthenticatedData,
|
||||
'compressed_data': CompressedData,
|
||||
'authenticated_enveloped_data': AuthEnvelopedData,
|
||||
}
|
||||
|
||||
|
||||
CMSAttribute._oid_specs = {
|
||||
'content_type': SetOfContentType,
|
||||
'message_digest': SetOfOctetString,
|
||||
'signing_time': SetOfTime,
|
||||
'counter_signature': SignerInfos,
|
||||
'signature_time_stamp_token': SetOfContentInfo,
|
||||
'cms_algorithm_protection': SetOfCMSAlgorithmProtection,
|
||||
'microsoft_nested_signature': SetOfContentInfo,
|
||||
'microsoft_time_stamp_token': SetOfContentInfo,
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,536 @@
|
|||
# coding: utf-8
|
||||
|
||||
"""
|
||||
ASN.1 type classes for certificate revocation lists (CRL). Exports the
|
||||
following items:
|
||||
|
||||
- CertificateList()
|
||||
|
||||
Other type classes are defined that help compose the types listed above.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
import hashlib
|
||||
|
||||
from .algos import SignedDigestAlgorithm
|
||||
from .core import (
|
||||
Boolean,
|
||||
Enumerated,
|
||||
GeneralizedTime,
|
||||
Integer,
|
||||
ObjectIdentifier,
|
||||
OctetBitString,
|
||||
ParsableOctetString,
|
||||
Sequence,
|
||||
SequenceOf,
|
||||
)
|
||||
from .x509 import (
|
||||
AuthorityInfoAccessSyntax,
|
||||
AuthorityKeyIdentifier,
|
||||
CRLDistributionPoints,
|
||||
DistributionPointName,
|
||||
GeneralNames,
|
||||
Name,
|
||||
ReasonFlags,
|
||||
Time,
|
||||
)
|
||||
|
||||
|
||||
# The structures in this file are taken from https://tools.ietf.org/html/rfc5280
|
||||
|
||||
|
||||
class Version(Integer):
|
||||
_map = {
|
||||
0: 'v1',
|
||||
1: 'v2',
|
||||
2: 'v3',
|
||||
}
|
||||
|
||||
|
||||
class IssuingDistributionPoint(Sequence):
|
||||
_fields = [
|
||||
('distribution_point', DistributionPointName, {'explicit': 0, 'optional': True}),
|
||||
('only_contains_user_certs', Boolean, {'implicit': 1, 'default': False}),
|
||||
('only_contains_ca_certs', Boolean, {'implicit': 2, 'default': False}),
|
||||
('only_some_reasons', ReasonFlags, {'implicit': 3, 'optional': True}),
|
||||
('indirect_crl', Boolean, {'implicit': 4, 'default': False}),
|
||||
('only_contains_attribute_certs', Boolean, {'implicit': 5, 'default': False}),
|
||||
]
|
||||
|
||||
|
||||
class TBSCertListExtensionId(ObjectIdentifier):
|
||||
_map = {
|
||||
'2.5.29.18': 'issuer_alt_name',
|
||||
'2.5.29.20': 'crl_number',
|
||||
'2.5.29.27': 'delta_crl_indicator',
|
||||
'2.5.29.28': 'issuing_distribution_point',
|
||||
'2.5.29.35': 'authority_key_identifier',
|
||||
'2.5.29.46': 'freshest_crl',
|
||||
'1.3.6.1.5.5.7.1.1': 'authority_information_access',
|
||||
}
|
||||
|
||||
|
||||
class TBSCertListExtension(Sequence):
|
||||
_fields = [
|
||||
('extn_id', TBSCertListExtensionId),
|
||||
('critical', Boolean, {'default': False}),
|
||||
('extn_value', ParsableOctetString),
|
||||
]
|
||||
|
||||
_oid_pair = ('extn_id', 'extn_value')
|
||||
_oid_specs = {
|
||||
'issuer_alt_name': GeneralNames,
|
||||
'crl_number': Integer,
|
||||
'delta_crl_indicator': Integer,
|
||||
'issuing_distribution_point': IssuingDistributionPoint,
|
||||
'authority_key_identifier': AuthorityKeyIdentifier,
|
||||
'freshest_crl': CRLDistributionPoints,
|
||||
'authority_information_access': AuthorityInfoAccessSyntax,
|
||||
}
|
||||
|
||||
|
||||
class TBSCertListExtensions(SequenceOf):
|
||||
_child_spec = TBSCertListExtension
|
||||
|
||||
|
||||
class CRLReason(Enumerated):
|
||||
_map = {
|
||||
0: 'unspecified',
|
||||
1: 'key_compromise',
|
||||
2: 'ca_compromise',
|
||||
3: 'affiliation_changed',
|
||||
4: 'superseded',
|
||||
5: 'cessation_of_operation',
|
||||
6: 'certificate_hold',
|
||||
8: 'remove_from_crl',
|
||||
9: 'privilege_withdrawn',
|
||||
10: 'aa_compromise',
|
||||
}
|
||||
|
||||
@property
|
||||
def human_friendly(self):
|
||||
"""
|
||||
:return:
|
||||
A unicode string with revocation description that is suitable to
|
||||
show to end-users. Starts with a lower case letter and phrased in
|
||||
such a way that it makes sense after the phrase "because of" or
|
||||
"due to".
|
||||
"""
|
||||
|
||||
return {
|
||||
'unspecified': 'an unspecified reason',
|
||||
'key_compromise': 'a compromised key',
|
||||
'ca_compromise': 'the CA being compromised',
|
||||
'affiliation_changed': 'an affiliation change',
|
||||
'superseded': 'certificate supersession',
|
||||
'cessation_of_operation': 'a cessation of operation',
|
||||
'certificate_hold': 'a certificate hold',
|
||||
'remove_from_crl': 'removal from the CRL',
|
||||
'privilege_withdrawn': 'privilege withdrawl',
|
||||
'aa_compromise': 'the AA being compromised',
|
||||
}[self.native]
|
||||
|
||||
|
||||
class CRLEntryExtensionId(ObjectIdentifier):
|
||||
_map = {
|
||||
'2.5.29.21': 'crl_reason',
|
||||
'2.5.29.23': 'hold_instruction_code',
|
||||
'2.5.29.24': 'invalidity_date',
|
||||
'2.5.29.29': 'certificate_issuer',
|
||||
}
|
||||
|
||||
|
||||
class CRLEntryExtension(Sequence):
|
||||
_fields = [
|
||||
('extn_id', CRLEntryExtensionId),
|
||||
('critical', Boolean, {'default': False}),
|
||||
('extn_value', ParsableOctetString),
|
||||
]
|
||||
|
||||
_oid_pair = ('extn_id', 'extn_value')
|
||||
_oid_specs = {
|
||||
'crl_reason': CRLReason,
|
||||
'hold_instruction_code': ObjectIdentifier,
|
||||
'invalidity_date': GeneralizedTime,
|
||||
'certificate_issuer': GeneralNames,
|
||||
}
|
||||
|
||||
|
||||
class CRLEntryExtensions(SequenceOf):
|
||||
_child_spec = CRLEntryExtension
|
||||
|
||||
|
||||
class RevokedCertificate(Sequence):
|
||||
_fields = [
|
||||
('user_certificate', Integer),
|
||||
('revocation_date', Time),
|
||||
('crl_entry_extensions', CRLEntryExtensions, {'optional': True}),
|
||||
]
|
||||
|
||||
_processed_extensions = False
|
||||
_critical_extensions = None
|
||||
_crl_reason_value = None
|
||||
_invalidity_date_value = None
|
||||
_certificate_issuer_value = None
|
||||
_issuer_name = False
|
||||
|
||||
def _set_extensions(self):
|
||||
"""
|
||||
Sets common named extensions to private attributes and creates a list
|
||||
of critical extensions
|
||||
"""
|
||||
|
||||
self._critical_extensions = set()
|
||||
|
||||
for extension in self['crl_entry_extensions']:
|
||||
name = extension['extn_id'].native
|
||||
attribute_name = '_%s_value' % name
|
||||
if hasattr(self, attribute_name):
|
||||
setattr(self, attribute_name, extension['extn_value'].parsed)
|
||||
if extension['critical'].native:
|
||||
self._critical_extensions.add(name)
|
||||
|
||||
self._processed_extensions = True
|
||||
|
||||
@property
|
||||
def critical_extensions(self):
|
||||
"""
|
||||
Returns a set of the names (or OID if not a known extension) of the
|
||||
extensions marked as critical
|
||||
|
||||
:return:
|
||||
A set of unicode strings
|
||||
"""
|
||||
|
||||
if not self._processed_extensions:
|
||||
self._set_extensions()
|
||||
return self._critical_extensions
|
||||
|
||||
@property
|
||||
def crl_reason_value(self):
|
||||
"""
|
||||
This extension indicates the reason that a certificate was revoked.
|
||||
|
||||
:return:
|
||||
None or a CRLReason object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._crl_reason_value
|
||||
|
||||
@property
|
||||
def invalidity_date_value(self):
|
||||
"""
|
||||
This extension indicates the suspected date/time the private key was
|
||||
compromised or the certificate became invalid. This would usually be
|
||||
before the revocation date, which is when the CA processed the
|
||||
revocation.
|
||||
|
||||
:return:
|
||||
None or a GeneralizedTime object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._invalidity_date_value
|
||||
|
||||
@property
|
||||
def certificate_issuer_value(self):
|
||||
"""
|
||||
This extension indicates the issuer of the certificate in question,
|
||||
and is used in indirect CRLs. CRL entries without this extension are
|
||||
for certificates issued from the last seen issuer.
|
||||
|
||||
:return:
|
||||
None or an x509.GeneralNames object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._certificate_issuer_value
|
||||
|
||||
@property
|
||||
def issuer_name(self):
|
||||
"""
|
||||
:return:
|
||||
None, or an asn1crypto.x509.Name object for the issuer of the cert
|
||||
"""
|
||||
|
||||
if self._issuer_name is False:
|
||||
self._issuer_name = None
|
||||
if self.certificate_issuer_value:
|
||||
for general_name in self.certificate_issuer_value:
|
||||
if general_name.name == 'directory_name':
|
||||
self._issuer_name = general_name.chosen
|
||||
break
|
||||
return self._issuer_name
|
||||
|
||||
|
||||
class RevokedCertificates(SequenceOf):
|
||||
_child_spec = RevokedCertificate
|
||||
|
||||
|
||||
class TbsCertList(Sequence):
|
||||
_fields = [
|
||||
('version', Version, {'optional': True}),
|
||||
('signature', SignedDigestAlgorithm),
|
||||
('issuer', Name),
|
||||
('this_update', Time),
|
||||
('next_update', Time, {'optional': True}),
|
||||
('revoked_certificates', RevokedCertificates, {'optional': True}),
|
||||
('crl_extensions', TBSCertListExtensions, {'explicit': 0, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class CertificateList(Sequence):
|
||||
_fields = [
|
||||
('tbs_cert_list', TbsCertList),
|
||||
('signature_algorithm', SignedDigestAlgorithm),
|
||||
('signature', OctetBitString),
|
||||
]
|
||||
|
||||
_processed_extensions = False
|
||||
_critical_extensions = None
|
||||
_issuer_alt_name_value = None
|
||||
_crl_number_value = None
|
||||
_delta_crl_indicator_value = None
|
||||
_issuing_distribution_point_value = None
|
||||
_authority_key_identifier_value = None
|
||||
_freshest_crl_value = None
|
||||
_authority_information_access_value = None
|
||||
_issuer_cert_urls = None
|
||||
_delta_crl_distribution_points = None
|
||||
_sha1 = None
|
||||
_sha256 = None
|
||||
|
||||
def _set_extensions(self):
|
||||
"""
|
||||
Sets common named extensions to private attributes and creates a list
|
||||
of critical extensions
|
||||
"""
|
||||
|
||||
self._critical_extensions = set()
|
||||
|
||||
for extension in self['tbs_cert_list']['crl_extensions']:
|
||||
name = extension['extn_id'].native
|
||||
attribute_name = '_%s_value' % name
|
||||
if hasattr(self, attribute_name):
|
||||
setattr(self, attribute_name, extension['extn_value'].parsed)
|
||||
if extension['critical'].native:
|
||||
self._critical_extensions.add(name)
|
||||
|
||||
self._processed_extensions = True
|
||||
|
||||
@property
|
||||
def critical_extensions(self):
|
||||
"""
|
||||
Returns a set of the names (or OID if not a known extension) of the
|
||||
extensions marked as critical
|
||||
|
||||
:return:
|
||||
A set of unicode strings
|
||||
"""
|
||||
|
||||
if not self._processed_extensions:
|
||||
self._set_extensions()
|
||||
return self._critical_extensions
|
||||
|
||||
@property
|
||||
def issuer_alt_name_value(self):
|
||||
"""
|
||||
This extension allows associating one or more alternative names with
|
||||
the issuer of the CRL.
|
||||
|
||||
:return:
|
||||
None or an x509.GeneralNames object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._issuer_alt_name_value
|
||||
|
||||
@property
|
||||
def crl_number_value(self):
|
||||
"""
|
||||
This extension adds a monotonically increasing number to the CRL and is
|
||||
used to distinguish different versions of the CRL.
|
||||
|
||||
:return:
|
||||
None or an Integer object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._crl_number_value
|
||||
|
||||
@property
|
||||
def delta_crl_indicator_value(self):
|
||||
"""
|
||||
This extension indicates a CRL is a delta CRL, and contains the CRL
|
||||
number of the base CRL that it is a delta from.
|
||||
|
||||
:return:
|
||||
None or an Integer object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._delta_crl_indicator_value
|
||||
|
||||
@property
|
||||
def issuing_distribution_point_value(self):
|
||||
"""
|
||||
This extension includes information about what types of revocations
|
||||
and certificates are part of the CRL.
|
||||
|
||||
:return:
|
||||
None or an IssuingDistributionPoint object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._issuing_distribution_point_value
|
||||
|
||||
@property
|
||||
def authority_key_identifier_value(self):
|
||||
"""
|
||||
This extension helps in identifying the public key with which to
|
||||
validate the authenticity of the CRL.
|
||||
|
||||
:return:
|
||||
None or an AuthorityKeyIdentifier object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._authority_key_identifier_value
|
||||
|
||||
@property
|
||||
def freshest_crl_value(self):
|
||||
"""
|
||||
This extension is used in complete CRLs to indicate where a delta CRL
|
||||
may be located.
|
||||
|
||||
:return:
|
||||
None or a CRLDistributionPoints object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._freshest_crl_value
|
||||
|
||||
@property
|
||||
def authority_information_access_value(self):
|
||||
"""
|
||||
This extension is used to provide a URL with which to download the
|
||||
certificate used to sign this CRL.
|
||||
|
||||
:return:
|
||||
None or an AuthorityInfoAccessSyntax object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._authority_information_access_value
|
||||
|
||||
@property
|
||||
def issuer(self):
|
||||
"""
|
||||
:return:
|
||||
An asn1crypto.x509.Name object for the issuer of the CRL
|
||||
"""
|
||||
|
||||
return self['tbs_cert_list']['issuer']
|
||||
|
||||
@property
|
||||
def authority_key_identifier(self):
|
||||
"""
|
||||
:return:
|
||||
None or a byte string of the key_identifier from the authority key
|
||||
identifier extension
|
||||
"""
|
||||
|
||||
if not self.authority_key_identifier_value:
|
||||
return None
|
||||
|
||||
return self.authority_key_identifier_value['key_identifier'].native
|
||||
|
||||
@property
|
||||
def issuer_cert_urls(self):
|
||||
"""
|
||||
:return:
|
||||
A list of unicode strings that are URLs that should contain either
|
||||
an individual DER-encoded X.509 certificate, or a DER-encoded CMS
|
||||
message containing multiple certificates
|
||||
"""
|
||||
|
||||
if self._issuer_cert_urls is None:
|
||||
self._issuer_cert_urls = []
|
||||
if self.authority_information_access_value:
|
||||
for entry in self.authority_information_access_value:
|
||||
if entry['access_method'].native == 'ca_issuers':
|
||||
location = entry['access_location']
|
||||
if location.name != 'uniform_resource_identifier':
|
||||
continue
|
||||
url = location.native
|
||||
if url.lower()[0:7] == 'http://':
|
||||
self._issuer_cert_urls.append(url)
|
||||
return self._issuer_cert_urls
|
||||
|
||||
@property
|
||||
def delta_crl_distribution_points(self):
|
||||
"""
|
||||
Returns delta CRL URLs - only applies to complete CRLs
|
||||
|
||||
:return:
|
||||
A list of zero or more DistributionPoint objects
|
||||
"""
|
||||
|
||||
if self._delta_crl_distribution_points is None:
|
||||
self._delta_crl_distribution_points = []
|
||||
|
||||
if self.freshest_crl_value is not None:
|
||||
for distribution_point in self.freshest_crl_value:
|
||||
distribution_point_name = distribution_point['distribution_point']
|
||||
# RFC 5280 indicates conforming CA should not use the relative form
|
||||
if distribution_point_name.name == 'name_relative_to_crl_issuer':
|
||||
continue
|
||||
# This library is currently only concerned with HTTP-based CRLs
|
||||
for general_name in distribution_point_name.chosen:
|
||||
if general_name.name == 'uniform_resource_identifier':
|
||||
self._delta_crl_distribution_points.append(distribution_point)
|
||||
|
||||
return self._delta_crl_distribution_points
|
||||
|
||||
@property
|
||||
def signature(self):
|
||||
"""
|
||||
:return:
|
||||
A byte string of the signature
|
||||
"""
|
||||
|
||||
return self['signature'].native
|
||||
|
||||
@property
|
||||
def sha1(self):
|
||||
"""
|
||||
:return:
|
||||
The SHA1 hash of the DER-encoded bytes of this certificate list
|
||||
"""
|
||||
|
||||
if self._sha1 is None:
|
||||
self._sha1 = hashlib.sha1(self.dump()).digest()
|
||||
return self._sha1
|
||||
|
||||
@property
|
||||
def sha256(self):
|
||||
"""
|
||||
:return:
|
||||
The SHA-256 hash of the DER-encoded bytes of this certificate list
|
||||
"""
|
||||
|
||||
if self._sha256 is None:
|
||||
self._sha256 = hashlib.sha256(self.dump()).digest()
|
||||
return self._sha256
|
|
@ -0,0 +1,96 @@
|
|||
# coding: utf-8
|
||||
|
||||
"""
|
||||
ASN.1 type classes for certificate signing requests (CSR). Exports the
|
||||
following items:
|
||||
|
||||
- CertificatationRequest()
|
||||
|
||||
Other type classes are defined that help compose the types listed above.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
from .algos import SignedDigestAlgorithm
|
||||
from .core import (
|
||||
Any,
|
||||
Integer,
|
||||
ObjectIdentifier,
|
||||
OctetBitString,
|
||||
Sequence,
|
||||
SetOf,
|
||||
)
|
||||
from .keys import PublicKeyInfo
|
||||
from .x509 import DirectoryString, Extensions, Name
|
||||
|
||||
|
||||
# The structures in this file are taken from https://tools.ietf.org/html/rfc2986
|
||||
# and https://tools.ietf.org/html/rfc2985
|
||||
|
||||
|
||||
class Version(Integer):
|
||||
_map = {
|
||||
0: 'v1',
|
||||
}
|
||||
|
||||
|
||||
class CSRAttributeType(ObjectIdentifier):
|
||||
_map = {
|
||||
'1.2.840.113549.1.9.7': 'challenge_password',
|
||||
'1.2.840.113549.1.9.9': 'extended_certificate_attributes',
|
||||
'1.2.840.113549.1.9.14': 'extension_request',
|
||||
}
|
||||
|
||||
|
||||
class SetOfDirectoryString(SetOf):
|
||||
_child_spec = DirectoryString
|
||||
|
||||
|
||||
class Attribute(Sequence):
|
||||
_fields = [
|
||||
('type', ObjectIdentifier),
|
||||
('values', SetOf, {'spec': Any}),
|
||||
]
|
||||
|
||||
|
||||
class SetOfAttributes(SetOf):
|
||||
_child_spec = Attribute
|
||||
|
||||
|
||||
class SetOfExtensions(SetOf):
|
||||
_child_spec = Extensions
|
||||
|
||||
|
||||
class CRIAttribute(Sequence):
|
||||
_fields = [
|
||||
('type', CSRAttributeType),
|
||||
('values', Any),
|
||||
]
|
||||
|
||||
_oid_pair = ('type', 'values')
|
||||
_oid_specs = {
|
||||
'challenge_password': SetOfDirectoryString,
|
||||
'extended_certificate_attributes': SetOfAttributes,
|
||||
'extension_request': SetOfExtensions,
|
||||
}
|
||||
|
||||
|
||||
class CRIAttributes(SetOf):
|
||||
_child_spec = CRIAttribute
|
||||
|
||||
|
||||
class CertificationRequestInfo(Sequence):
|
||||
_fields = [
|
||||
('version', Version),
|
||||
('subject', Name),
|
||||
('subject_pk_info', PublicKeyInfo),
|
||||
('attributes', CRIAttributes, {'implicit': 0, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class CertificationRequest(Sequence):
|
||||
_fields = [
|
||||
('certification_request_info', CertificationRequestInfo),
|
||||
('signature_algorithm', SignedDigestAlgorithm),
|
||||
('signature', OctetBitString),
|
||||
]
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,703 @@
|
|||
# coding: utf-8
|
||||
|
||||
"""
|
||||
ASN.1 type classes for the online certificate status protocol (OCSP). Exports
|
||||
the following items:
|
||||
|
||||
- OCSPRequest()
|
||||
- OCSPResponse()
|
||||
|
||||
Other type classes are defined that help compose the types listed above.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
from ._errors import unwrap
|
||||
from .algos import DigestAlgorithm, SignedDigestAlgorithm
|
||||
from .core import (
|
||||
Boolean,
|
||||
Choice,
|
||||
Enumerated,
|
||||
GeneralizedTime,
|
||||
IA5String,
|
||||
Integer,
|
||||
Null,
|
||||
ObjectIdentifier,
|
||||
OctetBitString,
|
||||
OctetString,
|
||||
ParsableOctetString,
|
||||
Sequence,
|
||||
SequenceOf,
|
||||
)
|
||||
from .crl import AuthorityInfoAccessSyntax, CRLReason
|
||||
from .keys import PublicKeyAlgorithm
|
||||
from .x509 import Certificate, GeneralName, GeneralNames, Name
|
||||
|
||||
|
||||
# The structures in this file are taken from https://tools.ietf.org/html/rfc6960
|
||||
|
||||
|
||||
class Version(Integer):
|
||||
_map = {
|
||||
0: 'v1'
|
||||
}
|
||||
|
||||
|
||||
class CertId(Sequence):
|
||||
_fields = [
|
||||
('hash_algorithm', DigestAlgorithm),
|
||||
('issuer_name_hash', OctetString),
|
||||
('issuer_key_hash', OctetString),
|
||||
('serial_number', Integer),
|
||||
]
|
||||
|
||||
|
||||
class ServiceLocator(Sequence):
|
||||
_fields = [
|
||||
('issuer', Name),
|
||||
('locator', AuthorityInfoAccessSyntax),
|
||||
]
|
||||
|
||||
|
||||
class RequestExtensionId(ObjectIdentifier):
|
||||
_map = {
|
||||
'1.3.6.1.5.5.7.48.1.7': 'service_locator',
|
||||
}
|
||||
|
||||
|
||||
class RequestExtension(Sequence):
|
||||
_fields = [
|
||||
('extn_id', RequestExtensionId),
|
||||
('critical', Boolean, {'default': False}),
|
||||
('extn_value', ParsableOctetString),
|
||||
]
|
||||
|
||||
_oid_pair = ('extn_id', 'extn_value')
|
||||
_oid_specs = {
|
||||
'service_locator': ServiceLocator,
|
||||
}
|
||||
|
||||
|
||||
class RequestExtensions(SequenceOf):
|
||||
_child_spec = RequestExtension
|
||||
|
||||
|
||||
class Request(Sequence):
|
||||
_fields = [
|
||||
('req_cert', CertId),
|
||||
('single_request_extensions', RequestExtensions, {'explicit': 0, 'optional': True}),
|
||||
]
|
||||
|
||||
_processed_extensions = False
|
||||
_critical_extensions = None
|
||||
_service_locator_value = None
|
||||
|
||||
def _set_extensions(self):
|
||||
"""
|
||||
Sets common named extensions to private attributes and creates a list
|
||||
of critical extensions
|
||||
"""
|
||||
|
||||
self._critical_extensions = set()
|
||||
|
||||
for extension in self['single_request_extensions']:
|
||||
name = extension['extn_id'].native
|
||||
attribute_name = '_%s_value' % name
|
||||
if hasattr(self, attribute_name):
|
||||
setattr(self, attribute_name, extension['extn_value'].parsed)
|
||||
if extension['critical'].native:
|
||||
self._critical_extensions.add(name)
|
||||
|
||||
self._processed_extensions = True
|
||||
|
||||
@property
|
||||
def critical_extensions(self):
|
||||
"""
|
||||
Returns a set of the names (or OID if not a known extension) of the
|
||||
extensions marked as critical
|
||||
|
||||
:return:
|
||||
A set of unicode strings
|
||||
"""
|
||||
|
||||
if not self._processed_extensions:
|
||||
self._set_extensions()
|
||||
return self._critical_extensions
|
||||
|
||||
@property
|
||||
def service_locator_value(self):
|
||||
"""
|
||||
This extension is used when communicating with an OCSP responder that
|
||||
acts as a proxy for OCSP requests
|
||||
|
||||
:return:
|
||||
None or a ServiceLocator object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._service_locator_value
|
||||
|
||||
|
||||
class Requests(SequenceOf):
|
||||
_child_spec = Request
|
||||
|
||||
|
||||
class ResponseType(ObjectIdentifier):
|
||||
_map = {
|
||||
'1.3.6.1.5.5.7.48.1.1': 'basic_ocsp_response',
|
||||
}
|
||||
|
||||
|
||||
class AcceptableResponses(SequenceOf):
|
||||
_child_spec = ResponseType
|
||||
|
||||
|
||||
class PreferredSignatureAlgorithm(Sequence):
|
||||
_fields = [
|
||||
('sig_identifier', SignedDigestAlgorithm),
|
||||
('cert_identifier', PublicKeyAlgorithm, {'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class PreferredSignatureAlgorithms(SequenceOf):
|
||||
_child_spec = PreferredSignatureAlgorithm
|
||||
|
||||
|
||||
class TBSRequestExtensionId(ObjectIdentifier):
|
||||
_map = {
|
||||
'1.3.6.1.5.5.7.48.1.2': 'nonce',
|
||||
'1.3.6.1.5.5.7.48.1.4': 'acceptable_responses',
|
||||
'1.3.6.1.5.5.7.48.1.8': 'preferred_signature_algorithms',
|
||||
}
|
||||
|
||||
|
||||
class TBSRequestExtension(Sequence):
|
||||
_fields = [
|
||||
('extn_id', TBSRequestExtensionId),
|
||||
('critical', Boolean, {'default': False}),
|
||||
('extn_value', ParsableOctetString),
|
||||
]
|
||||
|
||||
_oid_pair = ('extn_id', 'extn_value')
|
||||
_oid_specs = {
|
||||
'nonce': OctetString,
|
||||
'acceptable_responses': AcceptableResponses,
|
||||
'preferred_signature_algorithms': PreferredSignatureAlgorithms,
|
||||
}
|
||||
|
||||
|
||||
class TBSRequestExtensions(SequenceOf):
|
||||
_child_spec = TBSRequestExtension
|
||||
|
||||
|
||||
class TBSRequest(Sequence):
|
||||
_fields = [
|
||||
('version', Version, {'explicit': 0, 'default': 'v1'}),
|
||||
('requestor_name', GeneralName, {'explicit': 1, 'optional': True}),
|
||||
('request_list', Requests),
|
||||
('request_extensions', TBSRequestExtensions, {'explicit': 2, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class Certificates(SequenceOf):
|
||||
_child_spec = Certificate
|
||||
|
||||
|
||||
class Signature(Sequence):
|
||||
_fields = [
|
||||
('signature_algorithm', SignedDigestAlgorithm),
|
||||
('signature', OctetBitString),
|
||||
('certs', Certificates, {'explicit': 0, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class OCSPRequest(Sequence):
|
||||
_fields = [
|
||||
('tbs_request', TBSRequest),
|
||||
('optional_signature', Signature, {'explicit': 0, 'optional': True}),
|
||||
]
|
||||
|
||||
_processed_extensions = False
|
||||
_critical_extensions = None
|
||||
_nonce_value = None
|
||||
_acceptable_responses_value = None
|
||||
_preferred_signature_algorithms_value = None
|
||||
|
||||
def _set_extensions(self):
|
||||
"""
|
||||
Sets common named extensions to private attributes and creates a list
|
||||
of critical extensions
|
||||
"""
|
||||
|
||||
self._critical_extensions = set()
|
||||
|
||||
for extension in self['tbs_request']['request_extensions']:
|
||||
name = extension['extn_id'].native
|
||||
attribute_name = '_%s_value' % name
|
||||
if hasattr(self, attribute_name):
|
||||
setattr(self, attribute_name, extension['extn_value'].parsed)
|
||||
if extension['critical'].native:
|
||||
self._critical_extensions.add(name)
|
||||
|
||||
self._processed_extensions = True
|
||||
|
||||
@property
|
||||
def critical_extensions(self):
|
||||
"""
|
||||
Returns a set of the names (or OID if not a known extension) of the
|
||||
extensions marked as critical
|
||||
|
||||
:return:
|
||||
A set of unicode strings
|
||||
"""
|
||||
|
||||
if not self._processed_extensions:
|
||||
self._set_extensions()
|
||||
return self._critical_extensions
|
||||
|
||||
@property
|
||||
def nonce_value(self):
|
||||
"""
|
||||
This extension is used to prevent replay attacks by including a unique,
|
||||
random value with each request/response pair
|
||||
|
||||
:return:
|
||||
None or an OctetString object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._nonce_value
|
||||
|
||||
@property
|
||||
def acceptable_responses_value(self):
|
||||
"""
|
||||
This extension is used to allow the client and server to communicate
|
||||
with alternative response formats other than just basic_ocsp_response,
|
||||
although no other formats are defined in the standard.
|
||||
|
||||
:return:
|
||||
None or an AcceptableResponses object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._acceptable_responses_value
|
||||
|
||||
@property
|
||||
def preferred_signature_algorithms_value(self):
|
||||
"""
|
||||
This extension is used by the client to define what signature algorithms
|
||||
are preferred, including both the hash algorithm and the public key
|
||||
algorithm, with a level of detail down to even the public key algorithm
|
||||
parameters, such as curve name.
|
||||
|
||||
:return:
|
||||
None or a PreferredSignatureAlgorithms object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._preferred_signature_algorithms_value
|
||||
|
||||
|
||||
class OCSPResponseStatus(Enumerated):
|
||||
_map = {
|
||||
0: 'successful',
|
||||
1: 'malformed_request',
|
||||
2: 'internal_error',
|
||||
3: 'try_later',
|
||||
5: 'sign_required',
|
||||
6: 'unauthorized',
|
||||
}
|
||||
|
||||
|
||||
class ResponderId(Choice):
|
||||
_alternatives = [
|
||||
('by_name', Name, {'explicit': 1}),
|
||||
('by_key', OctetString, {'explicit': 2}),
|
||||
]
|
||||
|
||||
|
||||
# Custom class to return a meaningful .native attribute from CertStatus()
|
||||
class StatusGood(Null):
|
||||
def set(self, value):
|
||||
"""
|
||||
Sets the value of the object
|
||||
|
||||
:param value:
|
||||
None or 'good'
|
||||
"""
|
||||
|
||||
if value is not None and value != 'good' and not isinstance(value, Null):
|
||||
raise ValueError(unwrap(
|
||||
'''
|
||||
value must be one of None, "good", not %s
|
||||
''',
|
||||
repr(value)
|
||||
))
|
||||
|
||||
self.contents = b''
|
||||
|
||||
@property
|
||||
def native(self):
|
||||
return 'good'
|
||||
|
||||
|
||||
# Custom class to return a meaningful .native attribute from CertStatus()
|
||||
class StatusUnknown(Null):
|
||||
def set(self, value):
|
||||
"""
|
||||
Sets the value of the object
|
||||
|
||||
:param value:
|
||||
None or 'unknown'
|
||||
"""
|
||||
|
||||
if value is not None and value != 'unknown' and not isinstance(value, Null):
|
||||
raise ValueError(unwrap(
|
||||
'''
|
||||
value must be one of None, "unknown", not %s
|
||||
''',
|
||||
repr(value)
|
||||
))
|
||||
|
||||
self.contents = b''
|
||||
|
||||
@property
|
||||
def native(self):
|
||||
return 'unknown'
|
||||
|
||||
|
||||
class RevokedInfo(Sequence):
|
||||
_fields = [
|
||||
('revocation_time', GeneralizedTime),
|
||||
('revocation_reason', CRLReason, {'explicit': 0, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class CertStatus(Choice):
|
||||
_alternatives = [
|
||||
('good', StatusGood, {'implicit': 0}),
|
||||
('revoked', RevokedInfo, {'implicit': 1}),
|
||||
('unknown', StatusUnknown, {'implicit': 2}),
|
||||
]
|
||||
|
||||
|
||||
class CrlId(Sequence):
|
||||
_fields = [
|
||||
('crl_url', IA5String, {'explicit': 0, 'optional': True}),
|
||||
('crl_num', Integer, {'explicit': 1, 'optional': True}),
|
||||
('crl_time', GeneralizedTime, {'explicit': 2, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class SingleResponseExtensionId(ObjectIdentifier):
|
||||
_map = {
|
||||
'1.3.6.1.5.5.7.48.1.3': 'crl',
|
||||
'1.3.6.1.5.5.7.48.1.6': 'archive_cutoff',
|
||||
# These are CRLEntryExtension values from
|
||||
# https://tools.ietf.org/html/rfc5280
|
||||
'2.5.29.21': 'crl_reason',
|
||||
'2.5.29.24': 'invalidity_date',
|
||||
'2.5.29.29': 'certificate_issuer',
|
||||
# https://tools.ietf.org/html/rfc6962.html#page-13
|
||||
'1.3.6.1.4.1.11129.2.4.5': 'signed_certificate_timestamp_list',
|
||||
}
|
||||
|
||||
|
||||
class SingleResponseExtension(Sequence):
|
||||
_fields = [
|
||||
('extn_id', SingleResponseExtensionId),
|
||||
('critical', Boolean, {'default': False}),
|
||||
('extn_value', ParsableOctetString),
|
||||
]
|
||||
|
||||
_oid_pair = ('extn_id', 'extn_value')
|
||||
_oid_specs = {
|
||||
'crl': CrlId,
|
||||
'archive_cutoff': GeneralizedTime,
|
||||
'crl_reason': CRLReason,
|
||||
'invalidity_date': GeneralizedTime,
|
||||
'certificate_issuer': GeneralNames,
|
||||
'signed_certificate_timestamp_list': OctetString,
|
||||
}
|
||||
|
||||
|
||||
class SingleResponseExtensions(SequenceOf):
|
||||
_child_spec = SingleResponseExtension
|
||||
|
||||
|
||||
class SingleResponse(Sequence):
|
||||
_fields = [
|
||||
('cert_id', CertId),
|
||||
('cert_status', CertStatus),
|
||||
('this_update', GeneralizedTime),
|
||||
('next_update', GeneralizedTime, {'explicit': 0, 'optional': True}),
|
||||
('single_extensions', SingleResponseExtensions, {'explicit': 1, 'optional': True}),
|
||||
]
|
||||
|
||||
_processed_extensions = False
|
||||
_critical_extensions = None
|
||||
_crl_value = None
|
||||
_archive_cutoff_value = None
|
||||
_crl_reason_value = None
|
||||
_invalidity_date_value = None
|
||||
_certificate_issuer_value = None
|
||||
|
||||
def _set_extensions(self):
|
||||
"""
|
||||
Sets common named extensions to private attributes and creates a list
|
||||
of critical extensions
|
||||
"""
|
||||
|
||||
self._critical_extensions = set()
|
||||
|
||||
for extension in self['single_extensions']:
|
||||
name = extension['extn_id'].native
|
||||
attribute_name = '_%s_value' % name
|
||||
if hasattr(self, attribute_name):
|
||||
setattr(self, attribute_name, extension['extn_value'].parsed)
|
||||
if extension['critical'].native:
|
||||
self._critical_extensions.add(name)
|
||||
|
||||
self._processed_extensions = True
|
||||
|
||||
@property
|
||||
def critical_extensions(self):
|
||||
"""
|
||||
Returns a set of the names (or OID if not a known extension) of the
|
||||
extensions marked as critical
|
||||
|
||||
:return:
|
||||
A set of unicode strings
|
||||
"""
|
||||
|
||||
if not self._processed_extensions:
|
||||
self._set_extensions()
|
||||
return self._critical_extensions
|
||||
|
||||
@property
|
||||
def crl_value(self):
|
||||
"""
|
||||
This extension is used to locate the CRL that a certificate's revocation
|
||||
is contained within.
|
||||
|
||||
:return:
|
||||
None or a CrlId object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._crl_value
|
||||
|
||||
@property
|
||||
def archive_cutoff_value(self):
|
||||
"""
|
||||
This extension is used to indicate the date at which an archived
|
||||
(historical) certificate status entry will no longer be available.
|
||||
|
||||
:return:
|
||||
None or a GeneralizedTime object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._archive_cutoff_value
|
||||
|
||||
@property
|
||||
def crl_reason_value(self):
|
||||
"""
|
||||
This extension indicates the reason that a certificate was revoked.
|
||||
|
||||
:return:
|
||||
None or a CRLReason object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._crl_reason_value
|
||||
|
||||
@property
|
||||
def invalidity_date_value(self):
|
||||
"""
|
||||
This extension indicates the suspected date/time the private key was
|
||||
compromised or the certificate became invalid. This would usually be
|
||||
before the revocation date, which is when the CA processed the
|
||||
revocation.
|
||||
|
||||
:return:
|
||||
None or a GeneralizedTime object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._invalidity_date_value
|
||||
|
||||
@property
|
||||
def certificate_issuer_value(self):
|
||||
"""
|
||||
This extension indicates the issuer of the certificate in question.
|
||||
|
||||
:return:
|
||||
None or an x509.GeneralNames object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._certificate_issuer_value
|
||||
|
||||
|
||||
class Responses(SequenceOf):
|
||||
_child_spec = SingleResponse
|
||||
|
||||
|
||||
class ResponseDataExtensionId(ObjectIdentifier):
|
||||
_map = {
|
||||
'1.3.6.1.5.5.7.48.1.2': 'nonce',
|
||||
'1.3.6.1.5.5.7.48.1.9': 'extended_revoke',
|
||||
}
|
||||
|
||||
|
||||
class ResponseDataExtension(Sequence):
|
||||
_fields = [
|
||||
('extn_id', ResponseDataExtensionId),
|
||||
('critical', Boolean, {'default': False}),
|
||||
('extn_value', ParsableOctetString),
|
||||
]
|
||||
|
||||
_oid_pair = ('extn_id', 'extn_value')
|
||||
_oid_specs = {
|
||||
'nonce': OctetString,
|
||||
'extended_revoke': Null,
|
||||
}
|
||||
|
||||
|
||||
class ResponseDataExtensions(SequenceOf):
|
||||
_child_spec = ResponseDataExtension
|
||||
|
||||
|
||||
class ResponseData(Sequence):
|
||||
_fields = [
|
||||
('version', Version, {'explicit': 0, 'default': 'v1'}),
|
||||
('responder_id', ResponderId),
|
||||
('produced_at', GeneralizedTime),
|
||||
('responses', Responses),
|
||||
('response_extensions', ResponseDataExtensions, {'explicit': 1, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class BasicOCSPResponse(Sequence):
|
||||
_fields = [
|
||||
('tbs_response_data', ResponseData),
|
||||
('signature_algorithm', SignedDigestAlgorithm),
|
||||
('signature', OctetBitString),
|
||||
('certs', Certificates, {'explicit': 0, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class ResponseBytes(Sequence):
|
||||
_fields = [
|
||||
('response_type', ResponseType),
|
||||
('response', ParsableOctetString),
|
||||
]
|
||||
|
||||
_oid_pair = ('response_type', 'response')
|
||||
_oid_specs = {
|
||||
'basic_ocsp_response': BasicOCSPResponse,
|
||||
}
|
||||
|
||||
|
||||
class OCSPResponse(Sequence):
|
||||
_fields = [
|
||||
('response_status', OCSPResponseStatus),
|
||||
('response_bytes', ResponseBytes, {'explicit': 0, 'optional': True}),
|
||||
]
|
||||
|
||||
_processed_extensions = False
|
||||
_critical_extensions = None
|
||||
_nonce_value = None
|
||||
_extended_revoke_value = None
|
||||
|
||||
def _set_extensions(self):
|
||||
"""
|
||||
Sets common named extensions to private attributes and creates a list
|
||||
of critical extensions
|
||||
"""
|
||||
|
||||
self._critical_extensions = set()
|
||||
|
||||
for extension in self['response_bytes']['response'].parsed['tbs_response_data']['response_extensions']:
|
||||
name = extension['extn_id'].native
|
||||
attribute_name = '_%s_value' % name
|
||||
if hasattr(self, attribute_name):
|
||||
setattr(self, attribute_name, extension['extn_value'].parsed)
|
||||
if extension['critical'].native:
|
||||
self._critical_extensions.add(name)
|
||||
|
||||
self._processed_extensions = True
|
||||
|
||||
@property
|
||||
def critical_extensions(self):
|
||||
"""
|
||||
Returns a set of the names (or OID if not a known extension) of the
|
||||
extensions marked as critical
|
||||
|
||||
:return:
|
||||
A set of unicode strings
|
||||
"""
|
||||
|
||||
if not self._processed_extensions:
|
||||
self._set_extensions()
|
||||
return self._critical_extensions
|
||||
|
||||
@property
|
||||
def nonce_value(self):
|
||||
"""
|
||||
This extension is used to prevent replay attacks on the request/response
|
||||
exchange
|
||||
|
||||
:return:
|
||||
None or an OctetString object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._nonce_value
|
||||
|
||||
@property
|
||||
def extended_revoke_value(self):
|
||||
"""
|
||||
This extension is used to signal that the responder will return a
|
||||
"revoked" status for non-issued certificates.
|
||||
|
||||
:return:
|
||||
None or a Null object (if present)
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._extended_revoke_value
|
||||
|
||||
@property
|
||||
def basic_ocsp_response(self):
|
||||
"""
|
||||
A shortcut into the BasicOCSPResponse sequence
|
||||
|
||||
:return:
|
||||
None or an asn1crypto.ocsp.BasicOCSPResponse object
|
||||
"""
|
||||
|
||||
return self['response_bytes']['response'].parsed
|
||||
|
||||
@property
|
||||
def response_data(self):
|
||||
"""
|
||||
A shortcut into the parsed, ResponseData sequence
|
||||
|
||||
:return:
|
||||
None or an asn1crypto.ocsp.ResponseData object
|
||||
"""
|
||||
|
||||
return self['response_bytes']['response'].parsed['tbs_response_data']
|
|
@ -0,0 +1,285 @@
|
|||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Functions for parsing and dumping using the ASN.1 DER encoding. Exports the
|
||||
following items:
|
||||
|
||||
- emit()
|
||||
- parse()
|
||||
- peek()
|
||||
|
||||
Other type classes are defined that help compose the types listed above.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
import sys
|
||||
|
||||
from ._types import byte_cls, chr_cls, type_name
|
||||
from .util import int_from_bytes, int_to_bytes
|
||||
|
||||
_PY2 = sys.version_info <= (3,)
|
||||
_INSUFFICIENT_DATA_MESSAGE = 'Insufficient data - %s bytes requested but only %s available'
|
||||
|
||||
|
||||
def emit(class_, method, tag, contents):
|
||||
"""
|
||||
Constructs a byte string of an ASN.1 DER-encoded value
|
||||
|
||||
This is typically not useful. Instead, use one of the standard classes from
|
||||
asn1crypto.core, or construct a new class with specific fields, and call the
|
||||
.dump() method.
|
||||
|
||||
:param class_:
|
||||
An integer ASN.1 class value: 0 (universal), 1 (application),
|
||||
2 (context), 3 (private)
|
||||
|
||||
:param method:
|
||||
An integer ASN.1 method value: 0 (primitive), 1 (constructed)
|
||||
|
||||
:param tag:
|
||||
An integer ASN.1 tag value
|
||||
|
||||
:param contents:
|
||||
A byte string of the encoded byte contents
|
||||
|
||||
:return:
|
||||
A byte string of the ASN.1 DER value (header and contents)
|
||||
"""
|
||||
|
||||
if not isinstance(class_, int):
|
||||
raise TypeError('class_ must be an integer, not %s' % type_name(class_))
|
||||
|
||||
if class_ < 0 or class_ > 3:
|
||||
raise ValueError('class_ must be one of 0, 1, 2 or 3, not %s' % class_)
|
||||
|
||||
if not isinstance(method, int):
|
||||
raise TypeError('method must be an integer, not %s' % type_name(method))
|
||||
|
||||
if method < 0 or method > 1:
|
||||
raise ValueError('method must be 0 or 1, not %s' % method)
|
||||
|
||||
if not isinstance(tag, int):
|
||||
raise TypeError('tag must be an integer, not %s' % type_name(tag))
|
||||
|
||||
if tag < 0:
|
||||
raise ValueError('tag must be greater than zero, not %s' % tag)
|
||||
|
||||
if not isinstance(contents, byte_cls):
|
||||
raise TypeError('contents must be a byte string, not %s' % type_name(contents))
|
||||
|
||||
return _dump_header(class_, method, tag, contents) + contents
|
||||
|
||||
|
||||
def parse(contents, strict=False):
|
||||
"""
|
||||
Parses a byte string of ASN.1 BER/DER-encoded data.
|
||||
|
||||
This is typically not useful. Instead, use one of the standard classes from
|
||||
asn1crypto.core, or construct a new class with specific fields, and call the
|
||||
.load() class method.
|
||||
|
||||
:param contents:
|
||||
A byte string of BER/DER-encoded data
|
||||
|
||||
:param strict:
|
||||
A boolean indicating if trailing data should be forbidden - if so, a
|
||||
ValueError will be raised when trailing data exists
|
||||
|
||||
:raises:
|
||||
ValueError - when the contents do not contain an ASN.1 header or are truncated in some way
|
||||
TypeError - when contents is not a byte string
|
||||
|
||||
:return:
|
||||
A 6-element tuple:
|
||||
- 0: integer class (0 to 3)
|
||||
- 1: integer method
|
||||
- 2: integer tag
|
||||
- 3: byte string header
|
||||
- 4: byte string content
|
||||
- 5: byte string trailer
|
||||
"""
|
||||
|
||||
if not isinstance(contents, byte_cls):
|
||||
raise TypeError('contents must be a byte string, not %s' % type_name(contents))
|
||||
|
||||
contents_len = len(contents)
|
||||
info, consumed = _parse(contents, contents_len)
|
||||
if strict and consumed != contents_len:
|
||||
raise ValueError('Extra data - %d bytes of trailing data were provided' % (contents_len - consumed))
|
||||
return info
|
||||
|
||||
|
||||
def peek(contents):
|
||||
"""
|
||||
Parses a byte string of ASN.1 BER/DER-encoded data to find the length
|
||||
|
||||
This is typically used to look into an encoded value to see how long the
|
||||
next chunk of ASN.1-encoded data is. Primarily it is useful when a
|
||||
value is a concatenation of multiple values.
|
||||
|
||||
:param contents:
|
||||
A byte string of BER/DER-encoded data
|
||||
|
||||
:raises:
|
||||
ValueError - when the contents do not contain an ASN.1 header or are truncated in some way
|
||||
TypeError - when contents is not a byte string
|
||||
|
||||
:return:
|
||||
An integer with the number of bytes occupied by the ASN.1 value
|
||||
"""
|
||||
|
||||
if not isinstance(contents, byte_cls):
|
||||
raise TypeError('contents must be a byte string, not %s' % type_name(contents))
|
||||
|
||||
info, consumed = _parse(contents, len(contents))
|
||||
return consumed
|
||||
|
||||
|
||||
def _parse(encoded_data, data_len, pointer=0, lengths_only=False):
|
||||
"""
|
||||
Parses a byte string into component parts
|
||||
|
||||
:param encoded_data:
|
||||
A byte string that contains BER-encoded data
|
||||
|
||||
:param data_len:
|
||||
The integer length of the encoded data
|
||||
|
||||
:param pointer:
|
||||
The index in the byte string to parse from
|
||||
|
||||
:param lengths_only:
|
||||
A boolean to cause the call to return a 2-element tuple of the integer
|
||||
number of bytes in the header and the integer number of bytes in the
|
||||
contents. Internal use only.
|
||||
|
||||
:return:
|
||||
A 2-element tuple:
|
||||
- 0: A tuple of (class_, method, tag, header, content, trailer)
|
||||
- 1: An integer indicating how many bytes were consumed
|
||||
"""
|
||||
|
||||
if data_len < pointer + 2:
|
||||
raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (2, data_len - pointer))
|
||||
|
||||
start = pointer
|
||||
first_octet = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer]
|
||||
pointer += 1
|
||||
|
||||
tag = first_octet & 31
|
||||
# Base 128 length using 8th bit as continuation indicator
|
||||
if tag == 31:
|
||||
tag = 0
|
||||
while True:
|
||||
num = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer]
|
||||
pointer += 1
|
||||
tag *= 128
|
||||
tag += num & 127
|
||||
if num >> 7 == 0:
|
||||
break
|
||||
|
||||
length_octet = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer]
|
||||
pointer += 1
|
||||
|
||||
if length_octet >> 7 == 0:
|
||||
if lengths_only:
|
||||
return (pointer, pointer + (length_octet & 127))
|
||||
contents_end = pointer + (length_octet & 127)
|
||||
|
||||
else:
|
||||
length_octets = length_octet & 127
|
||||
if length_octets:
|
||||
pointer += length_octets
|
||||
contents_end = pointer + int_from_bytes(encoded_data[pointer - length_octets:pointer], signed=False)
|
||||
if lengths_only:
|
||||
return (pointer, contents_end)
|
||||
|
||||
else:
|
||||
# To properly parse indefinite length values, we need to scan forward
|
||||
# parsing headers until we find a value with a length of zero. If we
|
||||
# just scanned looking for \x00\x00, nested indefinite length values
|
||||
# would not work.
|
||||
contents_end = pointer
|
||||
while contents_end < data_len:
|
||||
sub_header_end, contents_end = _parse(encoded_data, data_len, contents_end, lengths_only=True)
|
||||
if contents_end == sub_header_end and encoded_data[contents_end - 2:contents_end] == b'\x00\x00':
|
||||
break
|
||||
if lengths_only:
|
||||
return (pointer, contents_end)
|
||||
if contents_end > data_len:
|
||||
raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (contents_end, data_len))
|
||||
return (
|
||||
(
|
||||
first_octet >> 6,
|
||||
(first_octet >> 5) & 1,
|
||||
tag,
|
||||
encoded_data[start:pointer],
|
||||
encoded_data[pointer:contents_end - 2],
|
||||
b'\x00\x00'
|
||||
),
|
||||
contents_end
|
||||
)
|
||||
|
||||
if contents_end > data_len:
|
||||
raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (contents_end, data_len))
|
||||
return (
|
||||
(
|
||||
first_octet >> 6,
|
||||
(first_octet >> 5) & 1,
|
||||
tag,
|
||||
encoded_data[start:pointer],
|
||||
encoded_data[pointer:contents_end],
|
||||
b''
|
||||
),
|
||||
contents_end
|
||||
)
|
||||
|
||||
|
||||
def _dump_header(class_, method, tag, contents):
|
||||
"""
|
||||
Constructs the header bytes for an ASN.1 object
|
||||
|
||||
:param class_:
|
||||
An integer ASN.1 class value: 0 (universal), 1 (application),
|
||||
2 (context), 3 (private)
|
||||
|
||||
:param method:
|
||||
An integer ASN.1 method value: 0 (primitive), 1 (constructed)
|
||||
|
||||
:param tag:
|
||||
An integer ASN.1 tag value
|
||||
|
||||
:param contents:
|
||||
A byte string of the encoded byte contents
|
||||
|
||||
:return:
|
||||
A byte string of the ASN.1 DER header
|
||||
"""
|
||||
|
||||
header = b''
|
||||
|
||||
id_num = 0
|
||||
id_num |= class_ << 6
|
||||
id_num |= method << 5
|
||||
|
||||
if tag >= 31:
|
||||
cont_bit = 0
|
||||
while tag > 0:
|
||||
header = chr_cls(cont_bit | (tag & 0x7f)) + header
|
||||
if not cont_bit:
|
||||
cont_bit = 0x80
|
||||
tag = tag >> 7
|
||||
header = chr_cls(id_num | 31) + header
|
||||
else:
|
||||
header += chr_cls(id_num | tag)
|
||||
|
||||
length = len(contents)
|
||||
if length <= 127:
|
||||
header += chr_cls(length)
|
||||
else:
|
||||
length_bytes = int_to_bytes(length)
|
||||
header += chr_cls(0x80 | len(length_bytes))
|
||||
header += length_bytes
|
||||
|
||||
return header
|
|
@ -0,0 +1,84 @@
|
|||
# coding: utf-8
|
||||
|
||||
"""
|
||||
ASN.1 type classes for PDF signature structures. Adds extra oid mapping and
|
||||
value parsing to asn1crypto.x509.Extension() and asn1crypto.xms.CMSAttribute().
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
from .cms import CMSAttributeType, CMSAttribute
|
||||
from .core import (
|
||||
Boolean,
|
||||
Integer,
|
||||
Null,
|
||||
ObjectIdentifier,
|
||||
OctetString,
|
||||
Sequence,
|
||||
SequenceOf,
|
||||
SetOf,
|
||||
)
|
||||
from .crl import CertificateList
|
||||
from .ocsp import OCSPResponse
|
||||
from .x509 import (
|
||||
Extension,
|
||||
ExtensionId,
|
||||
GeneralName,
|
||||
KeyPurposeId,
|
||||
)
|
||||
|
||||
|
||||
class AdobeArchiveRevInfo(Sequence):
|
||||
_fields = [
|
||||
('version', Integer)
|
||||
]
|
||||
|
||||
|
||||
class AdobeTimestamp(Sequence):
|
||||
_fields = [
|
||||
('version', Integer),
|
||||
('location', GeneralName),
|
||||
('requires_auth', Boolean, {'optional': True, 'default': False}),
|
||||
]
|
||||
|
||||
|
||||
class OtherRevInfo(Sequence):
|
||||
_fields = [
|
||||
('type', ObjectIdentifier),
|
||||
('value', OctetString),
|
||||
]
|
||||
|
||||
|
||||
class SequenceOfCertificateList(SequenceOf):
|
||||
_child_spec = CertificateList
|
||||
|
||||
|
||||
class SequenceOfOCSPResponse(SequenceOf):
|
||||
_child_spec = OCSPResponse
|
||||
|
||||
|
||||
class SequenceOfOtherRevInfo(SequenceOf):
|
||||
_child_spec = OtherRevInfo
|
||||
|
||||
|
||||
class RevocationInfoArchival(Sequence):
|
||||
_fields = [
|
||||
('crl', SequenceOfCertificateList, {'explicit': 0, 'optional': True}),
|
||||
('ocsp', SequenceOfOCSPResponse, {'explicit': 1, 'optional': True}),
|
||||
('other_rev_info', SequenceOfOtherRevInfo, {'explicit': 2, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class SetOfRevocationInfoArchival(SetOf):
|
||||
_child_spec = RevocationInfoArchival
|
||||
|
||||
|
||||
ExtensionId._map['1.2.840.113583.1.1.9.2'] = 'adobe_archive_rev_info'
|
||||
ExtensionId._map['1.2.840.113583.1.1.9.1'] = 'adobe_timestamp'
|
||||
ExtensionId._map['1.2.840.113583.1.1.10'] = 'adobe_ppklite_credential'
|
||||
Extension._oid_specs['adobe_archive_rev_info'] = AdobeArchiveRevInfo
|
||||
Extension._oid_specs['adobe_timestamp'] = AdobeTimestamp
|
||||
Extension._oid_specs['adobe_ppklite_credential'] = Null
|
||||
KeyPurposeId._map['1.2.840.113583.1.1.5'] = 'pdf_signing'
|
||||
CMSAttributeType._map['1.2.840.113583.1.1.8'] = 'adobe_revocation_info_archival'
|
||||
CMSAttribute._oid_specs['adobe_revocation_info_archival'] = SetOfRevocationInfoArchival
|
|
@ -0,0 +1,222 @@
|
|||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Encoding DER to PEM and decoding PEM to DER. Exports the following items:
|
||||
|
||||
- armor()
|
||||
- detect()
|
||||
- unarmor()
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
import base64
|
||||
import re
|
||||
import sys
|
||||
|
||||
from ._errors import unwrap
|
||||
from ._types import type_name as _type_name, str_cls, byte_cls
|
||||
|
||||
if sys.version_info < (3,):
|
||||
from cStringIO import StringIO as BytesIO
|
||||
else:
|
||||
from io import BytesIO
|
||||
|
||||
|
||||
def detect(byte_string):
|
||||
"""
|
||||
Detect if a byte string seems to contain a PEM-encoded block
|
||||
|
||||
:param byte_string:
|
||||
A byte string to look through
|
||||
|
||||
:return:
|
||||
A boolean, indicating if a PEM-encoded block is contained in the byte
|
||||
string
|
||||
"""
|
||||
|
||||
if not isinstance(byte_string, byte_cls):
|
||||
raise TypeError(unwrap(
|
||||
'''
|
||||
byte_string must be a byte string, not %s
|
||||
''',
|
||||
_type_name(byte_string)
|
||||
))
|
||||
|
||||
return byte_string.find(b'-----BEGIN') != -1 or byte_string.find(b'---- BEGIN') != -1
|
||||
|
||||
|
||||
def armor(type_name, der_bytes, headers=None):
|
||||
"""
|
||||
Armors a DER-encoded byte string in PEM
|
||||
|
||||
:param type_name:
|
||||
A unicode string that will be capitalized and placed in the header
|
||||
and footer of the block. E.g. "CERTIFICATE", "PRIVATE KEY", etc. This
|
||||
will appear as "-----BEGIN CERTIFICATE-----" and
|
||||
"-----END CERTIFICATE-----".
|
||||
|
||||
:param der_bytes:
|
||||
A byte string to be armored
|
||||
|
||||
:param headers:
|
||||
An OrderedDict of the header lines to write after the BEGIN line
|
||||
|
||||
:return:
|
||||
A byte string of the PEM block
|
||||
"""
|
||||
|
||||
if not isinstance(der_bytes, byte_cls):
|
||||
raise TypeError(unwrap(
|
||||
'''
|
||||
der_bytes must be a byte string, not %s
|
||||
''' % _type_name(der_bytes)
|
||||
))
|
||||
|
||||
if not isinstance(type_name, str_cls):
|
||||
raise TypeError(unwrap(
|
||||
'''
|
||||
type_name must be a unicode string, not %s
|
||||
''',
|
||||
_type_name(type_name)
|
||||
))
|
||||
|
||||
type_name = type_name.upper().encode('ascii')
|
||||
|
||||
output = BytesIO()
|
||||
output.write(b'-----BEGIN ')
|
||||
output.write(type_name)
|
||||
output.write(b'-----\n')
|
||||
if headers:
|
||||
for key in headers:
|
||||
output.write(key.encode('ascii'))
|
||||
output.write(b': ')
|
||||
output.write(headers[key].encode('ascii'))
|
||||
output.write(b'\n')
|
||||
output.write(b'\n')
|
||||
b64_bytes = base64.b64encode(der_bytes)
|
||||
b64_len = len(b64_bytes)
|
||||
i = 0
|
||||
while i < b64_len:
|
||||
output.write(b64_bytes[i:i + 64])
|
||||
output.write(b'\n')
|
||||
i += 64
|
||||
output.write(b'-----END ')
|
||||
output.write(type_name)
|
||||
output.write(b'-----\n')
|
||||
|
||||
return output.getvalue()
|
||||
|
||||
|
||||
def _unarmor(pem_bytes):
|
||||
"""
|
||||
Convert a PEM-encoded byte string into one or more DER-encoded byte strings
|
||||
|
||||
:param pem_bytes:
|
||||
A byte string of the PEM-encoded data
|
||||
|
||||
:raises:
|
||||
ValueError - when the pem_bytes do not appear to be PEM-encoded bytes
|
||||
|
||||
:return:
|
||||
A generator of 3-element tuples in the format: (object_type, headers,
|
||||
der_bytes). The object_type is a unicode string of what is between
|
||||
"-----BEGIN " and "-----". Examples include: "CERTIFICATE",
|
||||
"PUBLIC KEY", "PRIVATE KEY". The headers is a dict containing any lines
|
||||
in the form "Name: Value" that are right after the begin line.
|
||||
"""
|
||||
|
||||
if not isinstance(pem_bytes, byte_cls):
|
||||
raise TypeError(unwrap(
|
||||
'''
|
||||
pem_bytes must be a byte string, not %s
|
||||
''',
|
||||
_type_name(pem_bytes)
|
||||
))
|
||||
|
||||
# Valid states include: "trash", "headers", "body"
|
||||
state = 'trash'
|
||||
headers = {}
|
||||
base64_data = b''
|
||||
object_type = None
|
||||
|
||||
found_start = False
|
||||
found_end = False
|
||||
|
||||
for line in pem_bytes.splitlines(False):
|
||||
if line == b'':
|
||||
continue
|
||||
|
||||
if state == "trash":
|
||||
# Look for a starting line since some CA cert bundle show the cert
|
||||
# into in a parsed format above each PEM block
|
||||
type_name_match = re.match(b'^(?:---- |-----)BEGIN ([A-Z0-9 ]+)(?: ----|-----)', line)
|
||||
if not type_name_match:
|
||||
continue
|
||||
object_type = type_name_match.group(1).decode('ascii')
|
||||
|
||||
found_start = True
|
||||
state = 'headers'
|
||||
continue
|
||||
|
||||
if state == 'headers':
|
||||
if line.find(b':') == -1:
|
||||
state = 'body'
|
||||
else:
|
||||
decoded_line = line.decode('ascii')
|
||||
name, value = decoded_line.split(':', 1)
|
||||
headers[name] = value.strip()
|
||||
continue
|
||||
|
||||
if state == 'body':
|
||||
if line[0:5] in (b'-----', b'---- '):
|
||||
der_bytes = base64.b64decode(base64_data)
|
||||
|
||||
yield (object_type, headers, der_bytes)
|
||||
|
||||
state = 'trash'
|
||||
headers = {}
|
||||
base64_data = b''
|
||||
object_type = None
|
||||
found_end = True
|
||||
continue
|
||||
|
||||
base64_data += line
|
||||
|
||||
if not found_start or not found_end:
|
||||
raise ValueError(unwrap(
|
||||
'''
|
||||
pem_bytes does not appear to contain PEM-encoded data - no
|
||||
BEGIN/END combination found
|
||||
'''
|
||||
))
|
||||
|
||||
|
||||
def unarmor(pem_bytes, multiple=False):
|
||||
"""
|
||||
Convert a PEM-encoded byte string into a DER-encoded byte string
|
||||
|
||||
:param pem_bytes:
|
||||
A byte string of the PEM-encoded data
|
||||
|
||||
:param multiple:
|
||||
If True, function will return a generator
|
||||
|
||||
:raises:
|
||||
ValueError - when the pem_bytes do not appear to be PEM-encoded bytes
|
||||
|
||||
:return:
|
||||
A 3-element tuple (object_name, headers, der_bytes). The object_name is
|
||||
a unicode string of what is between "-----BEGIN " and "-----". Examples
|
||||
include: "CERTIFICATE", "PUBLIC KEY", "PRIVATE KEY". The headers is a
|
||||
dict containing any lines in the form "Name: Value" that are right
|
||||
after the begin line.
|
||||
"""
|
||||
|
||||
generator = _unarmor(pem_bytes)
|
||||
|
||||
if not multiple:
|
||||
return next(generator)
|
||||
|
||||
return generator
|
|
@ -0,0 +1,193 @@
|
|||
# coding: utf-8
|
||||
|
||||
"""
|
||||
ASN.1 type classes for PKCS#12 files. Exports the following items:
|
||||
|
||||
- CertBag()
|
||||
- CrlBag()
|
||||
- Pfx()
|
||||
- SafeBag()
|
||||
- SecretBag()
|
||||
|
||||
Other type classes are defined that help compose the types listed above.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
from .algos import DigestInfo
|
||||
from .cms import ContentInfo, SignedData
|
||||
from .core import (
|
||||
Any,
|
||||
BMPString,
|
||||
Integer,
|
||||
ObjectIdentifier,
|
||||
OctetString,
|
||||
ParsableOctetString,
|
||||
Sequence,
|
||||
SequenceOf,
|
||||
SetOf,
|
||||
)
|
||||
from .keys import PrivateKeyInfo, EncryptedPrivateKeyInfo
|
||||
from .x509 import Certificate, KeyPurposeId
|
||||
|
||||
|
||||
# The structures in this file are taken from https://tools.ietf.org/html/rfc7292
|
||||
|
||||
class MacData(Sequence):
|
||||
_fields = [
|
||||
('mac', DigestInfo),
|
||||
('mac_salt', OctetString),
|
||||
('iterations', Integer, {'default': 1}),
|
||||
]
|
||||
|
||||
|
||||
class Version(Integer):
|
||||
_map = {
|
||||
3: 'v3'
|
||||
}
|
||||
|
||||
|
||||
class AttributeType(ObjectIdentifier):
|
||||
_map = {
|
||||
# https://tools.ietf.org/html/rfc2985#page-18
|
||||
'1.2.840.113549.1.9.20': 'friendly_name',
|
||||
'1.2.840.113549.1.9.21': 'local_key_id',
|
||||
# https://support.microsoft.com/en-us/kb/287547
|
||||
'1.3.6.1.4.1.311.17.1': 'microsoft_local_machine_keyset',
|
||||
# https://github.com/frohoff/jdk8u-dev-jdk/blob/master/src/share/classes/sun/security/pkcs12/PKCS12KeyStore.java
|
||||
# this is a set of OIDs, representing key usage, the usual value is a SET of one element OID 2.5.29.37.0
|
||||
'2.16.840.1.113894.746875.1.1': 'trusted_key_usage',
|
||||
}
|
||||
|
||||
|
||||
class SetOfAny(SetOf):
|
||||
_child_spec = Any
|
||||
|
||||
|
||||
class SetOfBMPString(SetOf):
|
||||
_child_spec = BMPString
|
||||
|
||||
|
||||
class SetOfOctetString(SetOf):
|
||||
_child_spec = OctetString
|
||||
|
||||
|
||||
class SetOfKeyPurposeId(SetOf):
|
||||
_child_spec = KeyPurposeId
|
||||
|
||||
|
||||
class Attribute(Sequence):
|
||||
_fields = [
|
||||
('type', AttributeType),
|
||||
('values', None),
|
||||
]
|
||||
|
||||
_oid_specs = {
|
||||
'friendly_name': SetOfBMPString,
|
||||
'local_key_id': SetOfOctetString,
|
||||
'microsoft_csp_name': SetOfBMPString,
|
||||
'trusted_key_usage': SetOfKeyPurposeId,
|
||||
}
|
||||
|
||||
def _values_spec(self):
|
||||
return self._oid_specs.get(self['type'].native, SetOfAny)
|
||||
|
||||
_spec_callbacks = {
|
||||
'values': _values_spec
|
||||
}
|
||||
|
||||
|
||||
class Attributes(SetOf):
|
||||
_child_spec = Attribute
|
||||
|
||||
|
||||
class Pfx(Sequence):
|
||||
_fields = [
|
||||
('version', Version),
|
||||
('auth_safe', ContentInfo),
|
||||
('mac_data', MacData, {'optional': True})
|
||||
]
|
||||
|
||||
_authenticated_safe = None
|
||||
|
||||
@property
|
||||
def authenticated_safe(self):
|
||||
if self._authenticated_safe is None:
|
||||
content = self['auth_safe']['content']
|
||||
if isinstance(content, SignedData):
|
||||
content = content['content_info']['content']
|
||||
self._authenticated_safe = AuthenticatedSafe.load(content.native)
|
||||
return self._authenticated_safe
|
||||
|
||||
|
||||
class AuthenticatedSafe(SequenceOf):
|
||||
_child_spec = ContentInfo
|
||||
|
||||
|
||||
class BagId(ObjectIdentifier):
|
||||
_map = {
|
||||
'1.2.840.113549.1.12.10.1.1': 'key_bag',
|
||||
'1.2.840.113549.1.12.10.1.2': 'pkcs8_shrouded_key_bag',
|
||||
'1.2.840.113549.1.12.10.1.3': 'cert_bag',
|
||||
'1.2.840.113549.1.12.10.1.4': 'crl_bag',
|
||||
'1.2.840.113549.1.12.10.1.5': 'secret_bag',
|
||||
'1.2.840.113549.1.12.10.1.6': 'safe_contents',
|
||||
}
|
||||
|
||||
|
||||
class CertId(ObjectIdentifier):
|
||||
_map = {
|
||||
'1.2.840.113549.1.9.22.1': 'x509',
|
||||
'1.2.840.113549.1.9.22.2': 'sdsi',
|
||||
}
|
||||
|
||||
|
||||
class CertBag(Sequence):
|
||||
_fields = [
|
||||
('cert_id', CertId),
|
||||
('cert_value', ParsableOctetString, {'explicit': 0}),
|
||||
]
|
||||
|
||||
_oid_pair = ('cert_id', 'cert_value')
|
||||
_oid_specs = {
|
||||
'x509': Certificate,
|
||||
}
|
||||
|
||||
|
||||
class CrlBag(Sequence):
|
||||
_fields = [
|
||||
('crl_id', ObjectIdentifier),
|
||||
('crl_value', OctetString, {'explicit': 0}),
|
||||
]
|
||||
|
||||
|
||||
class SecretBag(Sequence):
|
||||
_fields = [
|
||||
('secret_type_id', ObjectIdentifier),
|
||||
('secret_value', OctetString, {'explicit': 0}),
|
||||
]
|
||||
|
||||
|
||||
class SafeContents(SequenceOf):
|
||||
pass
|
||||
|
||||
|
||||
class SafeBag(Sequence):
|
||||
_fields = [
|
||||
('bag_id', BagId),
|
||||
('bag_value', Any, {'explicit': 0}),
|
||||
('bag_attributes', Attributes, {'optional': True}),
|
||||
]
|
||||
|
||||
_oid_pair = ('bag_id', 'bag_value')
|
||||
_oid_specs = {
|
||||
'key_bag': PrivateKeyInfo,
|
||||
'pkcs8_shrouded_key_bag': EncryptedPrivateKeyInfo,
|
||||
'cert_bag': CertBag,
|
||||
'crl_bag': CrlBag,
|
||||
'secret_bag': SecretBag,
|
||||
'safe_contents': SafeContents
|
||||
}
|
||||
|
||||
|
||||
SafeContents._child_spec = SafeBag
|
|
@ -0,0 +1,310 @@
|
|||
# coding: utf-8
|
||||
|
||||
"""
|
||||
ASN.1 type classes for the time stamp protocol (TSP). Exports the following
|
||||
items:
|
||||
|
||||
- TimeStampReq()
|
||||
- TimeStampResp()
|
||||
|
||||
Also adds TimeStampedData() support to asn1crypto.cms.ContentInfo(),
|
||||
TimeStampedData() and TSTInfo() support to
|
||||
asn1crypto.cms.EncapsulatedContentInfo() and some oids and value parsers to
|
||||
asn1crypto.cms.CMSAttribute().
|
||||
|
||||
Other type classes are defined that help compose the types listed above.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
from .algos import DigestAlgorithm
|
||||
from .cms import (
|
||||
CMSAttribute,
|
||||
CMSAttributeType,
|
||||
ContentInfo,
|
||||
ContentType,
|
||||
EncapsulatedContentInfo,
|
||||
)
|
||||
from .core import (
|
||||
Any,
|
||||
BitString,
|
||||
Boolean,
|
||||
Choice,
|
||||
GeneralizedTime,
|
||||
IA5String,
|
||||
Integer,
|
||||
ObjectIdentifier,
|
||||
OctetString,
|
||||
Sequence,
|
||||
SequenceOf,
|
||||
SetOf,
|
||||
UTF8String,
|
||||
)
|
||||
from .crl import CertificateList
|
||||
from .x509 import (
|
||||
Attributes,
|
||||
CertificatePolicies,
|
||||
GeneralName,
|
||||
GeneralNames,
|
||||
)
|
||||
|
||||
|
||||
# The structures in this file are based on https://tools.ietf.org/html/rfc3161,
|
||||
# https://tools.ietf.org/html/rfc4998, https://tools.ietf.org/html/rfc5544,
|
||||
# https://tools.ietf.org/html/rfc5035, https://tools.ietf.org/html/rfc2634
|
||||
|
||||
class Version(Integer):
|
||||
_map = {
|
||||
0: 'v0',
|
||||
1: 'v1',
|
||||
2: 'v2',
|
||||
3: 'v3',
|
||||
4: 'v4',
|
||||
5: 'v5',
|
||||
}
|
||||
|
||||
|
||||
class MessageImprint(Sequence):
|
||||
_fields = [
|
||||
('hash_algorithm', DigestAlgorithm),
|
||||
('hashed_message', OctetString),
|
||||
]
|
||||
|
||||
|
||||
class Accuracy(Sequence):
|
||||
_fields = [
|
||||
('seconds', Integer, {'optional': True}),
|
||||
('millis', Integer, {'implicit': 0, 'optional': True}),
|
||||
('micros', Integer, {'implicit': 1, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class Extension(Sequence):
|
||||
_fields = [
|
||||
('extn_id', ObjectIdentifier),
|
||||
('critical', Boolean, {'default': False}),
|
||||
('extn_value', OctetString),
|
||||
]
|
||||
|
||||
|
||||
class Extensions(SequenceOf):
|
||||
_child_spec = Extension
|
||||
|
||||
|
||||
class TSTInfo(Sequence):
|
||||
_fields = [
|
||||
('version', Version),
|
||||
('policy', ObjectIdentifier),
|
||||
('message_imprint', MessageImprint),
|
||||
('serial_number', Integer),
|
||||
('gen_time', GeneralizedTime),
|
||||
('accuracy', Accuracy, {'optional': True}),
|
||||
('ordering', Boolean, {'default': False}),
|
||||
('nonce', Integer, {'optional': True}),
|
||||
('tsa', GeneralName, {'explicit': 0, 'optional': True}),
|
||||
('extensions', Extensions, {'implicit': 1, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class TimeStampReq(Sequence):
|
||||
_fields = [
|
||||
('version', Version),
|
||||
('message_imprint', MessageImprint),
|
||||
('req_policy', ObjectIdentifier, {'optional': True}),
|
||||
('nonce', Integer, {'optional': True}),
|
||||
('cert_req', Boolean, {'default': False}),
|
||||
('extensions', Extensions, {'implicit': 0, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class PKIStatus(Integer):
|
||||
_map = {
|
||||
0: 'granted',
|
||||
1: 'granted_with_mods',
|
||||
2: 'rejection',
|
||||
3: 'waiting',
|
||||
4: 'revocation_warning',
|
||||
5: 'revocation_notification',
|
||||
}
|
||||
|
||||
|
||||
class PKIFreeText(SequenceOf):
|
||||
_child_spec = UTF8String
|
||||
|
||||
|
||||
class PKIFailureInfo(BitString):
|
||||
_map = {
|
||||
0: 'bad_alg',
|
||||
2: 'bad_request',
|
||||
5: 'bad_data_format',
|
||||
14: 'time_not_available',
|
||||
15: 'unaccepted_policy',
|
||||
16: 'unaccepted_extensions',
|
||||
17: 'add_info_not_available',
|
||||
25: 'system_failure',
|
||||
}
|
||||
|
||||
|
||||
class PKIStatusInfo(Sequence):
|
||||
_fields = [
|
||||
('status', PKIStatus),
|
||||
('status_string', PKIFreeText, {'optional': True}),
|
||||
('fail_info', PKIFailureInfo, {'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class TimeStampResp(Sequence):
|
||||
_fields = [
|
||||
('status', PKIStatusInfo),
|
||||
('time_stamp_token', ContentInfo),
|
||||
]
|
||||
|
||||
|
||||
class MetaData(Sequence):
|
||||
_fields = [
|
||||
('hash_protected', Boolean),
|
||||
('file_name', UTF8String, {'optional': True}),
|
||||
('media_type', IA5String, {'optional': True}),
|
||||
('other_meta_data', Attributes, {'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class TimeStampAndCRL(SequenceOf):
|
||||
_fields = [
|
||||
('time_stamp', EncapsulatedContentInfo),
|
||||
('crl', CertificateList, {'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class TimeStampTokenEvidence(SequenceOf):
|
||||
_child_spec = TimeStampAndCRL
|
||||
|
||||
|
||||
class DigestAlgorithms(SequenceOf):
|
||||
_child_spec = DigestAlgorithm
|
||||
|
||||
|
||||
class EncryptionInfo(Sequence):
|
||||
_fields = [
|
||||
('encryption_info_type', ObjectIdentifier),
|
||||
('encryption_info_value', Any),
|
||||
]
|
||||
|
||||
|
||||
class PartialHashtree(SequenceOf):
|
||||
_child_spec = OctetString
|
||||
|
||||
|
||||
class PartialHashtrees(SequenceOf):
|
||||
_child_spec = PartialHashtree
|
||||
|
||||
|
||||
class ArchiveTimeStamp(Sequence):
|
||||
_fields = [
|
||||
('digest_algorithm', DigestAlgorithm, {'implicit': 0, 'optional': True}),
|
||||
('attributes', Attributes, {'implicit': 1, 'optional': True}),
|
||||
('reduced_hashtree', PartialHashtrees, {'implicit': 2, 'optional': True}),
|
||||
('time_stamp', ContentInfo),
|
||||
]
|
||||
|
||||
|
||||
class ArchiveTimeStampSequence(SequenceOf):
|
||||
_child_spec = ArchiveTimeStamp
|
||||
|
||||
|
||||
class EvidenceRecord(Sequence):
|
||||
_fields = [
|
||||
('version', Version),
|
||||
('digest_algorithms', DigestAlgorithms),
|
||||
('crypto_infos', Attributes, {'implicit': 0, 'optional': True}),
|
||||
('encryption_info', EncryptionInfo, {'implicit': 1, 'optional': True}),
|
||||
('archive_time_stamp_sequence', ArchiveTimeStampSequence),
|
||||
]
|
||||
|
||||
|
||||
class OtherEvidence(Sequence):
|
||||
_fields = [
|
||||
('oe_type', ObjectIdentifier),
|
||||
('oe_value', Any),
|
||||
]
|
||||
|
||||
|
||||
class Evidence(Choice):
|
||||
_alternatives = [
|
||||
('tst_evidence', TimeStampTokenEvidence, {'implicit': 0}),
|
||||
('ers_evidence', EvidenceRecord, {'implicit': 1}),
|
||||
('other_evidence', OtherEvidence, {'implicit': 2}),
|
||||
]
|
||||
|
||||
|
||||
class TimeStampedData(Sequence):
|
||||
_fields = [
|
||||
('version', Version),
|
||||
('data_uri', IA5String, {'optional': True}),
|
||||
('meta_data', MetaData, {'optional': True}),
|
||||
('content', OctetString, {'optional': True}),
|
||||
('temporal_evidence', Evidence),
|
||||
]
|
||||
|
||||
|
||||
class IssuerSerial(Sequence):
|
||||
_fields = [
|
||||
('issuer', GeneralNames),
|
||||
('serial_number', Integer),
|
||||
]
|
||||
|
||||
|
||||
class ESSCertID(Sequence):
|
||||
_fields = [
|
||||
('cert_hash', OctetString),
|
||||
('issuer_serial', IssuerSerial, {'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class ESSCertIDs(SequenceOf):
|
||||
_child_spec = ESSCertID
|
||||
|
||||
|
||||
class SigningCertificate(Sequence):
|
||||
_fields = [
|
||||
('certs', ESSCertIDs),
|
||||
('policies', CertificatePolicies, {'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class SetOfSigningCertificates(SetOf):
|
||||
_child_spec = SigningCertificate
|
||||
|
||||
|
||||
class ESSCertIDv2(Sequence):
|
||||
_fields = [
|
||||
('hash_algorithm', DigestAlgorithm, {'default': {'algorithm': 'sha256'}}),
|
||||
('cert_hash', OctetString),
|
||||
('issuer_serial', IssuerSerial, {'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class ESSCertIDv2s(SequenceOf):
|
||||
_child_spec = ESSCertIDv2
|
||||
|
||||
|
||||
class SigningCertificateV2(Sequence):
|
||||
_fields = [
|
||||
('certs', ESSCertIDv2s),
|
||||
('policies', CertificatePolicies, {'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class SetOfSigningCertificatesV2(SetOf):
|
||||
_child_spec = SigningCertificateV2
|
||||
|
||||
|
||||
EncapsulatedContentInfo._oid_specs['tst_info'] = TSTInfo
|
||||
EncapsulatedContentInfo._oid_specs['timestamped_data'] = TimeStampedData
|
||||
ContentInfo._oid_specs['timestamped_data'] = TimeStampedData
|
||||
ContentType._map['1.2.840.113549.1.9.16.1.4'] = 'tst_info'
|
||||
ContentType._map['1.2.840.113549.1.9.16.1.31'] = 'timestamped_data'
|
||||
CMSAttributeType._map['1.2.840.113549.1.9.16.2.12'] = 'signing_certificate'
|
||||
CMSAttribute._oid_specs['signing_certificate'] = SetOfSigningCertificates
|
||||
CMSAttributeType._map['1.2.840.113549.1.9.16.2.47'] = 'signing_certificate_v2'
|
||||
CMSAttribute._oid_specs['signing_certificate_v2'] = SetOfSigningCertificatesV2
|
|
@ -0,0 +1,868 @@
|
|||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Miscellaneous data helpers, including functions for converting integers to and
|
||||
from bytes and UTC timezone. Exports the following items:
|
||||
|
||||
- OrderedDict()
|
||||
- int_from_bytes()
|
||||
- int_to_bytes()
|
||||
- timezone.utc
|
||||
- utc_with_dst
|
||||
- create_timezone()
|
||||
- inet_ntop()
|
||||
- inet_pton()
|
||||
- uri_to_iri()
|
||||
- iri_to_uri()
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
import math
|
||||
import sys
|
||||
from datetime import datetime, date, timedelta, tzinfo
|
||||
|
||||
from ._errors import unwrap
|
||||
from ._iri import iri_to_uri, uri_to_iri # noqa
|
||||
from ._ordereddict import OrderedDict # noqa
|
||||
from ._types import type_name
|
||||
|
||||
if sys.platform == 'win32':
|
||||
from ._inet import inet_ntop, inet_pton
|
||||
else:
|
||||
from socket import inet_ntop, inet_pton # noqa
|
||||
|
||||
|
||||
# Python 2
|
||||
if sys.version_info <= (3,):
|
||||
|
||||
def int_to_bytes(value, signed=False, width=None):
|
||||
"""
|
||||
Converts an integer to a byte string
|
||||
|
||||
:param value:
|
||||
The integer to convert
|
||||
|
||||
:param signed:
|
||||
If the byte string should be encoded using two's complement
|
||||
|
||||
:param width:
|
||||
If None, the minimal possible size (but at least 1),
|
||||
otherwise an integer of the byte width for the return value
|
||||
|
||||
:return:
|
||||
A byte string
|
||||
"""
|
||||
|
||||
if value == 0 and width == 0:
|
||||
return b''
|
||||
|
||||
# Handle negatives in two's complement
|
||||
is_neg = False
|
||||
if signed and value < 0:
|
||||
is_neg = True
|
||||
bits = int(math.ceil(len('%x' % abs(value)) / 2.0) * 8)
|
||||
value = (value + (1 << bits)) % (1 << bits)
|
||||
|
||||
hex_str = '%x' % value
|
||||
if len(hex_str) & 1:
|
||||
hex_str = '0' + hex_str
|
||||
|
||||
output = hex_str.decode('hex')
|
||||
|
||||
if signed and not is_neg and ord(output[0:1]) & 0x80:
|
||||
output = b'\x00' + output
|
||||
|
||||
if width is not None:
|
||||
if len(output) > width:
|
||||
raise OverflowError('int too big to convert')
|
||||
if is_neg:
|
||||
pad_char = b'\xFF'
|
||||
else:
|
||||
pad_char = b'\x00'
|
||||
output = (pad_char * (width - len(output))) + output
|
||||
elif is_neg and ord(output[0:1]) & 0x80 == 0:
|
||||
output = b'\xFF' + output
|
||||
|
||||
return output
|
||||
|
||||
def int_from_bytes(value, signed=False):
|
||||
"""
|
||||
Converts a byte string to an integer
|
||||
|
||||
:param value:
|
||||
The byte string to convert
|
||||
|
||||
:param signed:
|
||||
If the byte string should be interpreted using two's complement
|
||||
|
||||
:return:
|
||||
An integer
|
||||
"""
|
||||
|
||||
if value == b'':
|
||||
return 0
|
||||
|
||||
num = long(value.encode("hex"), 16) # noqa
|
||||
|
||||
if not signed:
|
||||
return num
|
||||
|
||||
# Check for sign bit and handle two's complement
|
||||
if ord(value[0:1]) & 0x80:
|
||||
bit_len = len(value) * 8
|
||||
return num - (1 << bit_len)
|
||||
|
||||
return num
|
||||
|
||||
class timezone(tzinfo): # noqa
|
||||
"""
|
||||
Implements datetime.timezone for py2.
|
||||
Only full minute offsets are supported.
|
||||
DST is not supported.
|
||||
"""
|
||||
|
||||
def __init__(self, offset, name=None):
|
||||
"""
|
||||
:param offset:
|
||||
A timedelta with this timezone's offset from UTC
|
||||
|
||||
:param name:
|
||||
Name of the timezone; if None, generate one.
|
||||
"""
|
||||
|
||||
if not timedelta(hours=-24) < offset < timedelta(hours=24):
|
||||
raise ValueError('Offset must be in [-23:59, 23:59]')
|
||||
|
||||
if offset.seconds % 60 or offset.microseconds:
|
||||
raise ValueError('Offset must be full minutes')
|
||||
|
||||
self._offset = offset
|
||||
|
||||
if name is not None:
|
||||
self._name = name
|
||||
elif not offset:
|
||||
self._name = 'UTC'
|
||||
else:
|
||||
self._name = 'UTC' + _format_offset(offset)
|
||||
|
||||
def __eq__(self, other):
|
||||
"""
|
||||
Compare two timezones
|
||||
|
||||
:param other:
|
||||
The other timezone to compare to
|
||||
|
||||
:return:
|
||||
A boolean
|
||||
"""
|
||||
|
||||
if type(other) != timezone:
|
||||
return False
|
||||
return self._offset == other._offset
|
||||
|
||||
def tzname(self, dt):
|
||||
"""
|
||||
:param dt:
|
||||
A datetime object; ignored.
|
||||
|
||||
:return:
|
||||
Name of this timezone
|
||||
"""
|
||||
|
||||
return self._name
|
||||
|
||||
def utcoffset(self, dt):
|
||||
"""
|
||||
:param dt:
|
||||
A datetime object; ignored.
|
||||
|
||||
:return:
|
||||
A timedelta object with the offset from UTC
|
||||
"""
|
||||
|
||||
return self._offset
|
||||
|
||||
def dst(self, dt):
|
||||
"""
|
||||
:param dt:
|
||||
A datetime object; ignored.
|
||||
|
||||
:return:
|
||||
Zero timedelta
|
||||
"""
|
||||
|
||||
return timedelta(0)
|
||||
|
||||
timezone.utc = timezone(timedelta(0))
|
||||
|
||||
# Python 3
|
||||
else:
|
||||
|
||||
from datetime import timezone # noqa
|
||||
|
||||
def int_to_bytes(value, signed=False, width=None):
|
||||
"""
|
||||
Converts an integer to a byte string
|
||||
|
||||
:param value:
|
||||
The integer to convert
|
||||
|
||||
:param signed:
|
||||
If the byte string should be encoded using two's complement
|
||||
|
||||
:param width:
|
||||
If None, the minimal possible size (but at least 1),
|
||||
otherwise an integer of the byte width for the return value
|
||||
|
||||
:return:
|
||||
A byte string
|
||||
"""
|
||||
|
||||
if width is None:
|
||||
if signed:
|
||||
if value < 0:
|
||||
bits_required = abs(value + 1).bit_length()
|
||||
else:
|
||||
bits_required = value.bit_length()
|
||||
if bits_required % 8 == 0:
|
||||
bits_required += 1
|
||||
else:
|
||||
bits_required = value.bit_length()
|
||||
width = math.ceil(bits_required / 8) or 1
|
||||
return value.to_bytes(width, byteorder='big', signed=signed)
|
||||
|
||||
def int_from_bytes(value, signed=False):
|
||||
"""
|
||||
Converts a byte string to an integer
|
||||
|
||||
:param value:
|
||||
The byte string to convert
|
||||
|
||||
:param signed:
|
||||
If the byte string should be interpreted using two's complement
|
||||
|
||||
:return:
|
||||
An integer
|
||||
"""
|
||||
|
||||
return int.from_bytes(value, 'big', signed=signed)
|
||||
|
||||
|
||||
def _format_offset(off):
|
||||
"""
|
||||
Format a timedelta into "[+-]HH:MM" format or "" for None
|
||||
"""
|
||||
|
||||
if off is None:
|
||||
return ''
|
||||
mins = off.days * 24 * 60 + off.seconds // 60
|
||||
sign = '-' if mins < 0 else '+'
|
||||
return sign + '%02d:%02d' % divmod(abs(mins), 60)
|
||||
|
||||
|
||||
class _UtcWithDst(tzinfo):
|
||||
"""
|
||||
Utc class where dst does not return None; required for astimezone
|
||||
"""
|
||||
|
||||
def tzname(self, dt):
|
||||
return 'UTC'
|
||||
|
||||
def utcoffset(self, dt):
|
||||
return timedelta(0)
|
||||
|
||||
def dst(self, dt):
|
||||
return timedelta(0)
|
||||
|
||||
|
||||
utc_with_dst = _UtcWithDst()
|
||||
|
||||
_timezone_cache = {}
|
||||
|
||||
|
||||
def create_timezone(offset):
|
||||
"""
|
||||
Returns a new datetime.timezone object with the given offset.
|
||||
Uses cached objects if possible.
|
||||
|
||||
:param offset:
|
||||
A datetime.timedelta object; It needs to be in full minutes and between -23:59 and +23:59.
|
||||
|
||||
:return:
|
||||
A datetime.timezone object
|
||||
"""
|
||||
|
||||
try:
|
||||
tz = _timezone_cache[offset]
|
||||
except KeyError:
|
||||
tz = _timezone_cache[offset] = timezone(offset)
|
||||
return tz
|
||||
|
||||
|
||||
class extended_date(object):
|
||||
"""
|
||||
A datetime.datetime-like object that represents the year 0. This is just
|
||||
to handle 0000-01-01 found in some certificates. Python's datetime does
|
||||
not support year 0.
|
||||
|
||||
The proleptic gregorian calendar repeats itself every 400 years. Therefore,
|
||||
the simplest way to format is to substitute year 2000.
|
||||
"""
|
||||
|
||||
def __init__(self, year, month, day):
|
||||
"""
|
||||
:param year:
|
||||
The integer 0
|
||||
|
||||
:param month:
|
||||
An integer from 1 to 12
|
||||
|
||||
:param day:
|
||||
An integer from 1 to 31
|
||||
"""
|
||||
|
||||
if year != 0:
|
||||
raise ValueError('year must be 0')
|
||||
|
||||
self._y2k = date(2000, month, day)
|
||||
|
||||
@property
|
||||
def year(self):
|
||||
"""
|
||||
:return:
|
||||
The integer 0
|
||||
"""
|
||||
|
||||
return 0
|
||||
|
||||
@property
|
||||
def month(self):
|
||||
"""
|
||||
:return:
|
||||
An integer from 1 to 12
|
||||
"""
|
||||
|
||||
return self._y2k.month
|
||||
|
||||
@property
|
||||
def day(self):
|
||||
"""
|
||||
:return:
|
||||
An integer from 1 to 31
|
||||
"""
|
||||
|
||||
return self._y2k.day
|
||||
|
||||
def strftime(self, format):
|
||||
"""
|
||||
Formats the date using strftime()
|
||||
|
||||
:param format:
|
||||
A strftime() format string
|
||||
|
||||
:return:
|
||||
A str, the formatted date as a unicode string
|
||||
in Python 3 and a byte string in Python 2
|
||||
"""
|
||||
|
||||
# Format the date twice, once with year 2000, once with year 4000.
|
||||
# The only differences in the result will be in the millennium. Find them and replace by zeros.
|
||||
y2k = self._y2k.strftime(format)
|
||||
y4k = self._y2k.replace(year=4000).strftime(format)
|
||||
return ''.join('0' if (c2, c4) == ('2', '4') else c2 for c2, c4 in zip(y2k, y4k))
|
||||
|
||||
def isoformat(self):
|
||||
"""
|
||||
Formats the date as %Y-%m-%d
|
||||
|
||||
:return:
|
||||
The date formatted to %Y-%m-%d as a unicode string in Python 3
|
||||
and a byte string in Python 2
|
||||
"""
|
||||
|
||||
return self.strftime('0000-%m-%d')
|
||||
|
||||
def replace(self, year=None, month=None, day=None):
|
||||
"""
|
||||
Returns a new datetime.date or asn1crypto.util.extended_date
|
||||
object with the specified components replaced
|
||||
|
||||
:return:
|
||||
A datetime.date or asn1crypto.util.extended_date object
|
||||
"""
|
||||
|
||||
if year is None:
|
||||
year = self.year
|
||||
if month is None:
|
||||
month = self.month
|
||||
if day is None:
|
||||
day = self.day
|
||||
|
||||
if year > 0:
|
||||
cls = date
|
||||
else:
|
||||
cls = extended_date
|
||||
|
||||
return cls(
|
||||
year,
|
||||
month,
|
||||
day
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
"""
|
||||
:return:
|
||||
A str representing this extended_date, e.g. "0000-01-01"
|
||||
"""
|
||||
|
||||
return self.strftime('%Y-%m-%d')
|
||||
|
||||
def __eq__(self, other):
|
||||
"""
|
||||
Compare two extended_date objects
|
||||
|
||||
:param other:
|
||||
The other extended_date to compare to
|
||||
|
||||
:return:
|
||||
A boolean
|
||||
"""
|
||||
|
||||
# datetime.date object wouldn't compare equal because it can't be year 0
|
||||
if not isinstance(other, self.__class__):
|
||||
return False
|
||||
return self.__cmp__(other) == 0
|
||||
|
||||
def __ne__(self, other):
|
||||
"""
|
||||
Compare two extended_date objects
|
||||
|
||||
:param other:
|
||||
The other extended_date to compare to
|
||||
|
||||
:return:
|
||||
A boolean
|
||||
"""
|
||||
|
||||
return not self.__eq__(other)
|
||||
|
||||
def _comparison_error(self, other):
|
||||
raise TypeError(unwrap(
|
||||
'''
|
||||
An asn1crypto.util.extended_date object can only be compared to
|
||||
an asn1crypto.util.extended_date or datetime.date object, not %s
|
||||
''',
|
||||
type_name(other)
|
||||
))
|
||||
|
||||
def __cmp__(self, other):
|
||||
"""
|
||||
Compare two extended_date or datetime.date objects
|
||||
|
||||
:param other:
|
||||
The other extended_date object to compare to
|
||||
|
||||
:return:
|
||||
An integer smaller than, equal to, or larger than 0
|
||||
"""
|
||||
|
||||
# self is year 0, other is >= year 1
|
||||
if isinstance(other, date):
|
||||
return -1
|
||||
|
||||
if not isinstance(other, self.__class__):
|
||||
self._comparison_error(other)
|
||||
|
||||
if self._y2k < other._y2k:
|
||||
return -1
|
||||
if self._y2k > other._y2k:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.__cmp__(other) < 0
|
||||
|
||||
def __le__(self, other):
|
||||
return self.__cmp__(other) <= 0
|
||||
|
||||
def __gt__(self, other):
|
||||
return self.__cmp__(other) > 0
|
||||
|
||||
def __ge__(self, other):
|
||||
return self.__cmp__(other) >= 0
|
||||
|
||||
|
||||
class extended_datetime(object):
|
||||
"""
|
||||
A datetime.datetime-like object that represents the year 0. This is just
|
||||
to handle 0000-01-01 found in some certificates. Python's datetime does
|
||||
not support year 0.
|
||||
|
||||
The proleptic gregorian calendar repeats itself every 400 years. Therefore,
|
||||
the simplest way to format is to substitute year 2000.
|
||||
"""
|
||||
|
||||
# There are 97 leap days during 400 years.
|
||||
DAYS_IN_400_YEARS = 400 * 365 + 97
|
||||
DAYS_IN_2000_YEARS = 5 * DAYS_IN_400_YEARS
|
||||
|
||||
def __init__(self, year, *args, **kwargs):
|
||||
"""
|
||||
:param year:
|
||||
The integer 0
|
||||
|
||||
:param args:
|
||||
Other positional arguments; see datetime.datetime.
|
||||
|
||||
:param kwargs:
|
||||
Other keyword arguments; see datetime.datetime.
|
||||
"""
|
||||
|
||||
if year != 0:
|
||||
raise ValueError('year must be 0')
|
||||
|
||||
self._y2k = datetime(2000, *args, **kwargs)
|
||||
|
||||
@property
|
||||
def year(self):
|
||||
"""
|
||||
:return:
|
||||
The integer 0
|
||||
"""
|
||||
|
||||
return 0
|
||||
|
||||
@property
|
||||
def month(self):
|
||||
"""
|
||||
:return:
|
||||
An integer from 1 to 12
|
||||
"""
|
||||
|
||||
return self._y2k.month
|
||||
|
||||
@property
|
||||
def day(self):
|
||||
"""
|
||||
:return:
|
||||
An integer from 1 to 31
|
||||
"""
|
||||
|
||||
return self._y2k.day
|
||||
|
||||
@property
|
||||
def hour(self):
|
||||
"""
|
||||
:return:
|
||||
An integer from 1 to 24
|
||||
"""
|
||||
|
||||
return self._y2k.hour
|
||||
|
||||
@property
|
||||
def minute(self):
|
||||
"""
|
||||
:return:
|
||||
An integer from 1 to 60
|
||||
"""
|
||||
|
||||
return self._y2k.minute
|
||||
|
||||
@property
|
||||
def second(self):
|
||||
"""
|
||||
:return:
|
||||
An integer from 1 to 60
|
||||
"""
|
||||
|
||||
return self._y2k.second
|
||||
|
||||
@property
|
||||
def microsecond(self):
|
||||
"""
|
||||
:return:
|
||||
An integer from 0 to 999999
|
||||
"""
|
||||
|
||||
return self._y2k.microsecond
|
||||
|
||||
@property
|
||||
def tzinfo(self):
|
||||
"""
|
||||
:return:
|
||||
If object is timezone aware, a datetime.tzinfo object, else None.
|
||||
"""
|
||||
|
||||
return self._y2k.tzinfo
|
||||
|
||||
def utcoffset(self):
|
||||
"""
|
||||
:return:
|
||||
If object is timezone aware, a datetime.timedelta object, else None.
|
||||
"""
|
||||
|
||||
return self._y2k.utcoffset()
|
||||
|
||||
def time(self):
|
||||
"""
|
||||
:return:
|
||||
A datetime.time object
|
||||
"""
|
||||
|
||||
return self._y2k.time()
|
||||
|
||||
def date(self):
|
||||
"""
|
||||
:return:
|
||||
An asn1crypto.util.extended_date of the date
|
||||
"""
|
||||
|
||||
return extended_date(0, self.month, self.day)
|
||||
|
||||
def strftime(self, format):
|
||||
"""
|
||||
Performs strftime(), always returning a str
|
||||
|
||||
:param format:
|
||||
A strftime() format string
|
||||
|
||||
:return:
|
||||
A str of the formatted datetime
|
||||
"""
|
||||
|
||||
# Format the datetime twice, once with year 2000, once with year 4000.
|
||||
# The only differences in the result will be in the millennium. Find them and replace by zeros.
|
||||
y2k = self._y2k.strftime(format)
|
||||
y4k = self._y2k.replace(year=4000).strftime(format)
|
||||
return ''.join('0' if (c2, c4) == ('2', '4') else c2 for c2, c4 in zip(y2k, y4k))
|
||||
|
||||
def isoformat(self, sep='T'):
|
||||
"""
|
||||
Formats the date as "%Y-%m-%d %H:%M:%S" with the sep param between the
|
||||
date and time portions
|
||||
|
||||
:param set:
|
||||
A single character of the separator to place between the date and
|
||||
time
|
||||
|
||||
:return:
|
||||
The formatted datetime as a unicode string in Python 3 and a byte
|
||||
string in Python 2
|
||||
"""
|
||||
|
||||
s = '0000-%02d-%02d%c%02d:%02d:%02d' % (self.month, self.day, sep, self.hour, self.minute, self.second)
|
||||
if self.microsecond:
|
||||
s += '.%06d' % self.microsecond
|
||||
return s + _format_offset(self.utcoffset())
|
||||
|
||||
def replace(self, year=None, *args, **kwargs):
|
||||
"""
|
||||
Returns a new datetime.datetime or asn1crypto.util.extended_datetime
|
||||
object with the specified components replaced
|
||||
|
||||
:param year:
|
||||
The new year to substitute. None to keep it.
|
||||
|
||||
:param args:
|
||||
Other positional arguments; see datetime.datetime.replace.
|
||||
|
||||
:param kwargs:
|
||||
Other keyword arguments; see datetime.datetime.replace.
|
||||
|
||||
:return:
|
||||
A datetime.datetime or asn1crypto.util.extended_datetime object
|
||||
"""
|
||||
|
||||
if year:
|
||||
return self._y2k.replace(year, *args, **kwargs)
|
||||
|
||||
return extended_datetime.from_y2k(self._y2k.replace(2000, *args, **kwargs))
|
||||
|
||||
def astimezone(self, tz):
|
||||
"""
|
||||
Convert this extended_datetime to another timezone.
|
||||
|
||||
:param tz:
|
||||
A datetime.tzinfo object.
|
||||
|
||||
:return:
|
||||
A new extended_datetime or datetime.datetime object
|
||||
"""
|
||||
|
||||
return extended_datetime.from_y2k(self._y2k.astimezone(tz))
|
||||
|
||||
def timestamp(self):
|
||||
"""
|
||||
Return POSIX timestamp. Only supported in python >= 3.3
|
||||
|
||||
:return:
|
||||
A float representing the seconds since 1970-01-01 UTC. This will be a negative value.
|
||||
"""
|
||||
|
||||
return self._y2k.timestamp() - self.DAYS_IN_2000_YEARS * 86400
|
||||
|
||||
def __str__(self):
|
||||
"""
|
||||
:return:
|
||||
A str representing this extended_datetime, e.g. "0000-01-01 00:00:00.000001-10:00"
|
||||
"""
|
||||
|
||||
return self.isoformat(sep=' ')
|
||||
|
||||
def __eq__(self, other):
|
||||
"""
|
||||
Compare two extended_datetime objects
|
||||
|
||||
:param other:
|
||||
The other extended_datetime to compare to
|
||||
|
||||
:return:
|
||||
A boolean
|
||||
"""
|
||||
|
||||
# Only compare against other datetime or extended_datetime objects
|
||||
if not isinstance(other, (self.__class__, datetime)):
|
||||
return False
|
||||
|
||||
# Offset-naive and offset-aware datetimes are never the same
|
||||
if (self.tzinfo is None) != (other.tzinfo is None):
|
||||
return False
|
||||
|
||||
return self.__cmp__(other) == 0
|
||||
|
||||
def __ne__(self, other):
|
||||
"""
|
||||
Compare two extended_datetime objects
|
||||
|
||||
:param other:
|
||||
The other extended_datetime to compare to
|
||||
|
||||
:return:
|
||||
A boolean
|
||||
"""
|
||||
|
||||
return not self.__eq__(other)
|
||||
|
||||
def _comparison_error(self, other):
|
||||
"""
|
||||
Raises a TypeError about the other object not being suitable for
|
||||
comparison
|
||||
|
||||
:param other:
|
||||
The object being compared to
|
||||
"""
|
||||
|
||||
raise TypeError(unwrap(
|
||||
'''
|
||||
An asn1crypto.util.extended_datetime object can only be compared to
|
||||
an asn1crypto.util.extended_datetime or datetime.datetime object,
|
||||
not %s
|
||||
''',
|
||||
type_name(other)
|
||||
))
|
||||
|
||||
def __cmp__(self, other):
|
||||
"""
|
||||
Compare two extended_datetime or datetime.datetime objects
|
||||
|
||||
:param other:
|
||||
The other extended_datetime or datetime.datetime object to compare to
|
||||
|
||||
:return:
|
||||
An integer smaller than, equal to, or larger than 0
|
||||
"""
|
||||
|
||||
if not isinstance(other, (self.__class__, datetime)):
|
||||
self._comparison_error(other)
|
||||
|
||||
if (self.tzinfo is None) != (other.tzinfo is None):
|
||||
raise TypeError("can't compare offset-naive and offset-aware datetimes")
|
||||
|
||||
diff = self - other
|
||||
zero = timedelta(0)
|
||||
if diff < zero:
|
||||
return -1
|
||||
if diff > zero:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.__cmp__(other) < 0
|
||||
|
||||
def __le__(self, other):
|
||||
return self.__cmp__(other) <= 0
|
||||
|
||||
def __gt__(self, other):
|
||||
return self.__cmp__(other) > 0
|
||||
|
||||
def __ge__(self, other):
|
||||
return self.__cmp__(other) >= 0
|
||||
|
||||
def __add__(self, other):
|
||||
"""
|
||||
Adds a timedelta
|
||||
|
||||
:param other:
|
||||
A datetime.timedelta object to add.
|
||||
|
||||
:return:
|
||||
A new extended_datetime or datetime.datetime object.
|
||||
"""
|
||||
|
||||
return extended_datetime.from_y2k(self._y2k + other)
|
||||
|
||||
def __sub__(self, other):
|
||||
"""
|
||||
Subtracts a timedelta or another datetime.
|
||||
|
||||
:param other:
|
||||
A datetime.timedelta or datetime.datetime or extended_datetime object to subtract.
|
||||
|
||||
:return:
|
||||
If a timedelta is passed, a new extended_datetime or datetime.datetime object.
|
||||
Else a datetime.timedelta object.
|
||||
"""
|
||||
|
||||
if isinstance(other, timedelta):
|
||||
return extended_datetime.from_y2k(self._y2k - other)
|
||||
|
||||
if isinstance(other, extended_datetime):
|
||||
return self._y2k - other._y2k
|
||||
|
||||
if isinstance(other, datetime):
|
||||
return self._y2k - other - timedelta(days=self.DAYS_IN_2000_YEARS)
|
||||
|
||||
return NotImplemented
|
||||
|
||||
def __rsub__(self, other):
|
||||
return -(self - other)
|
||||
|
||||
@classmethod
|
||||
def from_y2k(cls, value):
|
||||
"""
|
||||
Revert substitution of year 2000.
|
||||
|
||||
:param value:
|
||||
A datetime.datetime object which is 2000 years in the future.
|
||||
:return:
|
||||
A new extended_datetime or datetime.datetime object.
|
||||
"""
|
||||
|
||||
year = value.year - 2000
|
||||
|
||||
if year > 0:
|
||||
new_cls = datetime
|
||||
else:
|
||||
new_cls = cls
|
||||
|
||||
return new_cls(
|
||||
year,
|
||||
value.month,
|
||||
value.day,
|
||||
value.hour,
|
||||
value.minute,
|
||||
value.second,
|
||||
value.microsecond,
|
||||
value.tzinfo
|
||||
)
|
|
@ -0,0 +1,6 @@
|
|||
# coding: utf-8
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
|
||||
__version__ = '1.2.0'
|
||||
__version_info__ = (1, 2, 0)
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,14 @@
|
|||
__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError',
|
||||
'FFIError']
|
||||
|
||||
from .api import FFI
|
||||
from .error import CDefError, FFIError, VerificationError, VerificationMissing
|
||||
from .error import PkgConfigError
|
||||
|
||||
__version__ = "1.13.2"
|
||||
__version_info__ = (1, 13, 2)
|
||||
|
||||
# The verifier module file names are based on the CRC32 of a string that
|
||||
# contains the following version number. It may be older than __version__
|
||||
# if nothing is clearly incompatible.
|
||||
__version_verifier_modules__ = "0.8.6"
|
|
@ -0,0 +1,147 @@
|
|||
#ifndef CFFI_MESSAGEBOX
|
||||
# ifdef _MSC_VER
|
||||
# define CFFI_MESSAGEBOX 1
|
||||
# else
|
||||
# define CFFI_MESSAGEBOX 0
|
||||
# endif
|
||||
#endif
|
||||
|
||||
|
||||
#if CFFI_MESSAGEBOX
|
||||
/* Windows only: logic to take the Python-CFFI embedding logic
|
||||
initialization errors and display them in a background thread
|
||||
with MessageBox. The idea is that if the whole program closes
|
||||
as a result of this problem, then likely it is already a console
|
||||
program and you can read the stderr output in the console too.
|
||||
If it is not a console program, then it will likely show its own
|
||||
dialog to complain, or generally not abruptly close, and for this
|
||||
case the background thread should stay alive.
|
||||
*/
|
||||
static void *volatile _cffi_bootstrap_text;
|
||||
|
||||
static PyObject *_cffi_start_error_capture(void)
|
||||
{
|
||||
PyObject *result = NULL;
|
||||
PyObject *x, *m, *bi;
|
||||
|
||||
if (InterlockedCompareExchangePointer(&_cffi_bootstrap_text,
|
||||
(void *)1, NULL) != NULL)
|
||||
return (PyObject *)1;
|
||||
|
||||
m = PyImport_AddModule("_cffi_error_capture");
|
||||
if (m == NULL)
|
||||
goto error;
|
||||
|
||||
result = PyModule_GetDict(m);
|
||||
if (result == NULL)
|
||||
goto error;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
bi = PyImport_ImportModule("builtins");
|
||||
#else
|
||||
bi = PyImport_ImportModule("__builtin__");
|
||||
#endif
|
||||
if (bi == NULL)
|
||||
goto error;
|
||||
PyDict_SetItemString(result, "__builtins__", bi);
|
||||
Py_DECREF(bi);
|
||||
|
||||
x = PyRun_String(
|
||||
"import sys\n"
|
||||
"class FileLike:\n"
|
||||
" def write(self, x):\n"
|
||||
" try:\n"
|
||||
" of.write(x)\n"
|
||||
" except: pass\n"
|
||||
" self.buf += x\n"
|
||||
"fl = FileLike()\n"
|
||||
"fl.buf = ''\n"
|
||||
"of = sys.stderr\n"
|
||||
"sys.stderr = fl\n"
|
||||
"def done():\n"
|
||||
" sys.stderr = of\n"
|
||||
" return fl.buf\n", /* make sure the returned value stays alive */
|
||||
Py_file_input,
|
||||
result, result);
|
||||
Py_XDECREF(x);
|
||||
|
||||
error:
|
||||
if (PyErr_Occurred())
|
||||
{
|
||||
PyErr_WriteUnraisable(Py_None);
|
||||
PyErr_Clear();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
#pragma comment(lib, "user32.lib")
|
||||
|
||||
static DWORD WINAPI _cffi_bootstrap_dialog(LPVOID ignored)
|
||||
{
|
||||
Sleep(666); /* may be interrupted if the whole process is closing */
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
MessageBoxW(NULL, (wchar_t *)_cffi_bootstrap_text,
|
||||
L"Python-CFFI error",
|
||||
MB_OK | MB_ICONERROR);
|
||||
#else
|
||||
MessageBoxA(NULL, (char *)_cffi_bootstrap_text,
|
||||
"Python-CFFI error",
|
||||
MB_OK | MB_ICONERROR);
|
||||
#endif
|
||||
_cffi_bootstrap_text = NULL;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void _cffi_stop_error_capture(PyObject *ecap)
|
||||
{
|
||||
PyObject *s;
|
||||
void *text;
|
||||
|
||||
if (ecap == (PyObject *)1)
|
||||
return;
|
||||
|
||||
if (ecap == NULL)
|
||||
goto error;
|
||||
|
||||
s = PyRun_String("done()", Py_eval_input, ecap, ecap);
|
||||
if (s == NULL)
|
||||
goto error;
|
||||
|
||||
/* Show a dialog box, but in a background thread, and
|
||||
never show multiple dialog boxes at once. */
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
text = PyUnicode_AsWideCharString(s, NULL);
|
||||
#else
|
||||
text = PyString_AsString(s);
|
||||
#endif
|
||||
|
||||
_cffi_bootstrap_text = text;
|
||||
|
||||
if (text != NULL)
|
||||
{
|
||||
HANDLE h;
|
||||
h = CreateThread(NULL, 0, _cffi_bootstrap_dialog,
|
||||
NULL, 0, NULL);
|
||||
if (h != NULL)
|
||||
CloseHandle(h);
|
||||
}
|
||||
/* decref the string, but it should stay alive as 'fl.buf'
|
||||
in the small module above. It will really be freed only if
|
||||
we later get another similar error. So it's a leak of at
|
||||
most one copy of the small module. That's fine for this
|
||||
situation which is usually a "fatal error" anyway. */
|
||||
Py_DECREF(s);
|
||||
PyErr_Clear();
|
||||
return;
|
||||
|
||||
error:
|
||||
_cffi_bootstrap_text = NULL;
|
||||
PyErr_Clear();
|
||||
}
|
||||
|
||||
#else
|
||||
|
||||
static PyObject *_cffi_start_error_capture(void) { return NULL; }
|
||||
static void _cffi_stop_error_capture(PyObject *ecap) { }
|
||||
|
||||
#endif
|
|
@ -0,0 +1,308 @@
|
|||
#define _CFFI_
|
||||
|
||||
/* We try to define Py_LIMITED_API before including Python.h.
|
||||
|
||||
Mess: we can only define it if Py_DEBUG, Py_TRACE_REFS and
|
||||
Py_REF_DEBUG are not defined. This is a best-effort approximation:
|
||||
we can learn about Py_DEBUG from pyconfig.h, but it is unclear if
|
||||
the same works for the other two macros. Py_DEBUG implies them,
|
||||
but not the other way around.
|
||||
|
||||
Issue #350 is still open: on Windows, the code here causes it to link
|
||||
with PYTHON36.DLL (for example) instead of PYTHON3.DLL. A fix was
|
||||
attempted in 164e526a5515 and 14ce6985e1c3, but reverted: virtualenv
|
||||
does not make PYTHON3.DLL available, and so the "correctly" compiled
|
||||
version would not run inside a virtualenv. We will re-apply the fix
|
||||
after virtualenv has been fixed for some time. For explanation, see
|
||||
issue #355. For a workaround if you want PYTHON3.DLL and don't worry
|
||||
about virtualenv, see issue #350. See also 'py_limited_api' in
|
||||
setuptools_ext.py.
|
||||
*/
|
||||
#if !defined(_CFFI_USE_EMBEDDING) && !defined(Py_LIMITED_API)
|
||||
# include <pyconfig.h>
|
||||
# if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG)
|
||||
# define Py_LIMITED_API
|
||||
# endif
|
||||
#endif
|
||||
|
||||
#include <Python.h>
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
#include <stddef.h>
|
||||
#include "parse_c_type.h"
|
||||
|
||||
/* this block of #ifs should be kept exactly identical between
|
||||
c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py
|
||||
and cffi/_cffi_include.h */
|
||||
#if defined(_MSC_VER)
|
||||
# include <malloc.h> /* for alloca() */
|
||||
# if _MSC_VER < 1600 /* MSVC < 2010 */
|
||||
typedef __int8 int8_t;
|
||||
typedef __int16 int16_t;
|
||||
typedef __int32 int32_t;
|
||||
typedef __int64 int64_t;
|
||||
typedef unsigned __int8 uint8_t;
|
||||
typedef unsigned __int16 uint16_t;
|
||||
typedef unsigned __int32 uint32_t;
|
||||
typedef unsigned __int64 uint64_t;
|
||||
typedef __int8 int_least8_t;
|
||||
typedef __int16 int_least16_t;
|
||||
typedef __int32 int_least32_t;
|
||||
typedef __int64 int_least64_t;
|
||||
typedef unsigned __int8 uint_least8_t;
|
||||
typedef unsigned __int16 uint_least16_t;
|
||||
typedef unsigned __int32 uint_least32_t;
|
||||
typedef unsigned __int64 uint_least64_t;
|
||||
typedef __int8 int_fast8_t;
|
||||
typedef __int16 int_fast16_t;
|
||||
typedef __int32 int_fast32_t;
|
||||
typedef __int64 int_fast64_t;
|
||||
typedef unsigned __int8 uint_fast8_t;
|
||||
typedef unsigned __int16 uint_fast16_t;
|
||||
typedef unsigned __int32 uint_fast32_t;
|
||||
typedef unsigned __int64 uint_fast64_t;
|
||||
typedef __int64 intmax_t;
|
||||
typedef unsigned __int64 uintmax_t;
|
||||
# else
|
||||
# include <stdint.h>
|
||||
# endif
|
||||
# if _MSC_VER < 1800 /* MSVC < 2013 */
|
||||
# ifndef __cplusplus
|
||||
typedef unsigned char _Bool;
|
||||
# endif
|
||||
# endif
|
||||
#else
|
||||
# include <stdint.h>
|
||||
# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
|
||||
# include <alloca.h>
|
||||
# endif
|
||||
#endif
|
||||
|
||||
#ifdef __GNUC__
|
||||
# define _CFFI_UNUSED_FN __attribute__((unused))
|
||||
#else
|
||||
# define _CFFI_UNUSED_FN /* nothing */
|
||||
#endif
|
||||
|
||||
#ifdef __cplusplus
|
||||
# ifndef _Bool
|
||||
typedef bool _Bool; /* semi-hackish: C++ has no _Bool; bool is builtin */
|
||||
# endif
|
||||
#endif
|
||||
|
||||
/********** CPython-specific section **********/
|
||||
#ifndef PYPY_VERSION
|
||||
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
# define PyInt_FromLong PyLong_FromLong
|
||||
#endif
|
||||
|
||||
#define _cffi_from_c_double PyFloat_FromDouble
|
||||
#define _cffi_from_c_float PyFloat_FromDouble
|
||||
#define _cffi_from_c_long PyInt_FromLong
|
||||
#define _cffi_from_c_ulong PyLong_FromUnsignedLong
|
||||
#define _cffi_from_c_longlong PyLong_FromLongLong
|
||||
#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong
|
||||
#define _cffi_from_c__Bool PyBool_FromLong
|
||||
|
||||
#define _cffi_to_c_double PyFloat_AsDouble
|
||||
#define _cffi_to_c_float PyFloat_AsDouble
|
||||
|
||||
#define _cffi_from_c_int(x, type) \
|
||||
(((type)-1) > 0 ? /* unsigned */ \
|
||||
(sizeof(type) < sizeof(long) ? \
|
||||
PyInt_FromLong((long)x) : \
|
||||
sizeof(type) == sizeof(long) ? \
|
||||
PyLong_FromUnsignedLong((unsigned long)x) : \
|
||||
PyLong_FromUnsignedLongLong((unsigned long long)x)) : \
|
||||
(sizeof(type) <= sizeof(long) ? \
|
||||
PyInt_FromLong((long)x) : \
|
||||
PyLong_FromLongLong((long long)x)))
|
||||
|
||||
#define _cffi_to_c_int(o, type) \
|
||||
((type)( \
|
||||
sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \
|
||||
: (type)_cffi_to_c_i8(o)) : \
|
||||
sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \
|
||||
: (type)_cffi_to_c_i16(o)) : \
|
||||
sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \
|
||||
: (type)_cffi_to_c_i32(o)) : \
|
||||
sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \
|
||||
: (type)_cffi_to_c_i64(o)) : \
|
||||
(Py_FatalError("unsupported size for type " #type), (type)0)))
|
||||
|
||||
#define _cffi_to_c_i8 \
|
||||
((int(*)(PyObject *))_cffi_exports[1])
|
||||
#define _cffi_to_c_u8 \
|
||||
((int(*)(PyObject *))_cffi_exports[2])
|
||||
#define _cffi_to_c_i16 \
|
||||
((int(*)(PyObject *))_cffi_exports[3])
|
||||
#define _cffi_to_c_u16 \
|
||||
((int(*)(PyObject *))_cffi_exports[4])
|
||||
#define _cffi_to_c_i32 \
|
||||
((int(*)(PyObject *))_cffi_exports[5])
|
||||
#define _cffi_to_c_u32 \
|
||||
((unsigned int(*)(PyObject *))_cffi_exports[6])
|
||||
#define _cffi_to_c_i64 \
|
||||
((long long(*)(PyObject *))_cffi_exports[7])
|
||||
#define _cffi_to_c_u64 \
|
||||
((unsigned long long(*)(PyObject *))_cffi_exports[8])
|
||||
#define _cffi_to_c_char \
|
||||
((int(*)(PyObject *))_cffi_exports[9])
|
||||
#define _cffi_from_c_pointer \
|
||||
((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[10])
|
||||
#define _cffi_to_c_pointer \
|
||||
((char *(*)(PyObject *, struct _cffi_ctypedescr *))_cffi_exports[11])
|
||||
#define _cffi_get_struct_layout \
|
||||
not used any more
|
||||
#define _cffi_restore_errno \
|
||||
((void(*)(void))_cffi_exports[13])
|
||||
#define _cffi_save_errno \
|
||||
((void(*)(void))_cffi_exports[14])
|
||||
#define _cffi_from_c_char \
|
||||
((PyObject *(*)(char))_cffi_exports[15])
|
||||
#define _cffi_from_c_deref \
|
||||
((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[16])
|
||||
#define _cffi_to_c \
|
||||
((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[17])
|
||||
#define _cffi_from_c_struct \
|
||||
((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[18])
|
||||
#define _cffi_to_c_wchar_t \
|
||||
((_cffi_wchar_t(*)(PyObject *))_cffi_exports[19])
|
||||
#define _cffi_from_c_wchar_t \
|
||||
((PyObject *(*)(_cffi_wchar_t))_cffi_exports[20])
|
||||
#define _cffi_to_c_long_double \
|
||||
((long double(*)(PyObject *))_cffi_exports[21])
|
||||
#define _cffi_to_c__Bool \
|
||||
((_Bool(*)(PyObject *))_cffi_exports[22])
|
||||
#define _cffi_prepare_pointer_call_argument \
|
||||
((Py_ssize_t(*)(struct _cffi_ctypedescr *, \
|
||||
PyObject *, char **))_cffi_exports[23])
|
||||
#define _cffi_convert_array_from_object \
|
||||
((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[24])
|
||||
#define _CFFI_CPIDX 25
|
||||
#define _cffi_call_python \
|
||||
((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX])
|
||||
#define _cffi_to_c_wchar3216_t \
|
||||
((int(*)(PyObject *))_cffi_exports[26])
|
||||
#define _cffi_from_c_wchar3216_t \
|
||||
((PyObject *(*)(int))_cffi_exports[27])
|
||||
#define _CFFI_NUM_EXPORTS 28
|
||||
|
||||
struct _cffi_ctypedescr;
|
||||
|
||||
static void *_cffi_exports[_CFFI_NUM_EXPORTS];
|
||||
|
||||
#define _cffi_type(index) ( \
|
||||
assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \
|
||||
(struct _cffi_ctypedescr *)_cffi_types[index])
|
||||
|
||||
static PyObject *_cffi_init(const char *module_name, Py_ssize_t version,
|
||||
const struct _cffi_type_context_s *ctx)
|
||||
{
|
||||
PyObject *module, *o_arg, *new_module;
|
||||
void *raw[] = {
|
||||
(void *)module_name,
|
||||
(void *)version,
|
||||
(void *)_cffi_exports,
|
||||
(void *)ctx,
|
||||
};
|
||||
|
||||
module = PyImport_ImportModule("_cffi_backend");
|
||||
if (module == NULL)
|
||||
goto failure;
|
||||
|
||||
o_arg = PyLong_FromVoidPtr((void *)raw);
|
||||
if (o_arg == NULL)
|
||||
goto failure;
|
||||
|
||||
new_module = PyObject_CallMethod(
|
||||
module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg);
|
||||
|
||||
Py_DECREF(o_arg);
|
||||
Py_DECREF(module);
|
||||
return new_module;
|
||||
|
||||
failure:
|
||||
Py_XDECREF(module);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
|
||||
#ifdef HAVE_WCHAR_H
|
||||
typedef wchar_t _cffi_wchar_t;
|
||||
#else
|
||||
typedef uint16_t _cffi_wchar_t; /* same random pick as _cffi_backend.c */
|
||||
#endif
|
||||
|
||||
_CFFI_UNUSED_FN static uint16_t _cffi_to_c_char16_t(PyObject *o)
|
||||
{
|
||||
if (sizeof(_cffi_wchar_t) == 2)
|
||||
return (uint16_t)_cffi_to_c_wchar_t(o);
|
||||
else
|
||||
return (uint16_t)_cffi_to_c_wchar3216_t(o);
|
||||
}
|
||||
|
||||
_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char16_t(uint16_t x)
|
||||
{
|
||||
if (sizeof(_cffi_wchar_t) == 2)
|
||||
return _cffi_from_c_wchar_t((_cffi_wchar_t)x);
|
||||
else
|
||||
return _cffi_from_c_wchar3216_t((int)x);
|
||||
}
|
||||
|
||||
_CFFI_UNUSED_FN static int _cffi_to_c_char32_t(PyObject *o)
|
||||
{
|
||||
if (sizeof(_cffi_wchar_t) == 4)
|
||||
return (int)_cffi_to_c_wchar_t(o);
|
||||
else
|
||||
return (int)_cffi_to_c_wchar3216_t(o);
|
||||
}
|
||||
|
||||
_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char32_t(int x)
|
||||
{
|
||||
if (sizeof(_cffi_wchar_t) == 4)
|
||||
return _cffi_from_c_wchar_t((_cffi_wchar_t)x);
|
||||
else
|
||||
return _cffi_from_c_wchar3216_t(x);
|
||||
}
|
||||
|
||||
|
||||
/********** end CPython-specific section **********/
|
||||
#else
|
||||
_CFFI_UNUSED_FN
|
||||
static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *);
|
||||
# define _cffi_call_python _cffi_call_python_org
|
||||
#endif
|
||||
|
||||
|
||||
#define _cffi_array_len(array) (sizeof(array) / sizeof((array)[0]))
|
||||
|
||||
#define _cffi_prim_int(size, sign) \
|
||||
((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8 : _CFFI_PRIM_UINT8) : \
|
||||
(size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) : \
|
||||
(size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) : \
|
||||
(size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) : \
|
||||
_CFFI__UNKNOWN_PRIM)
|
||||
|
||||
#define _cffi_prim_float(size) \
|
||||
((size) == sizeof(float) ? _CFFI_PRIM_FLOAT : \
|
||||
(size) == sizeof(double) ? _CFFI_PRIM_DOUBLE : \
|
||||
(size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE : \
|
||||
_CFFI__UNKNOWN_FLOAT_PRIM)
|
||||
|
||||
#define _cffi_check_int(got, got_nonpos, expected) \
|
||||
((got_nonpos) == (expected <= 0) && \
|
||||
(got) == (unsigned long long)expected)
|
||||
|
||||
#ifdef MS_WIN32
|
||||
# define _cffi_stdcall __stdcall
|
||||
#else
|
||||
# define _cffi_stdcall /* nothing */
|
||||
#endif
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
|
@ -0,0 +1,520 @@
|
|||
|
||||
/***** Support code for embedding *****/
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
|
||||
#if defined(_WIN32)
|
||||
# define CFFI_DLLEXPORT __declspec(dllexport)
|
||||
#elif defined(__GNUC__)
|
||||
# define CFFI_DLLEXPORT __attribute__((visibility("default")))
|
||||
#else
|
||||
# define CFFI_DLLEXPORT /* nothing */
|
||||
#endif
|
||||
|
||||
|
||||
/* There are two global variables of type _cffi_call_python_fnptr:
|
||||
|
||||
* _cffi_call_python, which we declare just below, is the one called
|
||||
by ``extern "Python"`` implementations.
|
||||
|
||||
* _cffi_call_python_org, which on CPython is actually part of the
|
||||
_cffi_exports[] array, is the function pointer copied from
|
||||
_cffi_backend.
|
||||
|
||||
After initialization is complete, both are equal. However, the
|
||||
first one remains equal to &_cffi_start_and_call_python until the
|
||||
very end of initialization, when we are (or should be) sure that
|
||||
concurrent threads also see a completely initialized world, and
|
||||
only then is it changed.
|
||||
*/
|
||||
#undef _cffi_call_python
|
||||
typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *);
|
||||
static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *);
|
||||
static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python;
|
||||
|
||||
|
||||
#ifndef _MSC_VER
|
||||
/* --- Assuming a GCC not infinitely old --- */
|
||||
# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n)
|
||||
# define cffi_write_barrier() __sync_synchronize()
|
||||
# if !defined(__amd64__) && !defined(__x86_64__) && \
|
||||
!defined(__i386__) && !defined(__i386)
|
||||
# define cffi_read_barrier() __sync_synchronize()
|
||||
# else
|
||||
# define cffi_read_barrier() (void)0
|
||||
# endif
|
||||
#else
|
||||
/* --- Windows threads version --- */
|
||||
# include <Windows.h>
|
||||
# define cffi_compare_and_swap(l,o,n) \
|
||||
(InterlockedCompareExchangePointer(l,n,o) == (o))
|
||||
# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0)
|
||||
# define cffi_read_barrier() (void)0
|
||||
static volatile LONG _cffi_dummy;
|
||||
#endif
|
||||
|
||||
#ifdef WITH_THREAD
|
||||
# ifndef _MSC_VER
|
||||
# include <pthread.h>
|
||||
static pthread_mutex_t _cffi_embed_startup_lock;
|
||||
# else
|
||||
static CRITICAL_SECTION _cffi_embed_startup_lock;
|
||||
# endif
|
||||
static char _cffi_embed_startup_lock_ready = 0;
|
||||
#endif
|
||||
|
||||
static void _cffi_acquire_reentrant_mutex(void)
|
||||
{
|
||||
static void *volatile lock = NULL;
|
||||
|
||||
while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) {
|
||||
/* should ideally do a spin loop instruction here, but
|
||||
hard to do it portably and doesn't really matter I
|
||||
think: pthread_mutex_init() should be very fast, and
|
||||
this is only run at start-up anyway. */
|
||||
}
|
||||
|
||||
#ifdef WITH_THREAD
|
||||
if (!_cffi_embed_startup_lock_ready) {
|
||||
# ifndef _MSC_VER
|
||||
pthread_mutexattr_t attr;
|
||||
pthread_mutexattr_init(&attr);
|
||||
pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE);
|
||||
pthread_mutex_init(&_cffi_embed_startup_lock, &attr);
|
||||
# else
|
||||
InitializeCriticalSection(&_cffi_embed_startup_lock);
|
||||
# endif
|
||||
_cffi_embed_startup_lock_ready = 1;
|
||||
}
|
||||
#endif
|
||||
|
||||
while (!cffi_compare_and_swap(&lock, (void *)1, NULL))
|
||||
;
|
||||
|
||||
#ifndef _MSC_VER
|
||||
pthread_mutex_lock(&_cffi_embed_startup_lock);
|
||||
#else
|
||||
EnterCriticalSection(&_cffi_embed_startup_lock);
|
||||
#endif
|
||||
}
|
||||
|
||||
static void _cffi_release_reentrant_mutex(void)
|
||||
{
|
||||
#ifndef _MSC_VER
|
||||
pthread_mutex_unlock(&_cffi_embed_startup_lock);
|
||||
#else
|
||||
LeaveCriticalSection(&_cffi_embed_startup_lock);
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
/********** CPython-specific section **********/
|
||||
#ifndef PYPY_VERSION
|
||||
|
||||
#include "_cffi_errors.h"
|
||||
|
||||
|
||||
#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX]
|
||||
|
||||
PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */
|
||||
|
||||
static void _cffi_py_initialize(void)
|
||||
{
|
||||
/* XXX use initsigs=0, which "skips initialization registration of
|
||||
signal handlers, which might be useful when Python is
|
||||
embedded" according to the Python docs. But review and think
|
||||
if it should be a user-controllable setting.
|
||||
|
||||
XXX we should also give a way to write errors to a buffer
|
||||
instead of to stderr.
|
||||
|
||||
XXX if importing 'site' fails, CPython (any version) calls
|
||||
exit(). Should we try to work around this behavior here?
|
||||
*/
|
||||
Py_InitializeEx(0);
|
||||
}
|
||||
|
||||
static int _cffi_initialize_python(void)
|
||||
{
|
||||
/* This initializes Python, imports _cffi_backend, and then the
|
||||
present .dll/.so is set up as a CPython C extension module.
|
||||
*/
|
||||
int result;
|
||||
PyGILState_STATE state;
|
||||
PyObject *pycode=NULL, *global_dict=NULL, *x;
|
||||
PyObject *builtins;
|
||||
|
||||
state = PyGILState_Ensure();
|
||||
|
||||
/* Call the initxxx() function from the present module. It will
|
||||
create and initialize us as a CPython extension module, instead
|
||||
of letting the startup Python code do it---it might reimport
|
||||
the same .dll/.so and get maybe confused on some platforms.
|
||||
It might also have troubles locating the .dll/.so again for all
|
||||
I know.
|
||||
*/
|
||||
(void)_CFFI_PYTHON_STARTUP_FUNC();
|
||||
if (PyErr_Occurred())
|
||||
goto error;
|
||||
|
||||
/* Now run the Python code provided to ffi.embedding_init_code().
|
||||
*/
|
||||
pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE,
|
||||
"<init code for '" _CFFI_MODULE_NAME "'>",
|
||||
Py_file_input);
|
||||
if (pycode == NULL)
|
||||
goto error;
|
||||
global_dict = PyDict_New();
|
||||
if (global_dict == NULL)
|
||||
goto error;
|
||||
builtins = PyEval_GetBuiltins();
|
||||
if (builtins == NULL)
|
||||
goto error;
|
||||
if (PyDict_SetItemString(global_dict, "__builtins__", builtins) < 0)
|
||||
goto error;
|
||||
x = PyEval_EvalCode(
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
(PyCodeObject *)
|
||||
#endif
|
||||
pycode, global_dict, global_dict);
|
||||
if (x == NULL)
|
||||
goto error;
|
||||
Py_DECREF(x);
|
||||
|
||||
/* Done! Now if we've been called from
|
||||
_cffi_start_and_call_python() in an ``extern "Python"``, we can
|
||||
only hope that the Python code did correctly set up the
|
||||
corresponding @ffi.def_extern() function. Otherwise, the
|
||||
general logic of ``extern "Python"`` functions (inside the
|
||||
_cffi_backend module) will find that the reference is still
|
||||
missing and print an error.
|
||||
*/
|
||||
result = 0;
|
||||
done:
|
||||
Py_XDECREF(pycode);
|
||||
Py_XDECREF(global_dict);
|
||||
PyGILState_Release(state);
|
||||
return result;
|
||||
|
||||
error:;
|
||||
{
|
||||
/* Print as much information as potentially useful.
|
||||
Debugging load-time failures with embedding is not fun
|
||||
*/
|
||||
PyObject *ecap;
|
||||
PyObject *exception, *v, *tb, *f, *modules, *mod;
|
||||
PyErr_Fetch(&exception, &v, &tb);
|
||||
ecap = _cffi_start_error_capture();
|
||||
f = PySys_GetObject((char *)"stderr");
|
||||
if (f != NULL && f != Py_None) {
|
||||
PyFile_WriteString(
|
||||
"Failed to initialize the Python-CFFI embedding logic:\n\n", f);
|
||||
}
|
||||
|
||||
if (exception != NULL) {
|
||||
PyErr_NormalizeException(&exception, &v, &tb);
|
||||
PyErr_Display(exception, v, tb);
|
||||
}
|
||||
Py_XDECREF(exception);
|
||||
Py_XDECREF(v);
|
||||
Py_XDECREF(tb);
|
||||
|
||||
if (f != NULL && f != Py_None) {
|
||||
PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME
|
||||
"\ncompiled with cffi version: 1.13.2"
|
||||
"\n_cffi_backend module: ", f);
|
||||
modules = PyImport_GetModuleDict();
|
||||
mod = PyDict_GetItemString(modules, "_cffi_backend");
|
||||
if (mod == NULL) {
|
||||
PyFile_WriteString("not loaded", f);
|
||||
}
|
||||
else {
|
||||
v = PyObject_GetAttrString(mod, "__file__");
|
||||
PyFile_WriteObject(v, f, 0);
|
||||
Py_XDECREF(v);
|
||||
}
|
||||
PyFile_WriteString("\nsys.path: ", f);
|
||||
PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0);
|
||||
PyFile_WriteString("\n\n", f);
|
||||
}
|
||||
_cffi_stop_error_capture(ecap);
|
||||
}
|
||||
result = -1;
|
||||
goto done;
|
||||
}
|
||||
|
||||
PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */
|
||||
|
||||
static int _cffi_carefully_make_gil(void)
|
||||
{
|
||||
/* This does the basic initialization of Python. It can be called
|
||||
completely concurrently from unrelated threads. It assumes
|
||||
that we don't hold the GIL before (if it exists), and we don't
|
||||
hold it afterwards.
|
||||
|
||||
(What it really does used to be completely different in Python 2
|
||||
and Python 3, with the Python 2 solution avoiding the spin-lock
|
||||
around the Py_InitializeEx() call. However, after recent changes
|
||||
to CPython 2.7 (issue #358) it no longer works. So we use the
|
||||
Python 3 solution everywhere.)
|
||||
|
||||
This initializes Python by calling Py_InitializeEx().
|
||||
Important: this must not be called concurrently at all.
|
||||
So we use a global variable as a simple spin lock. This global
|
||||
variable must be from 'libpythonX.Y.so', not from this
|
||||
cffi-based extension module, because it must be shared from
|
||||
different cffi-based extension modules.
|
||||
|
||||
In Python < 3.8, we choose
|
||||
_PyParser_TokenNames[0] as a completely arbitrary pointer value
|
||||
that is never written to. The default is to point to the
|
||||
string "ENDMARKER". We change it temporarily to point to the
|
||||
next character in that string. (Yes, I know it's REALLY
|
||||
obscure.)
|
||||
|
||||
In Python >= 3.8, this string array is no longer writable, so
|
||||
instead we pick PyCapsuleType.tp_version_tag. We can't change
|
||||
Python < 3.8 because someone might use a mixture of cffi
|
||||
embedded modules, some of which were compiled before this file
|
||||
changed.
|
||||
*/
|
||||
|
||||
#ifdef WITH_THREAD
|
||||
# if PY_VERSION_HEX < 0x03080000
|
||||
char *volatile *lock = (char *volatile *)_PyParser_TokenNames;
|
||||
char *old_value, *locked_value;
|
||||
|
||||
while (1) { /* spin loop */
|
||||
old_value = *lock;
|
||||
locked_value = old_value + 1;
|
||||
if (old_value[0] == 'E') {
|
||||
assert(old_value[1] == 'N');
|
||||
if (cffi_compare_and_swap(lock, old_value, locked_value))
|
||||
break;
|
||||
}
|
||||
else {
|
||||
assert(old_value[0] == 'N');
|
||||
/* should ideally do a spin loop instruction here, but
|
||||
hard to do it portably and doesn't really matter I
|
||||
think: PyEval_InitThreads() should be very fast, and
|
||||
this is only run at start-up anyway. */
|
||||
}
|
||||
}
|
||||
# else
|
||||
int volatile *lock = (int volatile *)&PyCapsule_Type.tp_version_tag;
|
||||
int old_value, locked_value;
|
||||
assert(!(PyCapsule_Type.tp_flags & Py_TPFLAGS_HAVE_VERSION_TAG));
|
||||
|
||||
while (1) { /* spin loop */
|
||||
old_value = *lock;
|
||||
locked_value = -42;
|
||||
if (old_value == 0) {
|
||||
if (cffi_compare_and_swap(lock, old_value, locked_value))
|
||||
break;
|
||||
}
|
||||
else {
|
||||
assert(old_value == locked_value);
|
||||
/* should ideally do a spin loop instruction here, but
|
||||
hard to do it portably and doesn't really matter I
|
||||
think: PyEval_InitThreads() should be very fast, and
|
||||
this is only run at start-up anyway. */
|
||||
}
|
||||
}
|
||||
# endif
|
||||
#endif
|
||||
|
||||
/* call Py_InitializeEx() */
|
||||
if (!Py_IsInitialized()) {
|
||||
_cffi_py_initialize();
|
||||
PyEval_InitThreads();
|
||||
PyEval_SaveThread(); /* release the GIL */
|
||||
/* the returned tstate must be the one that has been stored into the
|
||||
autoTLSkey by _PyGILState_Init() called from Py_Initialize(). */
|
||||
}
|
||||
else {
|
||||
PyGILState_STATE state = PyGILState_Ensure();
|
||||
PyEval_InitThreads();
|
||||
PyGILState_Release(state);
|
||||
}
|
||||
|
||||
#ifdef WITH_THREAD
|
||||
/* release the lock */
|
||||
while (!cffi_compare_and_swap(lock, locked_value, old_value))
|
||||
;
|
||||
#endif
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/********** end CPython-specific section **********/
|
||||
|
||||
|
||||
#else
|
||||
|
||||
|
||||
/********** PyPy-specific section **********/
|
||||
|
||||
PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */
|
||||
|
||||
static struct _cffi_pypy_init_s {
|
||||
const char *name;
|
||||
void (*func)(const void *[]);
|
||||
const char *code;
|
||||
} _cffi_pypy_init = {
|
||||
_CFFI_MODULE_NAME,
|
||||
(void(*)(const void *[]))_CFFI_PYTHON_STARTUP_FUNC,
|
||||
_CFFI_PYTHON_STARTUP_CODE,
|
||||
};
|
||||
|
||||
extern int pypy_carefully_make_gil(const char *);
|
||||
extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *);
|
||||
|
||||
static int _cffi_carefully_make_gil(void)
|
||||
{
|
||||
return pypy_carefully_make_gil(_CFFI_MODULE_NAME);
|
||||
}
|
||||
|
||||
static int _cffi_initialize_python(void)
|
||||
{
|
||||
return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init);
|
||||
}
|
||||
|
||||
/********** end PyPy-specific section **********/
|
||||
|
||||
|
||||
#endif
|
||||
|
||||
|
||||
#ifdef __GNUC__
|
||||
__attribute__((noinline))
|
||||
#endif
|
||||
static _cffi_call_python_fnptr _cffi_start_python(void)
|
||||
{
|
||||
/* Delicate logic to initialize Python. This function can be
|
||||
called multiple times concurrently, e.g. when the process calls
|
||||
its first ``extern "Python"`` functions in multiple threads at
|
||||
once. It can also be called recursively, in which case we must
|
||||
ignore it. We also have to consider what occurs if several
|
||||
different cffi-based extensions reach this code in parallel
|
||||
threads---it is a different copy of the code, then, and we
|
||||
can't have any shared global variable unless it comes from
|
||||
'libpythonX.Y.so'.
|
||||
|
||||
Idea:
|
||||
|
||||
* _cffi_carefully_make_gil(): "carefully" call
|
||||
PyEval_InitThreads() (possibly with Py_InitializeEx() first).
|
||||
|
||||
* then we use a (local) custom lock to make sure that a call to this
|
||||
cffi-based extension will wait if another call to the *same*
|
||||
extension is running the initialization in another thread.
|
||||
It is reentrant, so that a recursive call will not block, but
|
||||
only one from a different thread.
|
||||
|
||||
* then we grab the GIL and (Python 2) we call Py_InitializeEx().
|
||||
At this point, concurrent calls to Py_InitializeEx() are not
|
||||
possible: we have the GIL.
|
||||
|
||||
* do the rest of the specific initialization, which may
|
||||
temporarily release the GIL but not the custom lock.
|
||||
Only release the custom lock when we are done.
|
||||
*/
|
||||
static char called = 0;
|
||||
|
||||
if (_cffi_carefully_make_gil() != 0)
|
||||
return NULL;
|
||||
|
||||
_cffi_acquire_reentrant_mutex();
|
||||
|
||||
/* Here the GIL exists, but we don't have it. We're only protected
|
||||
from concurrency by the reentrant mutex. */
|
||||
|
||||
/* This file only initializes the embedded module once, the first
|
||||
time this is called, even if there are subinterpreters. */
|
||||
if (!called) {
|
||||
called = 1; /* invoke _cffi_initialize_python() only once,
|
||||
but don't set '_cffi_call_python' right now,
|
||||
otherwise concurrent threads won't call
|
||||
this function at all (we need them to wait) */
|
||||
if (_cffi_initialize_python() == 0) {
|
||||
/* now initialization is finished. Switch to the fast-path. */
|
||||
|
||||
/* We would like nobody to see the new value of
|
||||
'_cffi_call_python' without also seeing the rest of the
|
||||
data initialized. However, this is not possible. But
|
||||
the new value of '_cffi_call_python' is the function
|
||||
'cffi_call_python()' from _cffi_backend. So: */
|
||||
cffi_write_barrier();
|
||||
/* ^^^ we put a write barrier here, and a corresponding
|
||||
read barrier at the start of cffi_call_python(). This
|
||||
ensures that after that read barrier, we see everything
|
||||
done here before the write barrier.
|
||||
*/
|
||||
|
||||
assert(_cffi_call_python_org != NULL);
|
||||
_cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org;
|
||||
}
|
||||
else {
|
||||
/* initialization failed. Reset this to NULL, even if it was
|
||||
already set to some other value. Future calls to
|
||||
_cffi_start_python() are still forced to occur, and will
|
||||
always return NULL from now on. */
|
||||
_cffi_call_python_org = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
_cffi_release_reentrant_mutex();
|
||||
|
||||
return (_cffi_call_python_fnptr)_cffi_call_python_org;
|
||||
}
|
||||
|
||||
static
|
||||
void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args)
|
||||
{
|
||||
_cffi_call_python_fnptr fnptr;
|
||||
int current_err = errno;
|
||||
#ifdef _MSC_VER
|
||||
int current_lasterr = GetLastError();
|
||||
#endif
|
||||
fnptr = _cffi_start_python();
|
||||
if (fnptr == NULL) {
|
||||
fprintf(stderr, "function %s() called, but initialization code "
|
||||
"failed. Returning 0.\n", externpy->name);
|
||||
memset(args, 0, externpy->size_of_result);
|
||||
}
|
||||
#ifdef _MSC_VER
|
||||
SetLastError(current_lasterr);
|
||||
#endif
|
||||
errno = current_err;
|
||||
|
||||
if (fnptr != NULL)
|
||||
fnptr(externpy, args);
|
||||
}
|
||||
|
||||
|
||||
/* The cffi_start_python() function makes sure Python is initialized
|
||||
and our cffi module is set up. It can be called manually from the
|
||||
user C code. The same effect is obtained automatically from any
|
||||
dll-exported ``extern "Python"`` function. This function returns
|
||||
-1 if initialization failed, 0 if all is OK. */
|
||||
_CFFI_UNUSED_FN
|
||||
static int cffi_start_python(void)
|
||||
{
|
||||
if (_cffi_call_python == &_cffi_start_and_call_python) {
|
||||
if (_cffi_start_python() == NULL)
|
||||
return -1;
|
||||
}
|
||||
cffi_read_barrier();
|
||||
return 0;
|
||||
}
|
||||
|
||||
#undef cffi_compare_and_swap
|
||||
#undef cffi_write_barrier
|
||||
#undef cffi_read_barrier
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
|
@ -0,0 +1,961 @@
|
|||
import sys, types
|
||||
from .lock import allocate_lock
|
||||
from .error import CDefError
|
||||
from . import model
|
||||
|
||||
try:
|
||||
callable
|
||||
except NameError:
|
||||
# Python 3.1
|
||||
from collections import Callable
|
||||
callable = lambda x: isinstance(x, Callable)
|
||||
|
||||
try:
|
||||
basestring
|
||||
except NameError:
|
||||
# Python 3.x
|
||||
basestring = str
|
||||
|
||||
_unspecified = object()
|
||||
|
||||
|
||||
|
||||
class FFI(object):
|
||||
r'''
|
||||
The main top-level class that you instantiate once, or once per module.
|
||||
|
||||
Example usage:
|
||||
|
||||
ffi = FFI()
|
||||
ffi.cdef("""
|
||||
int printf(const char *, ...);
|
||||
""")
|
||||
|
||||
C = ffi.dlopen(None) # standard library
|
||||
-or-
|
||||
C = ffi.verify() # use a C compiler: verify the decl above is right
|
||||
|
||||
C.printf("hello, %s!\n", ffi.new("char[]", "world"))
|
||||
'''
|
||||
|
||||
def __init__(self, backend=None):
|
||||
"""Create an FFI instance. The 'backend' argument is used to
|
||||
select a non-default backend, mostly for tests.
|
||||
"""
|
||||
if backend is None:
|
||||
# You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with
|
||||
# _cffi_backend.so compiled.
|
||||
import _cffi_backend as backend
|
||||
from . import __version__
|
||||
if backend.__version__ != __version__:
|
||||
# bad version! Try to be as explicit as possible.
|
||||
if hasattr(backend, '__file__'):
|
||||
# CPython
|
||||
raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. When we import the top-level '_cffi_backend' extension module, we get version %s, located in %r. The two versions should be equal; check your installation." % (
|
||||
__version__, __file__,
|
||||
backend.__version__, backend.__file__))
|
||||
else:
|
||||
# PyPy
|
||||
raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. This interpreter comes with a built-in '_cffi_backend' module, which is version %s. The two versions should be equal; check your installation." % (
|
||||
__version__, __file__, backend.__version__))
|
||||
# (If you insist you can also try to pass the option
|
||||
# 'backend=backend_ctypes.CTypesBackend()', but don't
|
||||
# rely on it! It's probably not going to work well.)
|
||||
|
||||
from . import cparser
|
||||
self._backend = backend
|
||||
self._lock = allocate_lock()
|
||||
self._parser = cparser.Parser()
|
||||
self._cached_btypes = {}
|
||||
self._parsed_types = types.ModuleType('parsed_types').__dict__
|
||||
self._new_types = types.ModuleType('new_types').__dict__
|
||||
self._function_caches = []
|
||||
self._libraries = []
|
||||
self._cdefsources = []
|
||||
self._included_ffis = []
|
||||
self._windows_unicode = None
|
||||
self._init_once_cache = {}
|
||||
self._cdef_version = None
|
||||
self._embedding = None
|
||||
self._typecache = model.get_typecache(backend)
|
||||
if hasattr(backend, 'set_ffi'):
|
||||
backend.set_ffi(self)
|
||||
for name in list(backend.__dict__):
|
||||
if name.startswith('RTLD_'):
|
||||
setattr(self, name, getattr(backend, name))
|
||||
#
|
||||
with self._lock:
|
||||
self.BVoidP = self._get_cached_btype(model.voidp_type)
|
||||
self.BCharA = self._get_cached_btype(model.char_array_type)
|
||||
if isinstance(backend, types.ModuleType):
|
||||
# _cffi_backend: attach these constants to the class
|
||||
if not hasattr(FFI, 'NULL'):
|
||||
FFI.NULL = self.cast(self.BVoidP, 0)
|
||||
FFI.CData, FFI.CType = backend._get_types()
|
||||
else:
|
||||
# ctypes backend: attach these constants to the instance
|
||||
self.NULL = self.cast(self.BVoidP, 0)
|
||||
self.CData, self.CType = backend._get_types()
|
||||
self.buffer = backend.buffer
|
||||
|
||||
def cdef(self, csource, override=False, packed=False, pack=None):
|
||||
"""Parse the given C source. This registers all declared functions,
|
||||
types, and global variables. The functions and global variables can
|
||||
then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'.
|
||||
The types can be used in 'ffi.new()' and other functions.
|
||||
If 'packed' is specified as True, all structs declared inside this
|
||||
cdef are packed, i.e. laid out without any field alignment at all.
|
||||
Alternatively, 'pack' can be a small integer, and requests for
|
||||
alignment greater than that are ignored (pack=1 is equivalent to
|
||||
packed=True).
|
||||
"""
|
||||
self._cdef(csource, override=override, packed=packed, pack=pack)
|
||||
|
||||
def embedding_api(self, csource, packed=False, pack=None):
|
||||
self._cdef(csource, packed=packed, pack=pack, dllexport=True)
|
||||
if self._embedding is None:
|
||||
self._embedding = ''
|
||||
|
||||
def _cdef(self, csource, override=False, **options):
|
||||
if not isinstance(csource, str): # unicode, on Python 2
|
||||
if not isinstance(csource, basestring):
|
||||
raise TypeError("cdef() argument must be a string")
|
||||
csource = csource.encode('ascii')
|
||||
with self._lock:
|
||||
self._cdef_version = object()
|
||||
self._parser.parse(csource, override=override, **options)
|
||||
self._cdefsources.append(csource)
|
||||
if override:
|
||||
for cache in self._function_caches:
|
||||
cache.clear()
|
||||
finishlist = self._parser._recomplete
|
||||
if finishlist:
|
||||
self._parser._recomplete = []
|
||||
for tp in finishlist:
|
||||
tp.finish_backend_type(self, finishlist)
|
||||
|
||||
def dlopen(self, name, flags=0):
|
||||
"""Load and return a dynamic library identified by 'name'.
|
||||
The standard C library can be loaded by passing None.
|
||||
Note that functions and types declared by 'ffi.cdef()' are not
|
||||
linked to a particular library, just like C headers; in the
|
||||
library we only look for the actual (untyped) symbols.
|
||||
"""
|
||||
assert isinstance(name, basestring) or name is None
|
||||
with self._lock:
|
||||
lib, function_cache = _make_ffi_library(self, name, flags)
|
||||
self._function_caches.append(function_cache)
|
||||
self._libraries.append(lib)
|
||||
return lib
|
||||
|
||||
def dlclose(self, lib):
|
||||
"""Close a library obtained with ffi.dlopen(). After this call,
|
||||
access to functions or variables from the library will fail
|
||||
(possibly with a segmentation fault).
|
||||
"""
|
||||
type(lib).__cffi_close__(lib)
|
||||
|
||||
def _typeof_locked(self, cdecl):
|
||||
# call me with the lock!
|
||||
key = cdecl
|
||||
if key in self._parsed_types:
|
||||
return self._parsed_types[key]
|
||||
#
|
||||
if not isinstance(cdecl, str): # unicode, on Python 2
|
||||
cdecl = cdecl.encode('ascii')
|
||||
#
|
||||
type = self._parser.parse_type(cdecl)
|
||||
really_a_function_type = type.is_raw_function
|
||||
if really_a_function_type:
|
||||
type = type.as_function_pointer()
|
||||
btype = self._get_cached_btype(type)
|
||||
result = btype, really_a_function_type
|
||||
self._parsed_types[key] = result
|
||||
return result
|
||||
|
||||
def _typeof(self, cdecl, consider_function_as_funcptr=False):
|
||||
# string -> ctype object
|
||||
try:
|
||||
result = self._parsed_types[cdecl]
|
||||
except KeyError:
|
||||
with self._lock:
|
||||
result = self._typeof_locked(cdecl)
|
||||
#
|
||||
btype, really_a_function_type = result
|
||||
if really_a_function_type and not consider_function_as_funcptr:
|
||||
raise CDefError("the type %r is a function type, not a "
|
||||
"pointer-to-function type" % (cdecl,))
|
||||
return btype
|
||||
|
||||
def typeof(self, cdecl):
|
||||
"""Parse the C type given as a string and return the
|
||||
corresponding <ctype> object.
|
||||
It can also be used on 'cdata' instance to get its C type.
|
||||
"""
|
||||
if isinstance(cdecl, basestring):
|
||||
return self._typeof(cdecl)
|
||||
if isinstance(cdecl, self.CData):
|
||||
return self._backend.typeof(cdecl)
|
||||
if isinstance(cdecl, types.BuiltinFunctionType):
|
||||
res = _builtin_function_type(cdecl)
|
||||
if res is not None:
|
||||
return res
|
||||
if (isinstance(cdecl, types.FunctionType)
|
||||
and hasattr(cdecl, '_cffi_base_type')):
|
||||
with self._lock:
|
||||
return self._get_cached_btype(cdecl._cffi_base_type)
|
||||
raise TypeError(type(cdecl))
|
||||
|
||||
def sizeof(self, cdecl):
|
||||
"""Return the size in bytes of the argument. It can be a
|
||||
string naming a C type, or a 'cdata' instance.
|
||||
"""
|
||||
if isinstance(cdecl, basestring):
|
||||
BType = self._typeof(cdecl)
|
||||
return self._backend.sizeof(BType)
|
||||
else:
|
||||
return self._backend.sizeof(cdecl)
|
||||
|
||||
def alignof(self, cdecl):
|
||||
"""Return the natural alignment size in bytes of the C type
|
||||
given as a string.
|
||||
"""
|
||||
if isinstance(cdecl, basestring):
|
||||
cdecl = self._typeof(cdecl)
|
||||
return self._backend.alignof(cdecl)
|
||||
|
||||
def offsetof(self, cdecl, *fields_or_indexes):
|
||||
"""Return the offset of the named field inside the given
|
||||
structure or array, which must be given as a C type name.
|
||||
You can give several field names in case of nested structures.
|
||||
You can also give numeric values which correspond to array
|
||||
items, in case of an array type.
|
||||
"""
|
||||
if isinstance(cdecl, basestring):
|
||||
cdecl = self._typeof(cdecl)
|
||||
return self._typeoffsetof(cdecl, *fields_or_indexes)[1]
|
||||
|
||||
def new(self, cdecl, init=None):
|
||||
"""Allocate an instance according to the specified C type and
|
||||
return a pointer to it. The specified C type must be either a
|
||||
pointer or an array: ``new('X *')`` allocates an X and returns
|
||||
a pointer to it, whereas ``new('X[n]')`` allocates an array of
|
||||
n X'es and returns an array referencing it (which works
|
||||
mostly like a pointer, like in C). You can also use
|
||||
``new('X[]', n)`` to allocate an array of a non-constant
|
||||
length n.
|
||||
|
||||
The memory is initialized following the rules of declaring a
|
||||
global variable in C: by default it is zero-initialized, but
|
||||
an explicit initializer can be given which can be used to
|
||||
fill all or part of the memory.
|
||||
|
||||
When the returned <cdata> object goes out of scope, the memory
|
||||
is freed. In other words the returned <cdata> object has
|
||||
ownership of the value of type 'cdecl' that it points to. This
|
||||
means that the raw data can be used as long as this object is
|
||||
kept alive, but must not be used for a longer time. Be careful
|
||||
about that when copying the pointer to the memory somewhere
|
||||
else, e.g. into another structure.
|
||||
"""
|
||||
if isinstance(cdecl, basestring):
|
||||
cdecl = self._typeof(cdecl)
|
||||
return self._backend.newp(cdecl, init)
|
||||
|
||||
def new_allocator(self, alloc=None, free=None,
|
||||
should_clear_after_alloc=True):
|
||||
"""Return a new allocator, i.e. a function that behaves like ffi.new()
|
||||
but uses the provided low-level 'alloc' and 'free' functions.
|
||||
|
||||
'alloc' is called with the size as argument. If it returns NULL, a
|
||||
MemoryError is raised. 'free' is called with the result of 'alloc'
|
||||
as argument. Both can be either Python function or directly C
|
||||
functions. If 'free' is None, then no free function is called.
|
||||
If both 'alloc' and 'free' are None, the default is used.
|
||||
|
||||
If 'should_clear_after_alloc' is set to False, then the memory
|
||||
returned by 'alloc' is assumed to be already cleared (or you are
|
||||
fine with garbage); otherwise CFFI will clear it.
|
||||
"""
|
||||
compiled_ffi = self._backend.FFI()
|
||||
allocator = compiled_ffi.new_allocator(alloc, free,
|
||||
should_clear_after_alloc)
|
||||
def allocate(cdecl, init=None):
|
||||
if isinstance(cdecl, basestring):
|
||||
cdecl = self._typeof(cdecl)
|
||||
return allocator(cdecl, init)
|
||||
return allocate
|
||||
|
||||
def cast(self, cdecl, source):
|
||||
"""Similar to a C cast: returns an instance of the named C
|
||||
type initialized with the given 'source'. The source is
|
||||
casted between integers or pointers of any type.
|
||||
"""
|
||||
if isinstance(cdecl, basestring):
|
||||
cdecl = self._typeof(cdecl)
|
||||
return self._backend.cast(cdecl, source)
|
||||
|
||||
def string(self, cdata, maxlen=-1):
|
||||
"""Return a Python string (or unicode string) from the 'cdata'.
|
||||
If 'cdata' is a pointer or array of characters or bytes, returns
|
||||
the null-terminated string. The returned string extends until
|
||||
the first null character, or at most 'maxlen' characters. If
|
||||
'cdata' is an array then 'maxlen' defaults to its length.
|
||||
|
||||
If 'cdata' is a pointer or array of wchar_t, returns a unicode
|
||||
string following the same rules.
|
||||
|
||||
If 'cdata' is a single character or byte or a wchar_t, returns
|
||||
it as a string or unicode string.
|
||||
|
||||
If 'cdata' is an enum, returns the value of the enumerator as a
|
||||
string, or 'NUMBER' if the value is out of range.
|
||||
"""
|
||||
return self._backend.string(cdata, maxlen)
|
||||
|
||||
def unpack(self, cdata, length):
|
||||
"""Unpack an array of C data of the given length,
|
||||
returning a Python string/unicode/list.
|
||||
|
||||
If 'cdata' is a pointer to 'char', returns a byte string.
|
||||
It does not stop at the first null. This is equivalent to:
|
||||
ffi.buffer(cdata, length)[:]
|
||||
|
||||
If 'cdata' is a pointer to 'wchar_t', returns a unicode string.
|
||||
'length' is measured in wchar_t's; it is not the size in bytes.
|
||||
|
||||
If 'cdata' is a pointer to anything else, returns a list of
|
||||
'length' items. This is a faster equivalent to:
|
||||
[cdata[i] for i in range(length)]
|
||||
"""
|
||||
return self._backend.unpack(cdata, length)
|
||||
|
||||
#def buffer(self, cdata, size=-1):
|
||||
# """Return a read-write buffer object that references the raw C data
|
||||
# pointed to by the given 'cdata'. The 'cdata' must be a pointer or
|
||||
# an array. Can be passed to functions expecting a buffer, or directly
|
||||
# manipulated with:
|
||||
#
|
||||
# buf[:] get a copy of it in a regular string, or
|
||||
# buf[idx] as a single character
|
||||
# buf[:] = ...
|
||||
# buf[idx] = ... change the content
|
||||
# """
|
||||
# note that 'buffer' is a type, set on this instance by __init__
|
||||
|
||||
def from_buffer(self, cdecl, python_buffer=_unspecified,
|
||||
require_writable=False):
|
||||
"""Return a cdata of the given type pointing to the data of the
|
||||
given Python object, which must support the buffer interface.
|
||||
Note that this is not meant to be used on the built-in types
|
||||
str or unicode (you can build 'char[]' arrays explicitly)
|
||||
but only on objects containing large quantities of raw data
|
||||
in some other format, like 'array.array' or numpy arrays.
|
||||
|
||||
The first argument is optional and default to 'char[]'.
|
||||
"""
|
||||
if python_buffer is _unspecified:
|
||||
cdecl, python_buffer = self.BCharA, cdecl
|
||||
elif isinstance(cdecl, basestring):
|
||||
cdecl = self._typeof(cdecl)
|
||||
return self._backend.from_buffer(cdecl, python_buffer,
|
||||
require_writable)
|
||||
|
||||
def memmove(self, dest, src, n):
|
||||
"""ffi.memmove(dest, src, n) copies n bytes of memory from src to dest.
|
||||
|
||||
Like the C function memmove(), the memory areas may overlap;
|
||||
apart from that it behaves like the C function memcpy().
|
||||
|
||||
'src' can be any cdata ptr or array, or any Python buffer object.
|
||||
'dest' can be any cdata ptr or array, or a writable Python buffer
|
||||
object. The size to copy, 'n', is always measured in bytes.
|
||||
|
||||
Unlike other methods, this one supports all Python buffer including
|
||||
byte strings and bytearrays---but it still does not support
|
||||
non-contiguous buffers.
|
||||
"""
|
||||
return self._backend.memmove(dest, src, n)
|
||||
|
||||
def callback(self, cdecl, python_callable=None, error=None, onerror=None):
|
||||
"""Return a callback object or a decorator making such a
|
||||
callback object. 'cdecl' must name a C function pointer type.
|
||||
The callback invokes the specified 'python_callable' (which may
|
||||
be provided either directly or via a decorator). Important: the
|
||||
callback object must be manually kept alive for as long as the
|
||||
callback may be invoked from the C level.
|
||||
"""
|
||||
def callback_decorator_wrap(python_callable):
|
||||
if not callable(python_callable):
|
||||
raise TypeError("the 'python_callable' argument "
|
||||
"is not callable")
|
||||
return self._backend.callback(cdecl, python_callable,
|
||||
error, onerror)
|
||||
if isinstance(cdecl, basestring):
|
||||
cdecl = self._typeof(cdecl, consider_function_as_funcptr=True)
|
||||
if python_callable is None:
|
||||
return callback_decorator_wrap # decorator mode
|
||||
else:
|
||||
return callback_decorator_wrap(python_callable) # direct mode
|
||||
|
||||
def getctype(self, cdecl, replace_with=''):
|
||||
"""Return a string giving the C type 'cdecl', which may be itself
|
||||
a string or a <ctype> object. If 'replace_with' is given, it gives
|
||||
extra text to append (or insert for more complicated C types), like
|
||||
a variable name, or '*' to get actually the C type 'pointer-to-cdecl'.
|
||||
"""
|
||||
if isinstance(cdecl, basestring):
|
||||
cdecl = self._typeof(cdecl)
|
||||
replace_with = replace_with.strip()
|
||||
if (replace_with.startswith('*')
|
||||
and '&[' in self._backend.getcname(cdecl, '&')):
|
||||
replace_with = '(%s)' % replace_with
|
||||
elif replace_with and not replace_with[0] in '[(':
|
||||
replace_with = ' ' + replace_with
|
||||
return self._backend.getcname(cdecl, replace_with)
|
||||
|
||||
def gc(self, cdata, destructor, size=0):
|
||||
"""Return a new cdata object that points to the same
|
||||
data. Later, when this new cdata object is garbage-collected,
|
||||
'destructor(old_cdata_object)' will be called.
|
||||
|
||||
The optional 'size' gives an estimate of the size, used to
|
||||
trigger the garbage collection more eagerly. So far only used
|
||||
on PyPy. It tells the GC that the returned object keeps alive
|
||||
roughly 'size' bytes of external memory.
|
||||
"""
|
||||
return self._backend.gcp(cdata, destructor, size)
|
||||
|
||||
def _get_cached_btype(self, type):
|
||||
assert self._lock.acquire(False) is False
|
||||
# call me with the lock!
|
||||
try:
|
||||
BType = self._cached_btypes[type]
|
||||
except KeyError:
|
||||
finishlist = []
|
||||
BType = type.get_cached_btype(self, finishlist)
|
||||
for type in finishlist:
|
||||
type.finish_backend_type(self, finishlist)
|
||||
return BType
|
||||
|
||||
def verify(self, source='', tmpdir=None, **kwargs):
|
||||
"""Verify that the current ffi signatures compile on this
|
||||
machine, and return a dynamic library object. The dynamic
|
||||
library can be used to call functions and access global
|
||||
variables declared in this 'ffi'. The library is compiled
|
||||
by the C compiler: it gives you C-level API compatibility
|
||||
(including calling macros). This is unlike 'ffi.dlopen()',
|
||||
which requires binary compatibility in the signatures.
|
||||
"""
|
||||
from .verifier import Verifier, _caller_dir_pycache
|
||||
#
|
||||
# If set_unicode(True) was called, insert the UNICODE and
|
||||
# _UNICODE macro declarations
|
||||
if self._windows_unicode:
|
||||
self._apply_windows_unicode(kwargs)
|
||||
#
|
||||
# Set the tmpdir here, and not in Verifier.__init__: it picks
|
||||
# up the caller's directory, which we want to be the caller of
|
||||
# ffi.verify(), as opposed to the caller of Veritier().
|
||||
tmpdir = tmpdir or _caller_dir_pycache()
|
||||
#
|
||||
# Make a Verifier() and use it to load the library.
|
||||
self.verifier = Verifier(self, source, tmpdir, **kwargs)
|
||||
lib = self.verifier.load_library()
|
||||
#
|
||||
# Save the loaded library for keep-alive purposes, even
|
||||
# if the caller doesn't keep it alive itself (it should).
|
||||
self._libraries.append(lib)
|
||||
return lib
|
||||
|
||||
def _get_errno(self):
|
||||
return self._backend.get_errno()
|
||||
def _set_errno(self, errno):
|
||||
self._backend.set_errno(errno)
|
||||
errno = property(_get_errno, _set_errno, None,
|
||||
"the value of 'errno' from/to the C calls")
|
||||
|
||||
def getwinerror(self, code=-1):
|
||||
return self._backend.getwinerror(code)
|
||||
|
||||
def _pointer_to(self, ctype):
|
||||
with self._lock:
|
||||
return model.pointer_cache(self, ctype)
|
||||
|
||||
def addressof(self, cdata, *fields_or_indexes):
|
||||
"""Return the address of a <cdata 'struct-or-union'>.
|
||||
If 'fields_or_indexes' are given, returns the address of that
|
||||
field or array item in the structure or array, recursively in
|
||||
case of nested structures.
|
||||
"""
|
||||
try:
|
||||
ctype = self._backend.typeof(cdata)
|
||||
except TypeError:
|
||||
if '__addressof__' in type(cdata).__dict__:
|
||||
return type(cdata).__addressof__(cdata, *fields_or_indexes)
|
||||
raise
|
||||
if fields_or_indexes:
|
||||
ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes)
|
||||
else:
|
||||
if ctype.kind == "pointer":
|
||||
raise TypeError("addressof(pointer)")
|
||||
offset = 0
|
||||
ctypeptr = self._pointer_to(ctype)
|
||||
return self._backend.rawaddressof(ctypeptr, cdata, offset)
|
||||
|
||||
def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes):
|
||||
ctype, offset = self._backend.typeoffsetof(ctype, field_or_index)
|
||||
for field1 in fields_or_indexes:
|
||||
ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1)
|
||||
offset += offset1
|
||||
return ctype, offset
|
||||
|
||||
def include(self, ffi_to_include):
|
||||
"""Includes the typedefs, structs, unions and enums defined
|
||||
in another FFI instance. Usage is similar to a #include in C,
|
||||
where a part of the program might include types defined in
|
||||
another part for its own usage. Note that the include()
|
||||
method has no effect on functions, constants and global
|
||||
variables, which must anyway be accessed directly from the
|
||||
lib object returned by the original FFI instance.
|
||||
"""
|
||||
if not isinstance(ffi_to_include, FFI):
|
||||
raise TypeError("ffi.include() expects an argument that is also of"
|
||||
" type cffi.FFI, not %r" % (
|
||||
type(ffi_to_include).__name__,))
|
||||
if ffi_to_include is self:
|
||||
raise ValueError("self.include(self)")
|
||||
with ffi_to_include._lock:
|
||||
with self._lock:
|
||||
self._parser.include(ffi_to_include._parser)
|
||||
self._cdefsources.append('[')
|
||||
self._cdefsources.extend(ffi_to_include._cdefsources)
|
||||
self._cdefsources.append(']')
|
||||
self._included_ffis.append(ffi_to_include)
|
||||
|
||||
def new_handle(self, x):
|
||||
return self._backend.newp_handle(self.BVoidP, x)
|
||||
|
||||
def from_handle(self, x):
|
||||
return self._backend.from_handle(x)
|
||||
|
||||
def release(self, x):
|
||||
self._backend.release(x)
|
||||
|
||||
def set_unicode(self, enabled_flag):
|
||||
"""Windows: if 'enabled_flag' is True, enable the UNICODE and
|
||||
_UNICODE defines in C, and declare the types like TCHAR and LPTCSTR
|
||||
to be (pointers to) wchar_t. If 'enabled_flag' is False,
|
||||
declare these types to be (pointers to) plain 8-bit characters.
|
||||
This is mostly for backward compatibility; you usually want True.
|
||||
"""
|
||||
if self._windows_unicode is not None:
|
||||
raise ValueError("set_unicode() can only be called once")
|
||||
enabled_flag = bool(enabled_flag)
|
||||
if enabled_flag:
|
||||
self.cdef("typedef wchar_t TBYTE;"
|
||||
"typedef wchar_t TCHAR;"
|
||||
"typedef const wchar_t *LPCTSTR;"
|
||||
"typedef const wchar_t *PCTSTR;"
|
||||
"typedef wchar_t *LPTSTR;"
|
||||
"typedef wchar_t *PTSTR;"
|
||||
"typedef TBYTE *PTBYTE;"
|
||||
"typedef TCHAR *PTCHAR;")
|
||||
else:
|
||||
self.cdef("typedef char TBYTE;"
|
||||
"typedef char TCHAR;"
|
||||
"typedef const char *LPCTSTR;"
|
||||
"typedef const char *PCTSTR;"
|
||||
"typedef char *LPTSTR;"
|
||||
"typedef char *PTSTR;"
|
||||
"typedef TBYTE *PTBYTE;"
|
||||
"typedef TCHAR *PTCHAR;")
|
||||
self._windows_unicode = enabled_flag
|
||||
|
||||
def _apply_windows_unicode(self, kwds):
|
||||
defmacros = kwds.get('define_macros', ())
|
||||
if not isinstance(defmacros, (list, tuple)):
|
||||
raise TypeError("'define_macros' must be a list or tuple")
|
||||
defmacros = list(defmacros) + [('UNICODE', '1'),
|
||||
('_UNICODE', '1')]
|
||||
kwds['define_macros'] = defmacros
|
||||
|
||||
def _apply_embedding_fix(self, kwds):
|
||||
# must include an argument like "-lpython2.7" for the compiler
|
||||
def ensure(key, value):
|
||||
lst = kwds.setdefault(key, [])
|
||||
if value not in lst:
|
||||
lst.append(value)
|
||||
#
|
||||
if '__pypy__' in sys.builtin_module_names:
|
||||
import os
|
||||
if sys.platform == "win32":
|
||||
# we need 'libpypy-c.lib'. Current distributions of
|
||||
# pypy (>= 4.1) contain it as 'libs/python27.lib'.
|
||||
pythonlib = "python{0[0]}{0[1]}".format(sys.version_info)
|
||||
if hasattr(sys, 'prefix'):
|
||||
ensure('library_dirs', os.path.join(sys.prefix, 'libs'))
|
||||
else:
|
||||
# we need 'libpypy-c.{so,dylib}', which should be by
|
||||
# default located in 'sys.prefix/bin' for installed
|
||||
# systems.
|
||||
if sys.version_info < (3,):
|
||||
pythonlib = "pypy-c"
|
||||
else:
|
||||
pythonlib = "pypy3-c"
|
||||
if hasattr(sys, 'prefix'):
|
||||
ensure('library_dirs', os.path.join(sys.prefix, 'bin'))
|
||||
# On uninstalled pypy's, the libpypy-c is typically found in
|
||||
# .../pypy/goal/.
|
||||
if hasattr(sys, 'prefix'):
|
||||
ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal'))
|
||||
else:
|
||||
if sys.platform == "win32":
|
||||
template = "python%d%d"
|
||||
if hasattr(sys, 'gettotalrefcount'):
|
||||
template += '_d'
|
||||
else:
|
||||
try:
|
||||
import sysconfig
|
||||
except ImportError: # 2.6
|
||||
from distutils import sysconfig
|
||||
template = "python%d.%d"
|
||||
if sysconfig.get_config_var('DEBUG_EXT'):
|
||||
template += sysconfig.get_config_var('DEBUG_EXT')
|
||||
pythonlib = (template %
|
||||
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
|
||||
if hasattr(sys, 'abiflags'):
|
||||
pythonlib += sys.abiflags
|
||||
ensure('libraries', pythonlib)
|
||||
if sys.platform == "win32":
|
||||
ensure('extra_link_args', '/MANIFEST')
|
||||
|
||||
def set_source(self, module_name, source, source_extension='.c', **kwds):
|
||||
import os
|
||||
if hasattr(self, '_assigned_source'):
|
||||
raise ValueError("set_source() cannot be called several times "
|
||||
"per ffi object")
|
||||
if not isinstance(module_name, basestring):
|
||||
raise TypeError("'module_name' must be a string")
|
||||
if os.sep in module_name or (os.altsep and os.altsep in module_name):
|
||||
raise ValueError("'module_name' must not contain '/': use a dotted "
|
||||
"name to make a 'package.module' location")
|
||||
self._assigned_source = (str(module_name), source,
|
||||
source_extension, kwds)
|
||||
|
||||
def set_source_pkgconfig(self, module_name, pkgconfig_libs, source,
|
||||
source_extension='.c', **kwds):
|
||||
from . import pkgconfig
|
||||
if not isinstance(pkgconfig_libs, list):
|
||||
raise TypeError("the pkgconfig_libs argument must be a list "
|
||||
"of package names")
|
||||
kwds2 = pkgconfig.flags_from_pkgconfig(pkgconfig_libs)
|
||||
pkgconfig.merge_flags(kwds, kwds2)
|
||||
self.set_source(module_name, source, source_extension, **kwds)
|
||||
|
||||
def distutils_extension(self, tmpdir='build', verbose=True):
|
||||
from distutils.dir_util import mkpath
|
||||
from .recompiler import recompile
|
||||
#
|
||||
if not hasattr(self, '_assigned_source'):
|
||||
if hasattr(self, 'verifier'): # fallback, 'tmpdir' ignored
|
||||
return self.verifier.get_extension()
|
||||
raise ValueError("set_source() must be called before"
|
||||
" distutils_extension()")
|
||||
module_name, source, source_extension, kwds = self._assigned_source
|
||||
if source is None:
|
||||
raise TypeError("distutils_extension() is only for C extension "
|
||||
"modules, not for dlopen()-style pure Python "
|
||||
"modules")
|
||||
mkpath(tmpdir)
|
||||
ext, updated = recompile(self, module_name,
|
||||
source, tmpdir=tmpdir, extradir=tmpdir,
|
||||
source_extension=source_extension,
|
||||
call_c_compiler=False, **kwds)
|
||||
if verbose:
|
||||
if updated:
|
||||
sys.stderr.write("regenerated: %r\n" % (ext.sources[0],))
|
||||
else:
|
||||
sys.stderr.write("not modified: %r\n" % (ext.sources[0],))
|
||||
return ext
|
||||
|
||||
def emit_c_code(self, filename):
|
||||
from .recompiler import recompile
|
||||
#
|
||||
if not hasattr(self, '_assigned_source'):
|
||||
raise ValueError("set_source() must be called before emit_c_code()")
|
||||
module_name, source, source_extension, kwds = self._assigned_source
|
||||
if source is None:
|
||||
raise TypeError("emit_c_code() is only for C extension modules, "
|
||||
"not for dlopen()-style pure Python modules")
|
||||
recompile(self, module_name, source,
|
||||
c_file=filename, call_c_compiler=False, **kwds)
|
||||
|
||||
def emit_python_code(self, filename):
|
||||
from .recompiler import recompile
|
||||
#
|
||||
if not hasattr(self, '_assigned_source'):
|
||||
raise ValueError("set_source() must be called before emit_c_code()")
|
||||
module_name, source, source_extension, kwds = self._assigned_source
|
||||
if source is not None:
|
||||
raise TypeError("emit_python_code() is only for dlopen()-style "
|
||||
"pure Python modules, not for C extension modules")
|
||||
recompile(self, module_name, source,
|
||||
c_file=filename, call_c_compiler=False, **kwds)
|
||||
|
||||
def compile(self, tmpdir='.', verbose=0, target=None, debug=None):
|
||||
"""The 'target' argument gives the final file name of the
|
||||
compiled DLL. Use '*' to force distutils' choice, suitable for
|
||||
regular CPython C API modules. Use a file name ending in '.*'
|
||||
to ask for the system's default extension for dynamic libraries
|
||||
(.so/.dll/.dylib).
|
||||
|
||||
The default is '*' when building a non-embedded C API extension,
|
||||
and (module_name + '.*') when building an embedded library.
|
||||
"""
|
||||
from .recompiler import recompile
|
||||
#
|
||||
if not hasattr(self, '_assigned_source'):
|
||||
raise ValueError("set_source() must be called before compile()")
|
||||
module_name, source, source_extension, kwds = self._assigned_source
|
||||
return recompile(self, module_name, source, tmpdir=tmpdir,
|
||||
target=target, source_extension=source_extension,
|
||||
compiler_verbose=verbose, debug=debug, **kwds)
|
||||
|
||||
def init_once(self, func, tag):
|
||||
# Read _init_once_cache[tag], which is either (False, lock) if
|
||||
# we're calling the function now in some thread, or (True, result).
|
||||
# Don't call setdefault() in most cases, to avoid allocating and
|
||||
# immediately freeing a lock; but still use setdefaut() to avoid
|
||||
# races.
|
||||
try:
|
||||
x = self._init_once_cache[tag]
|
||||
except KeyError:
|
||||
x = self._init_once_cache.setdefault(tag, (False, allocate_lock()))
|
||||
# Common case: we got (True, result), so we return the result.
|
||||
if x[0]:
|
||||
return x[1]
|
||||
# Else, it's a lock. Acquire it to serialize the following tests.
|
||||
with x[1]:
|
||||
# Read again from _init_once_cache the current status.
|
||||
x = self._init_once_cache[tag]
|
||||
if x[0]:
|
||||
return x[1]
|
||||
# Call the function and store the result back.
|
||||
result = func()
|
||||
self._init_once_cache[tag] = (True, result)
|
||||
return result
|
||||
|
||||
def embedding_init_code(self, pysource):
|
||||
if self._embedding:
|
||||
raise ValueError("embedding_init_code() can only be called once")
|
||||
# fix 'pysource' before it gets dumped into the C file:
|
||||
# - remove empty lines at the beginning, so it starts at "line 1"
|
||||
# - dedent, if all non-empty lines are indented
|
||||
# - check for SyntaxErrors
|
||||
import re
|
||||
match = re.match(r'\s*\n', pysource)
|
||||
if match:
|
||||
pysource = pysource[match.end():]
|
||||
lines = pysource.splitlines() or ['']
|
||||
prefix = re.match(r'\s*', lines[0]).group()
|
||||
for i in range(1, len(lines)):
|
||||
line = lines[i]
|
||||
if line.rstrip():
|
||||
while not line.startswith(prefix):
|
||||
prefix = prefix[:-1]
|
||||
i = len(prefix)
|
||||
lines = [line[i:]+'\n' for line in lines]
|
||||
pysource = ''.join(lines)
|
||||
#
|
||||
compile(pysource, "cffi_init", "exec")
|
||||
#
|
||||
self._embedding = pysource
|
||||
|
||||
def def_extern(self, *args, **kwds):
|
||||
raise ValueError("ffi.def_extern() is only available on API-mode FFI "
|
||||
"objects")
|
||||
|
||||
def list_types(self):
|
||||
"""Returns the user type names known to this FFI instance.
|
||||
This returns a tuple containing three lists of names:
|
||||
(typedef_names, names_of_structs, names_of_unions)
|
||||
"""
|
||||
typedefs = []
|
||||
structs = []
|
||||
unions = []
|
||||
for key in self._parser._declarations:
|
||||
if key.startswith('typedef '):
|
||||
typedefs.append(key[8:])
|
||||
elif key.startswith('struct '):
|
||||
structs.append(key[7:])
|
||||
elif key.startswith('union '):
|
||||
unions.append(key[6:])
|
||||
typedefs.sort()
|
||||
structs.sort()
|
||||
unions.sort()
|
||||
return (typedefs, structs, unions)
|
||||
|
||||
|
||||
def _load_backend_lib(backend, name, flags):
|
||||
import os
|
||||
if name is None:
|
||||
if sys.platform != "win32":
|
||||
return backend.load_library(None, flags)
|
||||
name = "c" # Windows: load_library(None) fails, but this works
|
||||
# on Python 2 (backward compatibility hack only)
|
||||
first_error = None
|
||||
if '.' in name or '/' in name or os.sep in name:
|
||||
try:
|
||||
return backend.load_library(name, flags)
|
||||
except OSError as e:
|
||||
first_error = e
|
||||
import ctypes.util
|
||||
path = ctypes.util.find_library(name)
|
||||
if path is None:
|
||||
if name == "c" and sys.platform == "win32" and sys.version_info >= (3,):
|
||||
raise OSError("dlopen(None) cannot work on Windows for Python 3 "
|
||||
"(see http://bugs.python.org/issue23606)")
|
||||
msg = ("ctypes.util.find_library() did not manage "
|
||||
"to locate a library called %r" % (name,))
|
||||
if first_error is not None:
|
||||
msg = "%s. Additionally, %s" % (first_error, msg)
|
||||
raise OSError(msg)
|
||||
return backend.load_library(path, flags)
|
||||
|
||||
def _make_ffi_library(ffi, libname, flags):
|
||||
backend = ffi._backend
|
||||
backendlib = _load_backend_lib(backend, libname, flags)
|
||||
#
|
||||
def accessor_function(name):
|
||||
key = 'function ' + name
|
||||
tp, _ = ffi._parser._declarations[key]
|
||||
BType = ffi._get_cached_btype(tp)
|
||||
value = backendlib.load_function(BType, name)
|
||||
library.__dict__[name] = value
|
||||
#
|
||||
def accessor_variable(name):
|
||||
key = 'variable ' + name
|
||||
tp, _ = ffi._parser._declarations[key]
|
||||
BType = ffi._get_cached_btype(tp)
|
||||
read_variable = backendlib.read_variable
|
||||
write_variable = backendlib.write_variable
|
||||
setattr(FFILibrary, name, property(
|
||||
lambda self: read_variable(BType, name),
|
||||
lambda self, value: write_variable(BType, name, value)))
|
||||
#
|
||||
def addressof_var(name):
|
||||
try:
|
||||
return addr_variables[name]
|
||||
except KeyError:
|
||||
with ffi._lock:
|
||||
if name not in addr_variables:
|
||||
key = 'variable ' + name
|
||||
tp, _ = ffi._parser._declarations[key]
|
||||
BType = ffi._get_cached_btype(tp)
|
||||
if BType.kind != 'array':
|
||||
BType = model.pointer_cache(ffi, BType)
|
||||
p = backendlib.load_function(BType, name)
|
||||
addr_variables[name] = p
|
||||
return addr_variables[name]
|
||||
#
|
||||
def accessor_constant(name):
|
||||
raise NotImplementedError("non-integer constant '%s' cannot be "
|
||||
"accessed from a dlopen() library" % (name,))
|
||||
#
|
||||
def accessor_int_constant(name):
|
||||
library.__dict__[name] = ffi._parser._int_constants[name]
|
||||
#
|
||||
accessors = {}
|
||||
accessors_version = [False]
|
||||
addr_variables = {}
|
||||
#
|
||||
def update_accessors():
|
||||
if accessors_version[0] is ffi._cdef_version:
|
||||
return
|
||||
#
|
||||
for key, (tp, _) in ffi._parser._declarations.items():
|
||||
if not isinstance(tp, model.EnumType):
|
||||
tag, name = key.split(' ', 1)
|
||||
if tag == 'function':
|
||||
accessors[name] = accessor_function
|
||||
elif tag == 'variable':
|
||||
accessors[name] = accessor_variable
|
||||
elif tag == 'constant':
|
||||
accessors[name] = accessor_constant
|
||||
else:
|
||||
for i, enumname in enumerate(tp.enumerators):
|
||||
def accessor_enum(name, tp=tp, i=i):
|
||||
tp.check_not_partial()
|
||||
library.__dict__[name] = tp.enumvalues[i]
|
||||
accessors[enumname] = accessor_enum
|
||||
for name in ffi._parser._int_constants:
|
||||
accessors.setdefault(name, accessor_int_constant)
|
||||
accessors_version[0] = ffi._cdef_version
|
||||
#
|
||||
def make_accessor(name):
|
||||
with ffi._lock:
|
||||
if name in library.__dict__ or name in FFILibrary.__dict__:
|
||||
return # added by another thread while waiting for the lock
|
||||
if name not in accessors:
|
||||
update_accessors()
|
||||
if name not in accessors:
|
||||
raise AttributeError(name)
|
||||
accessors[name](name)
|
||||
#
|
||||
class FFILibrary(object):
|
||||
def __getattr__(self, name):
|
||||
make_accessor(name)
|
||||
return getattr(self, name)
|
||||
def __setattr__(self, name, value):
|
||||
try:
|
||||
property = getattr(self.__class__, name)
|
||||
except AttributeError:
|
||||
make_accessor(name)
|
||||
setattr(self, name, value)
|
||||
else:
|
||||
property.__set__(self, value)
|
||||
def __dir__(self):
|
||||
with ffi._lock:
|
||||
update_accessors()
|
||||
return accessors.keys()
|
||||
def __addressof__(self, name):
|
||||
if name in library.__dict__:
|
||||
return library.__dict__[name]
|
||||
if name in FFILibrary.__dict__:
|
||||
return addressof_var(name)
|
||||
make_accessor(name)
|
||||
if name in library.__dict__:
|
||||
return library.__dict__[name]
|
||||
if name in FFILibrary.__dict__:
|
||||
return addressof_var(name)
|
||||
raise AttributeError("cffi library has no function or "
|
||||
"global variable named '%s'" % (name,))
|
||||
def __cffi_close__(self):
|
||||
backendlib.close_lib()
|
||||
self.__dict__.clear()
|
||||
#
|
||||
if libname is not None:
|
||||
try:
|
||||
if not isinstance(libname, str): # unicode, on Python 2
|
||||
libname = libname.encode('utf-8')
|
||||
FFILibrary.__name__ = 'FFILibrary_%s' % libname
|
||||
except UnicodeError:
|
||||
pass
|
||||
library = FFILibrary()
|
||||
return library, library.__dict__
|
||||
|
||||
def _builtin_function_type(func):
|
||||
# a hack to make at least ffi.typeof(builtin_function) work,
|
||||
# if the builtin function was obtained by 'vengine_cpy'.
|
||||
import sys
|
||||
try:
|
||||
module = sys.modules[func.__module__]
|
||||
ffi = module._cffi_original_ffi
|
||||
types_of_builtin_funcs = module._cffi_types_of_builtin_funcs
|
||||
tp = types_of_builtin_funcs[func]
|
||||
except (KeyError, AttributeError, TypeError):
|
||||
return None
|
||||
else:
|
||||
with ffi._lock:
|
||||
return ffi._get_cached_btype(tp)
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,187 @@
|
|||
from .error import VerificationError
|
||||
|
||||
class CffiOp(object):
|
||||
def __init__(self, op, arg):
|
||||
self.op = op
|
||||
self.arg = arg
|
||||
|
||||
def as_c_expr(self):
|
||||
if self.op is None:
|
||||
assert isinstance(self.arg, str)
|
||||
return '(_cffi_opcode_t)(%s)' % (self.arg,)
|
||||
classname = CLASS_NAME[self.op]
|
||||
return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg)
|
||||
|
||||
def as_python_bytes(self):
|
||||
if self.op is None and self.arg.isdigit():
|
||||
value = int(self.arg) # non-negative: '-' not in self.arg
|
||||
if value >= 2**31:
|
||||
raise OverflowError("cannot emit %r: limited to 2**31-1"
|
||||
% (self.arg,))
|
||||
return format_four_bytes(value)
|
||||
if isinstance(self.arg, str):
|
||||
raise VerificationError("cannot emit to Python: %r" % (self.arg,))
|
||||
return format_four_bytes((self.arg << 8) | self.op)
|
||||
|
||||
def __str__(self):
|
||||
classname = CLASS_NAME.get(self.op, self.op)
|
||||
return '(%s %s)' % (classname, self.arg)
|
||||
|
||||
def format_four_bytes(num):
|
||||
return '\\x%02X\\x%02X\\x%02X\\x%02X' % (
|
||||
(num >> 24) & 0xFF,
|
||||
(num >> 16) & 0xFF,
|
||||
(num >> 8) & 0xFF,
|
||||
(num ) & 0xFF)
|
||||
|
||||
OP_PRIMITIVE = 1
|
||||
OP_POINTER = 3
|
||||
OP_ARRAY = 5
|
||||
OP_OPEN_ARRAY = 7
|
||||
OP_STRUCT_UNION = 9
|
||||
OP_ENUM = 11
|
||||
OP_FUNCTION = 13
|
||||
OP_FUNCTION_END = 15
|
||||
OP_NOOP = 17
|
||||
OP_BITFIELD = 19
|
||||
OP_TYPENAME = 21
|
||||
OP_CPYTHON_BLTN_V = 23 # varargs
|
||||
OP_CPYTHON_BLTN_N = 25 # noargs
|
||||
OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg)
|
||||
OP_CONSTANT = 29
|
||||
OP_CONSTANT_INT = 31
|
||||
OP_GLOBAL_VAR = 33
|
||||
OP_DLOPEN_FUNC = 35
|
||||
OP_DLOPEN_CONST = 37
|
||||
OP_GLOBAL_VAR_F = 39
|
||||
OP_EXTERN_PYTHON = 41
|
||||
|
||||
PRIM_VOID = 0
|
||||
PRIM_BOOL = 1
|
||||
PRIM_CHAR = 2
|
||||
PRIM_SCHAR = 3
|
||||
PRIM_UCHAR = 4
|
||||
PRIM_SHORT = 5
|
||||
PRIM_USHORT = 6
|
||||
PRIM_INT = 7
|
||||
PRIM_UINT = 8
|
||||
PRIM_LONG = 9
|
||||
PRIM_ULONG = 10
|
||||
PRIM_LONGLONG = 11
|
||||
PRIM_ULONGLONG = 12
|
||||
PRIM_FLOAT = 13
|
||||
PRIM_DOUBLE = 14
|
||||
PRIM_LONGDOUBLE = 15
|
||||
|
||||
PRIM_WCHAR = 16
|
||||
PRIM_INT8 = 17
|
||||
PRIM_UINT8 = 18
|
||||
PRIM_INT16 = 19
|
||||
PRIM_UINT16 = 20
|
||||
PRIM_INT32 = 21
|
||||
PRIM_UINT32 = 22
|
||||
PRIM_INT64 = 23
|
||||
PRIM_UINT64 = 24
|
||||
PRIM_INTPTR = 25
|
||||
PRIM_UINTPTR = 26
|
||||
PRIM_PTRDIFF = 27
|
||||
PRIM_SIZE = 28
|
||||
PRIM_SSIZE = 29
|
||||
PRIM_INT_LEAST8 = 30
|
||||
PRIM_UINT_LEAST8 = 31
|
||||
PRIM_INT_LEAST16 = 32
|
||||
PRIM_UINT_LEAST16 = 33
|
||||
PRIM_INT_LEAST32 = 34
|
||||
PRIM_UINT_LEAST32 = 35
|
||||
PRIM_INT_LEAST64 = 36
|
||||
PRIM_UINT_LEAST64 = 37
|
||||
PRIM_INT_FAST8 = 38
|
||||
PRIM_UINT_FAST8 = 39
|
||||
PRIM_INT_FAST16 = 40
|
||||
PRIM_UINT_FAST16 = 41
|
||||
PRIM_INT_FAST32 = 42
|
||||
PRIM_UINT_FAST32 = 43
|
||||
PRIM_INT_FAST64 = 44
|
||||
PRIM_UINT_FAST64 = 45
|
||||
PRIM_INTMAX = 46
|
||||
PRIM_UINTMAX = 47
|
||||
PRIM_FLOATCOMPLEX = 48
|
||||
PRIM_DOUBLECOMPLEX = 49
|
||||
PRIM_CHAR16 = 50
|
||||
PRIM_CHAR32 = 51
|
||||
|
||||
_NUM_PRIM = 52
|
||||
_UNKNOWN_PRIM = -1
|
||||
_UNKNOWN_FLOAT_PRIM = -2
|
||||
_UNKNOWN_LONG_DOUBLE = -3
|
||||
|
||||
_IO_FILE_STRUCT = -1
|
||||
|
||||
PRIMITIVE_TO_INDEX = {
|
||||
'char': PRIM_CHAR,
|
||||
'short': PRIM_SHORT,
|
||||
'int': PRIM_INT,
|
||||
'long': PRIM_LONG,
|
||||
'long long': PRIM_LONGLONG,
|
||||
'signed char': PRIM_SCHAR,
|
||||
'unsigned char': PRIM_UCHAR,
|
||||
'unsigned short': PRIM_USHORT,
|
||||
'unsigned int': PRIM_UINT,
|
||||
'unsigned long': PRIM_ULONG,
|
||||
'unsigned long long': PRIM_ULONGLONG,
|
||||
'float': PRIM_FLOAT,
|
||||
'double': PRIM_DOUBLE,
|
||||
'long double': PRIM_LONGDOUBLE,
|
||||
'float _Complex': PRIM_FLOATCOMPLEX,
|
||||
'double _Complex': PRIM_DOUBLECOMPLEX,
|
||||
'_Bool': PRIM_BOOL,
|
||||
'wchar_t': PRIM_WCHAR,
|
||||
'char16_t': PRIM_CHAR16,
|
||||
'char32_t': PRIM_CHAR32,
|
||||
'int8_t': PRIM_INT8,
|
||||
'uint8_t': PRIM_UINT8,
|
||||
'int16_t': PRIM_INT16,
|
||||
'uint16_t': PRIM_UINT16,
|
||||
'int32_t': PRIM_INT32,
|
||||
'uint32_t': PRIM_UINT32,
|
||||
'int64_t': PRIM_INT64,
|
||||
'uint64_t': PRIM_UINT64,
|
||||
'intptr_t': PRIM_INTPTR,
|
||||
'uintptr_t': PRIM_UINTPTR,
|
||||
'ptrdiff_t': PRIM_PTRDIFF,
|
||||
'size_t': PRIM_SIZE,
|
||||
'ssize_t': PRIM_SSIZE,
|
||||
'int_least8_t': PRIM_INT_LEAST8,
|
||||
'uint_least8_t': PRIM_UINT_LEAST8,
|
||||
'int_least16_t': PRIM_INT_LEAST16,
|
||||
'uint_least16_t': PRIM_UINT_LEAST16,
|
||||
'int_least32_t': PRIM_INT_LEAST32,
|
||||
'uint_least32_t': PRIM_UINT_LEAST32,
|
||||
'int_least64_t': PRIM_INT_LEAST64,
|
||||
'uint_least64_t': PRIM_UINT_LEAST64,
|
||||
'int_fast8_t': PRIM_INT_FAST8,
|
||||
'uint_fast8_t': PRIM_UINT_FAST8,
|
||||
'int_fast16_t': PRIM_INT_FAST16,
|
||||
'uint_fast16_t': PRIM_UINT_FAST16,
|
||||
'int_fast32_t': PRIM_INT_FAST32,
|
||||
'uint_fast32_t': PRIM_UINT_FAST32,
|
||||
'int_fast64_t': PRIM_INT_FAST64,
|
||||
'uint_fast64_t': PRIM_UINT_FAST64,
|
||||
'intmax_t': PRIM_INTMAX,
|
||||
'uintmax_t': PRIM_UINTMAX,
|
||||
}
|
||||
|
||||
F_UNION = 0x01
|
||||
F_CHECK_FIELDS = 0x02
|
||||
F_PACKED = 0x04
|
||||
F_EXTERNAL = 0x08
|
||||
F_OPAQUE = 0x10
|
||||
|
||||
G_FLAGS = dict([('_CFFI_' + _key, globals()[_key])
|
||||
for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED',
|
||||
'F_EXTERNAL', 'F_OPAQUE']])
|
||||
|
||||
CLASS_NAME = {}
|
||||
for _name, _value in list(globals().items()):
|
||||
if _name.startswith('OP_') and isinstance(_value, int):
|
||||
CLASS_NAME[_value] = _name[3:]
|
|
@ -0,0 +1,80 @@
|
|||
import sys
|
||||
from . import model
|
||||
from .error import FFIError
|
||||
|
||||
|
||||
COMMON_TYPES = {}
|
||||
|
||||
try:
|
||||
# fetch "bool" and all simple Windows types
|
||||
from _cffi_backend import _get_common_types
|
||||
_get_common_types(COMMON_TYPES)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE')
|
||||
COMMON_TYPES['bool'] = '_Bool' # in case we got ImportError above
|
||||
|
||||
for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
|
||||
if _type.endswith('_t'):
|
||||
COMMON_TYPES[_type] = _type
|
||||
del _type
|
||||
|
||||
_CACHE = {}
|
||||
|
||||
def resolve_common_type(parser, commontype):
|
||||
try:
|
||||
return _CACHE[commontype]
|
||||
except KeyError:
|
||||
cdecl = COMMON_TYPES.get(commontype, commontype)
|
||||
if not isinstance(cdecl, str):
|
||||
result, quals = cdecl, 0 # cdecl is already a BaseType
|
||||
elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
|
||||
result, quals = model.PrimitiveType(cdecl), 0
|
||||
elif cdecl == 'set-unicode-needed':
|
||||
raise FFIError("The Windows type %r is only available after "
|
||||
"you call ffi.set_unicode()" % (commontype,))
|
||||
else:
|
||||
if commontype == cdecl:
|
||||
raise FFIError(
|
||||
"Unsupported type: %r. Please look at "
|
||||
"http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations "
|
||||
"and file an issue if you think this type should really "
|
||||
"be supported." % (commontype,))
|
||||
result, quals = parser.parse_type_and_quals(cdecl) # recursive
|
||||
|
||||
assert isinstance(result, model.BaseTypeByIdentity)
|
||||
_CACHE[commontype] = result, quals
|
||||
return result, quals
|
||||
|
||||
|
||||
# ____________________________________________________________
|
||||
# extra types for Windows (most of them are in commontypes.c)
|
||||
|
||||
|
||||
def win_common_types():
|
||||
return {
|
||||
"UNICODE_STRING": model.StructType(
|
||||
"_UNICODE_STRING",
|
||||
["Length",
|
||||
"MaximumLength",
|
||||
"Buffer"],
|
||||
[model.PrimitiveType("unsigned short"),
|
||||
model.PrimitiveType("unsigned short"),
|
||||
model.PointerType(model.PrimitiveType("wchar_t"))],
|
||||
[-1, -1, -1]),
|
||||
"PUNICODE_STRING": "UNICODE_STRING *",
|
||||
"PCUNICODE_STRING": "const UNICODE_STRING *",
|
||||
|
||||
"TBYTE": "set-unicode-needed",
|
||||
"TCHAR": "set-unicode-needed",
|
||||
"LPCTSTR": "set-unicode-needed",
|
||||
"PCTSTR": "set-unicode-needed",
|
||||
"LPTSTR": "set-unicode-needed",
|
||||
"PTSTR": "set-unicode-needed",
|
||||
"PTBYTE": "set-unicode-needed",
|
||||
"PTCHAR": "set-unicode-needed",
|
||||
}
|
||||
|
||||
if sys.platform == 'win32':
|
||||
COMMON_TYPES.update(win_common_types())
|
|
@ -0,0 +1,963 @@
|
|||
from . import model
|
||||
from .commontypes import COMMON_TYPES, resolve_common_type
|
||||
from .error import FFIError, CDefError
|
||||
try:
|
||||
from . import _pycparser as pycparser
|
||||
except ImportError:
|
||||
import pycparser
|
||||
import weakref, re, sys
|
||||
|
||||
try:
|
||||
if sys.version_info < (3,):
|
||||
import thread as _thread
|
||||
else:
|
||||
import _thread
|
||||
lock = _thread.allocate_lock()
|
||||
except ImportError:
|
||||
lock = None
|
||||
|
||||
def _workaround_for_static_import_finders():
|
||||
# Issue #392: packaging tools like cx_Freeze can not find these
|
||||
# because pycparser uses exec dynamic import. This is an obscure
|
||||
# workaround. This function is never called.
|
||||
import pycparser.yacctab
|
||||
import pycparser.lextab
|
||||
|
||||
CDEF_SOURCE_STRING = "<cdef source string>"
|
||||
_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$",
|
||||
re.DOTALL | re.MULTILINE)
|
||||
_r_define = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)"
|
||||
r"\b((?:[^\n\\]|\\.)*?)$",
|
||||
re.DOTALL | re.MULTILINE)
|
||||
_r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}")
|
||||
_r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$")
|
||||
_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]")
|
||||
_r_words = re.compile(r"\w+|\S")
|
||||
_parser_cache = None
|
||||
_r_int_literal = re.compile(r"-?0?x?[0-9a-f]+[lu]*$", re.IGNORECASE)
|
||||
_r_stdcall1 = re.compile(r"\b(__stdcall|WINAPI)\b")
|
||||
_r_stdcall2 = re.compile(r"[(]\s*(__stdcall|WINAPI)\b")
|
||||
_r_cdecl = re.compile(r"\b__cdecl\b")
|
||||
_r_extern_python = re.compile(r'\bextern\s*"'
|
||||
r'(Python|Python\s*\+\s*C|C\s*\+\s*Python)"\s*.')
|
||||
_r_star_const_space = re.compile( # matches "* const "
|
||||
r"[*]\s*((const|volatile|restrict)\b\s*)+")
|
||||
_r_int_dotdotdot = re.compile(r"(\b(int|long|short|signed|unsigned|char)\s*)+"
|
||||
r"\.\.\.")
|
||||
_r_float_dotdotdot = re.compile(r"\b(double|float)\s*\.\.\.")
|
||||
|
||||
def _get_parser():
|
||||
global _parser_cache
|
||||
if _parser_cache is None:
|
||||
_parser_cache = pycparser.CParser()
|
||||
return _parser_cache
|
||||
|
||||
def _workaround_for_old_pycparser(csource):
|
||||
# Workaround for a pycparser issue (fixed between pycparser 2.10 and
|
||||
# 2.14): "char*const***" gives us a wrong syntax tree, the same as
|
||||
# for "char***(*const)". This means we can't tell the difference
|
||||
# afterwards. But "char(*const(***))" gives us the right syntax
|
||||
# tree. The issue only occurs if there are several stars in
|
||||
# sequence with no parenthesis inbetween, just possibly qualifiers.
|
||||
# Attempt to fix it by adding some parentheses in the source: each
|
||||
# time we see "* const" or "* const *", we add an opening
|
||||
# parenthesis before each star---the hard part is figuring out where
|
||||
# to close them.
|
||||
parts = []
|
||||
while True:
|
||||
match = _r_star_const_space.search(csource)
|
||||
if not match:
|
||||
break
|
||||
#print repr(''.join(parts)+csource), '=>',
|
||||
parts.append(csource[:match.start()])
|
||||
parts.append('('); closing = ')'
|
||||
parts.append(match.group()) # e.g. "* const "
|
||||
endpos = match.end()
|
||||
if csource.startswith('*', endpos):
|
||||
parts.append('('); closing += ')'
|
||||
level = 0
|
||||
i = endpos
|
||||
while i < len(csource):
|
||||
c = csource[i]
|
||||
if c == '(':
|
||||
level += 1
|
||||
elif c == ')':
|
||||
if level == 0:
|
||||
break
|
||||
level -= 1
|
||||
elif c in ',;=':
|
||||
if level == 0:
|
||||
break
|
||||
i += 1
|
||||
csource = csource[endpos:i] + closing + csource[i:]
|
||||
#print repr(''.join(parts)+csource)
|
||||
parts.append(csource)
|
||||
return ''.join(parts)
|
||||
|
||||
def _preprocess_extern_python(csource):
|
||||
# input: `extern "Python" int foo(int);` or
|
||||
# `extern "Python" { int foo(int); }`
|
||||
# output:
|
||||
# void __cffi_extern_python_start;
|
||||
# int foo(int);
|
||||
# void __cffi_extern_python_stop;
|
||||
#
|
||||
# input: `extern "Python+C" int foo(int);`
|
||||
# output:
|
||||
# void __cffi_extern_python_plus_c_start;
|
||||
# int foo(int);
|
||||
# void __cffi_extern_python_stop;
|
||||
parts = []
|
||||
while True:
|
||||
match = _r_extern_python.search(csource)
|
||||
if not match:
|
||||
break
|
||||
endpos = match.end() - 1
|
||||
#print
|
||||
#print ''.join(parts)+csource
|
||||
#print '=>'
|
||||
parts.append(csource[:match.start()])
|
||||
if 'C' in match.group(1):
|
||||
parts.append('void __cffi_extern_python_plus_c_start; ')
|
||||
else:
|
||||
parts.append('void __cffi_extern_python_start; ')
|
||||
if csource[endpos] == '{':
|
||||
# grouping variant
|
||||
closing = csource.find('}', endpos)
|
||||
if closing < 0:
|
||||
raise CDefError("'extern \"Python\" {': no '}' found")
|
||||
if csource.find('{', endpos + 1, closing) >= 0:
|
||||
raise NotImplementedError("cannot use { } inside a block "
|
||||
"'extern \"Python\" { ... }'")
|
||||
parts.append(csource[endpos+1:closing])
|
||||
csource = csource[closing+1:]
|
||||
else:
|
||||
# non-grouping variant
|
||||
semicolon = csource.find(';', endpos)
|
||||
if semicolon < 0:
|
||||
raise CDefError("'extern \"Python\": no ';' found")
|
||||
parts.append(csource[endpos:semicolon+1])
|
||||
csource = csource[semicolon+1:]
|
||||
parts.append(' void __cffi_extern_python_stop;')
|
||||
#print ''.join(parts)+csource
|
||||
#print
|
||||
parts.append(csource)
|
||||
return ''.join(parts)
|
||||
|
||||
def _warn_for_string_literal(csource):
|
||||
if '"' not in csource:
|
||||
return
|
||||
for line in csource.splitlines():
|
||||
if '"' in line and not line.lstrip().startswith('#'):
|
||||
import warnings
|
||||
warnings.warn("String literal found in cdef() or type source. "
|
||||
"String literals are ignored here, but you should "
|
||||
"remove them anyway because some character sequences "
|
||||
"confuse pre-parsing.")
|
||||
break
|
||||
|
||||
def _warn_for_non_extern_non_static_global_variable(decl):
|
||||
if not decl.storage:
|
||||
import warnings
|
||||
warnings.warn("Global variable '%s' in cdef(): for consistency "
|
||||
"with C it should have a storage class specifier "
|
||||
"(usually 'extern')" % (decl.name,))
|
||||
|
||||
def _preprocess(csource):
|
||||
# Remove comments. NOTE: this only work because the cdef() section
|
||||
# should not contain any string literal!
|
||||
csource = _r_comment.sub(' ', csource)
|
||||
# Remove the "#define FOO x" lines
|
||||
macros = {}
|
||||
for match in _r_define.finditer(csource):
|
||||
macroname, macrovalue = match.groups()
|
||||
macrovalue = macrovalue.replace('\\\n', '').strip()
|
||||
macros[macroname] = macrovalue
|
||||
csource = _r_define.sub('', csource)
|
||||
#
|
||||
if pycparser.__version__ < '2.14':
|
||||
csource = _workaround_for_old_pycparser(csource)
|
||||
#
|
||||
# BIG HACK: replace WINAPI or __stdcall with "volatile const".
|
||||
# It doesn't make sense for the return type of a function to be
|
||||
# "volatile volatile const", so we abuse it to detect __stdcall...
|
||||
# Hack number 2 is that "int(volatile *fptr)();" is not valid C
|
||||
# syntax, so we place the "volatile" before the opening parenthesis.
|
||||
csource = _r_stdcall2.sub(' volatile volatile const(', csource)
|
||||
csource = _r_stdcall1.sub(' volatile volatile const ', csource)
|
||||
csource = _r_cdecl.sub(' ', csource)
|
||||
#
|
||||
# Replace `extern "Python"` with start/end markers
|
||||
csource = _preprocess_extern_python(csource)
|
||||
#
|
||||
# Now there should not be any string literal left; warn if we get one
|
||||
_warn_for_string_literal(csource)
|
||||
#
|
||||
# Replace "[...]" with "[__dotdotdotarray__]"
|
||||
csource = _r_partial_array.sub('[__dotdotdotarray__]', csource)
|
||||
#
|
||||
# Replace "...}" with "__dotdotdotNUM__}". This construction should
|
||||
# occur only at the end of enums; at the end of structs we have "...;}"
|
||||
# and at the end of vararg functions "...);". Also replace "=...[,}]"
|
||||
# with ",__dotdotdotNUM__[,}]": this occurs in the enums too, when
|
||||
# giving an unknown value.
|
||||
matches = list(_r_partial_enum.finditer(csource))
|
||||
for number, match in enumerate(reversed(matches)):
|
||||
p = match.start()
|
||||
if csource[p] == '=':
|
||||
p2 = csource.find('...', p, match.end())
|
||||
assert p2 > p
|
||||
csource = '%s,__dotdotdot%d__ %s' % (csource[:p], number,
|
||||
csource[p2+3:])
|
||||
else:
|
||||
assert csource[p:p+3] == '...'
|
||||
csource = '%s __dotdotdot%d__ %s' % (csource[:p], number,
|
||||
csource[p+3:])
|
||||
# Replace "int ..." or "unsigned long int..." with "__dotdotdotint__"
|
||||
csource = _r_int_dotdotdot.sub(' __dotdotdotint__ ', csource)
|
||||
# Replace "float ..." or "double..." with "__dotdotdotfloat__"
|
||||
csource = _r_float_dotdotdot.sub(' __dotdotdotfloat__ ', csource)
|
||||
# Replace all remaining "..." with the same name, "__dotdotdot__",
|
||||
# which is declared with a typedef for the purpose of C parsing.
|
||||
return csource.replace('...', ' __dotdotdot__ '), macros
|
||||
|
||||
def _common_type_names(csource):
|
||||
# Look in the source for what looks like usages of types from the
|
||||
# list of common types. A "usage" is approximated here as the
|
||||
# appearance of the word, minus a "definition" of the type, which
|
||||
# is the last word in a "typedef" statement. Approximative only
|
||||
# but should be fine for all the common types.
|
||||
look_for_words = set(COMMON_TYPES)
|
||||
look_for_words.add(';')
|
||||
look_for_words.add(',')
|
||||
look_for_words.add('(')
|
||||
look_for_words.add(')')
|
||||
look_for_words.add('typedef')
|
||||
words_used = set()
|
||||
is_typedef = False
|
||||
paren = 0
|
||||
previous_word = ''
|
||||
for word in _r_words.findall(csource):
|
||||
if word in look_for_words:
|
||||
if word == ';':
|
||||
if is_typedef:
|
||||
words_used.discard(previous_word)
|
||||
look_for_words.discard(previous_word)
|
||||
is_typedef = False
|
||||
elif word == 'typedef':
|
||||
is_typedef = True
|
||||
paren = 0
|
||||
elif word == '(':
|
||||
paren += 1
|
||||
elif word == ')':
|
||||
paren -= 1
|
||||
elif word == ',':
|
||||
if is_typedef and paren == 0:
|
||||
words_used.discard(previous_word)
|
||||
look_for_words.discard(previous_word)
|
||||
else: # word in COMMON_TYPES
|
||||
words_used.add(word)
|
||||
previous_word = word
|
||||
return words_used
|
||||
|
||||
|
||||
class Parser(object):
|
||||
|
||||
def __init__(self):
|
||||
self._declarations = {}
|
||||
self._included_declarations = set()
|
||||
self._anonymous_counter = 0
|
||||
self._structnode2type = weakref.WeakKeyDictionary()
|
||||
self._options = {}
|
||||
self._int_constants = {}
|
||||
self._recomplete = []
|
||||
self._uses_new_feature = None
|
||||
|
||||
def _parse(self, csource):
|
||||
csource, macros = _preprocess(csource)
|
||||
# XXX: for more efficiency we would need to poke into the
|
||||
# internals of CParser... the following registers the
|
||||
# typedefs, because their presence or absence influences the
|
||||
# parsing itself (but what they are typedef'ed to plays no role)
|
||||
ctn = _common_type_names(csource)
|
||||
typenames = []
|
||||
for name in sorted(self._declarations):
|
||||
if name.startswith('typedef '):
|
||||
name = name[8:]
|
||||
typenames.append(name)
|
||||
ctn.discard(name)
|
||||
typenames += sorted(ctn)
|
||||
#
|
||||
csourcelines = []
|
||||
csourcelines.append('# 1 "<cdef automatic initialization code>"')
|
||||
for typename in typenames:
|
||||
csourcelines.append('typedef int %s;' % typename)
|
||||
csourcelines.append('typedef int __dotdotdotint__, __dotdotdotfloat__,'
|
||||
' __dotdotdot__;')
|
||||
# this forces pycparser to consider the following in the file
|
||||
# called <cdef source string> from line 1
|
||||
csourcelines.append('# 1 "%s"' % (CDEF_SOURCE_STRING,))
|
||||
csourcelines.append(csource)
|
||||
fullcsource = '\n'.join(csourcelines)
|
||||
if lock is not None:
|
||||
lock.acquire() # pycparser is not thread-safe...
|
||||
try:
|
||||
ast = _get_parser().parse(fullcsource)
|
||||
except pycparser.c_parser.ParseError as e:
|
||||
self.convert_pycparser_error(e, csource)
|
||||
finally:
|
||||
if lock is not None:
|
||||
lock.release()
|
||||
# csource will be used to find buggy source text
|
||||
return ast, macros, csource
|
||||
|
||||
def _convert_pycparser_error(self, e, csource):
|
||||
# xxx look for "<cdef source string>:NUM:" at the start of str(e)
|
||||
# and interpret that as a line number. This will not work if
|
||||
# the user gives explicit ``# NUM "FILE"`` directives.
|
||||
line = None
|
||||
msg = str(e)
|
||||
match = re.match(r"%s:(\d+):" % (CDEF_SOURCE_STRING,), msg)
|
||||
if match:
|
||||
linenum = int(match.group(1), 10)
|
||||
csourcelines = csource.splitlines()
|
||||
if 1 <= linenum <= len(csourcelines):
|
||||
line = csourcelines[linenum-1]
|
||||
return line
|
||||
|
||||
def convert_pycparser_error(self, e, csource):
|
||||
line = self._convert_pycparser_error(e, csource)
|
||||
|
||||
msg = str(e)
|
||||
if line:
|
||||
msg = 'cannot parse "%s"\n%s' % (line.strip(), msg)
|
||||
else:
|
||||
msg = 'parse error\n%s' % (msg,)
|
||||
raise CDefError(msg)
|
||||
|
||||
def parse(self, csource, override=False, packed=False, pack=None,
|
||||
dllexport=False):
|
||||
if packed:
|
||||
if packed != True:
|
||||
raise ValueError("'packed' should be False or True; use "
|
||||
"'pack' to give another value")
|
||||
if pack:
|
||||
raise ValueError("cannot give both 'pack' and 'packed'")
|
||||
pack = 1
|
||||
elif pack:
|
||||
if pack & (pack - 1):
|
||||
raise ValueError("'pack' must be a power of two, not %r" %
|
||||
(pack,))
|
||||
else:
|
||||
pack = 0
|
||||
prev_options = self._options
|
||||
try:
|
||||
self._options = {'override': override,
|
||||
'packed': pack,
|
||||
'dllexport': dllexport}
|
||||
self._internal_parse(csource)
|
||||
finally:
|
||||
self._options = prev_options
|
||||
|
||||
def _internal_parse(self, csource):
|
||||
ast, macros, csource = self._parse(csource)
|
||||
# add the macros
|
||||
self._process_macros(macros)
|
||||
# find the first "__dotdotdot__" and use that as a separator
|
||||
# between the repeated typedefs and the real csource
|
||||
iterator = iter(ast.ext)
|
||||
for decl in iterator:
|
||||
if decl.name == '__dotdotdot__':
|
||||
break
|
||||
else:
|
||||
assert 0
|
||||
current_decl = None
|
||||
#
|
||||
try:
|
||||
self._inside_extern_python = '__cffi_extern_python_stop'
|
||||
for decl in iterator:
|
||||
current_decl = decl
|
||||
if isinstance(decl, pycparser.c_ast.Decl):
|
||||
self._parse_decl(decl)
|
||||
elif isinstance(decl, pycparser.c_ast.Typedef):
|
||||
if not decl.name:
|
||||
raise CDefError("typedef does not declare any name",
|
||||
decl)
|
||||
quals = 0
|
||||
if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) and
|
||||
decl.type.type.names[-1].startswith('__dotdotdot')):
|
||||
realtype = self._get_unknown_type(decl)
|
||||
elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and
|
||||
isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and
|
||||
isinstance(decl.type.type.type,
|
||||
pycparser.c_ast.IdentifierType) and
|
||||
decl.type.type.type.names[-1].startswith('__dotdotdot')):
|
||||
realtype = self._get_unknown_ptr_type(decl)
|
||||
else:
|
||||
realtype, quals = self._get_type_and_quals(
|
||||
decl.type, name=decl.name, partial_length_ok=True)
|
||||
self._declare('typedef ' + decl.name, realtype, quals=quals)
|
||||
elif decl.__class__.__name__ == 'Pragma':
|
||||
pass # skip pragma, only in pycparser 2.15
|
||||
else:
|
||||
raise CDefError("unexpected <%s>: this construct is valid "
|
||||
"C but not valid in cdef()" %
|
||||
decl.__class__.__name__, decl)
|
||||
except CDefError as e:
|
||||
if len(e.args) == 1:
|
||||
e.args = e.args + (current_decl,)
|
||||
raise
|
||||
except FFIError as e:
|
||||
msg = self._convert_pycparser_error(e, csource)
|
||||
if msg:
|
||||
e.args = (e.args[0] + "\n *** Err: %s" % msg,)
|
||||
raise
|
||||
|
||||
def _add_constants(self, key, val):
|
||||
if key in self._int_constants:
|
||||
if self._int_constants[key] == val:
|
||||
return # ignore identical double declarations
|
||||
raise FFIError(
|
||||
"multiple declarations of constant: %s" % (key,))
|
||||
self._int_constants[key] = val
|
||||
|
||||
def _add_integer_constant(self, name, int_str):
|
||||
int_str = int_str.lower().rstrip("ul")
|
||||
neg = int_str.startswith('-')
|
||||
if neg:
|
||||
int_str = int_str[1:]
|
||||
# "010" is not valid oct in py3
|
||||
if (int_str.startswith("0") and int_str != '0'
|
||||
and not int_str.startswith("0x")):
|
||||
int_str = "0o" + int_str[1:]
|
||||
pyvalue = int(int_str, 0)
|
||||
if neg:
|
||||
pyvalue = -pyvalue
|
||||
self._add_constants(name, pyvalue)
|
||||
self._declare('macro ' + name, pyvalue)
|
||||
|
||||
def _process_macros(self, macros):
|
||||
for key, value in macros.items():
|
||||
value = value.strip()
|
||||
if _r_int_literal.match(value):
|
||||
self._add_integer_constant(key, value)
|
||||
elif value == '...':
|
||||
self._declare('macro ' + key, value)
|
||||
else:
|
||||
raise CDefError(
|
||||
'only supports one of the following syntax:\n'
|
||||
' #define %s ... (literally dot-dot-dot)\n'
|
||||
' #define %s NUMBER (with NUMBER an integer'
|
||||
' constant, decimal/hex/octal)\n'
|
||||
'got:\n'
|
||||
' #define %s %s'
|
||||
% (key, key, key, value))
|
||||
|
||||
def _declare_function(self, tp, quals, decl):
|
||||
tp = self._get_type_pointer(tp, quals)
|
||||
if self._options.get('dllexport'):
|
||||
tag = 'dllexport_python '
|
||||
elif self._inside_extern_python == '__cffi_extern_python_start':
|
||||
tag = 'extern_python '
|
||||
elif self._inside_extern_python == '__cffi_extern_python_plus_c_start':
|
||||
tag = 'extern_python_plus_c '
|
||||
else:
|
||||
tag = 'function '
|
||||
self._declare(tag + decl.name, tp)
|
||||
|
||||
def _parse_decl(self, decl):
|
||||
node = decl.type
|
||||
if isinstance(node, pycparser.c_ast.FuncDecl):
|
||||
tp, quals = self._get_type_and_quals(node, name=decl.name)
|
||||
assert isinstance(tp, model.RawFunctionType)
|
||||
self._declare_function(tp, quals, decl)
|
||||
else:
|
||||
if isinstance(node, pycparser.c_ast.Struct):
|
||||
self._get_struct_union_enum_type('struct', node)
|
||||
elif isinstance(node, pycparser.c_ast.Union):
|
||||
self._get_struct_union_enum_type('union', node)
|
||||
elif isinstance(node, pycparser.c_ast.Enum):
|
||||
self._get_struct_union_enum_type('enum', node)
|
||||
elif not decl.name:
|
||||
raise CDefError("construct does not declare any variable",
|
||||
decl)
|
||||
#
|
||||
if decl.name:
|
||||
tp, quals = self._get_type_and_quals(node,
|
||||
partial_length_ok=True)
|
||||
if tp.is_raw_function:
|
||||
self._declare_function(tp, quals, decl)
|
||||
elif (tp.is_integer_type() and
|
||||
hasattr(decl, 'init') and
|
||||
hasattr(decl.init, 'value') and
|
||||
_r_int_literal.match(decl.init.value)):
|
||||
self._add_integer_constant(decl.name, decl.init.value)
|
||||
elif (tp.is_integer_type() and
|
||||
isinstance(decl.init, pycparser.c_ast.UnaryOp) and
|
||||
decl.init.op == '-' and
|
||||
hasattr(decl.init.expr, 'value') and
|
||||
_r_int_literal.match(decl.init.expr.value)):
|
||||
self._add_integer_constant(decl.name,
|
||||
'-' + decl.init.expr.value)
|
||||
elif (tp is model.void_type and
|
||||
decl.name.startswith('__cffi_extern_python_')):
|
||||
# hack: `extern "Python"` in the C source is replaced
|
||||
# with "void __cffi_extern_python_start;" and
|
||||
# "void __cffi_extern_python_stop;"
|
||||
self._inside_extern_python = decl.name
|
||||
else:
|
||||
if self._inside_extern_python !='__cffi_extern_python_stop':
|
||||
raise CDefError(
|
||||
"cannot declare constants or "
|
||||
"variables with 'extern \"Python\"'")
|
||||
if (quals & model.Q_CONST) and not tp.is_array_type:
|
||||
self._declare('constant ' + decl.name, tp, quals=quals)
|
||||
else:
|
||||
_warn_for_non_extern_non_static_global_variable(decl)
|
||||
self._declare('variable ' + decl.name, tp, quals=quals)
|
||||
|
||||
def parse_type(self, cdecl):
|
||||
return self.parse_type_and_quals(cdecl)[0]
|
||||
|
||||
def parse_type_and_quals(self, cdecl):
|
||||
ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2]
|
||||
assert not macros
|
||||
exprnode = ast.ext[-1].type.args.params[0]
|
||||
if isinstance(exprnode, pycparser.c_ast.ID):
|
||||
raise CDefError("unknown identifier '%s'" % (exprnode.name,))
|
||||
return self._get_type_and_quals(exprnode.type)
|
||||
|
||||
def _declare(self, name, obj, included=False, quals=0):
|
||||
if name in self._declarations:
|
||||
prevobj, prevquals = self._declarations[name]
|
||||
if prevobj is obj and prevquals == quals:
|
||||
return
|
||||
if not self._options.get('override'):
|
||||
raise FFIError(
|
||||
"multiple declarations of %s (for interactive usage, "
|
||||
"try cdef(xx, override=True))" % (name,))
|
||||
assert '__dotdotdot__' not in name.split()
|
||||
self._declarations[name] = (obj, quals)
|
||||
if included:
|
||||
self._included_declarations.add(obj)
|
||||
|
||||
def _extract_quals(self, type):
|
||||
quals = 0
|
||||
if isinstance(type, (pycparser.c_ast.TypeDecl,
|
||||
pycparser.c_ast.PtrDecl)):
|
||||
if 'const' in type.quals:
|
||||
quals |= model.Q_CONST
|
||||
if 'volatile' in type.quals:
|
||||
quals |= model.Q_VOLATILE
|
||||
if 'restrict' in type.quals:
|
||||
quals |= model.Q_RESTRICT
|
||||
return quals
|
||||
|
||||
def _get_type_pointer(self, type, quals, declname=None):
|
||||
if isinstance(type, model.RawFunctionType):
|
||||
return type.as_function_pointer()
|
||||
if (isinstance(type, model.StructOrUnionOrEnum) and
|
||||
type.name.startswith('$') and type.name[1:].isdigit() and
|
||||
type.forcename is None and declname is not None):
|
||||
return model.NamedPointerType(type, declname, quals)
|
||||
return model.PointerType(type, quals)
|
||||
|
||||
def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False):
|
||||
# first, dereference typedefs, if we have it already parsed, we're good
|
||||
if (isinstance(typenode, pycparser.c_ast.TypeDecl) and
|
||||
isinstance(typenode.type, pycparser.c_ast.IdentifierType) and
|
||||
len(typenode.type.names) == 1 and
|
||||
('typedef ' + typenode.type.names[0]) in self._declarations):
|
||||
tp, quals = self._declarations['typedef ' + typenode.type.names[0]]
|
||||
quals |= self._extract_quals(typenode)
|
||||
return tp, quals
|
||||
#
|
||||
if isinstance(typenode, pycparser.c_ast.ArrayDecl):
|
||||
# array type
|
||||
if typenode.dim is None:
|
||||
length = None
|
||||
else:
|
||||
length = self._parse_constant(
|
||||
typenode.dim, partial_length_ok=partial_length_ok)
|
||||
tp, quals = self._get_type_and_quals(typenode.type,
|
||||
partial_length_ok=partial_length_ok)
|
||||
return model.ArrayType(tp, length), quals
|
||||
#
|
||||
if isinstance(typenode, pycparser.c_ast.PtrDecl):
|
||||
# pointer type
|
||||
itemtype, itemquals = self._get_type_and_quals(typenode.type)
|
||||
tp = self._get_type_pointer(itemtype, itemquals, declname=name)
|
||||
quals = self._extract_quals(typenode)
|
||||
return tp, quals
|
||||
#
|
||||
if isinstance(typenode, pycparser.c_ast.TypeDecl):
|
||||
quals = self._extract_quals(typenode)
|
||||
type = typenode.type
|
||||
if isinstance(type, pycparser.c_ast.IdentifierType):
|
||||
# assume a primitive type. get it from .names, but reduce
|
||||
# synonyms to a single chosen combination
|
||||
names = list(type.names)
|
||||
if names != ['signed', 'char']: # keep this unmodified
|
||||
prefixes = {}
|
||||
while names:
|
||||
name = names[0]
|
||||
if name in ('short', 'long', 'signed', 'unsigned'):
|
||||
prefixes[name] = prefixes.get(name, 0) + 1
|
||||
del names[0]
|
||||
else:
|
||||
break
|
||||
# ignore the 'signed' prefix below, and reorder the others
|
||||
newnames = []
|
||||
for prefix in ('unsigned', 'short', 'long'):
|
||||
for i in range(prefixes.get(prefix, 0)):
|
||||
newnames.append(prefix)
|
||||
if not names:
|
||||
names = ['int'] # implicitly
|
||||
if names == ['int']: # but kill it if 'short' or 'long'
|
||||
if 'short' in prefixes or 'long' in prefixes:
|
||||
names = []
|
||||
names = newnames + names
|
||||
ident = ' '.join(names)
|
||||
if ident == 'void':
|
||||
return model.void_type, quals
|
||||
if ident == '__dotdotdot__':
|
||||
raise FFIError(':%d: bad usage of "..."' %
|
||||
typenode.coord.line)
|
||||
tp0, quals0 = resolve_common_type(self, ident)
|
||||
return tp0, (quals | quals0)
|
||||
#
|
||||
if isinstance(type, pycparser.c_ast.Struct):
|
||||
# 'struct foobar'
|
||||
tp = self._get_struct_union_enum_type('struct', type, name)
|
||||
return tp, quals
|
||||
#
|
||||
if isinstance(type, pycparser.c_ast.Union):
|
||||
# 'union foobar'
|
||||
tp = self._get_struct_union_enum_type('union', type, name)
|
||||
return tp, quals
|
||||
#
|
||||
if isinstance(type, pycparser.c_ast.Enum):
|
||||
# 'enum foobar'
|
||||
tp = self._get_struct_union_enum_type('enum', type, name)
|
||||
return tp, quals
|
||||
#
|
||||
if isinstance(typenode, pycparser.c_ast.FuncDecl):
|
||||
# a function type
|
||||
return self._parse_function_type(typenode, name), 0
|
||||
#
|
||||
# nested anonymous structs or unions end up here
|
||||
if isinstance(typenode, pycparser.c_ast.Struct):
|
||||
return self._get_struct_union_enum_type('struct', typenode, name,
|
||||
nested=True), 0
|
||||
if isinstance(typenode, pycparser.c_ast.Union):
|
||||
return self._get_struct_union_enum_type('union', typenode, name,
|
||||
nested=True), 0
|
||||
#
|
||||
raise FFIError(":%d: bad or unsupported type declaration" %
|
||||
typenode.coord.line)
|
||||
|
||||
def _parse_function_type(self, typenode, funcname=None):
|
||||
params = list(getattr(typenode.args, 'params', []))
|
||||
for i, arg in enumerate(params):
|
||||
if not hasattr(arg, 'type'):
|
||||
raise CDefError("%s arg %d: unknown type '%s'"
|
||||
" (if you meant to use the old C syntax of giving"
|
||||
" untyped arguments, it is not supported)"
|
||||
% (funcname or 'in expression', i + 1,
|
||||
getattr(arg, 'name', '?')))
|
||||
ellipsis = (
|
||||
len(params) > 0 and
|
||||
isinstance(params[-1].type, pycparser.c_ast.TypeDecl) and
|
||||
isinstance(params[-1].type.type,
|
||||
pycparser.c_ast.IdentifierType) and
|
||||
params[-1].type.type.names == ['__dotdotdot__'])
|
||||
if ellipsis:
|
||||
params.pop()
|
||||
if not params:
|
||||
raise CDefError(
|
||||
"%s: a function with only '(...)' as argument"
|
||||
" is not correct C" % (funcname or 'in expression'))
|
||||
args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type))
|
||||
for argdeclnode in params]
|
||||
if not ellipsis and args == [model.void_type]:
|
||||
args = []
|
||||
result, quals = self._get_type_and_quals(typenode.type)
|
||||
# the 'quals' on the result type are ignored. HACK: we absure them
|
||||
# to detect __stdcall functions: we textually replace "__stdcall"
|
||||
# with "volatile volatile const" above.
|
||||
abi = None
|
||||
if hasattr(typenode.type, 'quals'): # else, probable syntax error anyway
|
||||
if typenode.type.quals[-3:] == ['volatile', 'volatile', 'const']:
|
||||
abi = '__stdcall'
|
||||
return model.RawFunctionType(tuple(args), result, ellipsis, abi)
|
||||
|
||||
def _as_func_arg(self, type, quals):
|
||||
if isinstance(type, model.ArrayType):
|
||||
return model.PointerType(type.item, quals)
|
||||
elif isinstance(type, model.RawFunctionType):
|
||||
return type.as_function_pointer()
|
||||
else:
|
||||
return type
|
||||
|
||||
def _get_struct_union_enum_type(self, kind, type, name=None, nested=False):
|
||||
# First, a level of caching on the exact 'type' node of the AST.
|
||||
# This is obscure, but needed because pycparser "unrolls" declarations
|
||||
# such as "typedef struct { } foo_t, *foo_p" and we end up with
|
||||
# an AST that is not a tree, but a DAG, with the "type" node of the
|
||||
# two branches foo_t and foo_p of the trees being the same node.
|
||||
# It's a bit silly but detecting "DAG-ness" in the AST tree seems
|
||||
# to be the only way to distinguish this case from two independent
|
||||
# structs. See test_struct_with_two_usages.
|
||||
try:
|
||||
return self._structnode2type[type]
|
||||
except KeyError:
|
||||
pass
|
||||
#
|
||||
# Note that this must handle parsing "struct foo" any number of
|
||||
# times and always return the same StructType object. Additionally,
|
||||
# one of these times (not necessarily the first), the fields of
|
||||
# the struct can be specified with "struct foo { ...fields... }".
|
||||
# If no name is given, then we have to create a new anonymous struct
|
||||
# with no caching; in this case, the fields are either specified
|
||||
# right now or never.
|
||||
#
|
||||
force_name = name
|
||||
name = type.name
|
||||
#
|
||||
# get the type or create it if needed
|
||||
if name is None:
|
||||
# 'force_name' is used to guess a more readable name for
|
||||
# anonymous structs, for the common case "typedef struct { } foo".
|
||||
if force_name is not None:
|
||||
explicit_name = '$%s' % force_name
|
||||
else:
|
||||
self._anonymous_counter += 1
|
||||
explicit_name = '$%d' % self._anonymous_counter
|
||||
tp = None
|
||||
else:
|
||||
explicit_name = name
|
||||
key = '%s %s' % (kind, name)
|
||||
tp, _ = self._declarations.get(key, (None, None))
|
||||
#
|
||||
if tp is None:
|
||||
if kind == 'struct':
|
||||
tp = model.StructType(explicit_name, None, None, None)
|
||||
elif kind == 'union':
|
||||
tp = model.UnionType(explicit_name, None, None, None)
|
||||
elif kind == 'enum':
|
||||
if explicit_name == '__dotdotdot__':
|
||||
raise CDefError("Enums cannot be declared with ...")
|
||||
tp = self._build_enum_type(explicit_name, type.values)
|
||||
else:
|
||||
raise AssertionError("kind = %r" % (kind,))
|
||||
if name is not None:
|
||||
self._declare(key, tp)
|
||||
else:
|
||||
if kind == 'enum' and type.values is not None:
|
||||
raise NotImplementedError(
|
||||
"enum %s: the '{}' declaration should appear on the first "
|
||||
"time the enum is mentioned, not later" % explicit_name)
|
||||
if not tp.forcename:
|
||||
tp.force_the_name(force_name)
|
||||
if tp.forcename and '$' in tp.name:
|
||||
self._declare('anonymous %s' % tp.forcename, tp)
|
||||
#
|
||||
self._structnode2type[type] = tp
|
||||
#
|
||||
# enums: done here
|
||||
if kind == 'enum':
|
||||
return tp
|
||||
#
|
||||
# is there a 'type.decls'? If yes, then this is the place in the
|
||||
# C sources that declare the fields. If no, then just return the
|
||||
# existing type, possibly still incomplete.
|
||||
if type.decls is None:
|
||||
return tp
|
||||
#
|
||||
if tp.fldnames is not None:
|
||||
raise CDefError("duplicate declaration of struct %s" % name)
|
||||
fldnames = []
|
||||
fldtypes = []
|
||||
fldbitsize = []
|
||||
fldquals = []
|
||||
for decl in type.decls:
|
||||
if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and
|
||||
''.join(decl.type.names) == '__dotdotdot__'):
|
||||
# XXX pycparser is inconsistent: 'names' should be a list
|
||||
# of strings, but is sometimes just one string. Use
|
||||
# str.join() as a way to cope with both.
|
||||
self._make_partial(tp, nested)
|
||||
continue
|
||||
if decl.bitsize is None:
|
||||
bitsize = -1
|
||||
else:
|
||||
bitsize = self._parse_constant(decl.bitsize)
|
||||
self._partial_length = False
|
||||
type, fqual = self._get_type_and_quals(decl.type,
|
||||
partial_length_ok=True)
|
||||
if self._partial_length:
|
||||
self._make_partial(tp, nested)
|
||||
if isinstance(type, model.StructType) and type.partial:
|
||||
self._make_partial(tp, nested)
|
||||
fldnames.append(decl.name or '')
|
||||
fldtypes.append(type)
|
||||
fldbitsize.append(bitsize)
|
||||
fldquals.append(fqual)
|
||||
tp.fldnames = tuple(fldnames)
|
||||
tp.fldtypes = tuple(fldtypes)
|
||||
tp.fldbitsize = tuple(fldbitsize)
|
||||
tp.fldquals = tuple(fldquals)
|
||||
if fldbitsize != [-1] * len(fldbitsize):
|
||||
if isinstance(tp, model.StructType) and tp.partial:
|
||||
raise NotImplementedError("%s: using both bitfields and '...;'"
|
||||
% (tp,))
|
||||
tp.packed = self._options.get('packed')
|
||||
if tp.completed: # must be re-completed: it is not opaque any more
|
||||
tp.completed = 0
|
||||
self._recomplete.append(tp)
|
||||
return tp
|
||||
|
||||
def _make_partial(self, tp, nested):
|
||||
if not isinstance(tp, model.StructOrUnion):
|
||||
raise CDefError("%s cannot be partial" % (tp,))
|
||||
if not tp.has_c_name() and not nested:
|
||||
raise NotImplementedError("%s is partial but has no C name" %(tp,))
|
||||
tp.partial = True
|
||||
|
||||
def _parse_constant(self, exprnode, partial_length_ok=False):
|
||||
# for now, limited to expressions that are an immediate number
|
||||
# or positive/negative number
|
||||
if isinstance(exprnode, pycparser.c_ast.Constant):
|
||||
s = exprnode.value
|
||||
if '0' <= s[0] <= '9':
|
||||
s = s.rstrip('uUlL')
|
||||
try:
|
||||
if s.startswith('0'):
|
||||
return int(s, 8)
|
||||
else:
|
||||
return int(s, 10)
|
||||
except ValueError:
|
||||
if len(s) > 1:
|
||||
if s.lower()[0:2] == '0x':
|
||||
return int(s, 16)
|
||||
elif s.lower()[0:2] == '0b':
|
||||
return int(s, 2)
|
||||
raise CDefError("invalid constant %r" % (s,))
|
||||
elif s[0] == "'" and s[-1] == "'" and (
|
||||
len(s) == 3 or (len(s) == 4 and s[1] == "\\")):
|
||||
return ord(s[-2])
|
||||
else:
|
||||
raise CDefError("invalid constant %r" % (s,))
|
||||
#
|
||||
if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and
|
||||
exprnode.op == '+'):
|
||||
return self._parse_constant(exprnode.expr)
|
||||
#
|
||||
if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and
|
||||
exprnode.op == '-'):
|
||||
return -self._parse_constant(exprnode.expr)
|
||||
# load previously defined int constant
|
||||
if (isinstance(exprnode, pycparser.c_ast.ID) and
|
||||
exprnode.name in self._int_constants):
|
||||
return self._int_constants[exprnode.name]
|
||||
#
|
||||
if (isinstance(exprnode, pycparser.c_ast.ID) and
|
||||
exprnode.name == '__dotdotdotarray__'):
|
||||
if partial_length_ok:
|
||||
self._partial_length = True
|
||||
return '...'
|
||||
raise FFIError(":%d: unsupported '[...]' here, cannot derive "
|
||||
"the actual array length in this context"
|
||||
% exprnode.coord.line)
|
||||
#
|
||||
if isinstance(exprnode, pycparser.c_ast.BinaryOp):
|
||||
left = self._parse_constant(exprnode.left)
|
||||
right = self._parse_constant(exprnode.right)
|
||||
if exprnode.op == '+':
|
||||
return left + right
|
||||
elif exprnode.op == '-':
|
||||
return left - right
|
||||
elif exprnode.op == '*':
|
||||
return left * right
|
||||
elif exprnode.op == '/':
|
||||
return self._c_div(left, right)
|
||||
elif exprnode.op == '%':
|
||||
return left - self._c_div(left, right) * right
|
||||
elif exprnode.op == '<<':
|
||||
return left << right
|
||||
elif exprnode.op == '>>':
|
||||
return left >> right
|
||||
elif exprnode.op == '&':
|
||||
return left & right
|
||||
elif exprnode.op == '|':
|
||||
return left | right
|
||||
elif exprnode.op == '^':
|
||||
return left ^ right
|
||||
#
|
||||
raise FFIError(":%d: unsupported expression: expected a "
|
||||
"simple numeric constant" % exprnode.coord.line)
|
||||
|
||||
def _c_div(self, a, b):
|
||||
result = a // b
|
||||
if ((a < 0) ^ (b < 0)) and (a % b) != 0:
|
||||
result += 1
|
||||
return result
|
||||
|
||||
def _build_enum_type(self, explicit_name, decls):
|
||||
if decls is not None:
|
||||
partial = False
|
||||
enumerators = []
|
||||
enumvalues = []
|
||||
nextenumvalue = 0
|
||||
for enum in decls.enumerators:
|
||||
if _r_enum_dotdotdot.match(enum.name):
|
||||
partial = True
|
||||
continue
|
||||
if enum.value is not None:
|
||||
nextenumvalue = self._parse_constant(enum.value)
|
||||
enumerators.append(enum.name)
|
||||
enumvalues.append(nextenumvalue)
|
||||
self._add_constants(enum.name, nextenumvalue)
|
||||
nextenumvalue += 1
|
||||
enumerators = tuple(enumerators)
|
||||
enumvalues = tuple(enumvalues)
|
||||
tp = model.EnumType(explicit_name, enumerators, enumvalues)
|
||||
tp.partial = partial
|
||||
else: # opaque enum
|
||||
tp = model.EnumType(explicit_name, (), ())
|
||||
return tp
|
||||
|
||||
def include(self, other):
|
||||
for name, (tp, quals) in other._declarations.items():
|
||||
if name.startswith('anonymous $enum_$'):
|
||||
continue # fix for test_anonymous_enum_include
|
||||
kind = name.split(' ', 1)[0]
|
||||
if kind in ('struct', 'union', 'enum', 'anonymous', 'typedef'):
|
||||
self._declare(name, tp, included=True, quals=quals)
|
||||
for k, v in other._int_constants.items():
|
||||
self._add_constants(k, v)
|
||||
|
||||
def _get_unknown_type(self, decl):
|
||||
typenames = decl.type.type.names
|
||||
if typenames == ['__dotdotdot__']:
|
||||
return model.unknown_type(decl.name)
|
||||
|
||||
if typenames == ['__dotdotdotint__']:
|
||||
if self._uses_new_feature is None:
|
||||
self._uses_new_feature = "'typedef int... %s'" % decl.name
|
||||
return model.UnknownIntegerType(decl.name)
|
||||
|
||||
if typenames == ['__dotdotdotfloat__']:
|
||||
# note: not for 'long double' so far
|
||||
if self._uses_new_feature is None:
|
||||
self._uses_new_feature = "'typedef float... %s'" % decl.name
|
||||
return model.UnknownFloatType(decl.name)
|
||||
|
||||
raise FFIError(':%d: unsupported usage of "..." in typedef'
|
||||
% decl.coord.line)
|
||||
|
||||
def _get_unknown_ptr_type(self, decl):
|
||||
if decl.type.type.type.names == ['__dotdotdot__']:
|
||||
return model.unknown_ptr_type(decl.name)
|
||||
raise FFIError(':%d: unsupported usage of "..." in typedef'
|
||||
% decl.coord.line)
|
|
@ -0,0 +1,31 @@
|
|||
|
||||
class FFIError(Exception):
|
||||
__module__ = 'cffi'
|
||||
|
||||
class CDefError(Exception):
|
||||
__module__ = 'cffi'
|
||||
def __str__(self):
|
||||
try:
|
||||
current_decl = self.args[1]
|
||||
filename = current_decl.coord.file
|
||||
linenum = current_decl.coord.line
|
||||
prefix = '%s:%d: ' % (filename, linenum)
|
||||
except (AttributeError, TypeError, IndexError):
|
||||
prefix = ''
|
||||
return '%s%s' % (prefix, self.args[0])
|
||||
|
||||
class VerificationError(Exception):
|
||||
""" An error raised when verification fails
|
||||
"""
|
||||
__module__ = 'cffi'
|
||||
|
||||
class VerificationMissing(Exception):
|
||||
""" An error raised when incomplete structures are passed into
|
||||
cdef, but no verification has been done
|
||||
"""
|
||||
__module__ = 'cffi'
|
||||
|
||||
class PkgConfigError(Exception):
|
||||
""" An error raised for missing modules in pkg-config
|
||||
"""
|
||||
__module__ = 'cffi'
|
|
@ -0,0 +1,127 @@
|
|||
import sys, os
|
||||
from .error import VerificationError
|
||||
|
||||
|
||||
LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs',
|
||||
'extra_objects', 'depends']
|
||||
|
||||
def get_extension(srcfilename, modname, sources=(), **kwds):
|
||||
_hack_at_distutils()
|
||||
from distutils.core import Extension
|
||||
allsources = [srcfilename]
|
||||
for src in sources:
|
||||
allsources.append(os.path.normpath(src))
|
||||
return Extension(name=modname, sources=allsources, **kwds)
|
||||
|
||||
def compile(tmpdir, ext, compiler_verbose=0, debug=None):
|
||||
"""Compile a C extension module using distutils."""
|
||||
|
||||
_hack_at_distutils()
|
||||
saved_environ = os.environ.copy()
|
||||
try:
|
||||
outputfilename = _build(tmpdir, ext, compiler_verbose, debug)
|
||||
outputfilename = os.path.abspath(outputfilename)
|
||||
finally:
|
||||
# workaround for a distutils bugs where some env vars can
|
||||
# become longer and longer every time it is used
|
||||
for key, value in saved_environ.items():
|
||||
if os.environ.get(key) != value:
|
||||
os.environ[key] = value
|
||||
return outputfilename
|
||||
|
||||
def _build(tmpdir, ext, compiler_verbose=0, debug=None):
|
||||
# XXX compact but horrible :-(
|
||||
from distutils.core import Distribution
|
||||
import distutils.errors, distutils.log
|
||||
#
|
||||
dist = Distribution({'ext_modules': [ext]})
|
||||
dist.parse_config_files()
|
||||
options = dist.get_option_dict('build_ext')
|
||||
if debug is None:
|
||||
debug = sys.flags.debug
|
||||
options['debug'] = ('ffiplatform', debug)
|
||||
options['force'] = ('ffiplatform', True)
|
||||
options['build_lib'] = ('ffiplatform', tmpdir)
|
||||
options['build_temp'] = ('ffiplatform', tmpdir)
|
||||
#
|
||||
try:
|
||||
old_level = distutils.log.set_threshold(0) or 0
|
||||
try:
|
||||
distutils.log.set_verbosity(compiler_verbose)
|
||||
dist.run_command('build_ext')
|
||||
cmd_obj = dist.get_command_obj('build_ext')
|
||||
[soname] = cmd_obj.get_outputs()
|
||||
finally:
|
||||
distutils.log.set_threshold(old_level)
|
||||
except (distutils.errors.CompileError,
|
||||
distutils.errors.LinkError) as e:
|
||||
raise VerificationError('%s: %s' % (e.__class__.__name__, e))
|
||||
#
|
||||
return soname
|
||||
|
||||
try:
|
||||
from os.path import samefile
|
||||
except ImportError:
|
||||
def samefile(f1, f2):
|
||||
return os.path.abspath(f1) == os.path.abspath(f2)
|
||||
|
||||
def maybe_relative_path(path):
|
||||
if not os.path.isabs(path):
|
||||
return path # already relative
|
||||
dir = path
|
||||
names = []
|
||||
while True:
|
||||
prevdir = dir
|
||||
dir, name = os.path.split(prevdir)
|
||||
if dir == prevdir or not dir:
|
||||
return path # failed to make it relative
|
||||
names.append(name)
|
||||
try:
|
||||
if samefile(dir, os.curdir):
|
||||
names.reverse()
|
||||
return os.path.join(*names)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
# ____________________________________________________________
|
||||
|
||||
try:
|
||||
int_or_long = (int, long)
|
||||
import cStringIO
|
||||
except NameError:
|
||||
int_or_long = int # Python 3
|
||||
import io as cStringIO
|
||||
|
||||
def _flatten(x, f):
|
||||
if isinstance(x, str):
|
||||
f.write('%ds%s' % (len(x), x))
|
||||
elif isinstance(x, dict):
|
||||
keys = sorted(x.keys())
|
||||
f.write('%dd' % len(keys))
|
||||
for key in keys:
|
||||
_flatten(key, f)
|
||||
_flatten(x[key], f)
|
||||
elif isinstance(x, (list, tuple)):
|
||||
f.write('%dl' % len(x))
|
||||
for value in x:
|
||||
_flatten(value, f)
|
||||
elif isinstance(x, int_or_long):
|
||||
f.write('%di' % (x,))
|
||||
else:
|
||||
raise TypeError(
|
||||
"the keywords to verify() contains unsupported object %r" % (x,))
|
||||
|
||||
def flatten(x):
|
||||
f = cStringIO.StringIO()
|
||||
_flatten(x, f)
|
||||
return f.getvalue()
|
||||
|
||||
def _hack_at_distutils():
|
||||
# Windows-only workaround for some configurations: see
|
||||
# https://bugs.python.org/issue23246 (Python 2.7 with
|
||||
# a specific MS compiler suite download)
|
||||
if sys.platform == "win32":
|
||||
try:
|
||||
import setuptools # for side-effects, patches distutils
|
||||
except ImportError:
|
||||
pass
|
|
@ -0,0 +1,30 @@
|
|||
import sys
|
||||
|
||||
if sys.version_info < (3,):
|
||||
try:
|
||||
from thread import allocate_lock
|
||||
except ImportError:
|
||||
from dummy_thread import allocate_lock
|
||||
else:
|
||||
try:
|
||||
from _thread import allocate_lock
|
||||
except ImportError:
|
||||
from _dummy_thread import allocate_lock
|
||||
|
||||
|
||||
##import sys
|
||||
##l1 = allocate_lock
|
||||
|
||||
##class allocate_lock(object):
|
||||
## def __init__(self):
|
||||
## self._real = l1()
|
||||
## def __enter__(self):
|
||||
## for i in range(4, 0, -1):
|
||||
## print sys._getframe(i).f_code
|
||||
## print
|
||||
## return self._real.__enter__()
|
||||
## def __exit__(self, *args):
|
||||
## return self._real.__exit__(*args)
|
||||
## def acquire(self, f):
|
||||
## assert f is False
|
||||
## return self._real.acquire(f)
|
|
@ -0,0 +1,614 @@
|
|||
import types
|
||||
import weakref
|
||||
|
||||
from .lock import allocate_lock
|
||||
from .error import CDefError, VerificationError, VerificationMissing
|
||||
|
||||
# type qualifiers
|
||||
Q_CONST = 0x01
|
||||
Q_RESTRICT = 0x02
|
||||
Q_VOLATILE = 0x04
|
||||
|
||||
def qualify(quals, replace_with):
|
||||
if quals & Q_CONST:
|
||||
replace_with = ' const ' + replace_with.lstrip()
|
||||
if quals & Q_VOLATILE:
|
||||
replace_with = ' volatile ' + replace_with.lstrip()
|
||||
if quals & Q_RESTRICT:
|
||||
# It seems that __restrict is supported by gcc and msvc.
|
||||
# If you hit some different compiler, add a #define in
|
||||
# _cffi_include.h for it (and in its copies, documented there)
|
||||
replace_with = ' __restrict ' + replace_with.lstrip()
|
||||
return replace_with
|
||||
|
||||
|
||||
class BaseTypeByIdentity(object):
|
||||
is_array_type = False
|
||||
is_raw_function = False
|
||||
|
||||
def get_c_name(self, replace_with='', context='a C file', quals=0):
|
||||
result = self.c_name_with_marker
|
||||
assert result.count('&') == 1
|
||||
# some logic duplication with ffi.getctype()... :-(
|
||||
replace_with = replace_with.strip()
|
||||
if replace_with:
|
||||
if replace_with.startswith('*') and '&[' in result:
|
||||
replace_with = '(%s)' % replace_with
|
||||
elif not replace_with[0] in '[(':
|
||||
replace_with = ' ' + replace_with
|
||||
replace_with = qualify(quals, replace_with)
|
||||
result = result.replace('&', replace_with)
|
||||
if '$' in result:
|
||||
raise VerificationError(
|
||||
"cannot generate '%s' in %s: unknown type name"
|
||||
% (self._get_c_name(), context))
|
||||
return result
|
||||
|
||||
def _get_c_name(self):
|
||||
return self.c_name_with_marker.replace('&', '')
|
||||
|
||||
def has_c_name(self):
|
||||
return '$' not in self._get_c_name()
|
||||
|
||||
def is_integer_type(self):
|
||||
return False
|
||||
|
||||
def get_cached_btype(self, ffi, finishlist, can_delay=False):
|
||||
try:
|
||||
BType = ffi._cached_btypes[self]
|
||||
except KeyError:
|
||||
BType = self.build_backend_type(ffi, finishlist)
|
||||
BType2 = ffi._cached_btypes.setdefault(self, BType)
|
||||
assert BType2 is BType
|
||||
return BType
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s>' % (self._get_c_name(),)
|
||||
|
||||
def _get_items(self):
|
||||
return [(name, getattr(self, name)) for name in self._attrs_]
|
||||
|
||||
|
||||
class BaseType(BaseTypeByIdentity):
|
||||
|
||||
def __eq__(self, other):
|
||||
return (self.__class__ == other.__class__ and
|
||||
self._get_items() == other._get_items())
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.__class__, tuple(self._get_items())))
|
||||
|
||||
|
||||
class VoidType(BaseType):
|
||||
_attrs_ = ()
|
||||
|
||||
def __init__(self):
|
||||
self.c_name_with_marker = 'void&'
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
return global_cache(self, ffi, 'new_void_type')
|
||||
|
||||
void_type = VoidType()
|
||||
|
||||
|
||||
class BasePrimitiveType(BaseType):
|
||||
def is_complex_type(self):
|
||||
return False
|
||||
|
||||
|
||||
class PrimitiveType(BasePrimitiveType):
|
||||
_attrs_ = ('name',)
|
||||
|
||||
ALL_PRIMITIVE_TYPES = {
|
||||
'char': 'c',
|
||||
'short': 'i',
|
||||
'int': 'i',
|
||||
'long': 'i',
|
||||
'long long': 'i',
|
||||
'signed char': 'i',
|
||||
'unsigned char': 'i',
|
||||
'unsigned short': 'i',
|
||||
'unsigned int': 'i',
|
||||
'unsigned long': 'i',
|
||||
'unsigned long long': 'i',
|
||||
'float': 'f',
|
||||
'double': 'f',
|
||||
'long double': 'f',
|
||||
'float _Complex': 'j',
|
||||
'double _Complex': 'j',
|
||||
'_Bool': 'i',
|
||||
# the following types are not primitive in the C sense
|
||||
'wchar_t': 'c',
|
||||
'char16_t': 'c',
|
||||
'char32_t': 'c',
|
||||
'int8_t': 'i',
|
||||
'uint8_t': 'i',
|
||||
'int16_t': 'i',
|
||||
'uint16_t': 'i',
|
||||
'int32_t': 'i',
|
||||
'uint32_t': 'i',
|
||||
'int64_t': 'i',
|
||||
'uint64_t': 'i',
|
||||
'int_least8_t': 'i',
|
||||
'uint_least8_t': 'i',
|
||||
'int_least16_t': 'i',
|
||||
'uint_least16_t': 'i',
|
||||
'int_least32_t': 'i',
|
||||
'uint_least32_t': 'i',
|
||||
'int_least64_t': 'i',
|
||||
'uint_least64_t': 'i',
|
||||
'int_fast8_t': 'i',
|
||||
'uint_fast8_t': 'i',
|
||||
'int_fast16_t': 'i',
|
||||
'uint_fast16_t': 'i',
|
||||
'int_fast32_t': 'i',
|
||||
'uint_fast32_t': 'i',
|
||||
'int_fast64_t': 'i',
|
||||
'uint_fast64_t': 'i',
|
||||
'intptr_t': 'i',
|
||||
'uintptr_t': 'i',
|
||||
'intmax_t': 'i',
|
||||
'uintmax_t': 'i',
|
||||
'ptrdiff_t': 'i',
|
||||
'size_t': 'i',
|
||||
'ssize_t': 'i',
|
||||
}
|
||||
|
||||
def __init__(self, name):
|
||||
assert name in self.ALL_PRIMITIVE_TYPES
|
||||
self.name = name
|
||||
self.c_name_with_marker = name + '&'
|
||||
|
||||
def is_char_type(self):
|
||||
return self.ALL_PRIMITIVE_TYPES[self.name] == 'c'
|
||||
def is_integer_type(self):
|
||||
return self.ALL_PRIMITIVE_TYPES[self.name] == 'i'
|
||||
def is_float_type(self):
|
||||
return self.ALL_PRIMITIVE_TYPES[self.name] == 'f'
|
||||
def is_complex_type(self):
|
||||
return self.ALL_PRIMITIVE_TYPES[self.name] == 'j'
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
return global_cache(self, ffi, 'new_primitive_type', self.name)
|
||||
|
||||
|
||||
class UnknownIntegerType(BasePrimitiveType):
|
||||
_attrs_ = ('name',)
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.c_name_with_marker = name + '&'
|
||||
|
||||
def is_integer_type(self):
|
||||
return True
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
raise NotImplementedError("integer type '%s' can only be used after "
|
||||
"compilation" % self.name)
|
||||
|
||||
class UnknownFloatType(BasePrimitiveType):
|
||||
_attrs_ = ('name', )
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.c_name_with_marker = name + '&'
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
raise NotImplementedError("float type '%s' can only be used after "
|
||||
"compilation" % self.name)
|
||||
|
||||
|
||||
class BaseFunctionType(BaseType):
|
||||
_attrs_ = ('args', 'result', 'ellipsis', 'abi')
|
||||
|
||||
def __init__(self, args, result, ellipsis, abi=None):
|
||||
self.args = args
|
||||
self.result = result
|
||||
self.ellipsis = ellipsis
|
||||
self.abi = abi
|
||||
#
|
||||
reprargs = [arg._get_c_name() for arg in self.args]
|
||||
if self.ellipsis:
|
||||
reprargs.append('...')
|
||||
reprargs = reprargs or ['void']
|
||||
replace_with = self._base_pattern % (', '.join(reprargs),)
|
||||
if abi is not None:
|
||||
replace_with = replace_with[:1] + abi + ' ' + replace_with[1:]
|
||||
self.c_name_with_marker = (
|
||||
self.result.c_name_with_marker.replace('&', replace_with))
|
||||
|
||||
|
||||
class RawFunctionType(BaseFunctionType):
|
||||
# Corresponds to a C type like 'int(int)', which is the C type of
|
||||
# a function, but not a pointer-to-function. The backend has no
|
||||
# notion of such a type; it's used temporarily by parsing.
|
||||
_base_pattern = '(&)(%s)'
|
||||
is_raw_function = True
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
raise CDefError("cannot render the type %r: it is a function "
|
||||
"type, not a pointer-to-function type" % (self,))
|
||||
|
||||
def as_function_pointer(self):
|
||||
return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi)
|
||||
|
||||
|
||||
class FunctionPtrType(BaseFunctionType):
|
||||
_base_pattern = '(*&)(%s)'
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
result = self.result.get_cached_btype(ffi, finishlist)
|
||||
args = []
|
||||
for tp in self.args:
|
||||
args.append(tp.get_cached_btype(ffi, finishlist))
|
||||
abi_args = ()
|
||||
if self.abi == "__stdcall":
|
||||
if not self.ellipsis: # __stdcall ignored for variadic funcs
|
||||
try:
|
||||
abi_args = (ffi._backend.FFI_STDCALL,)
|
||||
except AttributeError:
|
||||
pass
|
||||
return global_cache(self, ffi, 'new_function_type',
|
||||
tuple(args), result, self.ellipsis, *abi_args)
|
||||
|
||||
def as_raw_function(self):
|
||||
return RawFunctionType(self.args, self.result, self.ellipsis, self.abi)
|
||||
|
||||
|
||||
class PointerType(BaseType):
|
||||
_attrs_ = ('totype', 'quals')
|
||||
|
||||
def __init__(self, totype, quals=0):
|
||||
self.totype = totype
|
||||
self.quals = quals
|
||||
extra = qualify(quals, " *&")
|
||||
if totype.is_array_type:
|
||||
extra = "(%s)" % (extra.lstrip(),)
|
||||
self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra)
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True)
|
||||
return global_cache(self, ffi, 'new_pointer_type', BItem)
|
||||
|
||||
voidp_type = PointerType(void_type)
|
||||
|
||||
def ConstPointerType(totype):
|
||||
return PointerType(totype, Q_CONST)
|
||||
|
||||
const_voidp_type = ConstPointerType(void_type)
|
||||
|
||||
|
||||
class NamedPointerType(PointerType):
|
||||
_attrs_ = ('totype', 'name')
|
||||
|
||||
def __init__(self, totype, name, quals=0):
|
||||
PointerType.__init__(self, totype, quals)
|
||||
self.name = name
|
||||
self.c_name_with_marker = name + '&'
|
||||
|
||||
|
||||
class ArrayType(BaseType):
|
||||
_attrs_ = ('item', 'length')
|
||||
is_array_type = True
|
||||
|
||||
def __init__(self, item, length):
|
||||
self.item = item
|
||||
self.length = length
|
||||
#
|
||||
if length is None:
|
||||
brackets = '&[]'
|
||||
elif length == '...':
|
||||
brackets = '&[/*...*/]'
|
||||
else:
|
||||
brackets = '&[%s]' % length
|
||||
self.c_name_with_marker = (
|
||||
self.item.c_name_with_marker.replace('&', brackets))
|
||||
|
||||
def resolve_length(self, newlength):
|
||||
return ArrayType(self.item, newlength)
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
if self.length == '...':
|
||||
raise CDefError("cannot render the type %r: unknown length" %
|
||||
(self,))
|
||||
self.item.get_cached_btype(ffi, finishlist) # force the item BType
|
||||
BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist)
|
||||
return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length)
|
||||
|
||||
char_array_type = ArrayType(PrimitiveType('char'), None)
|
||||
|
||||
|
||||
class StructOrUnionOrEnum(BaseTypeByIdentity):
|
||||
_attrs_ = ('name',)
|
||||
forcename = None
|
||||
|
||||
def build_c_name_with_marker(self):
|
||||
name = self.forcename or '%s %s' % (self.kind, self.name)
|
||||
self.c_name_with_marker = name + '&'
|
||||
|
||||
def force_the_name(self, forcename):
|
||||
self.forcename = forcename
|
||||
self.build_c_name_with_marker()
|
||||
|
||||
def get_official_name(self):
|
||||
assert self.c_name_with_marker.endswith('&')
|
||||
return self.c_name_with_marker[:-1]
|
||||
|
||||
|
||||
class StructOrUnion(StructOrUnionOrEnum):
|
||||
fixedlayout = None
|
||||
completed = 0
|
||||
partial = False
|
||||
packed = 0
|
||||
|
||||
def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None):
|
||||
self.name = name
|
||||
self.fldnames = fldnames
|
||||
self.fldtypes = fldtypes
|
||||
self.fldbitsize = fldbitsize
|
||||
self.fldquals = fldquals
|
||||
self.build_c_name_with_marker()
|
||||
|
||||
def anonymous_struct_fields(self):
|
||||
if self.fldtypes is not None:
|
||||
for name, type in zip(self.fldnames, self.fldtypes):
|
||||
if name == '' and isinstance(type, StructOrUnion):
|
||||
yield type
|
||||
|
||||
def enumfields(self, expand_anonymous_struct_union=True):
|
||||
fldquals = self.fldquals
|
||||
if fldquals is None:
|
||||
fldquals = (0,) * len(self.fldnames)
|
||||
for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes,
|
||||
self.fldbitsize, fldquals):
|
||||
if (name == '' and isinstance(type, StructOrUnion)
|
||||
and expand_anonymous_struct_union):
|
||||
# nested anonymous struct/union
|
||||
for result in type.enumfields():
|
||||
yield result
|
||||
else:
|
||||
yield (name, type, bitsize, quals)
|
||||
|
||||
def force_flatten(self):
|
||||
# force the struct or union to have a declaration that lists
|
||||
# directly all fields returned by enumfields(), flattening
|
||||
# nested anonymous structs/unions.
|
||||
names = []
|
||||
types = []
|
||||
bitsizes = []
|
||||
fldquals = []
|
||||
for name, type, bitsize, quals in self.enumfields():
|
||||
names.append(name)
|
||||
types.append(type)
|
||||
bitsizes.append(bitsize)
|
||||
fldquals.append(quals)
|
||||
self.fldnames = tuple(names)
|
||||
self.fldtypes = tuple(types)
|
||||
self.fldbitsize = tuple(bitsizes)
|
||||
self.fldquals = tuple(fldquals)
|
||||
|
||||
def get_cached_btype(self, ffi, finishlist, can_delay=False):
|
||||
BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist,
|
||||
can_delay)
|
||||
if not can_delay:
|
||||
self.finish_backend_type(ffi, finishlist)
|
||||
return BType
|
||||
|
||||
def finish_backend_type(self, ffi, finishlist):
|
||||
if self.completed:
|
||||
if self.completed != 2:
|
||||
raise NotImplementedError("recursive structure declaration "
|
||||
"for '%s'" % (self.name,))
|
||||
return
|
||||
BType = ffi._cached_btypes[self]
|
||||
#
|
||||
self.completed = 1
|
||||
#
|
||||
if self.fldtypes is None:
|
||||
pass # not completing it: it's an opaque struct
|
||||
#
|
||||
elif self.fixedlayout is None:
|
||||
fldtypes = [tp.get_cached_btype(ffi, finishlist)
|
||||
for tp in self.fldtypes]
|
||||
lst = list(zip(self.fldnames, fldtypes, self.fldbitsize))
|
||||
extra_flags = ()
|
||||
if self.packed:
|
||||
if self.packed == 1:
|
||||
extra_flags = (8,) # SF_PACKED
|
||||
else:
|
||||
extra_flags = (0, self.packed)
|
||||
ffi._backend.complete_struct_or_union(BType, lst, self,
|
||||
-1, -1, *extra_flags)
|
||||
#
|
||||
else:
|
||||
fldtypes = []
|
||||
fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout
|
||||
for i in range(len(self.fldnames)):
|
||||
fsize = fieldsize[i]
|
||||
ftype = self.fldtypes[i]
|
||||
#
|
||||
if isinstance(ftype, ArrayType) and ftype.length == '...':
|
||||
# fix the length to match the total size
|
||||
BItemType = ftype.item.get_cached_btype(ffi, finishlist)
|
||||
nlen, nrest = divmod(fsize, ffi.sizeof(BItemType))
|
||||
if nrest != 0:
|
||||
self._verification_error(
|
||||
"field '%s.%s' has a bogus size?" % (
|
||||
self.name, self.fldnames[i] or '{}'))
|
||||
ftype = ftype.resolve_length(nlen)
|
||||
self.fldtypes = (self.fldtypes[:i] + (ftype,) +
|
||||
self.fldtypes[i+1:])
|
||||
#
|
||||
BFieldType = ftype.get_cached_btype(ffi, finishlist)
|
||||
if isinstance(ftype, ArrayType) and ftype.length is None:
|
||||
assert fsize == 0
|
||||
else:
|
||||
bitemsize = ffi.sizeof(BFieldType)
|
||||
if bitemsize != fsize:
|
||||
self._verification_error(
|
||||
"field '%s.%s' is declared as %d bytes, but is "
|
||||
"really %d bytes" % (self.name,
|
||||
self.fldnames[i] or '{}',
|
||||
bitemsize, fsize))
|
||||
fldtypes.append(BFieldType)
|
||||
#
|
||||
lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs))
|
||||
ffi._backend.complete_struct_or_union(BType, lst, self,
|
||||
totalsize, totalalignment)
|
||||
self.completed = 2
|
||||
|
||||
def _verification_error(self, msg):
|
||||
raise VerificationError(msg)
|
||||
|
||||
def check_not_partial(self):
|
||||
if self.partial and self.fixedlayout is None:
|
||||
raise VerificationMissing(self._get_c_name())
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
self.check_not_partial()
|
||||
finishlist.append(self)
|
||||
#
|
||||
return global_cache(self, ffi, 'new_%s_type' % self.kind,
|
||||
self.get_official_name(), key=self)
|
||||
|
||||
|
||||
class StructType(StructOrUnion):
|
||||
kind = 'struct'
|
||||
|
||||
|
||||
class UnionType(StructOrUnion):
|
||||
kind = 'union'
|
||||
|
||||
|
||||
class EnumType(StructOrUnionOrEnum):
|
||||
kind = 'enum'
|
||||
partial = False
|
||||
partial_resolved = False
|
||||
|
||||
def __init__(self, name, enumerators, enumvalues, baseinttype=None):
|
||||
self.name = name
|
||||
self.enumerators = enumerators
|
||||
self.enumvalues = enumvalues
|
||||
self.baseinttype = baseinttype
|
||||
self.build_c_name_with_marker()
|
||||
|
||||
def force_the_name(self, forcename):
|
||||
StructOrUnionOrEnum.force_the_name(self, forcename)
|
||||
if self.forcename is None:
|
||||
name = self.get_official_name()
|
||||
self.forcename = '$' + name.replace(' ', '_')
|
||||
|
||||
def check_not_partial(self):
|
||||
if self.partial and not self.partial_resolved:
|
||||
raise VerificationMissing(self._get_c_name())
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
self.check_not_partial()
|
||||
base_btype = self.build_baseinttype(ffi, finishlist)
|
||||
return global_cache(self, ffi, 'new_enum_type',
|
||||
self.get_official_name(),
|
||||
self.enumerators, self.enumvalues,
|
||||
base_btype, key=self)
|
||||
|
||||
def build_baseinttype(self, ffi, finishlist):
|
||||
if self.baseinttype is not None:
|
||||
return self.baseinttype.get_cached_btype(ffi, finishlist)
|
||||
#
|
||||
if self.enumvalues:
|
||||
smallest_value = min(self.enumvalues)
|
||||
largest_value = max(self.enumvalues)
|
||||
else:
|
||||
import warnings
|
||||
try:
|
||||
# XXX! The goal is to ensure that the warnings.warn()
|
||||
# will not suppress the warning. We want to get it
|
||||
# several times if we reach this point several times.
|
||||
__warningregistry__.clear()
|
||||
except NameError:
|
||||
pass
|
||||
warnings.warn("%r has no values explicitly defined; "
|
||||
"guessing that it is equivalent to 'unsigned int'"
|
||||
% self._get_c_name())
|
||||
smallest_value = largest_value = 0
|
||||
if smallest_value < 0: # needs a signed type
|
||||
sign = 1
|
||||
candidate1 = PrimitiveType("int")
|
||||
candidate2 = PrimitiveType("long")
|
||||
else:
|
||||
sign = 0
|
||||
candidate1 = PrimitiveType("unsigned int")
|
||||
candidate2 = PrimitiveType("unsigned long")
|
||||
btype1 = candidate1.get_cached_btype(ffi, finishlist)
|
||||
btype2 = candidate2.get_cached_btype(ffi, finishlist)
|
||||
size1 = ffi.sizeof(btype1)
|
||||
size2 = ffi.sizeof(btype2)
|
||||
if (smallest_value >= ((-1) << (8*size1-1)) and
|
||||
largest_value < (1 << (8*size1-sign))):
|
||||
return btype1
|
||||
if (smallest_value >= ((-1) << (8*size2-1)) and
|
||||
largest_value < (1 << (8*size2-sign))):
|
||||
return btype2
|
||||
raise CDefError("%s values don't all fit into either 'long' "
|
||||
"or 'unsigned long'" % self._get_c_name())
|
||||
|
||||
def unknown_type(name, structname=None):
|
||||
if structname is None:
|
||||
structname = '$%s' % name
|
||||
tp = StructType(structname, None, None, None)
|
||||
tp.force_the_name(name)
|
||||
tp.origin = "unknown_type"
|
||||
return tp
|
||||
|
||||
def unknown_ptr_type(name, structname=None):
|
||||
if structname is None:
|
||||
structname = '$$%s' % name
|
||||
tp = StructType(structname, None, None, None)
|
||||
return NamedPointerType(tp, name)
|
||||
|
||||
|
||||
global_lock = allocate_lock()
|
||||
_typecache_cffi_backend = weakref.WeakValueDictionary()
|
||||
|
||||
def get_typecache(backend):
|
||||
# returns _typecache_cffi_backend if backend is the _cffi_backend
|
||||
# module, or type(backend).__typecache if backend is an instance of
|
||||
# CTypesBackend (or some FakeBackend class during tests)
|
||||
if isinstance(backend, types.ModuleType):
|
||||
return _typecache_cffi_backend
|
||||
with global_lock:
|
||||
if not hasattr(type(backend), '__typecache'):
|
||||
type(backend).__typecache = weakref.WeakValueDictionary()
|
||||
return type(backend).__typecache
|
||||
|
||||
def global_cache(srctype, ffi, funcname, *args, **kwds):
|
||||
key = kwds.pop('key', (funcname, args))
|
||||
assert not kwds
|
||||
try:
|
||||
return ffi._typecache[key]
|
||||
except KeyError:
|
||||
pass
|
||||
try:
|
||||
res = getattr(ffi._backend, funcname)(*args)
|
||||
except NotImplementedError as e:
|
||||
raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e))
|
||||
# note that setdefault() on WeakValueDictionary is not atomic
|
||||
# and contains a rare bug (http://bugs.python.org/issue19542);
|
||||
# we have to use a lock and do it ourselves
|
||||
cache = ffi._typecache
|
||||
with global_lock:
|
||||
res1 = cache.get(key)
|
||||
if res1 is None:
|
||||
cache[key] = res
|
||||
return res
|
||||
else:
|
||||
return res1
|
||||
|
||||
def pointer_cache(ffi, BType):
|
||||
return global_cache('?', ffi, 'new_pointer_type', BType)
|
||||
|
||||
def attach_exception_info(e, name):
|
||||
if e.args and type(e.args[0]) is str:
|
||||
e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:]
|
|
@ -0,0 +1,181 @@
|
|||
|
||||
/* This part is from file 'cffi/parse_c_type.h'. It is copied at the
|
||||
beginning of C sources generated by CFFI's ffi.set_source(). */
|
||||
|
||||
typedef void *_cffi_opcode_t;
|
||||
|
||||
#define _CFFI_OP(opcode, arg) (_cffi_opcode_t)(opcode | (((uintptr_t)(arg)) << 8))
|
||||
#define _CFFI_GETOP(cffi_opcode) ((unsigned char)(uintptr_t)cffi_opcode)
|
||||
#define _CFFI_GETARG(cffi_opcode) (((intptr_t)cffi_opcode) >> 8)
|
||||
|
||||
#define _CFFI_OP_PRIMITIVE 1
|
||||
#define _CFFI_OP_POINTER 3
|
||||
#define _CFFI_OP_ARRAY 5
|
||||
#define _CFFI_OP_OPEN_ARRAY 7
|
||||
#define _CFFI_OP_STRUCT_UNION 9
|
||||
#define _CFFI_OP_ENUM 11
|
||||
#define _CFFI_OP_FUNCTION 13
|
||||
#define _CFFI_OP_FUNCTION_END 15
|
||||
#define _CFFI_OP_NOOP 17
|
||||
#define _CFFI_OP_BITFIELD 19
|
||||
#define _CFFI_OP_TYPENAME 21
|
||||
#define _CFFI_OP_CPYTHON_BLTN_V 23 // varargs
|
||||
#define _CFFI_OP_CPYTHON_BLTN_N 25 // noargs
|
||||
#define _CFFI_OP_CPYTHON_BLTN_O 27 // O (i.e. a single arg)
|
||||
#define _CFFI_OP_CONSTANT 29
|
||||
#define _CFFI_OP_CONSTANT_INT 31
|
||||
#define _CFFI_OP_GLOBAL_VAR 33
|
||||
#define _CFFI_OP_DLOPEN_FUNC 35
|
||||
#define _CFFI_OP_DLOPEN_CONST 37
|
||||
#define _CFFI_OP_GLOBAL_VAR_F 39
|
||||
#define _CFFI_OP_EXTERN_PYTHON 41
|
||||
|
||||
#define _CFFI_PRIM_VOID 0
|
||||
#define _CFFI_PRIM_BOOL 1
|
||||
#define _CFFI_PRIM_CHAR 2
|
||||
#define _CFFI_PRIM_SCHAR 3
|
||||
#define _CFFI_PRIM_UCHAR 4
|
||||
#define _CFFI_PRIM_SHORT 5
|
||||
#define _CFFI_PRIM_USHORT 6
|
||||
#define _CFFI_PRIM_INT 7
|
||||
#define _CFFI_PRIM_UINT 8
|
||||
#define _CFFI_PRIM_LONG 9
|
||||
#define _CFFI_PRIM_ULONG 10
|
||||
#define _CFFI_PRIM_LONGLONG 11
|
||||
#define _CFFI_PRIM_ULONGLONG 12
|
||||
#define _CFFI_PRIM_FLOAT 13
|
||||
#define _CFFI_PRIM_DOUBLE 14
|
||||
#define _CFFI_PRIM_LONGDOUBLE 15
|
||||
|
||||
#define _CFFI_PRIM_WCHAR 16
|
||||
#define _CFFI_PRIM_INT8 17
|
||||
#define _CFFI_PRIM_UINT8 18
|
||||
#define _CFFI_PRIM_INT16 19
|
||||
#define _CFFI_PRIM_UINT16 20
|
||||
#define _CFFI_PRIM_INT32 21
|
||||
#define _CFFI_PRIM_UINT32 22
|
||||
#define _CFFI_PRIM_INT64 23
|
||||
#define _CFFI_PRIM_UINT64 24
|
||||
#define _CFFI_PRIM_INTPTR 25
|
||||
#define _CFFI_PRIM_UINTPTR 26
|
||||
#define _CFFI_PRIM_PTRDIFF 27
|
||||
#define _CFFI_PRIM_SIZE 28
|
||||
#define _CFFI_PRIM_SSIZE 29
|
||||
#define _CFFI_PRIM_INT_LEAST8 30
|
||||
#define _CFFI_PRIM_UINT_LEAST8 31
|
||||
#define _CFFI_PRIM_INT_LEAST16 32
|
||||
#define _CFFI_PRIM_UINT_LEAST16 33
|
||||
#define _CFFI_PRIM_INT_LEAST32 34
|
||||
#define _CFFI_PRIM_UINT_LEAST32 35
|
||||
#define _CFFI_PRIM_INT_LEAST64 36
|
||||
#define _CFFI_PRIM_UINT_LEAST64 37
|
||||
#define _CFFI_PRIM_INT_FAST8 38
|
||||
#define _CFFI_PRIM_UINT_FAST8 39
|
||||
#define _CFFI_PRIM_INT_FAST16 40
|
||||
#define _CFFI_PRIM_UINT_FAST16 41
|
||||
#define _CFFI_PRIM_INT_FAST32 42
|
||||
#define _CFFI_PRIM_UINT_FAST32 43
|
||||
#define _CFFI_PRIM_INT_FAST64 44
|
||||
#define _CFFI_PRIM_UINT_FAST64 45
|
||||
#define _CFFI_PRIM_INTMAX 46
|
||||
#define _CFFI_PRIM_UINTMAX 47
|
||||
#define _CFFI_PRIM_FLOATCOMPLEX 48
|
||||
#define _CFFI_PRIM_DOUBLECOMPLEX 49
|
||||
#define _CFFI_PRIM_CHAR16 50
|
||||
#define _CFFI_PRIM_CHAR32 51
|
||||
|
||||
#define _CFFI__NUM_PRIM 52
|
||||
#define _CFFI__UNKNOWN_PRIM (-1)
|
||||
#define _CFFI__UNKNOWN_FLOAT_PRIM (-2)
|
||||
#define _CFFI__UNKNOWN_LONG_DOUBLE (-3)
|
||||
|
||||
#define _CFFI__IO_FILE_STRUCT (-1)
|
||||
|
||||
|
||||
struct _cffi_global_s {
|
||||
const char *name;
|
||||
void *address;
|
||||
_cffi_opcode_t type_op;
|
||||
void *size_or_direct_fn; // OP_GLOBAL_VAR: size, or 0 if unknown
|
||||
// OP_CPYTHON_BLTN_*: addr of direct function
|
||||
};
|
||||
|
||||
struct _cffi_getconst_s {
|
||||
unsigned long long value;
|
||||
const struct _cffi_type_context_s *ctx;
|
||||
int gindex;
|
||||
};
|
||||
|
||||
struct _cffi_struct_union_s {
|
||||
const char *name;
|
||||
int type_index; // -> _cffi_types, on a OP_STRUCT_UNION
|
||||
int flags; // _CFFI_F_* flags below
|
||||
size_t size;
|
||||
int alignment;
|
||||
int first_field_index; // -> _cffi_fields array
|
||||
int num_fields;
|
||||
};
|
||||
#define _CFFI_F_UNION 0x01 // is a union, not a struct
|
||||
#define _CFFI_F_CHECK_FIELDS 0x02 // complain if fields are not in the
|
||||
// "standard layout" or if some are missing
|
||||
#define _CFFI_F_PACKED 0x04 // for CHECK_FIELDS, assume a packed struct
|
||||
#define _CFFI_F_EXTERNAL 0x08 // in some other ffi.include()
|
||||
#define _CFFI_F_OPAQUE 0x10 // opaque
|
||||
|
||||
struct _cffi_field_s {
|
||||
const char *name;
|
||||
size_t field_offset;
|
||||
size_t field_size;
|
||||
_cffi_opcode_t field_type_op;
|
||||
};
|
||||
|
||||
struct _cffi_enum_s {
|
||||
const char *name;
|
||||
int type_index; // -> _cffi_types, on a OP_ENUM
|
||||
int type_prim; // _CFFI_PRIM_xxx
|
||||
const char *enumerators; // comma-delimited string
|
||||
};
|
||||
|
||||
struct _cffi_typename_s {
|
||||
const char *name;
|
||||
int type_index; /* if opaque, points to a possibly artificial
|
||||
OP_STRUCT which is itself opaque */
|
||||
};
|
||||
|
||||
struct _cffi_type_context_s {
|
||||
_cffi_opcode_t *types;
|
||||
const struct _cffi_global_s *globals;
|
||||
const struct _cffi_field_s *fields;
|
||||
const struct _cffi_struct_union_s *struct_unions;
|
||||
const struct _cffi_enum_s *enums;
|
||||
const struct _cffi_typename_s *typenames;
|
||||
int num_globals;
|
||||
int num_struct_unions;
|
||||
int num_enums;
|
||||
int num_typenames;
|
||||
const char *const *includes;
|
||||
int num_types;
|
||||
int flags; /* future extension */
|
||||
};
|
||||
|
||||
struct _cffi_parse_info_s {
|
||||
const struct _cffi_type_context_s *ctx;
|
||||
_cffi_opcode_t *output;
|
||||
unsigned int output_size;
|
||||
size_t error_location;
|
||||
const char *error_message;
|
||||
};
|
||||
|
||||
struct _cffi_externpy_s {
|
||||
const char *name;
|
||||
size_t size_of_result;
|
||||
void *reserved1, *reserved2;
|
||||
};
|
||||
|
||||
#ifdef _CFFI_INTERNAL
|
||||
static int parse_c_type(struct _cffi_parse_info_s *info, const char *input);
|
||||
static int search_in_globals(const struct _cffi_type_context_s *ctx,
|
||||
const char *search, size_t search_len);
|
||||
static int search_in_struct_unions(const struct _cffi_type_context_s *ctx,
|
||||
const char *search, size_t search_len);
|
||||
#endif
|
|
@ -0,0 +1,121 @@
|
|||
# pkg-config, https://www.freedesktop.org/wiki/Software/pkg-config/ integration for cffi
|
||||
import sys, os, subprocess
|
||||
|
||||
from .error import PkgConfigError
|
||||
|
||||
|
||||
def merge_flags(cfg1, cfg2):
|
||||
"""Merge values from cffi config flags cfg2 to cf1
|
||||
|
||||
Example:
|
||||
merge_flags({"libraries": ["one"]}, {"libraries": ["two"]})
|
||||
{"libraries": ["one", "two"]}
|
||||
"""
|
||||
for key, value in cfg2.items():
|
||||
if key not in cfg1:
|
||||
cfg1[key] = value
|
||||
else:
|
||||
if not isinstance(cfg1[key], list):
|
||||
raise TypeError("cfg1[%r] should be a list of strings" % (key,))
|
||||
if not isinstance(value, list):
|
||||
raise TypeError("cfg2[%r] should be a list of strings" % (key,))
|
||||
cfg1[key].extend(value)
|
||||
return cfg1
|
||||
|
||||
|
||||
def call(libname, flag, encoding=sys.getfilesystemencoding()):
|
||||
"""Calls pkg-config and returns the output if found
|
||||
"""
|
||||
a = ["pkg-config", "--print-errors"]
|
||||
a.append(flag)
|
||||
a.append(libname)
|
||||
try:
|
||||
pc = subprocess.Popen(a, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
except EnvironmentError as e:
|
||||
raise PkgConfigError("cannot run pkg-config: %s" % (str(e).strip(),))
|
||||
|
||||
bout, berr = pc.communicate()
|
||||
if pc.returncode != 0:
|
||||
try:
|
||||
berr = berr.decode(encoding)
|
||||
except Exception:
|
||||
pass
|
||||
raise PkgConfigError(berr.strip())
|
||||
|
||||
if sys.version_info >= (3,) and not isinstance(bout, str): # Python 3.x
|
||||
try:
|
||||
bout = bout.decode(encoding)
|
||||
except UnicodeDecodeError:
|
||||
raise PkgConfigError("pkg-config %s %s returned bytes that cannot "
|
||||
"be decoded with encoding %r:\n%r" %
|
||||
(flag, libname, encoding, bout))
|
||||
|
||||
if os.altsep != '\\' and '\\' in bout:
|
||||
raise PkgConfigError("pkg-config %s %s returned an unsupported "
|
||||
"backslash-escaped output:\n%r" %
|
||||
(flag, libname, bout))
|
||||
return bout
|
||||
|
||||
|
||||
def flags_from_pkgconfig(libs):
|
||||
r"""Return compiler line flags for FFI.set_source based on pkg-config output
|
||||
|
||||
Usage
|
||||
...
|
||||
ffibuilder.set_source("_foo", pkgconfig = ["libfoo", "libbar >= 1.8.3"])
|
||||
|
||||
If pkg-config is installed on build machine, then arguments include_dirs,
|
||||
library_dirs, libraries, define_macros, extra_compile_args and
|
||||
extra_link_args are extended with an output of pkg-config for libfoo and
|
||||
libbar.
|
||||
|
||||
Raises PkgConfigError in case the pkg-config call fails.
|
||||
"""
|
||||
|
||||
def get_include_dirs(string):
|
||||
return [x[2:] for x in string.split() if x.startswith("-I")]
|
||||
|
||||
def get_library_dirs(string):
|
||||
return [x[2:] for x in string.split() if x.startswith("-L")]
|
||||
|
||||
def get_libraries(string):
|
||||
return [x[2:] for x in string.split() if x.startswith("-l")]
|
||||
|
||||
# convert -Dfoo=bar to list of tuples [("foo", "bar")] expected by distutils
|
||||
def get_macros(string):
|
||||
def _macro(x):
|
||||
x = x[2:] # drop "-D"
|
||||
if '=' in x:
|
||||
return tuple(x.split("=", 1)) # "-Dfoo=bar" => ("foo", "bar")
|
||||
else:
|
||||
return (x, None) # "-Dfoo" => ("foo", None)
|
||||
return [_macro(x) for x in string.split() if x.startswith("-D")]
|
||||
|
||||
def get_other_cflags(string):
|
||||
return [x for x in string.split() if not x.startswith("-I") and
|
||||
not x.startswith("-D")]
|
||||
|
||||
def get_other_libs(string):
|
||||
return [x for x in string.split() if not x.startswith("-L") and
|
||||
not x.startswith("-l")]
|
||||
|
||||
# return kwargs for given libname
|
||||
def kwargs(libname):
|
||||
fse = sys.getfilesystemencoding()
|
||||
all_cflags = call(libname, "--cflags")
|
||||
all_libs = call(libname, "--libs")
|
||||
return {
|
||||
"include_dirs": get_include_dirs(all_cflags),
|
||||
"library_dirs": get_library_dirs(all_libs),
|
||||
"libraries": get_libraries(all_libs),
|
||||
"define_macros": get_macros(all_cflags),
|
||||
"extra_compile_args": get_other_cflags(all_cflags),
|
||||
"extra_link_args": get_other_libs(all_libs),
|
||||
}
|
||||
|
||||
# merge all arguments together
|
||||
ret = {}
|
||||
for libname in libs:
|
||||
lib_flags = kwargs(libname)
|
||||
merge_flags(ret, lib_flags)
|
||||
return ret
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,217 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
try:
|
||||
basestring
|
||||
except NameError:
|
||||
# Python 3.x
|
||||
basestring = str
|
||||
|
||||
def error(msg):
|
||||
from distutils.errors import DistutilsSetupError
|
||||
raise DistutilsSetupError(msg)
|
||||
|
||||
|
||||
def execfile(filename, glob):
|
||||
# We use execfile() (here rewritten for Python 3) instead of
|
||||
# __import__() to load the build script. The problem with
|
||||
# a normal import is that in some packages, the intermediate
|
||||
# __init__.py files may already try to import the file that
|
||||
# we are generating.
|
||||
with open(filename) as f:
|
||||
src = f.read()
|
||||
src += '\n' # Python 2.6 compatibility
|
||||
code = compile(src, filename, 'exec')
|
||||
exec(code, glob, glob)
|
||||
|
||||
|
||||
def add_cffi_module(dist, mod_spec):
|
||||
from cffi.api import FFI
|
||||
|
||||
if not isinstance(mod_spec, basestring):
|
||||
error("argument to 'cffi_modules=...' must be a str or a list of str,"
|
||||
" not %r" % (type(mod_spec).__name__,))
|
||||
mod_spec = str(mod_spec)
|
||||
try:
|
||||
build_file_name, ffi_var_name = mod_spec.split(':')
|
||||
except ValueError:
|
||||
error("%r must be of the form 'path/build.py:ffi_variable'" %
|
||||
(mod_spec,))
|
||||
if not os.path.exists(build_file_name):
|
||||
ext = ''
|
||||
rewritten = build_file_name.replace('.', '/') + '.py'
|
||||
if os.path.exists(rewritten):
|
||||
ext = ' (rewrite cffi_modules to [%r])' % (
|
||||
rewritten + ':' + ffi_var_name,)
|
||||
error("%r does not name an existing file%s" % (build_file_name, ext))
|
||||
|
||||
mod_vars = {'__name__': '__cffi__', '__file__': build_file_name}
|
||||
execfile(build_file_name, mod_vars)
|
||||
|
||||
try:
|
||||
ffi = mod_vars[ffi_var_name]
|
||||
except KeyError:
|
||||
error("%r: object %r not found in module" % (mod_spec,
|
||||
ffi_var_name))
|
||||
if not isinstance(ffi, FFI):
|
||||
ffi = ffi() # maybe it's a function instead of directly an ffi
|
||||
if not isinstance(ffi, FFI):
|
||||
error("%r is not an FFI instance (got %r)" % (mod_spec,
|
||||
type(ffi).__name__))
|
||||
if not hasattr(ffi, '_assigned_source'):
|
||||
error("%r: the set_source() method was not called" % (mod_spec,))
|
||||
module_name, source, source_extension, kwds = ffi._assigned_source
|
||||
if ffi._windows_unicode:
|
||||
kwds = kwds.copy()
|
||||
ffi._apply_windows_unicode(kwds)
|
||||
|
||||
if source is None:
|
||||
_add_py_module(dist, ffi, module_name)
|
||||
else:
|
||||
_add_c_module(dist, ffi, module_name, source, source_extension, kwds)
|
||||
|
||||
def _set_py_limited_api(Extension, kwds):
|
||||
"""
|
||||
Add py_limited_api to kwds if setuptools >= 26 is in use.
|
||||
Do not alter the setting if it already exists.
|
||||
Setuptools takes care of ignoring the flag on Python 2 and PyPy.
|
||||
|
||||
CPython itself should ignore the flag in a debugging version
|
||||
(by not listing .abi3.so in the extensions it supports), but
|
||||
it doesn't so far, creating troubles. That's why we check
|
||||
for "not hasattr(sys, 'gettotalrefcount')" (the 2.7 compatible equivalent
|
||||
of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401)
|
||||
|
||||
On Windows, with CPython <= 3.4, it's better not to use py_limited_api
|
||||
because virtualenv *still* doesn't copy PYTHON3.DLL on these versions.
|
||||
For now we'll skip py_limited_api on all Windows versions to avoid an
|
||||
inconsistent mess.
|
||||
"""
|
||||
if ('py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount')
|
||||
and sys.platform != 'win32'):
|
||||
import setuptools
|
||||
try:
|
||||
setuptools_major_version = int(setuptools.__version__.partition('.')[0])
|
||||
if setuptools_major_version >= 26:
|
||||
kwds['py_limited_api'] = True
|
||||
except ValueError: # certain development versions of setuptools
|
||||
# If we don't know the version number of setuptools, we
|
||||
# try to set 'py_limited_api' anyway. At worst, we get a
|
||||
# warning.
|
||||
kwds['py_limited_api'] = True
|
||||
return kwds
|
||||
|
||||
def _add_c_module(dist, ffi, module_name, source, source_extension, kwds):
|
||||
from distutils.core import Extension
|
||||
# We are a setuptools extension. Need this build_ext for py_limited_api.
|
||||
from setuptools.command.build_ext import build_ext
|
||||
from distutils.dir_util import mkpath
|
||||
from distutils import log
|
||||
from cffi import recompiler
|
||||
|
||||
allsources = ['$PLACEHOLDER']
|
||||
allsources.extend(kwds.pop('sources', []))
|
||||
kwds = _set_py_limited_api(Extension, kwds)
|
||||
ext = Extension(name=module_name, sources=allsources, **kwds)
|
||||
|
||||
def make_mod(tmpdir, pre_run=None):
|
||||
c_file = os.path.join(tmpdir, module_name + source_extension)
|
||||
log.info("generating cffi module %r" % c_file)
|
||||
mkpath(tmpdir)
|
||||
# a setuptools-only, API-only hook: called with the "ext" and "ffi"
|
||||
# arguments just before we turn the ffi into C code. To use it,
|
||||
# subclass the 'distutils.command.build_ext.build_ext' class and
|
||||
# add a method 'def pre_run(self, ext, ffi)'.
|
||||
if pre_run is not None:
|
||||
pre_run(ext, ffi)
|
||||
updated = recompiler.make_c_source(ffi, module_name, source, c_file)
|
||||
if not updated:
|
||||
log.info("already up-to-date")
|
||||
return c_file
|
||||
|
||||
if dist.ext_modules is None:
|
||||
dist.ext_modules = []
|
||||
dist.ext_modules.append(ext)
|
||||
|
||||
base_class = dist.cmdclass.get('build_ext', build_ext)
|
||||
class build_ext_make_mod(base_class):
|
||||
def run(self):
|
||||
if ext.sources[0] == '$PLACEHOLDER':
|
||||
pre_run = getattr(self, 'pre_run', None)
|
||||
ext.sources[0] = make_mod(self.build_temp, pre_run)
|
||||
base_class.run(self)
|
||||
dist.cmdclass['build_ext'] = build_ext_make_mod
|
||||
# NB. multiple runs here will create multiple 'build_ext_make_mod'
|
||||
# classes. Even in this case the 'build_ext' command should be
|
||||
# run once; but just in case, the logic above does nothing if
|
||||
# called again.
|
||||
|
||||
|
||||
def _add_py_module(dist, ffi, module_name):
|
||||
from distutils.dir_util import mkpath
|
||||
from setuptools.command.build_py import build_py
|
||||
from setuptools.command.build_ext import build_ext
|
||||
from distutils import log
|
||||
from cffi import recompiler
|
||||
|
||||
def generate_mod(py_file):
|
||||
log.info("generating cffi module %r" % py_file)
|
||||
mkpath(os.path.dirname(py_file))
|
||||
updated = recompiler.make_py_source(ffi, module_name, py_file)
|
||||
if not updated:
|
||||
log.info("already up-to-date")
|
||||
|
||||
base_class = dist.cmdclass.get('build_py', build_py)
|
||||
class build_py_make_mod(base_class):
|
||||
def run(self):
|
||||
base_class.run(self)
|
||||
module_path = module_name.split('.')
|
||||
module_path[-1] += '.py'
|
||||
generate_mod(os.path.join(self.build_lib, *module_path))
|
||||
def get_source_files(self):
|
||||
# This is called from 'setup.py sdist' only. Exclude
|
||||
# the generate .py module in this case.
|
||||
saved_py_modules = self.py_modules
|
||||
try:
|
||||
if saved_py_modules:
|
||||
self.py_modules = [m for m in saved_py_modules
|
||||
if m != module_name]
|
||||
return base_class.get_source_files(self)
|
||||
finally:
|
||||
self.py_modules = saved_py_modules
|
||||
dist.cmdclass['build_py'] = build_py_make_mod
|
||||
|
||||
# distutils and setuptools have no notion I could find of a
|
||||
# generated python module. If we don't add module_name to
|
||||
# dist.py_modules, then things mostly work but there are some
|
||||
# combination of options (--root and --record) that will miss
|
||||
# the module. So we add it here, which gives a few apparently
|
||||
# harmless warnings about not finding the file outside the
|
||||
# build directory.
|
||||
# Then we need to hack more in get_source_files(); see above.
|
||||
if dist.py_modules is None:
|
||||
dist.py_modules = []
|
||||
dist.py_modules.append(module_name)
|
||||
|
||||
# the following is only for "build_ext -i"
|
||||
base_class_2 = dist.cmdclass.get('build_ext', build_ext)
|
||||
class build_ext_make_mod(base_class_2):
|
||||
def run(self):
|
||||
base_class_2.run(self)
|
||||
if self.inplace:
|
||||
# from get_ext_fullpath() in distutils/command/build_ext.py
|
||||
module_path = module_name.split('.')
|
||||
package = '.'.join(module_path[:-1])
|
||||
build_py = self.get_finalized_command('build_py')
|
||||
package_dir = build_py.get_package_dir(package)
|
||||
file_name = module_path[-1] + '.py'
|
||||
generate_mod(os.path.join(package_dir, file_name))
|
||||
dist.cmdclass['build_ext'] = build_ext_make_mod
|
||||
|
||||
def cffi_modules(dist, attr, value):
|
||||
assert attr == 'cffi_modules'
|
||||
if isinstance(value, basestring):
|
||||
value = [value]
|
||||
|
||||
for cffi_module in value:
|
||||
add_cffi_module(dist, cffi_module)
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,675 @@
|
|||
#
|
||||
# DEPRECATED: implementation for ffi.verify()
|
||||
#
|
||||
import sys, os
|
||||
import types
|
||||
|
||||
from . import model
|
||||
from .error import VerificationError
|
||||
|
||||
|
||||
class VGenericEngine(object):
|
||||
_class_key = 'g'
|
||||
_gen_python_module = False
|
||||
|
||||
def __init__(self, verifier):
|
||||
self.verifier = verifier
|
||||
self.ffi = verifier.ffi
|
||||
self.export_symbols = []
|
||||
self._struct_pending_verification = {}
|
||||
|
||||
def patch_extension_kwds(self, kwds):
|
||||
# add 'export_symbols' to the dictionary. Note that we add the
|
||||
# list before filling it. When we fill it, it will thus also show
|
||||
# up in kwds['export_symbols'].
|
||||
kwds.setdefault('export_symbols', self.export_symbols)
|
||||
|
||||
def find_module(self, module_name, path, so_suffixes):
|
||||
for so_suffix in so_suffixes:
|
||||
basename = module_name + so_suffix
|
||||
if path is None:
|
||||
path = sys.path
|
||||
for dirname in path:
|
||||
filename = os.path.join(dirname, basename)
|
||||
if os.path.isfile(filename):
|
||||
return filename
|
||||
|
||||
def collect_types(self):
|
||||
pass # not needed in the generic engine
|
||||
|
||||
def _prnt(self, what=''):
|
||||
self._f.write(what + '\n')
|
||||
|
||||
def write_source_to_f(self):
|
||||
prnt = self._prnt
|
||||
# first paste some standard set of lines that are mostly '#include'
|
||||
prnt(cffimod_header)
|
||||
# then paste the C source given by the user, verbatim.
|
||||
prnt(self.verifier.preamble)
|
||||
#
|
||||
# call generate_gen_xxx_decl(), for every xxx found from
|
||||
# ffi._parser._declarations. This generates all the functions.
|
||||
self._generate('decl')
|
||||
#
|
||||
# on Windows, distutils insists on putting init_cffi_xyz in
|
||||
# 'export_symbols', so instead of fighting it, just give up and
|
||||
# give it one
|
||||
if sys.platform == 'win32':
|
||||
if sys.version_info >= (3,):
|
||||
prefix = 'PyInit_'
|
||||
else:
|
||||
prefix = 'init'
|
||||
modname = self.verifier.get_module_name()
|
||||
prnt("void %s%s(void) { }\n" % (prefix, modname))
|
||||
|
||||
def load_library(self, flags=0):
|
||||
# import it with the CFFI backend
|
||||
backend = self.ffi._backend
|
||||
# needs to make a path that contains '/', on Posix
|
||||
filename = os.path.join(os.curdir, self.verifier.modulefilename)
|
||||
module = backend.load_library(filename, flags)
|
||||
#
|
||||
# call loading_gen_struct() to get the struct layout inferred by
|
||||
# the C compiler
|
||||
self._load(module, 'loading')
|
||||
|
||||
# build the FFILibrary class and instance, this is a module subclass
|
||||
# because modules are expected to have usually-constant-attributes and
|
||||
# in PyPy this means the JIT is able to treat attributes as constant,
|
||||
# which we want.
|
||||
class FFILibrary(types.ModuleType):
|
||||
_cffi_generic_module = module
|
||||
_cffi_ffi = self.ffi
|
||||
_cffi_dir = []
|
||||
def __dir__(self):
|
||||
return FFILibrary._cffi_dir
|
||||
library = FFILibrary("")
|
||||
#
|
||||
# finally, call the loaded_gen_xxx() functions. This will set
|
||||
# up the 'library' object.
|
||||
self._load(module, 'loaded', library=library)
|
||||
return library
|
||||
|
||||
def _get_declarations(self):
|
||||
lst = [(key, tp) for (key, (tp, qual)) in
|
||||
self.ffi._parser._declarations.items()]
|
||||
lst.sort()
|
||||
return lst
|
||||
|
||||
def _generate(self, step_name):
|
||||
for name, tp in self._get_declarations():
|
||||
kind, realname = name.split(' ', 1)
|
||||
try:
|
||||
method = getattr(self, '_generate_gen_%s_%s' % (kind,
|
||||
step_name))
|
||||
except AttributeError:
|
||||
raise VerificationError(
|
||||
"not implemented in verify(): %r" % name)
|
||||
try:
|
||||
method(tp, realname)
|
||||
except Exception as e:
|
||||
model.attach_exception_info(e, name)
|
||||
raise
|
||||
|
||||
def _load(self, module, step_name, **kwds):
|
||||
for name, tp in self._get_declarations():
|
||||
kind, realname = name.split(' ', 1)
|
||||
method = getattr(self, '_%s_gen_%s' % (step_name, kind))
|
||||
try:
|
||||
method(tp, realname, module, **kwds)
|
||||
except Exception as e:
|
||||
model.attach_exception_info(e, name)
|
||||
raise
|
||||
|
||||
def _generate_nothing(self, tp, name):
|
||||
pass
|
||||
|
||||
def _loaded_noop(self, tp, name, module, **kwds):
|
||||
pass
|
||||
|
||||
# ----------
|
||||
# typedefs: generates no code so far
|
||||
|
||||
_generate_gen_typedef_decl = _generate_nothing
|
||||
_loading_gen_typedef = _loaded_noop
|
||||
_loaded_gen_typedef = _loaded_noop
|
||||
|
||||
# ----------
|
||||
# function declarations
|
||||
|
||||
def _generate_gen_function_decl(self, tp, name):
|
||||
assert isinstance(tp, model.FunctionPtrType)
|
||||
if tp.ellipsis:
|
||||
# cannot support vararg functions better than this: check for its
|
||||
# exact type (including the fixed arguments), and build it as a
|
||||
# constant function pointer (no _cffi_f_%s wrapper)
|
||||
self._generate_gen_const(False, name, tp)
|
||||
return
|
||||
prnt = self._prnt
|
||||
numargs = len(tp.args)
|
||||
argnames = []
|
||||
for i, type in enumerate(tp.args):
|
||||
indirection = ''
|
||||
if isinstance(type, model.StructOrUnion):
|
||||
indirection = '*'
|
||||
argnames.append('%sx%d' % (indirection, i))
|
||||
context = 'argument of %s' % name
|
||||
arglist = [type.get_c_name(' %s' % arg, context)
|
||||
for type, arg in zip(tp.args, argnames)]
|
||||
tpresult = tp.result
|
||||
if isinstance(tpresult, model.StructOrUnion):
|
||||
arglist.insert(0, tpresult.get_c_name(' *r', context))
|
||||
tpresult = model.void_type
|
||||
arglist = ', '.join(arglist) or 'void'
|
||||
wrappername = '_cffi_f_%s' % name
|
||||
self.export_symbols.append(wrappername)
|
||||
if tp.abi:
|
||||
abi = tp.abi + ' '
|
||||
else:
|
||||
abi = ''
|
||||
funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist)
|
||||
context = 'result of %s' % name
|
||||
prnt(tpresult.get_c_name(funcdecl, context))
|
||||
prnt('{')
|
||||
#
|
||||
if isinstance(tp.result, model.StructOrUnion):
|
||||
result_code = '*r = '
|
||||
elif not isinstance(tp.result, model.VoidType):
|
||||
result_code = 'return '
|
||||
else:
|
||||
result_code = ''
|
||||
prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames)))
|
||||
prnt('}')
|
||||
prnt()
|
||||
|
||||
_loading_gen_function = _loaded_noop
|
||||
|
||||
def _loaded_gen_function(self, tp, name, module, library):
|
||||
assert isinstance(tp, model.FunctionPtrType)
|
||||
if tp.ellipsis:
|
||||
newfunction = self._load_constant(False, tp, name, module)
|
||||
else:
|
||||
indirections = []
|
||||
base_tp = tp
|
||||
if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args)
|
||||
or isinstance(tp.result, model.StructOrUnion)):
|
||||
indirect_args = []
|
||||
for i, typ in enumerate(tp.args):
|
||||
if isinstance(typ, model.StructOrUnion):
|
||||
typ = model.PointerType(typ)
|
||||
indirections.append((i, typ))
|
||||
indirect_args.append(typ)
|
||||
indirect_result = tp.result
|
||||
if isinstance(indirect_result, model.StructOrUnion):
|
||||
if indirect_result.fldtypes is None:
|
||||
raise TypeError("'%s' is used as result type, "
|
||||
"but is opaque" % (
|
||||
indirect_result._get_c_name(),))
|
||||
indirect_result = model.PointerType(indirect_result)
|
||||
indirect_args.insert(0, indirect_result)
|
||||
indirections.insert(0, ("result", indirect_result))
|
||||
indirect_result = model.void_type
|
||||
tp = model.FunctionPtrType(tuple(indirect_args),
|
||||
indirect_result, tp.ellipsis)
|
||||
BFunc = self.ffi._get_cached_btype(tp)
|
||||
wrappername = '_cffi_f_%s' % name
|
||||
newfunction = module.load_function(BFunc, wrappername)
|
||||
for i, typ in indirections:
|
||||
newfunction = self._make_struct_wrapper(newfunction, i, typ,
|
||||
base_tp)
|
||||
setattr(library, name, newfunction)
|
||||
type(library)._cffi_dir.append(name)
|
||||
|
||||
def _make_struct_wrapper(self, oldfunc, i, tp, base_tp):
|
||||
backend = self.ffi._backend
|
||||
BType = self.ffi._get_cached_btype(tp)
|
||||
if i == "result":
|
||||
ffi = self.ffi
|
||||
def newfunc(*args):
|
||||
res = ffi.new(BType)
|
||||
oldfunc(res, *args)
|
||||
return res[0]
|
||||
else:
|
||||
def newfunc(*args):
|
||||
args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:]
|
||||
return oldfunc(*args)
|
||||
newfunc._cffi_base_type = base_tp
|
||||
return newfunc
|
||||
|
||||
# ----------
|
||||
# named structs
|
||||
|
||||
def _generate_gen_struct_decl(self, tp, name):
|
||||
assert name == tp.name
|
||||
self._generate_struct_or_union_decl(tp, 'struct', name)
|
||||
|
||||
def _loading_gen_struct(self, tp, name, module):
|
||||
self._loading_struct_or_union(tp, 'struct', name, module)
|
||||
|
||||
def _loaded_gen_struct(self, tp, name, module, **kwds):
|
||||
self._loaded_struct_or_union(tp)
|
||||
|
||||
def _generate_gen_union_decl(self, tp, name):
|
||||
assert name == tp.name
|
||||
self._generate_struct_or_union_decl(tp, 'union', name)
|
||||
|
||||
def _loading_gen_union(self, tp, name, module):
|
||||
self._loading_struct_or_union(tp, 'union', name, module)
|
||||
|
||||
def _loaded_gen_union(self, tp, name, module, **kwds):
|
||||
self._loaded_struct_or_union(tp)
|
||||
|
||||
def _generate_struct_or_union_decl(self, tp, prefix, name):
|
||||
if tp.fldnames is None:
|
||||
return # nothing to do with opaque structs
|
||||
checkfuncname = '_cffi_check_%s_%s' % (prefix, name)
|
||||
layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
|
||||
cname = ('%s %s' % (prefix, name)).strip()
|
||||
#
|
||||
prnt = self._prnt
|
||||
prnt('static void %s(%s *p)' % (checkfuncname, cname))
|
||||
prnt('{')
|
||||
prnt(' /* only to generate compile-time warnings or errors */')
|
||||
prnt(' (void)p;')
|
||||
for fname, ftype, fbitsize, fqual in tp.enumfields():
|
||||
if (isinstance(ftype, model.PrimitiveType)
|
||||
and ftype.is_integer_type()) or fbitsize >= 0:
|
||||
# accept all integers, but complain on float or double
|
||||
prnt(' (void)((p->%s) << 1);' % fname)
|
||||
else:
|
||||
# only accept exactly the type declared.
|
||||
try:
|
||||
prnt(' { %s = &p->%s; (void)tmp; }' % (
|
||||
ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
|
||||
fname))
|
||||
except VerificationError as e:
|
||||
prnt(' /* %s */' % str(e)) # cannot verify it, ignore
|
||||
prnt('}')
|
||||
self.export_symbols.append(layoutfuncname)
|
||||
prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,))
|
||||
prnt('{')
|
||||
prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname)
|
||||
prnt(' static intptr_t nums[] = {')
|
||||
prnt(' sizeof(%s),' % cname)
|
||||
prnt(' offsetof(struct _cffi_aligncheck, y),')
|
||||
for fname, ftype, fbitsize, fqual in tp.enumfields():
|
||||
if fbitsize >= 0:
|
||||
continue # xxx ignore fbitsize for now
|
||||
prnt(' offsetof(%s, %s),' % (cname, fname))
|
||||
if isinstance(ftype, model.ArrayType) and ftype.length is None:
|
||||
prnt(' 0, /* %s */' % ftype._get_c_name())
|
||||
else:
|
||||
prnt(' sizeof(((%s *)0)->%s),' % (cname, fname))
|
||||
prnt(' -1')
|
||||
prnt(' };')
|
||||
prnt(' return nums[i];')
|
||||
prnt(' /* the next line is not executed, but compiled */')
|
||||
prnt(' %s(0);' % (checkfuncname,))
|
||||
prnt('}')
|
||||
prnt()
|
||||
|
||||
def _loading_struct_or_union(self, tp, prefix, name, module):
|
||||
if tp.fldnames is None:
|
||||
return # nothing to do with opaque structs
|
||||
layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
|
||||
#
|
||||
BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0]
|
||||
function = module.load_function(BFunc, layoutfuncname)
|
||||
layout = []
|
||||
num = 0
|
||||
while True:
|
||||
x = function(num)
|
||||
if x < 0: break
|
||||
layout.append(x)
|
||||
num += 1
|
||||
if isinstance(tp, model.StructOrUnion) and tp.partial:
|
||||
# use the function()'s sizes and offsets to guide the
|
||||
# layout of the struct
|
||||
totalsize = layout[0]
|
||||
totalalignment = layout[1]
|
||||
fieldofs = layout[2::2]
|
||||
fieldsize = layout[3::2]
|
||||
tp.force_flatten()
|
||||
assert len(fieldofs) == len(fieldsize) == len(tp.fldnames)
|
||||
tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment
|
||||
else:
|
||||
cname = ('%s %s' % (prefix, name)).strip()
|
||||
self._struct_pending_verification[tp] = layout, cname
|
||||
|
||||
def _loaded_struct_or_union(self, tp):
|
||||
if tp.fldnames is None:
|
||||
return # nothing to do with opaque structs
|
||||
self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered
|
||||
|
||||
if tp in self._struct_pending_verification:
|
||||
# check that the layout sizes and offsets match the real ones
|
||||
def check(realvalue, expectedvalue, msg):
|
||||
if realvalue != expectedvalue:
|
||||
raise VerificationError(
|
||||
"%s (we have %d, but C compiler says %d)"
|
||||
% (msg, expectedvalue, realvalue))
|
||||
ffi = self.ffi
|
||||
BStruct = ffi._get_cached_btype(tp)
|
||||
layout, cname = self._struct_pending_verification.pop(tp)
|
||||
check(layout[0], ffi.sizeof(BStruct), "wrong total size")
|
||||
check(layout[1], ffi.alignof(BStruct), "wrong total alignment")
|
||||
i = 2
|
||||
for fname, ftype, fbitsize, fqual in tp.enumfields():
|
||||
if fbitsize >= 0:
|
||||
continue # xxx ignore fbitsize for now
|
||||
check(layout[i], ffi.offsetof(BStruct, fname),
|
||||
"wrong offset for field %r" % (fname,))
|
||||
if layout[i+1] != 0:
|
||||
BField = ffi._get_cached_btype(ftype)
|
||||
check(layout[i+1], ffi.sizeof(BField),
|
||||
"wrong size for field %r" % (fname,))
|
||||
i += 2
|
||||
assert i == len(layout)
|
||||
|
||||
# ----------
|
||||
# 'anonymous' declarations. These are produced for anonymous structs
|
||||
# or unions; the 'name' is obtained by a typedef.
|
||||
|
||||
def _generate_gen_anonymous_decl(self, tp, name):
|
||||
if isinstance(tp, model.EnumType):
|
||||
self._generate_gen_enum_decl(tp, name, '')
|
||||
else:
|
||||
self._generate_struct_or_union_decl(tp, '', name)
|
||||
|
||||
def _loading_gen_anonymous(self, tp, name, module):
|
||||
if isinstance(tp, model.EnumType):
|
||||
self._loading_gen_enum(tp, name, module, '')
|
||||
else:
|
||||
self._loading_struct_or_union(tp, '', name, module)
|
||||
|
||||
def _loaded_gen_anonymous(self, tp, name, module, **kwds):
|
||||
if isinstance(tp, model.EnumType):
|
||||
self._loaded_gen_enum(tp, name, module, **kwds)
|
||||
else:
|
||||
self._loaded_struct_or_union(tp)
|
||||
|
||||
# ----------
|
||||
# constants, likely declared with '#define'
|
||||
|
||||
def _generate_gen_const(self, is_int, name, tp=None, category='const',
|
||||
check_value=None):
|
||||
prnt = self._prnt
|
||||
funcname = '_cffi_%s_%s' % (category, name)
|
||||
self.export_symbols.append(funcname)
|
||||
if check_value is not None:
|
||||
assert is_int
|
||||
assert category == 'const'
|
||||
prnt('int %s(char *out_error)' % funcname)
|
||||
prnt('{')
|
||||
self._check_int_constant_value(name, check_value)
|
||||
prnt(' return 0;')
|
||||
prnt('}')
|
||||
elif is_int:
|
||||
assert category == 'const'
|
||||
prnt('int %s(long long *out_value)' % funcname)
|
||||
prnt('{')
|
||||
prnt(' *out_value = (long long)(%s);' % (name,))
|
||||
prnt(' return (%s) <= 0;' % (name,))
|
||||
prnt('}')
|
||||
else:
|
||||
assert tp is not None
|
||||
assert check_value is None
|
||||
if category == 'var':
|
||||
ampersand = '&'
|
||||
else:
|
||||
ampersand = ''
|
||||
extra = ''
|
||||
if category == 'const' and isinstance(tp, model.StructOrUnion):
|
||||
extra = 'const *'
|
||||
ampersand = '&'
|
||||
prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name))
|
||||
prnt('{')
|
||||
prnt(' return (%s%s);' % (ampersand, name))
|
||||
prnt('}')
|
||||
prnt()
|
||||
|
||||
def _generate_gen_constant_decl(self, tp, name):
|
||||
is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
|
||||
self._generate_gen_const(is_int, name, tp)
|
||||
|
||||
_loading_gen_constant = _loaded_noop
|
||||
|
||||
def _load_constant(self, is_int, tp, name, module, check_value=None):
|
||||
funcname = '_cffi_const_%s' % name
|
||||
if check_value is not None:
|
||||
assert is_int
|
||||
self._load_known_int_constant(module, funcname)
|
||||
value = check_value
|
||||
elif is_int:
|
||||
BType = self.ffi._typeof_locked("long long*")[0]
|
||||
BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0]
|
||||
function = module.load_function(BFunc, funcname)
|
||||
p = self.ffi.new(BType)
|
||||
negative = function(p)
|
||||
value = int(p[0])
|
||||
if value < 0 and not negative:
|
||||
BLongLong = self.ffi._typeof_locked("long long")[0]
|
||||
value += (1 << (8*self.ffi.sizeof(BLongLong)))
|
||||
else:
|
||||
assert check_value is None
|
||||
fntypeextra = '(*)(void)'
|
||||
if isinstance(tp, model.StructOrUnion):
|
||||
fntypeextra = '*' + fntypeextra
|
||||
BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0]
|
||||
function = module.load_function(BFunc, funcname)
|
||||
value = function()
|
||||
if isinstance(tp, model.StructOrUnion):
|
||||
value = value[0]
|
||||
return value
|
||||
|
||||
def _loaded_gen_constant(self, tp, name, module, library):
|
||||
is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
|
||||
value = self._load_constant(is_int, tp, name, module)
|
||||
setattr(library, name, value)
|
||||
type(library)._cffi_dir.append(name)
|
||||
|
||||
# ----------
|
||||
# enums
|
||||
|
||||
def _check_int_constant_value(self, name, value):
|
||||
prnt = self._prnt
|
||||
if value <= 0:
|
||||
prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % (
|
||||
name, name, value))
|
||||
else:
|
||||
prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % (
|
||||
name, name, value))
|
||||
prnt(' char buf[64];')
|
||||
prnt(' if ((%s) <= 0)' % name)
|
||||
prnt(' sprintf(buf, "%%ld", (long)(%s));' % name)
|
||||
prnt(' else')
|
||||
prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' %
|
||||
name)
|
||||
prnt(' sprintf(out_error, "%s has the real value %s, not %s",')
|
||||
prnt(' "%s", buf, "%d");' % (name[:100], value))
|
||||
prnt(' return -1;')
|
||||
prnt(' }')
|
||||
|
||||
def _load_known_int_constant(self, module, funcname):
|
||||
BType = self.ffi._typeof_locked("char[]")[0]
|
||||
BFunc = self.ffi._typeof_locked("int(*)(char*)")[0]
|
||||
function = module.load_function(BFunc, funcname)
|
||||
p = self.ffi.new(BType, 256)
|
||||
if function(p) < 0:
|
||||
error = self.ffi.string(p)
|
||||
if sys.version_info >= (3,):
|
||||
error = str(error, 'utf-8')
|
||||
raise VerificationError(error)
|
||||
|
||||
def _enum_funcname(self, prefix, name):
|
||||
# "$enum_$1" => "___D_enum____D_1"
|
||||
name = name.replace('$', '___D_')
|
||||
return '_cffi_e_%s_%s' % (prefix, name)
|
||||
|
||||
def _generate_gen_enum_decl(self, tp, name, prefix='enum'):
|
||||
if tp.partial:
|
||||
for enumerator in tp.enumerators:
|
||||
self._generate_gen_const(True, enumerator)
|
||||
return
|
||||
#
|
||||
funcname = self._enum_funcname(prefix, name)
|
||||
self.export_symbols.append(funcname)
|
||||
prnt = self._prnt
|
||||
prnt('int %s(char *out_error)' % funcname)
|
||||
prnt('{')
|
||||
for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
|
||||
self._check_int_constant_value(enumerator, enumvalue)
|
||||
prnt(' return 0;')
|
||||
prnt('}')
|
||||
prnt()
|
||||
|
||||
def _loading_gen_enum(self, tp, name, module, prefix='enum'):
|
||||
if tp.partial:
|
||||
enumvalues = [self._load_constant(True, tp, enumerator, module)
|
||||
for enumerator in tp.enumerators]
|
||||
tp.enumvalues = tuple(enumvalues)
|
||||
tp.partial_resolved = True
|
||||
else:
|
||||
funcname = self._enum_funcname(prefix, name)
|
||||
self._load_known_int_constant(module, funcname)
|
||||
|
||||
def _loaded_gen_enum(self, tp, name, module, library):
|
||||
for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
|
||||
setattr(library, enumerator, enumvalue)
|
||||
type(library)._cffi_dir.append(enumerator)
|
||||
|
||||
# ----------
|
||||
# macros: for now only for integers
|
||||
|
||||
def _generate_gen_macro_decl(self, tp, name):
|
||||
if tp == '...':
|
||||
check_value = None
|
||||
else:
|
||||
check_value = tp # an integer
|
||||
self._generate_gen_const(True, name, check_value=check_value)
|
||||
|
||||
_loading_gen_macro = _loaded_noop
|
||||
|
||||
def _loaded_gen_macro(self, tp, name, module, library):
|
||||
if tp == '...':
|
||||
check_value = None
|
||||
else:
|
||||
check_value = tp # an integer
|
||||
value = self._load_constant(True, tp, name, module,
|
||||
check_value=check_value)
|
||||
setattr(library, name, value)
|
||||
type(library)._cffi_dir.append(name)
|
||||
|
||||
# ----------
|
||||
# global variables
|
||||
|
||||
def _generate_gen_variable_decl(self, tp, name):
|
||||
if isinstance(tp, model.ArrayType):
|
||||
if tp.length == '...':
|
||||
prnt = self._prnt
|
||||
funcname = '_cffi_sizeof_%s' % (name,)
|
||||
self.export_symbols.append(funcname)
|
||||
prnt("size_t %s(void)" % funcname)
|
||||
prnt("{")
|
||||
prnt(" return sizeof(%s);" % (name,))
|
||||
prnt("}")
|
||||
tp_ptr = model.PointerType(tp.item)
|
||||
self._generate_gen_const(False, name, tp_ptr)
|
||||
else:
|
||||
tp_ptr = model.PointerType(tp)
|
||||
self._generate_gen_const(False, name, tp_ptr, category='var')
|
||||
|
||||
_loading_gen_variable = _loaded_noop
|
||||
|
||||
def _loaded_gen_variable(self, tp, name, module, library):
|
||||
if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the
|
||||
# sense that "a=..." is forbidden
|
||||
if tp.length == '...':
|
||||
funcname = '_cffi_sizeof_%s' % (name,)
|
||||
BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0]
|
||||
function = module.load_function(BFunc, funcname)
|
||||
size = function()
|
||||
BItemType = self.ffi._get_cached_btype(tp.item)
|
||||
length, rest = divmod(size, self.ffi.sizeof(BItemType))
|
||||
if rest != 0:
|
||||
raise VerificationError(
|
||||
"bad size: %r does not seem to be an array of %s" %
|
||||
(name, tp.item))
|
||||
tp = tp.resolve_length(length)
|
||||
tp_ptr = model.PointerType(tp.item)
|
||||
value = self._load_constant(False, tp_ptr, name, module)
|
||||
# 'value' is a <cdata 'type *'> which we have to replace with
|
||||
# a <cdata 'type[N]'> if the N is actually known
|
||||
if tp.length is not None:
|
||||
BArray = self.ffi._get_cached_btype(tp)
|
||||
value = self.ffi.cast(BArray, value)
|
||||
setattr(library, name, value)
|
||||
type(library)._cffi_dir.append(name)
|
||||
return
|
||||
# remove ptr=<cdata 'int *'> from the library instance, and replace
|
||||
# it by a property on the class, which reads/writes into ptr[0].
|
||||
funcname = '_cffi_var_%s' % name
|
||||
BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0]
|
||||
function = module.load_function(BFunc, funcname)
|
||||
ptr = function()
|
||||
def getter(library):
|
||||
return ptr[0]
|
||||
def setter(library, value):
|
||||
ptr[0] = value
|
||||
setattr(type(library), name, property(getter, setter))
|
||||
type(library)._cffi_dir.append(name)
|
||||
|
||||
cffimod_header = r'''
|
||||
#include <stdio.h>
|
||||
#include <stddef.h>
|
||||
#include <stdarg.h>
|
||||
#include <errno.h>
|
||||
#include <sys/types.h> /* XXX for ssize_t on some platforms */
|
||||
|
||||
/* this block of #ifs should be kept exactly identical between
|
||||
c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py
|
||||
and cffi/_cffi_include.h */
|
||||
#if defined(_MSC_VER)
|
||||
# include <malloc.h> /* for alloca() */
|
||||
# if _MSC_VER < 1600 /* MSVC < 2010 */
|
||||
typedef __int8 int8_t;
|
||||
typedef __int16 int16_t;
|
||||
typedef __int32 int32_t;
|
||||
typedef __int64 int64_t;
|
||||
typedef unsigned __int8 uint8_t;
|
||||
typedef unsigned __int16 uint16_t;
|
||||
typedef unsigned __int32 uint32_t;
|
||||
typedef unsigned __int64 uint64_t;
|
||||
typedef __int8 int_least8_t;
|
||||
typedef __int16 int_least16_t;
|
||||
typedef __int32 int_least32_t;
|
||||
typedef __int64 int_least64_t;
|
||||
typedef unsigned __int8 uint_least8_t;
|
||||
typedef unsigned __int16 uint_least16_t;
|
||||
typedef unsigned __int32 uint_least32_t;
|
||||
typedef unsigned __int64 uint_least64_t;
|
||||
typedef __int8 int_fast8_t;
|
||||
typedef __int16 int_fast16_t;
|
||||
typedef __int32 int_fast32_t;
|
||||
typedef __int64 int_fast64_t;
|
||||
typedef unsigned __int8 uint_fast8_t;
|
||||
typedef unsigned __int16 uint_fast16_t;
|
||||
typedef unsigned __int32 uint_fast32_t;
|
||||
typedef unsigned __int64 uint_fast64_t;
|
||||
typedef __int64 intmax_t;
|
||||
typedef unsigned __int64 uintmax_t;
|
||||
# else
|
||||
# include <stdint.h>
|
||||
# endif
|
||||
# if _MSC_VER < 1800 /* MSVC < 2013 */
|
||||
# ifndef __cplusplus
|
||||
typedef unsigned char _Bool;
|
||||
# endif
|
||||
# endif
|
||||
#else
|
||||
# include <stdint.h>
|
||||
# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
|
||||
# include <alloca.h>
|
||||
# endif
|
||||
#endif
|
||||
'''
|
|
@ -0,0 +1,306 @@
|
|||
#
|
||||
# DEPRECATED: implementation for ffi.verify()
|
||||
#
|
||||
import sys, os, binascii, shutil, io
|
||||
from . import __version_verifier_modules__
|
||||
from . import ffiplatform
|
||||
from .error import VerificationError
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
import importlib.machinery
|
||||
def _extension_suffixes():
|
||||
return importlib.machinery.EXTENSION_SUFFIXES[:]
|
||||
else:
|
||||
import imp
|
||||
def _extension_suffixes():
|
||||
return [suffix for suffix, _, type in imp.get_suffixes()
|
||||
if type == imp.C_EXTENSION]
|
||||
|
||||
|
||||
if sys.version_info >= (3,):
|
||||
NativeIO = io.StringIO
|
||||
else:
|
||||
class NativeIO(io.BytesIO):
|
||||
def write(self, s):
|
||||
if isinstance(s, unicode):
|
||||
s = s.encode('ascii')
|
||||
super(NativeIO, self).write(s)
|
||||
|
||||
|
||||
class Verifier(object):
|
||||
|
||||
def __init__(self, ffi, preamble, tmpdir=None, modulename=None,
|
||||
ext_package=None, tag='', force_generic_engine=False,
|
||||
source_extension='.c', flags=None, relative_to=None, **kwds):
|
||||
if ffi._parser._uses_new_feature:
|
||||
raise VerificationError(
|
||||
"feature not supported with ffi.verify(), but only "
|
||||
"with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,))
|
||||
self.ffi = ffi
|
||||
self.preamble = preamble
|
||||
if not modulename:
|
||||
flattened_kwds = ffiplatform.flatten(kwds)
|
||||
vengine_class = _locate_engine_class(ffi, force_generic_engine)
|
||||
self._vengine = vengine_class(self)
|
||||
self._vengine.patch_extension_kwds(kwds)
|
||||
self.flags = flags
|
||||
self.kwds = self.make_relative_to(kwds, relative_to)
|
||||
#
|
||||
if modulename:
|
||||
if tag:
|
||||
raise TypeError("can't specify both 'modulename' and 'tag'")
|
||||
else:
|
||||
key = '\x00'.join([sys.version[:3], __version_verifier_modules__,
|
||||
preamble, flattened_kwds] +
|
||||
ffi._cdefsources)
|
||||
if sys.version_info >= (3,):
|
||||
key = key.encode('utf-8')
|
||||
k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff)
|
||||
k1 = k1.lstrip('0x').rstrip('L')
|
||||
k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff)
|
||||
k2 = k2.lstrip('0').rstrip('L')
|
||||
modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key,
|
||||
k1, k2)
|
||||
suffix = _get_so_suffixes()[0]
|
||||
self.tmpdir = tmpdir or _caller_dir_pycache()
|
||||
self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension)
|
||||
self.modulefilename = os.path.join(self.tmpdir, modulename + suffix)
|
||||
self.ext_package = ext_package
|
||||
self._has_source = False
|
||||
self._has_module = False
|
||||
|
||||
def write_source(self, file=None):
|
||||
"""Write the C source code. It is produced in 'self.sourcefilename',
|
||||
which can be tweaked beforehand."""
|
||||
with self.ffi._lock:
|
||||
if self._has_source and file is None:
|
||||
raise VerificationError(
|
||||
"source code already written")
|
||||
self._write_source(file)
|
||||
|
||||
def compile_module(self):
|
||||
"""Write the C source code (if not done already) and compile it.
|
||||
This produces a dynamic link library in 'self.modulefilename'."""
|
||||
with self.ffi._lock:
|
||||
if self._has_module:
|
||||
raise VerificationError("module already compiled")
|
||||
if not self._has_source:
|
||||
self._write_source()
|
||||
self._compile_module()
|
||||
|
||||
def load_library(self):
|
||||
"""Get a C module from this Verifier instance.
|
||||
Returns an instance of a FFILibrary class that behaves like the
|
||||
objects returned by ffi.dlopen(), but that delegates all
|
||||
operations to the C module. If necessary, the C code is written
|
||||
and compiled first.
|
||||
"""
|
||||
with self.ffi._lock:
|
||||
if not self._has_module:
|
||||
self._locate_module()
|
||||
if not self._has_module:
|
||||
if not self._has_source:
|
||||
self._write_source()
|
||||
self._compile_module()
|
||||
return self._load_library()
|
||||
|
||||
def get_module_name(self):
|
||||
basename = os.path.basename(self.modulefilename)
|
||||
# kill both the .so extension and the other .'s, as introduced
|
||||
# by Python 3: 'basename.cpython-33m.so'
|
||||
basename = basename.split('.', 1)[0]
|
||||
# and the _d added in Python 2 debug builds --- but try to be
|
||||
# conservative and not kill a legitimate _d
|
||||
if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'):
|
||||
basename = basename[:-2]
|
||||
return basename
|
||||
|
||||
def get_extension(self):
|
||||
ffiplatform._hack_at_distutils() # backward compatibility hack
|
||||
if not self._has_source:
|
||||
with self.ffi._lock:
|
||||
if not self._has_source:
|
||||
self._write_source()
|
||||
sourcename = ffiplatform.maybe_relative_path(self.sourcefilename)
|
||||
modname = self.get_module_name()
|
||||
return ffiplatform.get_extension(sourcename, modname, **self.kwds)
|
||||
|
||||
def generates_python_module(self):
|
||||
return self._vengine._gen_python_module
|
||||
|
||||
def make_relative_to(self, kwds, relative_to):
|
||||
if relative_to and os.path.dirname(relative_to):
|
||||
dirname = os.path.dirname(relative_to)
|
||||
kwds = kwds.copy()
|
||||
for key in ffiplatform.LIST_OF_FILE_NAMES:
|
||||
if key in kwds:
|
||||
lst = kwds[key]
|
||||
if not isinstance(lst, (list, tuple)):
|
||||
raise TypeError("keyword '%s' should be a list or tuple"
|
||||
% (key,))
|
||||
lst = [os.path.join(dirname, fn) for fn in lst]
|
||||
kwds[key] = lst
|
||||
return kwds
|
||||
|
||||
# ----------
|
||||
|
||||
def _locate_module(self):
|
||||
if not os.path.isfile(self.modulefilename):
|
||||
if self.ext_package:
|
||||
try:
|
||||
pkg = __import__(self.ext_package, None, None, ['__doc__'])
|
||||
except ImportError:
|
||||
return # cannot import the package itself, give up
|
||||
# (e.g. it might be called differently before installation)
|
||||
path = pkg.__path__
|
||||
else:
|
||||
path = None
|
||||
filename = self._vengine.find_module(self.get_module_name(), path,
|
||||
_get_so_suffixes())
|
||||
if filename is None:
|
||||
return
|
||||
self.modulefilename = filename
|
||||
self._vengine.collect_types()
|
||||
self._has_module = True
|
||||
|
||||
def _write_source_to(self, file):
|
||||
self._vengine._f = file
|
||||
try:
|
||||
self._vengine.write_source_to_f()
|
||||
finally:
|
||||
del self._vengine._f
|
||||
|
||||
def _write_source(self, file=None):
|
||||
if file is not None:
|
||||
self._write_source_to(file)
|
||||
else:
|
||||
# Write our source file to an in memory file.
|
||||
f = NativeIO()
|
||||
self._write_source_to(f)
|
||||
source_data = f.getvalue()
|
||||
|
||||
# Determine if this matches the current file
|
||||
if os.path.exists(self.sourcefilename):
|
||||
with open(self.sourcefilename, "r") as fp:
|
||||
needs_written = not (fp.read() == source_data)
|
||||
else:
|
||||
needs_written = True
|
||||
|
||||
# Actually write the file out if it doesn't match
|
||||
if needs_written:
|
||||
_ensure_dir(self.sourcefilename)
|
||||
with open(self.sourcefilename, "w") as fp:
|
||||
fp.write(source_data)
|
||||
|
||||
# Set this flag
|
||||
self._has_source = True
|
||||
|
||||
def _compile_module(self):
|
||||
# compile this C source
|
||||
tmpdir = os.path.dirname(self.sourcefilename)
|
||||
outputfilename = ffiplatform.compile(tmpdir, self.get_extension())
|
||||
try:
|
||||
same = ffiplatform.samefile(outputfilename, self.modulefilename)
|
||||
except OSError:
|
||||
same = False
|
||||
if not same:
|
||||
_ensure_dir(self.modulefilename)
|
||||
shutil.move(outputfilename, self.modulefilename)
|
||||
self._has_module = True
|
||||
|
||||
def _load_library(self):
|
||||
assert self._has_module
|
||||
if self.flags is not None:
|
||||
return self._vengine.load_library(self.flags)
|
||||
else:
|
||||
return self._vengine.load_library()
|
||||
|
||||
# ____________________________________________________________
|
||||
|
||||
_FORCE_GENERIC_ENGINE = False # for tests
|
||||
|
||||
def _locate_engine_class(ffi, force_generic_engine):
|
||||
if _FORCE_GENERIC_ENGINE:
|
||||
force_generic_engine = True
|
||||
if not force_generic_engine:
|
||||
if '__pypy__' in sys.builtin_module_names:
|
||||
force_generic_engine = True
|
||||
else:
|
||||
try:
|
||||
import _cffi_backend
|
||||
except ImportError:
|
||||
_cffi_backend = '?'
|
||||
if ffi._backend is not _cffi_backend:
|
||||
force_generic_engine = True
|
||||
if force_generic_engine:
|
||||
from . import vengine_gen
|
||||
return vengine_gen.VGenericEngine
|
||||
else:
|
||||
from . import vengine_cpy
|
||||
return vengine_cpy.VCPythonEngine
|
||||
|
||||
# ____________________________________________________________
|
||||
|
||||
_TMPDIR = None
|
||||
|
||||
def _caller_dir_pycache():
|
||||
if _TMPDIR:
|
||||
return _TMPDIR
|
||||
result = os.environ.get('CFFI_TMPDIR')
|
||||
if result:
|
||||
return result
|
||||
filename = sys._getframe(2).f_code.co_filename
|
||||
return os.path.abspath(os.path.join(os.path.dirname(filename),
|
||||
'__pycache__'))
|
||||
|
||||
def set_tmpdir(dirname):
|
||||
"""Set the temporary directory to use instead of __pycache__."""
|
||||
global _TMPDIR
|
||||
_TMPDIR = dirname
|
||||
|
||||
def cleanup_tmpdir(tmpdir=None, keep_so=False):
|
||||
"""Clean up the temporary directory by removing all files in it
|
||||
called `_cffi_*.{c,so}` as well as the `build` subdirectory."""
|
||||
tmpdir = tmpdir or _caller_dir_pycache()
|
||||
try:
|
||||
filelist = os.listdir(tmpdir)
|
||||
except OSError:
|
||||
return
|
||||
if keep_so:
|
||||
suffix = '.c' # only remove .c files
|
||||
else:
|
||||
suffix = _get_so_suffixes()[0].lower()
|
||||
for fn in filelist:
|
||||
if fn.lower().startswith('_cffi_') and (
|
||||
fn.lower().endswith(suffix) or fn.lower().endswith('.c')):
|
||||
try:
|
||||
os.unlink(os.path.join(tmpdir, fn))
|
||||
except OSError:
|
||||
pass
|
||||
clean_dir = [os.path.join(tmpdir, 'build')]
|
||||
for dir in clean_dir:
|
||||
try:
|
||||
for fn in os.listdir(dir):
|
||||
fn = os.path.join(dir, fn)
|
||||
if os.path.isdir(fn):
|
||||
clean_dir.append(fn)
|
||||
else:
|
||||
os.unlink(fn)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def _get_so_suffixes():
|
||||
suffixes = _extension_suffixes()
|
||||
if not suffixes:
|
||||
# bah, no C_EXTENSION available. Occurs on pypy without cpyext
|
||||
if sys.platform == 'win32':
|
||||
suffixes = [".pyd"]
|
||||
else:
|
||||
suffixes = [".so"]
|
||||
|
||||
return suffixes
|
||||
|
||||
def _ensure_dir(filename):
|
||||
dirname = os.path.dirname(filename)
|
||||
if dirname and not os.path.isdir(dirname):
|
||||
os.makedirs(dirname)
|
|
@ -0,0 +1,2 @@
|
|||
from .package_data import __version__
|
||||
from .core import *
|
|
@ -0,0 +1,118 @@
|
|||
from .core import encode, decode, alabel, ulabel, IDNAError
|
||||
import codecs
|
||||
import re
|
||||
|
||||
_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]')
|
||||
|
||||
class Codec(codecs.Codec):
|
||||
|
||||
def encode(self, data, errors='strict'):
|
||||
|
||||
if errors != 'strict':
|
||||
raise IDNAError("Unsupported error handling \"{0}\"".format(errors))
|
||||
|
||||
if not data:
|
||||
return "", 0
|
||||
|
||||
return encode(data), len(data)
|
||||
|
||||
def decode(self, data, errors='strict'):
|
||||
|
||||
if errors != 'strict':
|
||||
raise IDNAError("Unsupported error handling \"{0}\"".format(errors))
|
||||
|
||||
if not data:
|
||||
return u"", 0
|
||||
|
||||
return decode(data), len(data)
|
||||
|
||||
class IncrementalEncoder(codecs.BufferedIncrementalEncoder):
|
||||
def _buffer_encode(self, data, errors, final):
|
||||
if errors != 'strict':
|
||||
raise IDNAError("Unsupported error handling \"{0}\"".format(errors))
|
||||
|
||||
if not data:
|
||||
return ("", 0)
|
||||
|
||||
labels = _unicode_dots_re.split(data)
|
||||
trailing_dot = u''
|
||||
if labels:
|
||||
if not labels[-1]:
|
||||
trailing_dot = '.'
|
||||
del labels[-1]
|
||||
elif not final:
|
||||
# Keep potentially unfinished label until the next call
|
||||
del labels[-1]
|
||||
if labels:
|
||||
trailing_dot = '.'
|
||||
|
||||
result = []
|
||||
size = 0
|
||||
for label in labels:
|
||||
result.append(alabel(label))
|
||||
if size:
|
||||
size += 1
|
||||
size += len(label)
|
||||
|
||||
# Join with U+002E
|
||||
result = ".".join(result) + trailing_dot
|
||||
size += len(trailing_dot)
|
||||
return (result, size)
|
||||
|
||||
class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
|
||||
def _buffer_decode(self, data, errors, final):
|
||||
if errors != 'strict':
|
||||
raise IDNAError("Unsupported error handling \"{0}\"".format(errors))
|
||||
|
||||
if not data:
|
||||
return (u"", 0)
|
||||
|
||||
# IDNA allows decoding to operate on Unicode strings, too.
|
||||
if isinstance(data, unicode):
|
||||
labels = _unicode_dots_re.split(data)
|
||||
else:
|
||||
# Must be ASCII string
|
||||
data = str(data)
|
||||
unicode(data, "ascii")
|
||||
labels = data.split(".")
|
||||
|
||||
trailing_dot = u''
|
||||
if labels:
|
||||
if not labels[-1]:
|
||||
trailing_dot = u'.'
|
||||
del labels[-1]
|
||||
elif not final:
|
||||
# Keep potentially unfinished label until the next call
|
||||
del labels[-1]
|
||||
if labels:
|
||||
trailing_dot = u'.'
|
||||
|
||||
result = []
|
||||
size = 0
|
||||
for label in labels:
|
||||
result.append(ulabel(label))
|
||||
if size:
|
||||
size += 1
|
||||
size += len(label)
|
||||
|
||||
result = u".".join(result) + trailing_dot
|
||||
size += len(trailing_dot)
|
||||
return (result, size)
|
||||
|
||||
|
||||
class StreamWriter(Codec, codecs.StreamWriter):
|
||||
pass
|
||||
|
||||
class StreamReader(Codec, codecs.StreamReader):
|
||||
pass
|
||||
|
||||
def getregentry():
|
||||
return codecs.CodecInfo(
|
||||
name='idna',
|
||||
encode=Codec().encode,
|
||||
decode=Codec().decode,
|
||||
incrementalencoder=IncrementalEncoder,
|
||||
incrementaldecoder=IncrementalDecoder,
|
||||
streamwriter=StreamWriter,
|
||||
streamreader=StreamReader,
|
||||
)
|
|
@ -0,0 +1,12 @@
|
|||
from .core import *
|
||||
from .codec import *
|
||||
|
||||
def ToASCII(label):
|
||||
return encode(label)
|
||||
|
||||
def ToUnicode(label):
|
||||
return decode(label)
|
||||
|
||||
def nameprep(s):
|
||||
raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol")
|
||||
|
|
@ -0,0 +1,396 @@
|
|||
from . import idnadata
|
||||
import bisect
|
||||
import unicodedata
|
||||
import re
|
||||
import sys
|
||||
from .intranges import intranges_contain
|
||||
|
||||
_virama_combining_class = 9
|
||||
_alabel_prefix = b'xn--'
|
||||
_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]')
|
||||
|
||||
if sys.version_info[0] == 3:
|
||||
unicode = str
|
||||
unichr = chr
|
||||
|
||||
class IDNAError(UnicodeError):
|
||||
""" Base exception for all IDNA-encoding related problems """
|
||||
pass
|
||||
|
||||
|
||||
class IDNABidiError(IDNAError):
|
||||
""" Exception when bidirectional requirements are not satisfied """
|
||||
pass
|
||||
|
||||
|
||||
class InvalidCodepoint(IDNAError):
|
||||
""" Exception when a disallowed or unallocated codepoint is used """
|
||||
pass
|
||||
|
||||
|
||||
class InvalidCodepointContext(IDNAError):
|
||||
""" Exception when the codepoint is not valid in the context it is used """
|
||||
pass
|
||||
|
||||
|
||||
def _combining_class(cp):
|
||||
v = unicodedata.combining(unichr(cp))
|
||||
if v == 0:
|
||||
if not unicodedata.name(unichr(cp)):
|
||||
raise ValueError("Unknown character in unicodedata")
|
||||
return v
|
||||
|
||||
def _is_script(cp, script):
|
||||
return intranges_contain(ord(cp), idnadata.scripts[script])
|
||||
|
||||
def _punycode(s):
|
||||
return s.encode('punycode')
|
||||
|
||||
def _unot(s):
|
||||
return 'U+{0:04X}'.format(s)
|
||||
|
||||
|
||||
def valid_label_length(label):
|
||||
|
||||
if len(label) > 63:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def valid_string_length(label, trailing_dot):
|
||||
|
||||
if len(label) > (254 if trailing_dot else 253):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def check_bidi(label, check_ltr=False):
|
||||
|
||||
# Bidi rules should only be applied if string contains RTL characters
|
||||
bidi_label = False
|
||||
for (idx, cp) in enumerate(label, 1):
|
||||
direction = unicodedata.bidirectional(cp)
|
||||
if direction == '':
|
||||
# String likely comes from a newer version of Unicode
|
||||
raise IDNABidiError('Unknown directionality in label {0} at position {1}'.format(repr(label), idx))
|
||||
if direction in ['R', 'AL', 'AN']:
|
||||
bidi_label = True
|
||||
if not bidi_label and not check_ltr:
|
||||
return True
|
||||
|
||||
# Bidi rule 1
|
||||
direction = unicodedata.bidirectional(label[0])
|
||||
if direction in ['R', 'AL']:
|
||||
rtl = True
|
||||
elif direction == 'L':
|
||||
rtl = False
|
||||
else:
|
||||
raise IDNABidiError('First codepoint in label {0} must be directionality L, R or AL'.format(repr(label)))
|
||||
|
||||
valid_ending = False
|
||||
number_type = False
|
||||
for (idx, cp) in enumerate(label, 1):
|
||||
direction = unicodedata.bidirectional(cp)
|
||||
|
||||
if rtl:
|
||||
# Bidi rule 2
|
||||
if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']:
|
||||
raise IDNABidiError('Invalid direction for codepoint at position {0} in a right-to-left label'.format(idx))
|
||||
# Bidi rule 3
|
||||
if direction in ['R', 'AL', 'EN', 'AN']:
|
||||
valid_ending = True
|
||||
elif direction != 'NSM':
|
||||
valid_ending = False
|
||||
# Bidi rule 4
|
||||
if direction in ['AN', 'EN']:
|
||||
if not number_type:
|
||||
number_type = direction
|
||||
else:
|
||||
if number_type != direction:
|
||||
raise IDNABidiError('Can not mix numeral types in a right-to-left label')
|
||||
else:
|
||||
# Bidi rule 5
|
||||
if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']:
|
||||
raise IDNABidiError('Invalid direction for codepoint at position {0} in a left-to-right label'.format(idx))
|
||||
# Bidi rule 6
|
||||
if direction in ['L', 'EN']:
|
||||
valid_ending = True
|
||||
elif direction != 'NSM':
|
||||
valid_ending = False
|
||||
|
||||
if not valid_ending:
|
||||
raise IDNABidiError('Label ends with illegal codepoint directionality')
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def check_initial_combiner(label):
|
||||
|
||||
if unicodedata.category(label[0])[0] == 'M':
|
||||
raise IDNAError('Label begins with an illegal combining character')
|
||||
return True
|
||||
|
||||
|
||||
def check_hyphen_ok(label):
|
||||
|
||||
if label[2:4] == '--':
|
||||
raise IDNAError('Label has disallowed hyphens in 3rd and 4th position')
|
||||
if label[0] == '-' or label[-1] == '-':
|
||||
raise IDNAError('Label must not start or end with a hyphen')
|
||||
return True
|
||||
|
||||
|
||||
def check_nfc(label):
|
||||
|
||||
if unicodedata.normalize('NFC', label) != label:
|
||||
raise IDNAError('Label must be in Normalization Form C')
|
||||
|
||||
|
||||
def valid_contextj(label, pos):
|
||||
|
||||
cp_value = ord(label[pos])
|
||||
|
||||
if cp_value == 0x200c:
|
||||
|
||||
if pos > 0:
|
||||
if _combining_class(ord(label[pos - 1])) == _virama_combining_class:
|
||||
return True
|
||||
|
||||
ok = False
|
||||
for i in range(pos-1, -1, -1):
|
||||
joining_type = idnadata.joining_types.get(ord(label[i]))
|
||||
if joining_type == ord('T'):
|
||||
continue
|
||||
if joining_type in [ord('L'), ord('D')]:
|
||||
ok = True
|
||||
break
|
||||
|
||||
if not ok:
|
||||
return False
|
||||
|
||||
ok = False
|
||||
for i in range(pos+1, len(label)):
|
||||
joining_type = idnadata.joining_types.get(ord(label[i]))
|
||||
if joining_type == ord('T'):
|
||||
continue
|
||||
if joining_type in [ord('R'), ord('D')]:
|
||||
ok = True
|
||||
break
|
||||
return ok
|
||||
|
||||
if cp_value == 0x200d:
|
||||
|
||||
if pos > 0:
|
||||
if _combining_class(ord(label[pos - 1])) == _virama_combining_class:
|
||||
return True
|
||||
return False
|
||||
|
||||
else:
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def valid_contexto(label, pos, exception=False):
|
||||
|
||||
cp_value = ord(label[pos])
|
||||
|
||||
if cp_value == 0x00b7:
|
||||
if 0 < pos < len(label)-1:
|
||||
if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c:
|
||||
return True
|
||||
return False
|
||||
|
||||
elif cp_value == 0x0375:
|
||||
if pos < len(label)-1 and len(label) > 1:
|
||||
return _is_script(label[pos + 1], 'Greek')
|
||||
return False
|
||||
|
||||
elif cp_value == 0x05f3 or cp_value == 0x05f4:
|
||||
if pos > 0:
|
||||
return _is_script(label[pos - 1], 'Hebrew')
|
||||
return False
|
||||
|
||||
elif cp_value == 0x30fb:
|
||||
for cp in label:
|
||||
if cp == u'\u30fb':
|
||||
continue
|
||||
if _is_script(cp, 'Hiragana') or _is_script(cp, 'Katakana') or _is_script(cp, 'Han'):
|
||||
return True
|
||||
return False
|
||||
|
||||
elif 0x660 <= cp_value <= 0x669:
|
||||
for cp in label:
|
||||
if 0x6f0 <= ord(cp) <= 0x06f9:
|
||||
return False
|
||||
return True
|
||||
|
||||
elif 0x6f0 <= cp_value <= 0x6f9:
|
||||
for cp in label:
|
||||
if 0x660 <= ord(cp) <= 0x0669:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def check_label(label):
|
||||
|
||||
if isinstance(label, (bytes, bytearray)):
|
||||
label = label.decode('utf-8')
|
||||
if len(label) == 0:
|
||||
raise IDNAError('Empty Label')
|
||||
|
||||
check_nfc(label)
|
||||
check_hyphen_ok(label)
|
||||
check_initial_combiner(label)
|
||||
|
||||
for (pos, cp) in enumerate(label):
|
||||
cp_value = ord(cp)
|
||||
if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']):
|
||||
continue
|
||||
elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']):
|
||||
try:
|
||||
if not valid_contextj(label, pos):
|
||||
raise InvalidCodepointContext('Joiner {0} not allowed at position {1} in {2}'.format(
|
||||
_unot(cp_value), pos+1, repr(label)))
|
||||
except ValueError:
|
||||
raise IDNAError('Unknown codepoint adjacent to joiner {0} at position {1} in {2}'.format(
|
||||
_unot(cp_value), pos+1, repr(label)))
|
||||
elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']):
|
||||
if not valid_contexto(label, pos):
|
||||
raise InvalidCodepointContext('Codepoint {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label)))
|
||||
else:
|
||||
raise InvalidCodepoint('Codepoint {0} at position {1} of {2} not allowed'.format(_unot(cp_value), pos+1, repr(label)))
|
||||
|
||||
check_bidi(label)
|
||||
|
||||
|
||||
def alabel(label):
|
||||
|
||||
try:
|
||||
label = label.encode('ascii')
|
||||
ulabel(label)
|
||||
if not valid_label_length(label):
|
||||
raise IDNAError('Label too long')
|
||||
return label
|
||||
except UnicodeEncodeError:
|
||||
pass
|
||||
|
||||
if not label:
|
||||
raise IDNAError('No Input')
|
||||
|
||||
label = unicode(label)
|
||||
check_label(label)
|
||||
label = _punycode(label)
|
||||
label = _alabel_prefix + label
|
||||
|
||||
if not valid_label_length(label):
|
||||
raise IDNAError('Label too long')
|
||||
|
||||
return label
|
||||
|
||||
|
||||
def ulabel(label):
|
||||
|
||||
if not isinstance(label, (bytes, bytearray)):
|
||||
try:
|
||||
label = label.encode('ascii')
|
||||
except UnicodeEncodeError:
|
||||
check_label(label)
|
||||
return label
|
||||
|
||||
label = label.lower()
|
||||
if label.startswith(_alabel_prefix):
|
||||
label = label[len(_alabel_prefix):]
|
||||
else:
|
||||
check_label(label)
|
||||
return label.decode('ascii')
|
||||
|
||||
label = label.decode('punycode')
|
||||
check_label(label)
|
||||
return label
|
||||
|
||||
|
||||
def uts46_remap(domain, std3_rules=True, transitional=False):
|
||||
"""Re-map the characters in the string according to UTS46 processing."""
|
||||
from .uts46data import uts46data
|
||||
output = u""
|
||||
try:
|
||||
for pos, char in enumerate(domain):
|
||||
code_point = ord(char)
|
||||
uts46row = uts46data[code_point if code_point < 256 else
|
||||
bisect.bisect_left(uts46data, (code_point, "Z")) - 1]
|
||||
status = uts46row[1]
|
||||
replacement = uts46row[2] if len(uts46row) == 3 else None
|
||||
if (status == "V" or
|
||||
(status == "D" and not transitional) or
|
||||
(status == "3" and not std3_rules and replacement is None)):
|
||||
output += char
|
||||
elif replacement is not None and (status == "M" or
|
||||
(status == "3" and not std3_rules) or
|
||||
(status == "D" and transitional)):
|
||||
output += replacement
|
||||
elif status != "I":
|
||||
raise IndexError()
|
||||
return unicodedata.normalize("NFC", output)
|
||||
except IndexError:
|
||||
raise InvalidCodepoint(
|
||||
"Codepoint {0} not allowed at position {1} in {2}".format(
|
||||
_unot(code_point), pos + 1, repr(domain)))
|
||||
|
||||
|
||||
def encode(s, strict=False, uts46=False, std3_rules=False, transitional=False):
|
||||
|
||||
if isinstance(s, (bytes, bytearray)):
|
||||
s = s.decode("ascii")
|
||||
if uts46:
|
||||
s = uts46_remap(s, std3_rules, transitional)
|
||||
trailing_dot = False
|
||||
result = []
|
||||
if strict:
|
||||
labels = s.split('.')
|
||||
else:
|
||||
labels = _unicode_dots_re.split(s)
|
||||
if not labels or labels == ['']:
|
||||
raise IDNAError('Empty domain')
|
||||
if labels[-1] == '':
|
||||
del labels[-1]
|
||||
trailing_dot = True
|
||||
for label in labels:
|
||||
s = alabel(label)
|
||||
if s:
|
||||
result.append(s)
|
||||
else:
|
||||
raise IDNAError('Empty label')
|
||||
if trailing_dot:
|
||||
result.append(b'')
|
||||
s = b'.'.join(result)
|
||||
if not valid_string_length(s, trailing_dot):
|
||||
raise IDNAError('Domain too long')
|
||||
return s
|
||||
|
||||
|
||||
def decode(s, strict=False, uts46=False, std3_rules=False):
|
||||
|
||||
if isinstance(s, (bytes, bytearray)):
|
||||
s = s.decode("ascii")
|
||||
if uts46:
|
||||
s = uts46_remap(s, std3_rules, False)
|
||||
trailing_dot = False
|
||||
result = []
|
||||
if not strict:
|
||||
labels = _unicode_dots_re.split(s)
|
||||
else:
|
||||
labels = s.split(u'.')
|
||||
if not labels or labels == ['']:
|
||||
raise IDNAError('Empty domain')
|
||||
if not labels[-1]:
|
||||
del labels[-1]
|
||||
trailing_dot = True
|
||||
for label in labels:
|
||||
s = ulabel(label)
|
||||
if s:
|
||||
result.append(s)
|
||||
else:
|
||||
raise IDNAError('Empty label')
|
||||
if trailing_dot:
|
||||
result.append(u'')
|
||||
return u'.'.join(result)
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,53 @@
|
|||
"""
|
||||
Given a list of integers, made up of (hopefully) a small number of long runs
|
||||
of consecutive integers, compute a representation of the form
|
||||
((start1, end1), (start2, end2) ...). Then answer the question "was x present
|
||||
in the original list?" in time O(log(# runs)).
|
||||
"""
|
||||
|
||||
import bisect
|
||||
|
||||
def intranges_from_list(list_):
|
||||
"""Represent a list of integers as a sequence of ranges:
|
||||
((start_0, end_0), (start_1, end_1), ...), such that the original
|
||||
integers are exactly those x such that start_i <= x < end_i for some i.
|
||||
|
||||
Ranges are encoded as single integers (start << 32 | end), not as tuples.
|
||||
"""
|
||||
|
||||
sorted_list = sorted(list_)
|
||||
ranges = []
|
||||
last_write = -1
|
||||
for i in range(len(sorted_list)):
|
||||
if i+1 < len(sorted_list):
|
||||
if sorted_list[i] == sorted_list[i+1]-1:
|
||||
continue
|
||||
current_range = sorted_list[last_write+1:i+1]
|
||||
ranges.append(_encode_range(current_range[0], current_range[-1] + 1))
|
||||
last_write = i
|
||||
|
||||
return tuple(ranges)
|
||||
|
||||
def _encode_range(start, end):
|
||||
return (start << 32) | end
|
||||
|
||||
def _decode_range(r):
|
||||
return (r >> 32), (r & ((1 << 32) - 1))
|
||||
|
||||
|
||||
def intranges_contain(int_, ranges):
|
||||
"""Determine if `int_` falls into one of the ranges in `ranges`."""
|
||||
tuple_ = _encode_range(int_, 0)
|
||||
pos = bisect.bisect_left(ranges, tuple_)
|
||||
# we could be immediately ahead of a tuple (start, end)
|
||||
# with start < int_ <= end
|
||||
if pos > 0:
|
||||
left, right = _decode_range(ranges[pos-1])
|
||||
if left <= int_ < right:
|
||||
return True
|
||||
# or we could be immediately behind a tuple (int_, end)
|
||||
if pos < len(ranges):
|
||||
left, _ = _decode_range(ranges[pos])
|
||||
if left == int_:
|
||||
return True
|
||||
return False
|
|
@ -0,0 +1,2 @@
|
|||
__version__ = '2.8'
|
||||
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue