diff --git a/.gitignore b/.gitignore
index 485dee6..1f1025f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1 +1,2 @@
.idea
+.DS_Store
\ No newline at end of file
diff --git a/ci/defaults.json b/ci/defaults.json
index 081b0b9..16c5d99 100644
--- a/ci/defaults.json
+++ b/ci/defaults.json
@@ -7,18 +7,6 @@
"ParameterValue": "",
"ParameterKey": "ApiSecret"
},
- {
- "ParameterValue": "",
- "ParameterKey": "GitToken"
- },
- {
- "ParameterValue": "",
- "ParameterKey": "OauthKey"
- },
- {
- "ParameterValue": "",
- "ParameterKey": "OauthSecret"
- },
{
"ParameterValue": "",
"ParameterKey": "OutputBucketName"
diff --git a/functions/packages/CreateSSHKey/lambda.zip b/functions/packages/CreateSSHKey/lambda.zip
index de1eda1..750efd1 100644
Binary files a/functions/packages/CreateSSHKey/lambda.zip and b/functions/packages/CreateSSHKey/lambda.zip differ
diff --git a/functions/packages/DeleteBucketContents/lambda.zip b/functions/packages/DeleteBucketContents/lambda.zip
index d021a2d..7c22fdc 100644
Binary files a/functions/packages/DeleteBucketContents/lambda.zip and b/functions/packages/DeleteBucketContents/lambda.zip differ
diff --git a/functions/packages/GitPullS3/lambda.zip b/functions/packages/GitPullS3/lambda.zip
index f21509d..114bad4 100644
Binary files a/functions/packages/GitPullS3/lambda.zip and b/functions/packages/GitPullS3/lambda.zip differ
diff --git a/functions/packages/ZipDl/lambda.zip b/functions/packages/ZipDl/lambda.zip
deleted file mode 100644
index e827f23..0000000
Binary files a/functions/packages/ZipDl/lambda.zip and /dev/null differ
diff --git a/functions/source/CreateSSHKey/.libs_cffi_backend/libffi-ce7fcc27.so.6.0.4 b/functions/source/CreateSSHKey/.libs_cffi_backend/libffi-ce7fcc27.so.6.0.4
deleted file mode 100755
index 0e07e1e..0000000
Binary files a/functions/source/CreateSSHKey/.libs_cffi_backend/libffi-ce7fcc27.so.6.0.4 and /dev/null differ
diff --git a/functions/source/CreateSSHKey/__pycache__/six.cpython-38.pyc b/functions/source/CreateSSHKey/__pycache__/six.cpython-38.pyc
new file mode 100644
index 0000000..6586381
Binary files /dev/null and b/functions/source/CreateSSHKey/__pycache__/six.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/_cffi_backend.cpython-38-x86_64-linux-gnu.so b/functions/source/CreateSSHKey/_cffi_backend.cpython-38-x86_64-linux-gnu.so
new file mode 100755
index 0000000..6494f51
Binary files /dev/null and b/functions/source/CreateSSHKey/_cffi_backend.cpython-38-x86_64-linux-gnu.so differ
diff --git a/functions/source/CreateSSHKey/_cffi_backend.so b/functions/source/CreateSSHKey/_cffi_backend.so
deleted file mode 100755
index 3530303..0000000
Binary files a/functions/source/CreateSSHKey/_cffi_backend.so and /dev/null differ
diff --git a/functions/source/CreateSSHKey/asn1crypto/__init__.py b/functions/source/CreateSSHKey/asn1crypto/__init__.py
deleted file mode 100644
index afdeb43..0000000
--- a/functions/source/CreateSSHKey/asn1crypto/__init__.py
+++ /dev/null
@@ -1,9 +0,0 @@
-# coding: utf-8
-from __future__ import unicode_literals, division, absolute_import, print_function
-
-from .version import __version__, __version_info__
-
-__all__ = [
- '__version__',
- '__version_info__',
-]
diff --git a/functions/source/CreateSSHKey/asn1crypto/_elliptic_curve.py b/functions/source/CreateSSHKey/asn1crypto/_elliptic_curve.py
deleted file mode 100644
index 0ecab2d..0000000
--- a/functions/source/CreateSSHKey/asn1crypto/_elliptic_curve.py
+++ /dev/null
@@ -1,314 +0,0 @@
-# coding: utf-8
-
-"""
-Classes and objects to represent prime-field elliptic curves and points on them.
-Exports the following items:
-
- - PrimeCurve()
- - PrimePoint()
- - SECP192R1_CURVE
- - SECP192R1_BASE_POINT
- - SECP224R1_CURVE
- - SECP224R1_BASE_POINT
- - SECP256R1_CURVE
- - SECP256R1_BASE_POINT
- - SECP384R1_CURVE
- - SECP384R1_BASE_POINT
- - SECP521R1_CURVE
- - SECP521R1_BASE_POINT
-
-The curve constants are all PrimeCurve() objects and the base point constants
-are all PrimePoint() objects.
-
-Some of the following source code is derived from
-http://webpages.charter.net/curryfans/peter/downloads.html, but has been heavily
-modified to fit into this projects lint settings. The original project license
-is listed below:
-
-Copyright (c) 2014 Peter Pearson
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from __future__ import unicode_literals, division, absolute_import, print_function
-
-from ._int import inverse_mod
-
-
-class PrimeCurve():
- """
- Elliptic curve over a prime field. Characteristic two field curves are not
- supported.
- """
-
- def __init__(self, p, a, b):
- """
- The curve of points satisfying y^2 = x^3 + a*x + b (mod p)
-
- :param p:
- The prime number as an integer
-
- :param a:
- The component a as an integer
-
- :param b:
- The component b as an integer
- """
-
- self.p = p
- self.a = a
- self.b = b
-
- def contains(self, point):
- """
- :param point:
- A Point object
-
- :return:
- Boolean if the point is on this curve
- """
-
- y2 = point.y * point.y
- x3 = point.x * point.x * point.x
- return (y2 - (x3 + self.a * point.x + self.b)) % self.p == 0
-
-
-class PrimePoint():
- """
- A point on a prime-field elliptic curve
- """
-
- def __init__(self, curve, x, y, order=None):
- """
- :param curve:
- A PrimeCurve object
-
- :param x:
- The x coordinate of the point as an integer
-
- :param y:
- The y coordinate of the point as an integer
-
- :param order:
- The order of the point, as an integer - optional
- """
-
- self.curve = curve
- self.x = x
- self.y = y
- self.order = order
-
- # self.curve is allowed to be None only for INFINITY:
- if self.curve:
- if not self.curve.contains(self):
- raise ValueError('Invalid EC point')
-
- if self.order:
- if self * self.order != INFINITY:
- raise ValueError('Invalid EC point')
-
- def __cmp__(self, other):
- """
- :param other:
- A PrimePoint object
-
- :return:
- 0 if identical, 1 otherwise
- """
- if self.curve == other.curve and self.x == other.x and self.y == other.y:
- return 0
- else:
- return 1
-
- def __add__(self, other):
- """
- :param other:
- A PrimePoint object
-
- :return:
- A PrimePoint object
- """
-
- # X9.62 B.3:
-
- if other == INFINITY:
- return self
- if self == INFINITY:
- return other
- assert self.curve == other.curve
- if self.x == other.x:
- if (self.y + other.y) % self.curve.p == 0:
- return INFINITY
- else:
- return self.double()
-
- p = self.curve.p
-
- l = ((other.y - self.y) * inverse_mod(other.x - self.x, p)) % p
-
- x3 = (l * l - self.x - other.x) % p
- y3 = (l * (self.x - x3) - self.y) % p
-
- return PrimePoint(self.curve, x3, y3)
-
- def __mul__(self, other):
- """
- :param other:
- An integer to multiple the Point by
-
- :return:
- A PrimePoint object
- """
-
- def leftmost_bit(x):
- assert x > 0
- result = 1
- while result <= x:
- result = 2 * result
- return result // 2
-
- e = other
- if self.order:
- e = e % self.order
- if e == 0:
- return INFINITY
- if self == INFINITY:
- return INFINITY
- assert e > 0
-
- # From X9.62 D.3.2:
-
- e3 = 3 * e
- negative_self = PrimePoint(self.curve, self.x, -self.y, self.order)
- i = leftmost_bit(e3) // 2
- result = self
- # print "Multiplying %s by %d (e3 = %d):" % ( self, other, e3 )
- while i > 1:
- result = result.double()
- if (e3 & i) != 0 and (e & i) == 0:
- result = result + self
- if (e3 & i) == 0 and (e & i) != 0:
- result = result + negative_self
- # print ". . . i = %d, result = %s" % ( i, result )
- i = i // 2
-
- return result
-
- def __rmul__(self, other):
- """
- :param other:
- An integer to multiple the Point by
-
- :return:
- A PrimePoint object
- """
-
- return self * other
-
- def double(self):
- """
- :return:
- A PrimePoint object that is twice this point
- """
-
- # X9.62 B.3:
-
- p = self.curve.p
- a = self.curve.a
-
- l = ((3 * self.x * self.x + a) * inverse_mod(2 * self.y, p)) % p
-
- x3 = (l * l - 2 * self.x) % p
- y3 = (l * (self.x - x3) - self.y) % p
-
- return PrimePoint(self.curve, x3, y3)
-
-
-# This one point is the Point At Infinity for all purposes:
-INFINITY = PrimePoint(None, None, None)
-
-
-# NIST Curve P-192:
-SECP192R1_CURVE = PrimeCurve(
- 6277101735386680763835789423207666416083908700390324961279,
- -3,
- 0x64210519e59c80e70fa7e9ab72243049feb8deecc146b9b1
-)
-SECP192R1_BASE_POINT = PrimePoint(
- SECP192R1_CURVE,
- 0x188da80eb03090f67cbf20eb43a18800f4ff0afd82ff1012,
- 0x07192b95ffc8da78631011ed6b24cdd573f977a11e794811,
- 6277101735386680763835789423176059013767194773182842284081
-)
-
-
-# NIST Curve P-224:
-SECP224R1_CURVE = PrimeCurve(
- 26959946667150639794667015087019630673557916260026308143510066298881,
- -3,
- 0xb4050a850c04b3abf54132565044b0b7d7bfd8ba270b39432355ffb4
-)
-SECP224R1_BASE_POINT = PrimePoint(
- SECP224R1_CURVE,
- 0xb70e0cbd6bb4bf7f321390b94a03c1d356c21122343280d6115c1d21,
- 0xbd376388b5f723fb4c22dfe6cd4375a05a07476444d5819985007e34,
- 26959946667150639794667015087019625940457807714424391721682722368061
-)
-
-
-# NIST Curve P-256:
-SECP256R1_CURVE = PrimeCurve(
- 115792089210356248762697446949407573530086143415290314195533631308867097853951,
- -3,
- 0x5ac635d8aa3a93e7b3ebbd55769886bc651d06b0cc53b0f63bce3c3e27d2604b
-)
-SECP256R1_BASE_POINT = PrimePoint(
- SECP256R1_CURVE,
- 0x6b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296,
- 0x4fe342e2fe1a7f9b8ee7eb4a7c0f9e162bce33576b315ececbb6406837bf51f5,
- 115792089210356248762697446949407573529996955224135760342422259061068512044369
-)
-
-
-# NIST Curve P-384:
-SECP384R1_CURVE = PrimeCurve(
- 39402006196394479212279040100143613805079739270465446667948293404245721771496870329047266088258938001861606973112319, # noqa
- -3,
- 0xb3312fa7e23ee7e4988e056be3f82d19181d9c6efe8141120314088f5013875ac656398d8a2ed19d2a85c8edd3ec2aef
-)
-SECP384R1_BASE_POINT = PrimePoint(
- SECP384R1_CURVE,
- 0xaa87ca22be8b05378eb1c71ef320ad746e1d3b628ba79b9859f741e082542a385502f25dbf55296c3a545e3872760ab7,
- 0x3617de4a96262c6f5d9e98bf9292dc29f8f41dbd289a147ce9da3113b5f0b8c00a60b1ce1d7e819d7a431d7c90ea0e5f,
- 39402006196394479212279040100143613805079739270465446667946905279627659399113263569398956308152294913554433653942643
-)
-
-
-# NIST Curve P-521:
-SECP521R1_CURVE = PrimeCurve(
- 6864797660130609714981900799081393217269435300143305409394463459185543183397656052122559640661454554977296311391480858037121987999716643812574028291115057151, # noqa
- -3,
- 0x051953eb9618e1c9a1f929a21a0b68540eea2da725b99b315f3b8b489918ef109e156193951ec7e937b1652c0bd3bb1bf073573df883d2c34f1ef451fd46b503f00 # noqa
-)
-SECP521R1_BASE_POINT = PrimePoint(
- SECP521R1_CURVE,
- 0xc6858e06b70404e9cd9e3ecb662395b4429c648139053fb521f828af606b4d3dbaa14b5e77efe75928fe1dc127a2ffa8de3348b3c1856a429bf97e7e31c2e5bd66, # noqa
- 0x11839296a789a3bc0045c8a5fb42c7d1bd998f54449579b446817afbd17273e662c97ee72995ef42640c550b9013fad0761353c7086a272c24088be94769fd16650, # noqa
- 6864797660130609714981900799081393217269435300143305409394463459185543183397655394245057746333217197532963996371363321113864768612440380340372808892707005449 # noqa
-)
diff --git a/functions/source/CreateSSHKey/asn1crypto/_errors.py b/functions/source/CreateSSHKey/asn1crypto/_errors.py
deleted file mode 100644
index cc785a5..0000000
--- a/functions/source/CreateSSHKey/asn1crypto/_errors.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# coding: utf-8
-
-"""
-Helper for formatting exception messages. Exports the following items:
-
- - unwrap()
-"""
-
-from __future__ import unicode_literals, division, absolute_import, print_function
-
-import re
-import textwrap
-
-
-def unwrap(string, *params):
- """
- Takes a multi-line string and does the following:
-
- - dedents
- - converts newlines with text before and after into a single line
- - strips leading and trailing whitespace
-
- :param string:
- The string to format
-
- :param *params:
- Params to interpolate into the string
-
- :return:
- The formatted string
- """
-
- output = textwrap.dedent(string)
-
- # Unwrap lines, taking into account bulleted lists, ordered lists and
- # underlines consisting of = signs
- if output.find('\n') != -1:
- output = re.sub('(?<=\\S)\n(?=[^ \n\t\\d\\*\\-=])', ' ', output)
-
- if params:
- output = output % params
-
- output = output.strip()
-
- return output
diff --git a/functions/source/CreateSSHKey/asn1crypto/_ffi.py b/functions/source/CreateSSHKey/asn1crypto/_ffi.py
deleted file mode 100644
index 2a4f5bf..0000000
--- a/functions/source/CreateSSHKey/asn1crypto/_ffi.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# coding: utf-8
-
-"""
-FFI helper compatibility functions. Exports the following items:
-
- - LibraryNotFoundError
- - FFIEngineError
- - bytes_from_buffer()
- - buffer_from_bytes()
- - null()
-"""
-
-from __future__ import unicode_literals, division, absolute_import, print_function
-
-from ctypes import create_string_buffer
-
-
-def buffer_from_bytes(initializer):
- return create_string_buffer(initializer)
-
-
-def bytes_from_buffer(buffer, maxlen=None):
- return buffer.raw
-
-
-def null():
- return None
-
-
-class LibraryNotFoundError(Exception):
-
- """
- An exception when trying to find a shared library
- """
-
- pass
-
-
-class FFIEngineError(Exception):
-
- """
- An exception when trying to instantiate ctypes or cffi
- """
-
- pass
diff --git a/functions/source/CreateSSHKey/asn1crypto/_inet.py b/functions/source/CreateSSHKey/asn1crypto/_inet.py
deleted file mode 100644
index 045ba56..0000000
--- a/functions/source/CreateSSHKey/asn1crypto/_inet.py
+++ /dev/null
@@ -1,170 +0,0 @@
-# coding: utf-8
-from __future__ import unicode_literals, division, absolute_import, print_function
-
-import socket
-import struct
-
-from ._errors import unwrap
-from ._types import byte_cls, bytes_to_list, str_cls, type_name
-
-
-def inet_ntop(address_family, packed_ip):
- """
- Windows compatibility shim for socket.inet_ntop().
-
- :param address_family:
- socket.AF_INET for IPv4 or socket.AF_INET6 for IPv6
-
- :param packed_ip:
- A byte string of the network form of an IP address
-
- :return:
- A unicode string of the IP address
- """
-
- if address_family not in set([socket.AF_INET, socket.AF_INET6]):
- raise ValueError(unwrap(
- '''
- address_family must be socket.AF_INET (%s) or socket.AF_INET6 (%s),
- not %s
- ''',
- repr(socket.AF_INET),
- repr(socket.AF_INET6),
- repr(address_family)
- ))
-
- if not isinstance(packed_ip, byte_cls):
- raise TypeError(unwrap(
- '''
- packed_ip must be a byte string, not %s
- ''',
- type_name(packed_ip)
- ))
-
- required_len = 4 if address_family == socket.AF_INET else 16
- if len(packed_ip) != required_len:
- raise ValueError(unwrap(
- '''
- packed_ip must be %d bytes long - is %d
- ''',
- required_len,
- len(packed_ip)
- ))
-
- if address_family == socket.AF_INET:
- return '%d.%d.%d.%d' % tuple(bytes_to_list(packed_ip))
-
- octets = struct.unpack(b'!HHHHHHHH', packed_ip)
-
- runs_of_zero = {}
- longest_run = 0
- zero_index = None
- for i, octet in enumerate(octets + (-1,)):
- if octet != 0:
- if zero_index is not None:
- length = i - zero_index
- if length not in runs_of_zero:
- runs_of_zero[length] = zero_index
- longest_run = max(longest_run, length)
- zero_index = None
- elif zero_index is None:
- zero_index = i
-
- hexed = [hex(o)[2:] for o in octets]
-
- if longest_run < 2:
- return ':'.join(hexed)
-
- zero_start = runs_of_zero[longest_run]
- zero_end = zero_start + longest_run
-
- return ':'.join(hexed[:zero_start]) + '::' + ':'.join(hexed[zero_end:])
-
-
-def inet_pton(address_family, ip_string):
- """
- Windows compatibility shim for socket.inet_ntop().
-
- :param address_family:
- socket.AF_INET for IPv4 or socket.AF_INET6 for IPv6
-
- :param ip_string:
- A unicode string of an IP address
-
- :return:
- A byte string of the network form of the IP address
- """
-
- if address_family not in set([socket.AF_INET, socket.AF_INET6]):
- raise ValueError(unwrap(
- '''
- address_family must be socket.AF_INET (%s) or socket.AF_INET6 (%s),
- not %s
- ''',
- repr(socket.AF_INET),
- repr(socket.AF_INET6),
- repr(address_family)
- ))
-
- if not isinstance(ip_string, str_cls):
- raise TypeError(unwrap(
- '''
- ip_string must be a unicode string, not %s
- ''',
- type_name(ip_string)
- ))
-
- if address_family == socket.AF_INET:
- octets = ip_string.split('.')
- error = len(octets) != 4
- if not error:
- ints = []
- for o in octets:
- o = int(o)
- if o > 255 or o < 0:
- error = True
- break
- ints.append(o)
-
- if error:
- raise ValueError(unwrap(
- '''
- ip_string must be a dotted string with four integers in the
- range of 0 to 255, got %s
- ''',
- repr(ip_string)
- ))
-
- return struct.pack(b'!BBBB', *ints)
-
- error = False
- omitted = ip_string.count('::')
- if omitted > 1:
- error = True
- elif omitted == 0:
- octets = ip_string.split(':')
- error = len(octets) != 8
- else:
- begin, end = ip_string.split('::')
- begin_octets = begin.split(':')
- end_octets = end.split(':')
- missing = 8 - len(begin_octets) - len(end_octets)
- octets = begin_octets + (['0'] * missing) + end_octets
-
- if not error:
- ints = []
- for o in octets:
- o = int(o, 16)
- if o > 65535 or o < 0:
- error = True
- break
- ints.append(o)
-
- return struct.pack(b'!HHHHHHHH', *ints)
-
- raise ValueError(unwrap(
- '''
- ip_string must be a valid ipv6 string, got %s
- ''',
- repr(ip_string)
- ))
diff --git a/functions/source/CreateSSHKey/asn1crypto/_int.py b/functions/source/CreateSSHKey/asn1crypto/_int.py
deleted file mode 100644
index d0c2319..0000000
--- a/functions/source/CreateSSHKey/asn1crypto/_int.py
+++ /dev/null
@@ -1,159 +0,0 @@
-# coding: utf-8
-
-"""
-Function for calculating the modular inverse. Exports the following items:
-
- - inverse_mod()
-
-Source code is derived from
-http://webpages.charter.net/curryfans/peter/downloads.html, but has been heavily
-modified to fit into this projects lint settings. The original project license
-is listed below:
-
-Copyright (c) 2014 Peter Pearson
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from __future__ import unicode_literals, division, absolute_import, print_function
-
-import math
-import platform
-
-from .util import int_to_bytes, int_from_bytes
-
-# First try to use ctypes with OpenSSL for better performance
-try:
- from ._ffi import (
- buffer_from_bytes,
- bytes_from_buffer,
- FFIEngineError,
- LibraryNotFoundError,
- null,
- )
-
- # Some versions of PyPy have segfault issues, so we just punt on PyPy
- if platform.python_implementation() == 'PyPy':
- raise EnvironmentError()
-
- try:
- from ._perf._big_num_ctypes import libcrypto
-
- def inverse_mod(a, p):
- """
- Compute the modular inverse of a (mod p)
-
- :param a:
- An integer
-
- :param p:
- An integer
-
- :return:
- An integer
- """
-
- ctx = libcrypto.BN_CTX_new()
-
- a_bytes = int_to_bytes(abs(a))
- p_bytes = int_to_bytes(abs(p))
-
- a_buf = buffer_from_bytes(a_bytes)
- a_bn = libcrypto.BN_bin2bn(a_buf, len(a_bytes), null())
- if a < 0:
- libcrypto.BN_set_negative(a_bn, 1)
-
- p_buf = buffer_from_bytes(p_bytes)
- p_bn = libcrypto.BN_bin2bn(p_buf, len(p_bytes), null())
- if p < 0:
- libcrypto.BN_set_negative(p_bn, 1)
-
- r_bn = libcrypto.BN_mod_inverse(null(), a_bn, p_bn, ctx)
- r_len_bits = libcrypto.BN_num_bits(r_bn)
- r_len = int(math.ceil(r_len_bits / 8))
- r_buf = buffer_from_bytes(r_len)
- libcrypto.BN_bn2bin(r_bn, r_buf)
- r_bytes = bytes_from_buffer(r_buf, r_len)
- result = int_from_bytes(r_bytes)
-
- libcrypto.BN_free(a_bn)
- libcrypto.BN_free(p_bn)
- libcrypto.BN_free(r_bn)
- libcrypto.BN_CTX_free(ctx)
-
- return result
- except (LibraryNotFoundError, FFIEngineError):
- raise EnvironmentError()
-
-# If there was an issue using ctypes or OpenSSL, we fall back to pure python
-except (EnvironmentError, ImportError):
-
- def inverse_mod(a, p):
- """
- Compute the modular inverse of a (mod p)
-
- :param a:
- An integer
-
- :param p:
- An integer
-
- :return:
- An integer
- """
-
- if a < 0 or p <= a:
- a = a % p
-
- # From Ferguson and Schneier, roughly:
-
- c, d = a, p
- uc, vc, ud, vd = 1, 0, 0, 1
- while c != 0:
- q, c, d = divmod(d, c) + (c,)
- uc, vc, ud, vd = ud - q * uc, vd - q * vc, uc, vc
-
- # At this point, d is the GCD, and ud*a+vd*p = d.
- # If d == 1, this means that ud is a inverse.
-
- assert d == 1
- if ud > 0:
- return ud
- else:
- return ud + p
-
-
-def fill_width(bytes_, width):
- """
- Ensure a byte string representing a positive integer is a specific width
- (in bytes)
-
- :param bytes_:
- The integer byte string
-
- :param width:
- The desired width as an integer
-
- :return:
- A byte string of the width specified
- """
-
- while len(bytes_) < width:
- bytes_ = b'\x00' + bytes_
- return bytes_
diff --git a/functions/source/CreateSSHKey/asn1crypto/_iri.py b/functions/source/CreateSSHKey/asn1crypto/_iri.py
deleted file mode 100644
index 57ddd40..0000000
--- a/functions/source/CreateSSHKey/asn1crypto/_iri.py
+++ /dev/null
@@ -1,288 +0,0 @@
-# coding: utf-8
-
-"""
-Functions to convert unicode IRIs into ASCII byte string URIs and back. Exports
-the following items:
-
- - iri_to_uri()
- - uri_to_iri()
-"""
-
-from __future__ import unicode_literals, division, absolute_import, print_function
-
-from encodings import idna # noqa
-import codecs
-import re
-import sys
-
-from ._errors import unwrap
-from ._types import byte_cls, str_cls, type_name, bytes_to_list, int_types
-
-if sys.version_info < (3,):
- from urlparse import urlsplit, urlunsplit
- from urllib import (
- quote as urlquote,
- unquote as unquote_to_bytes,
- )
-
-else:
- from urllib.parse import (
- quote as urlquote,
- unquote_to_bytes,
- urlsplit,
- urlunsplit,
- )
-
-
-def iri_to_uri(value):
- """
- Normalizes and encodes a unicode IRI into an ASCII byte string URI
-
- :param value:
- A unicode string of an IRI
-
- :return:
- A byte string of the ASCII-encoded URI
- """
-
- if not isinstance(value, str_cls):
- raise TypeError(unwrap(
- '''
- value must be a unicode string, not %s
- ''',
- type_name(value)
- ))
-
- scheme = None
- # Python 2.6 doesn't split properly is the URL doesn't start with http:// or https://
- if sys.version_info < (2, 7) and not value.startswith('http://') and not value.startswith('https://'):
- real_prefix = None
- prefix_match = re.match('^[^:]*://', value)
- if prefix_match:
- real_prefix = prefix_match.group(0)
- value = 'http://' + value[len(real_prefix):]
- parsed = urlsplit(value)
- if real_prefix:
- value = real_prefix + value[7:]
- scheme = _urlquote(real_prefix[:-3])
- else:
- parsed = urlsplit(value)
-
- if scheme is None:
- scheme = _urlquote(parsed.scheme)
- hostname = parsed.hostname
- if hostname is not None:
- hostname = hostname.encode('idna')
- # RFC 3986 allows userinfo to contain sub-delims
- username = _urlquote(parsed.username, safe='!$&\'()*+,;=')
- password = _urlquote(parsed.password, safe='!$&\'()*+,;=')
- port = parsed.port
- if port is not None:
- port = str_cls(port).encode('ascii')
-
- netloc = b''
- if username is not None:
- netloc += username
- if password:
- netloc += b':' + password
- netloc += b'@'
- if hostname is not None:
- netloc += hostname
- if port is not None:
- default_http = scheme == b'http' and port == b'80'
- default_https = scheme == b'https' and port == b'443'
- if not default_http and not default_https:
- netloc += b':' + port
-
- # RFC 3986 allows a path to contain sub-delims, plus "@" and ":"
- path = _urlquote(parsed.path, safe='/!$&\'()*+,;=@:')
- # RFC 3986 allows the query to contain sub-delims, plus "@", ":" , "/" and "?"
- query = _urlquote(parsed.query, safe='/?!$&\'()*+,;=@:')
- # RFC 3986 allows the fragment to contain sub-delims, plus "@", ":" , "/" and "?"
- fragment = _urlquote(parsed.fragment, safe='/?!$&\'()*+,;=@:')
-
- if query is None and fragment is None and path == b'/':
- path = None
-
- # Python 2.7 compat
- if path is None:
- path = ''
-
- output = urlunsplit((scheme, netloc, path, query, fragment))
- if isinstance(output, str_cls):
- output = output.encode('latin1')
- return output
-
-
-def uri_to_iri(value):
- """
- Converts an ASCII URI byte string into a unicode IRI
-
- :param value:
- An ASCII-encoded byte string of the URI
-
- :return:
- A unicode string of the IRI
- """
-
- if not isinstance(value, byte_cls):
- raise TypeError(unwrap(
- '''
- value must be a byte string, not %s
- ''',
- type_name(value)
- ))
-
- parsed = urlsplit(value)
-
- scheme = parsed.scheme
- if scheme is not None:
- scheme = scheme.decode('ascii')
-
- username = _urlunquote(parsed.username, remap=[':', '@'])
- password = _urlunquote(parsed.password, remap=[':', '@'])
- hostname = parsed.hostname
- if hostname:
- hostname = hostname.decode('idna')
- port = parsed.port
- if port and not isinstance(port, int_types):
- port = port.decode('ascii')
-
- netloc = ''
- if username is not None:
- netloc += username
- if password:
- netloc += ':' + password
- netloc += '@'
- if hostname is not None:
- netloc += hostname
- if port is not None:
- netloc += ':' + str_cls(port)
-
- path = _urlunquote(parsed.path, remap=['/'], preserve=True)
- query = _urlunquote(parsed.query, remap=['&', '='], preserve=True)
- fragment = _urlunquote(parsed.fragment)
-
- return urlunsplit((scheme, netloc, path, query, fragment))
-
-
-def _iri_utf8_errors_handler(exc):
- """
- Error handler for decoding UTF-8 parts of a URI into an IRI. Leaves byte
- sequences encoded in %XX format, but as part of a unicode string.
-
- :param exc:
- The UnicodeDecodeError exception
-
- :return:
- A 2-element tuple of (replacement unicode string, integer index to
- resume at)
- """
-
- bytes_as_ints = bytes_to_list(exc.object[exc.start:exc.end])
- replacements = ['%%%02x' % num for num in bytes_as_ints]
- return (''.join(replacements), exc.end)
-
-
-codecs.register_error('iriutf8', _iri_utf8_errors_handler)
-
-
-def _urlquote(string, safe=''):
- """
- Quotes a unicode string for use in a URL
-
- :param string:
- A unicode string
-
- :param safe:
- A unicode string of character to not encode
-
- :return:
- None (if string is None) or an ASCII byte string of the quoted string
- """
-
- if string is None or string == '':
- return None
-
- # Anything already hex quoted is pulled out of the URL and unquoted if
- # possible
- escapes = []
- if re.search('%[0-9a-fA-F]{2}', string):
- # Try to unquote any percent values, restoring them if they are not
- # valid UTF-8. Also, requote any safe chars since encoded versions of
- # those are functionally different than the unquoted ones.
- def _try_unescape(match):
- byte_string = unquote_to_bytes(match.group(0))
- unicode_string = byte_string.decode('utf-8', 'iriutf8')
- for safe_char in list(safe):
- unicode_string = unicode_string.replace(safe_char, '%%%02x' % ord(safe_char))
- return unicode_string
- string = re.sub('(?:%[0-9a-fA-F]{2})+', _try_unescape, string)
-
- # Once we have the minimal set of hex quoted values, removed them from
- # the string so that they are not double quoted
- def _extract_escape(match):
- escapes.append(match.group(0).encode('ascii'))
- return '\x00'
- string = re.sub('%[0-9a-fA-F]{2}', _extract_escape, string)
-
- output = urlquote(string.encode('utf-8'), safe=safe.encode('utf-8'))
- if not isinstance(output, byte_cls):
- output = output.encode('ascii')
-
- # Restore the existing quoted values that we extracted
- if len(escapes) > 0:
- def _return_escape(_):
- return escapes.pop(0)
- output = re.sub(b'%00', _return_escape, output)
-
- return output
-
-
-def _urlunquote(byte_string, remap=None, preserve=None):
- """
- Unquotes a URI portion from a byte string into unicode using UTF-8
-
- :param byte_string:
- A byte string of the data to unquote
-
- :param remap:
- A list of characters (as unicode) that should be re-mapped to a
- %XX encoding. This is used when characters are not valid in part of a
- URL.
-
- :param preserve:
- A bool - indicates that the chars to be remapped if they occur in
- non-hex form, should be preserved. E.g. / for URL path.
-
- :return:
- A unicode string
- """
-
- if byte_string is None:
- return byte_string
-
- if byte_string == b'':
- return ''
-
- if preserve:
- replacements = ['\x1A', '\x1C', '\x1D', '\x1E', '\x1F']
- preserve_unmap = {}
- for char in remap:
- replacement = replacements.pop(0)
- preserve_unmap[replacement] = char
- byte_string = byte_string.replace(char.encode('ascii'), replacement.encode('ascii'))
-
- byte_string = unquote_to_bytes(byte_string)
-
- if remap:
- for char in remap:
- byte_string = byte_string.replace(char.encode('ascii'), ('%%%02x' % ord(char)).encode('ascii'))
-
- output = byte_string.decode('utf-8', 'iriutf8')
-
- if preserve:
- for replacement, original in preserve_unmap.items():
- output = output.replace(replacement, original)
-
- return output
diff --git a/functions/source/CreateSSHKey/asn1crypto/_ordereddict.py b/functions/source/CreateSSHKey/asn1crypto/_ordereddict.py
deleted file mode 100644
index 2f18ab5..0000000
--- a/functions/source/CreateSSHKey/asn1crypto/_ordereddict.py
+++ /dev/null
@@ -1,135 +0,0 @@
-# Copyright (c) 2009 Raymond Hettinger
-#
-# Permission is hereby granted, free of charge, to any person
-# obtaining a copy of this software and associated documentation files
-# (the "Software"), to deal in the Software without restriction,
-# including without limitation the rights to use, copy, modify, merge,
-# publish, distribute, sublicense, and/or sell copies of the Software,
-# and to permit persons to whom the Software is furnished to do so,
-# subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be
-# included in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-# OTHER DEALINGS IN THE SOFTWARE.
-
-import sys
-
-if not sys.version_info < (2, 7):
-
- from collections import OrderedDict
-
-else:
-
- from UserDict import DictMixin
-
- class OrderedDict(dict, DictMixin):
-
- def __init__(self, *args, **kwds):
- if len(args) > 1:
- raise TypeError('expected at most 1 arguments, got %d' % len(args))
- try:
- self.__end
- except AttributeError:
- self.clear()
- self.update(*args, **kwds)
-
- def clear(self):
- self.__end = end = []
- end += [None, end, end] # sentinel node for doubly linked list
- self.__map = {} # key --> [key, prev, next]
- dict.clear(self)
-
- def __setitem__(self, key, value):
- if key not in self:
- end = self.__end
- curr = end[1]
- curr[2] = end[1] = self.__map[key] = [key, curr, end]
- dict.__setitem__(self, key, value)
-
- def __delitem__(self, key):
- dict.__delitem__(self, key)
- key, prev, next_ = self.__map.pop(key)
- prev[2] = next_
- next_[1] = prev
-
- def __iter__(self):
- end = self.__end
- curr = end[2]
- while curr is not end:
- yield curr[0]
- curr = curr[2]
-
- def __reversed__(self):
- end = self.__end
- curr = end[1]
- while curr is not end:
- yield curr[0]
- curr = curr[1]
-
- def popitem(self, last=True):
- if not self:
- raise KeyError('dictionary is empty')
- if last:
- key = reversed(self).next()
- else:
- key = iter(self).next()
- value = self.pop(key)
- return key, value
-
- def __reduce__(self):
- items = [[k, self[k]] for k in self]
- tmp = self.__map, self.__end
- del self.__map, self.__end
- inst_dict = vars(self).copy()
- self.__map, self.__end = tmp
- if inst_dict:
- return (self.__class__, (items,), inst_dict)
- return self.__class__, (items,)
-
- def keys(self):
- return list(self)
-
- setdefault = DictMixin.setdefault
- update = DictMixin.update
- pop = DictMixin.pop
- values = DictMixin.values
- items = DictMixin.items
- iterkeys = DictMixin.iterkeys
- itervalues = DictMixin.itervalues
- iteritems = DictMixin.iteritems
-
- def __repr__(self):
- if not self:
- return '%s()' % (self.__class__.__name__,)
- return '%s(%r)' % (self.__class__.__name__, self.items())
-
- def copy(self):
- return self.__class__(self)
-
- @classmethod
- def fromkeys(cls, iterable, value=None):
- d = cls()
- for key in iterable:
- d[key] = value
- return d
-
- def __eq__(self, other):
- if isinstance(other, OrderedDict):
- if len(self) != len(other):
- return False
- for p, q in zip(self.items(), other.items()):
- if p != q:
- return False
- return True
- return dict.__eq__(self, other)
-
- def __ne__(self, other):
- return not self == other
diff --git a/functions/source/CreateSSHKey/asn1crypto/_perf/_big_num_ctypes.py b/functions/source/CreateSSHKey/asn1crypto/_perf/_big_num_ctypes.py
deleted file mode 100644
index 8e37e9b..0000000
--- a/functions/source/CreateSSHKey/asn1crypto/_perf/_big_num_ctypes.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# coding: utf-8
-
-"""
-ctypes interface for BN_mod_inverse() function from OpenSSL. Exports the
-following items:
-
- - libcrypto
- - BN_bn2bin()
- - BN_CTX_free()
- - BN_CTX_new()
- - BN_free()
- - BN_mod_inverse()
- - BN_new()
- - BN_num_bits()
- - BN_set_negative()
-
-Will raise asn1crypto._ffi.LibraryNotFoundError() if libcrypto can not be
-found. Will raise asn1crypto._ffi.FFIEngineError() if there is an error
-interfacing with libcrypto.
-"""
-
-from __future__ import unicode_literals, division, absolute_import, print_function
-
-import sys
-
-from ctypes import CDLL, c_int, c_char_p, c_void_p
-from ctypes.util import find_library
-
-from .._ffi import LibraryNotFoundError, FFIEngineError
-
-
-try:
- # On Python 2, the unicode string here may raise a UnicodeDecodeError as it
- # tries to join a bytestring path to the unicode name "crypto"
- libcrypto_path = find_library(b'crypto' if sys.version_info < (3,) else 'crypto')
- if not libcrypto_path:
- raise LibraryNotFoundError('The library libcrypto could not be found')
-
- libcrypto = CDLL(libcrypto_path)
-
- libcrypto.BN_new.argtypes = []
- libcrypto.BN_new.restype = c_void_p
-
- libcrypto.BN_bin2bn.argtypes = [c_char_p, c_int, c_void_p]
- libcrypto.BN_bin2bn.restype = c_void_p
-
- libcrypto.BN_bn2bin.argtypes = [c_void_p, c_char_p]
- libcrypto.BN_bn2bin.restype = c_int
-
- libcrypto.BN_set_negative.argtypes = [c_void_p, c_int]
- libcrypto.BN_set_negative.restype = None
-
- libcrypto.BN_num_bits.argtypes = [c_void_p]
- libcrypto.BN_num_bits.restype = c_int
-
- libcrypto.BN_free.argtypes = [c_void_p]
- libcrypto.BN_free.restype = None
-
- libcrypto.BN_CTX_new.argtypes = []
- libcrypto.BN_CTX_new.restype = c_void_p
-
- libcrypto.BN_CTX_free.argtypes = [c_void_p]
- libcrypto.BN_CTX_free.restype = None
-
- libcrypto.BN_mod_inverse.argtypes = [c_void_p, c_void_p, c_void_p, c_void_p]
- libcrypto.BN_mod_inverse.restype = c_void_p
-
-except (AttributeError):
- raise FFIEngineError('Error initializing ctypes')
diff --git a/functions/source/CreateSSHKey/asn1crypto/_teletex_codec.py b/functions/source/CreateSSHKey/asn1crypto/_teletex_codec.py
deleted file mode 100644
index b5991aa..0000000
--- a/functions/source/CreateSSHKey/asn1crypto/_teletex_codec.py
+++ /dev/null
@@ -1,331 +0,0 @@
-# coding: utf-8
-
-"""
-Implementation of the teletex T.61 codec. Exports the following items:
-
- - register()
-"""
-
-from __future__ import unicode_literals, division, absolute_import, print_function
-
-import codecs
-
-
-class TeletexCodec(codecs.Codec):
-
- def encode(self, input_, errors='strict'):
- return codecs.charmap_encode(input_, errors, ENCODING_TABLE)
-
- def decode(self, input_, errors='strict'):
- return codecs.charmap_decode(input_, errors, DECODING_TABLE)
-
-
-class TeletexIncrementalEncoder(codecs.IncrementalEncoder):
-
- def encode(self, input_, final=False):
- return codecs.charmap_encode(input_, self.errors, ENCODING_TABLE)[0]
-
-
-class TeletexIncrementalDecoder(codecs.IncrementalDecoder):
-
- def decode(self, input_, final=False):
- return codecs.charmap_decode(input_, self.errors, DECODING_TABLE)[0]
-
-
-class TeletexStreamWriter(TeletexCodec, codecs.StreamWriter):
-
- pass
-
-
-class TeletexStreamReader(TeletexCodec, codecs.StreamReader):
-
- pass
-
-
-def teletex_search_function(name):
- """
- Search function for teletex codec that is passed to codecs.register()
- """
-
- if name != 'teletex':
- return None
-
- return codecs.CodecInfo(
- name='teletex',
- encode=TeletexCodec().encode,
- decode=TeletexCodec().decode,
- incrementalencoder=TeletexIncrementalEncoder,
- incrementaldecoder=TeletexIncrementalDecoder,
- streamreader=TeletexStreamReader,
- streamwriter=TeletexStreamWriter,
- )
-
-
-def register():
- """
- Registers the teletex codec
- """
-
- codecs.register(teletex_search_function)
-
-
-# http://en.wikipedia.org/wiki/ITU_T.61
-DECODING_TABLE = (
- '\u0000'
- '\u0001'
- '\u0002'
- '\u0003'
- '\u0004'
- '\u0005'
- '\u0006'
- '\u0007'
- '\u0008'
- '\u0009'
- '\u000A'
- '\u000B'
- '\u000C'
- '\u000D'
- '\u000E'
- '\u000F'
- '\u0010'
- '\u0011'
- '\u0012'
- '\u0013'
- '\u0014'
- '\u0015'
- '\u0016'
- '\u0017'
- '\u0018'
- '\u0019'
- '\u001A'
- '\u001B'
- '\u001C'
- '\u001D'
- '\u001E'
- '\u001F'
- '\u0020'
- '\u0021'
- '\u0022'
- '\ufffe'
- '\ufffe'
- '\u0025'
- '\u0026'
- '\u0027'
- '\u0028'
- '\u0029'
- '\u002A'
- '\u002B'
- '\u002C'
- '\u002D'
- '\u002E'
- '\u002F'
- '\u0030'
- '\u0031'
- '\u0032'
- '\u0033'
- '\u0034'
- '\u0035'
- '\u0036'
- '\u0037'
- '\u0038'
- '\u0039'
- '\u003A'
- '\u003B'
- '\u003C'
- '\u003D'
- '\u003E'
- '\u003F'
- '\u0040'
- '\u0041'
- '\u0042'
- '\u0043'
- '\u0044'
- '\u0045'
- '\u0046'
- '\u0047'
- '\u0048'
- '\u0049'
- '\u004A'
- '\u004B'
- '\u004C'
- '\u004D'
- '\u004E'
- '\u004F'
- '\u0050'
- '\u0051'
- '\u0052'
- '\u0053'
- '\u0054'
- '\u0055'
- '\u0056'
- '\u0057'
- '\u0058'
- '\u0059'
- '\u005A'
- '\u005B'
- '\ufffe'
- '\u005D'
- '\ufffe'
- '\u005F'
- '\ufffe'
- '\u0061'
- '\u0062'
- '\u0063'
- '\u0064'
- '\u0065'
- '\u0066'
- '\u0067'
- '\u0068'
- '\u0069'
- '\u006A'
- '\u006B'
- '\u006C'
- '\u006D'
- '\u006E'
- '\u006F'
- '\u0070'
- '\u0071'
- '\u0072'
- '\u0073'
- '\u0074'
- '\u0075'
- '\u0076'
- '\u0077'
- '\u0078'
- '\u0079'
- '\u007A'
- '\ufffe'
- '\u007C'
- '\ufffe'
- '\ufffe'
- '\u007F'
- '\u0080'
- '\u0081'
- '\u0082'
- '\u0083'
- '\u0084'
- '\u0085'
- '\u0086'
- '\u0087'
- '\u0088'
- '\u0089'
- '\u008A'
- '\u008B'
- '\u008C'
- '\u008D'
- '\u008E'
- '\u008F'
- '\u0090'
- '\u0091'
- '\u0092'
- '\u0093'
- '\u0094'
- '\u0095'
- '\u0096'
- '\u0097'
- '\u0098'
- '\u0099'
- '\u009A'
- '\u009B'
- '\u009C'
- '\u009D'
- '\u009E'
- '\u009F'
- '\u00A0'
- '\u00A1'
- '\u00A2'
- '\u00A3'
- '\u0024'
- '\u00A5'
- '\u0023'
- '\u00A7'
- '\u00A4'
- '\ufffe'
- '\ufffe'
- '\u00AB'
- '\ufffe'
- '\ufffe'
- '\ufffe'
- '\ufffe'
- '\u00B0'
- '\u00B1'
- '\u00B2'
- '\u00B3'
- '\u00D7'
- '\u00B5'
- '\u00B6'
- '\u00B7'
- '\u00F7'
- '\ufffe'
- '\ufffe'
- '\u00BB'
- '\u00BC'
- '\u00BD'
- '\u00BE'
- '\u00BF'
- '\ufffe'
- '\u0300'
- '\u0301'
- '\u0302'
- '\u0303'
- '\u0304'
- '\u0306'
- '\u0307'
- '\u0308'
- '\ufffe'
- '\u030A'
- '\u0327'
- '\u0332'
- '\u030B'
- '\u0328'
- '\u030C'
- '\ufffe'
- '\ufffe'
- '\ufffe'
- '\ufffe'
- '\ufffe'
- '\ufffe'
- '\ufffe'
- '\ufffe'
- '\ufffe'
- '\ufffe'
- '\ufffe'
- '\ufffe'
- '\ufffe'
- '\ufffe'
- '\ufffe'
- '\ufffe'
- '\u2126'
- '\u00C6'
- '\u00D0'
- '\u00AA'
- '\u0126'
- '\ufffe'
- '\u0132'
- '\u013F'
- '\u0141'
- '\u00D8'
- '\u0152'
- '\u00BA'
- '\u00DE'
- '\u0166'
- '\u014A'
- '\u0149'
- '\u0138'
- '\u00E6'
- '\u0111'
- '\u00F0'
- '\u0127'
- '\u0131'
- '\u0133'
- '\u0140'
- '\u0142'
- '\u00F8'
- '\u0153'
- '\u00DF'
- '\u00FE'
- '\u0167'
- '\u014B'
- '\ufffe'
-)
-ENCODING_TABLE = codecs.charmap_build(DECODING_TABLE)
diff --git a/functions/source/CreateSSHKey/asn1crypto/_types.py b/functions/source/CreateSSHKey/asn1crypto/_types.py
deleted file mode 100644
index b9ca8cc..0000000
--- a/functions/source/CreateSSHKey/asn1crypto/_types.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# coding: utf-8
-from __future__ import unicode_literals, division, absolute_import, print_function
-
-import inspect
-import sys
-
-
-if sys.version_info < (3,):
- str_cls = unicode # noqa
- byte_cls = str
- int_types = (int, long) # noqa
-
- def bytes_to_list(byte_string):
- return [ord(b) for b in byte_string]
-
- chr_cls = chr
-
-else:
- str_cls = str
- byte_cls = bytes
- int_types = int
-
- bytes_to_list = list
-
- def chr_cls(num):
- return bytes([num])
-
-
-def type_name(value):
- """
- Returns a user-readable name for the type of an object
-
- :param value:
- A value to get the type name of
-
- :return:
- A unicode string of the object's type name
- """
-
- if inspect.isclass(value):
- cls = value
- else:
- cls = value.__class__
- if cls.__module__ in set(['builtins', '__builtin__']):
- return cls.__name__
- return '%s.%s' % (cls.__module__, cls.__name__)
diff --git a/functions/source/CreateSSHKey/asn1crypto/algos.py b/functions/source/CreateSSHKey/asn1crypto/algos.py
deleted file mode 100644
index dd4ae5b..0000000
--- a/functions/source/CreateSSHKey/asn1crypto/algos.py
+++ /dev/null
@@ -1,1115 +0,0 @@
-# coding: utf-8
-
-"""
-ASN.1 type classes for various algorithms using in various aspects of public
-key cryptography. Exports the following items:
-
- - AlgorithmIdentifier()
- - DigestAlgorithm()
- - DigestInfo()
- - DSASignature()
- - EncryptionAlgorithm()
- - HmacAlgorithm()
- - KdfAlgorithm()
- - Pkcs5MacAlgorithm()
- - SignedDigestAlgorithm()
-
-Other type classes are defined that help compose the types listed above.
-"""
-
-from __future__ import unicode_literals, division, absolute_import, print_function
-
-from ._errors import unwrap
-from ._int import fill_width
-from .util import int_from_bytes, int_to_bytes
-from .core import (
- Any,
- Choice,
- Integer,
- Null,
- ObjectIdentifier,
- OctetString,
- Sequence,
- Void,
-)
-
-
-# Structures and OIDs in this file are pulled from
-# https://tools.ietf.org/html/rfc3279, https://tools.ietf.org/html/rfc4055,
-# https://tools.ietf.org/html/rfc5758, https://tools.ietf.org/html/rfc7292,
-# http://www.emc.com/collateral/white-papers/h11302-pkcs5v2-1-password-based-cryptography-standard-wp.pdf
-
-class AlgorithmIdentifier(Sequence):
- _fields = [
- ('algorithm', ObjectIdentifier),
- ('parameters', Any, {'optional': True}),
- ]
-
-
-class _ForceNullParameters(object):
- """
- Various structures based on AlgorithmIdentifier require that the parameters
- field be core.Null() for certain OIDs. This mixin ensures that happens.
- """
-
- # The following attribute, plus the parameters spec callback and custom
- # __setitem__ are all to handle a situation where parameters should not be
- # optional and must be Null for certain OIDs. More info at
- # https://tools.ietf.org/html/rfc4055#page-15 and
- # https://tools.ietf.org/html/rfc4055#section-2.1
- _null_algos = set([
- '1.2.840.113549.1.1.1', # rsassa_pkcs1v15 / rsaes_pkcs1v15 / rsa
- '1.2.840.113549.1.1.11', # sha256_rsa
- '1.2.840.113549.1.1.12', # sha384_rsa
- '1.2.840.113549.1.1.13', # sha512_rsa
- '1.2.840.113549.1.1.14', # sha224_rsa
- '1.3.14.3.2.26', # sha1
- '2.16.840.1.101.3.4.2.4', # sha224
- '2.16.840.1.101.3.4.2.1', # sha256
- '2.16.840.1.101.3.4.2.2', # sha384
- '2.16.840.1.101.3.4.2.3', # sha512
- ])
-
- def _parameters_spec(self):
- if self._oid_pair == ('algorithm', 'parameters'):
- algo = self['algorithm'].native
- if algo in self._oid_specs:
- return self._oid_specs[algo]
-
- if self['algorithm'].dotted in self._null_algos:
- return Null
-
- return None
-
- _spec_callbacks = {
- 'parameters': _parameters_spec
- }
-
- # We have to override this since the spec callback uses the value of
- # algorithm to determine the parameter spec, however default values are
- # assigned before setting a field, so a default value can't be based on
- # another field value (unless it is a default also). Thus we have to
- # manually check to see if the algorithm was set and parameters is unset,
- # and then fix the value as appropriate.
- def __setitem__(self, key, value):
- res = super(_ForceNullParameters, self).__setitem__(key, value)
- if key != 'algorithm':
- return res
- if self['algorithm'].dotted not in self._null_algos:
- return res
- if self['parameters'].__class__ != Void:
- return res
- self['parameters'] = Null()
- return res
-
-
-class HmacAlgorithmId(ObjectIdentifier):
- _map = {
- '1.3.14.3.2.10': 'des_mac',
- '1.2.840.113549.2.7': 'sha1',
- '1.2.840.113549.2.8': 'sha224',
- '1.2.840.113549.2.9': 'sha256',
- '1.2.840.113549.2.10': 'sha384',
- '1.2.840.113549.2.11': 'sha512',
- '1.2.840.113549.2.12': 'sha512_224',
- '1.2.840.113549.2.13': 'sha512_256',
- }
-
-
-class HmacAlgorithm(Sequence):
- _fields = [
- ('algorithm', HmacAlgorithmId),
- ('parameters', Any, {'optional': True}),
- ]
-
-
-class DigestAlgorithmId(ObjectIdentifier):
- _map = {
- '1.2.840.113549.2.2': 'md2',
- '1.2.840.113549.2.5': 'md5',
- '1.3.14.3.2.26': 'sha1',
- '2.16.840.1.101.3.4.2.4': 'sha224',
- '2.16.840.1.101.3.4.2.1': 'sha256',
- '2.16.840.1.101.3.4.2.2': 'sha384',
- '2.16.840.1.101.3.4.2.3': 'sha512',
- '2.16.840.1.101.3.4.2.5': 'sha512_224',
- '2.16.840.1.101.3.4.2.6': 'sha512_256',
- }
-
-
-class DigestAlgorithm(_ForceNullParameters, Sequence):
- _fields = [
- ('algorithm', DigestAlgorithmId),
- ('parameters', Any, {'optional': True}),
- ]
-
-
-# This structure is what is signed with a SignedDigestAlgorithm
-class DigestInfo(Sequence):
- _fields = [
- ('digest_algorithm', DigestAlgorithm),
- ('digest', OctetString),
- ]
-
-
-class MaskGenAlgorithmId(ObjectIdentifier):
- _map = {
- '1.2.840.113549.1.1.8': 'mgf1',
- }
-
-
-class MaskGenAlgorithm(Sequence):
- _fields = [
- ('algorithm', MaskGenAlgorithmId),
- ('parameters', Any, {'optional': True}),
- ]
-
- _oid_pair = ('algorithm', 'parameters')
- _oid_specs = {
- 'mgf1': DigestAlgorithm
- }
-
-
-class TrailerField(Integer):
- _map = {
- 1: 'trailer_field_bc',
- }
-
-
-class RSASSAPSSParams(Sequence):
- _fields = [
- (
- 'hash_algorithm',
- DigestAlgorithm,
- {
- 'explicit': 0,
- 'default': {'algorithm': 'sha1'},
- }
- ),
- (
- 'mask_gen_algorithm',
- MaskGenAlgorithm,
- {
- 'explicit': 1,
- 'default': {
- 'algorithm': 'mgf1',
- 'parameters': {'algorithm': 'sha1'},
- },
- }
- ),
- (
- 'salt_length',
- Integer,
- {
- 'explicit': 2,
- 'default': 20,
- }
- ),
- (
- 'trailer_field',
- TrailerField,
- {
- 'explicit': 3,
- 'default': 'trailer_field_bc',
- }
- ),
- ]
-
-
-class SignedDigestAlgorithmId(ObjectIdentifier):
- _map = {
- '1.3.14.3.2.3': 'md5_rsa',
- '1.3.14.3.2.29': 'sha1_rsa',
- '1.3.14.7.2.3.1': 'md2_rsa',
- '1.2.840.113549.1.1.2': 'md2_rsa',
- '1.2.840.113549.1.1.4': 'md5_rsa',
- '1.2.840.113549.1.1.5': 'sha1_rsa',
- '1.2.840.113549.1.1.14': 'sha224_rsa',
- '1.2.840.113549.1.1.11': 'sha256_rsa',
- '1.2.840.113549.1.1.12': 'sha384_rsa',
- '1.2.840.113549.1.1.13': 'sha512_rsa',
- '1.2.840.113549.1.1.10': 'rsassa_pss',
- '1.2.840.10040.4.3': 'sha1_dsa',
- '1.3.14.3.2.13': 'sha1_dsa',
- '1.3.14.3.2.27': 'sha1_dsa',
- '2.16.840.1.101.3.4.3.1': 'sha224_dsa',
- '2.16.840.1.101.3.4.3.2': 'sha256_dsa',
- '1.2.840.10045.4.1': 'sha1_ecdsa',
- '1.2.840.10045.4.3.1': 'sha224_ecdsa',
- '1.2.840.10045.4.3.2': 'sha256_ecdsa',
- '1.2.840.10045.4.3.3': 'sha384_ecdsa',
- '1.2.840.10045.4.3.4': 'sha512_ecdsa',
- # For when the digest is specified elsewhere in a Sequence
- '1.2.840.113549.1.1.1': 'rsassa_pkcs1v15',
- '1.2.840.10040.4.1': 'dsa',
- '1.2.840.10045.4': 'ecdsa',
- }
-
- _reverse_map = {
- 'dsa': '1.2.840.10040.4.1',
- 'ecdsa': '1.2.840.10045.4',
- 'md2_rsa': '1.2.840.113549.1.1.2',
- 'md5_rsa': '1.2.840.113549.1.1.4',
- 'rsassa_pkcs1v15': '1.2.840.113549.1.1.1',
- 'rsassa_pss': '1.2.840.113549.1.1.10',
- 'sha1_dsa': '1.2.840.10040.4.3',
- 'sha1_ecdsa': '1.2.840.10045.4.1',
- 'sha1_rsa': '1.2.840.113549.1.1.5',
- 'sha224_dsa': '2.16.840.1.101.3.4.3.1',
- 'sha224_ecdsa': '1.2.840.10045.4.3.1',
- 'sha224_rsa': '1.2.840.113549.1.1.14',
- 'sha256_dsa': '2.16.840.1.101.3.4.3.2',
- 'sha256_ecdsa': '1.2.840.10045.4.3.2',
- 'sha256_rsa': '1.2.840.113549.1.1.11',
- 'sha384_ecdsa': '1.2.840.10045.4.3.3',
- 'sha384_rsa': '1.2.840.113549.1.1.12',
- 'sha512_ecdsa': '1.2.840.10045.4.3.4',
- 'sha512_rsa': '1.2.840.113549.1.1.13',
- }
-
-
-class SignedDigestAlgorithm(_ForceNullParameters, Sequence):
- _fields = [
- ('algorithm', SignedDigestAlgorithmId),
- ('parameters', Any, {'optional': True}),
- ]
-
- _oid_pair = ('algorithm', 'parameters')
- _oid_specs = {
- 'rsassa_pss': RSASSAPSSParams,
- }
-
- @property
- def signature_algo(self):
- """
- :return:
- A unicode string of "rsassa_pkcs1v15", "rsassa_pss", "dsa" or
- "ecdsa"
- """
-
- algorithm = self['algorithm'].native
-
- algo_map = {
- 'md2_rsa': 'rsassa_pkcs1v15',
- 'md5_rsa': 'rsassa_pkcs1v15',
- 'sha1_rsa': 'rsassa_pkcs1v15',
- 'sha224_rsa': 'rsassa_pkcs1v15',
- 'sha256_rsa': 'rsassa_pkcs1v15',
- 'sha384_rsa': 'rsassa_pkcs1v15',
- 'sha512_rsa': 'rsassa_pkcs1v15',
- 'rsassa_pkcs1v15': 'rsassa_pkcs1v15',
- 'rsassa_pss': 'rsassa_pss',
- 'sha1_dsa': 'dsa',
- 'sha224_dsa': 'dsa',
- 'sha256_dsa': 'dsa',
- 'dsa': 'dsa',
- 'sha1_ecdsa': 'ecdsa',
- 'sha224_ecdsa': 'ecdsa',
- 'sha256_ecdsa': 'ecdsa',
- 'sha384_ecdsa': 'ecdsa',
- 'sha512_ecdsa': 'ecdsa',
- 'ecdsa': 'ecdsa',
- }
- if algorithm in algo_map:
- return algo_map[algorithm]
-
- raise ValueError(unwrap(
- '''
- Signature algorithm not known for %s
- ''',
- algorithm
- ))
-
- @property
- def hash_algo(self):
- """
- :return:
- A unicode string of "md2", "md5", "sha1", "sha224", "sha256",
- "sha384", "sha512", "sha512_224", "sha512_256"
- """
-
- algorithm = self['algorithm'].native
-
- algo_map = {
- 'md2_rsa': 'md2',
- 'md5_rsa': 'md5',
- 'sha1_rsa': 'sha1',
- 'sha224_rsa': 'sha224',
- 'sha256_rsa': 'sha256',
- 'sha384_rsa': 'sha384',
- 'sha512_rsa': 'sha512',
- 'sha1_dsa': 'sha1',
- 'sha224_dsa': 'sha224',
- 'sha256_dsa': 'sha256',
- 'sha1_ecdsa': 'sha1',
- 'sha224_ecdsa': 'sha224',
- 'sha256_ecdsa': 'sha256',
- 'sha384_ecdsa': 'sha384',
- 'sha512_ecdsa': 'sha512',
- }
- if algorithm in algo_map:
- return algo_map[algorithm]
-
- if algorithm == 'rsassa_pss':
- return self['parameters']['hash_algorithm']['algorithm'].native
-
- raise ValueError(unwrap(
- '''
- Hash algorithm not known for %s
- ''',
- algorithm
- ))
-
-
-class Pbkdf2Salt(Choice):
- _alternatives = [
- ('specified', OctetString),
- ('other_source', AlgorithmIdentifier),
- ]
-
-
-class Pbkdf2Params(Sequence):
- _fields = [
- ('salt', Pbkdf2Salt),
- ('iteration_count', Integer),
- ('key_length', Integer, {'optional': True}),
- ('prf', HmacAlgorithm, {'default': {'algorithm': 'sha1'}}),
- ]
-
-
-class KdfAlgorithmId(ObjectIdentifier):
- _map = {
- '1.2.840.113549.1.5.12': 'pbkdf2'
- }
-
-
-class KdfAlgorithm(Sequence):
- _fields = [
- ('algorithm', KdfAlgorithmId),
- ('parameters', Any, {'optional': True}),
- ]
- _oid_pair = ('algorithm', 'parameters')
- _oid_specs = {
- 'pbkdf2': Pbkdf2Params
- }
-
-
-class DHParameters(Sequence):
- """
- Original Name: DHParameter
- Source: ftp://ftp.rsasecurity.com/pub/pkcs/ascii/pkcs-3.asc section 9
- """
-
- _fields = [
- ('p', Integer),
- ('g', Integer),
- ('private_value_length', Integer, {'optional': True}),
- ]
-
-
-class KeyExchangeAlgorithmId(ObjectIdentifier):
- _map = {
- '1.2.840.113549.1.3.1': 'dh',
- }
-
-
-class KeyExchangeAlgorithm(Sequence):
- _fields = [
- ('algorithm', KeyExchangeAlgorithmId),
- ('parameters', Any, {'optional': True}),
- ]
- _oid_pair = ('algorithm', 'parameters')
- _oid_specs = {
- 'dh': DHParameters,
- }
-
-
-class Rc2Params(Sequence):
- _fields = [
- ('rc2_parameter_version', Integer, {'optional': True}),
- ('iv', OctetString),
- ]
-
-
-class Rc5ParamVersion(Integer):
- _map = {
- 16: 'v1-0'
- }
-
-
-class Rc5Params(Sequence):
- _fields = [
- ('version', Rc5ParamVersion),
- ('rounds', Integer),
- ('block_size_in_bits', Integer),
- ('iv', OctetString, {'optional': True}),
- ]
-
-
-class Pbes1Params(Sequence):
- _fields = [
- ('salt', OctetString),
- ('iterations', Integer),
- ]
-
-
-class PSourceAlgorithmId(ObjectIdentifier):
- _map = {
- '1.2.840.113549.1.1.9': 'p_specified',
- }
-
-
-class PSourceAlgorithm(Sequence):
- _fields = [
- ('algorithm', PSourceAlgorithmId),
- ('parameters', Any, {'optional': True}),
- ]
-
- _oid_pair = ('algorithm', 'parameters')
- _oid_specs = {
- 'p_specified': OctetString
- }
-
-
-class RSAESOAEPParams(Sequence):
- _fields = [
- (
- 'hash_algorithm',
- DigestAlgorithm,
- {
- 'explicit': 0,
- 'default': {'algorithm': 'sha1'}
- }
- ),
- (
- 'mask_gen_algorithm',
- MaskGenAlgorithm,
- {
- 'explicit': 1,
- 'default': {
- 'algorithm': 'mgf1',
- 'parameters': {'algorithm': 'sha1'}
- }
- }
- ),
- (
- 'p_source_algorithm',
- PSourceAlgorithm,
- {
- 'explicit': 2,
- 'default': {
- 'algorithm': 'p_specified',
- 'parameters': b''
- }
- }
- ),
- ]
-
-
-class DSASignature(Sequence):
- """
- An ASN.1 class for translating between the OS crypto library's
- representation of an (EC)DSA signature and the ASN.1 structure that is part
- of various RFCs.
-
- Original Name: DSS-Sig-Value
- Source: https://tools.ietf.org/html/rfc3279#section-2.2.2
- """
-
- _fields = [
- ('r', Integer),
- ('s', Integer),
- ]
-
- @classmethod
- def from_p1363(cls, data):
- """
- Reads a signature from a byte string encoding accordint to IEEE P1363,
- which is used by Microsoft's BCryptSignHash() function.
-
- :param data:
- A byte string from BCryptSignHash()
-
- :return:
- A DSASignature object
- """
-
- r = int_from_bytes(data[0:len(data) // 2])
- s = int_from_bytes(data[len(data) // 2:])
- return cls({'r': r, 's': s})
-
- def to_p1363(self):
- """
- Dumps a signature to a byte string compatible with Microsoft's
- BCryptVerifySignature() function.
-
- :return:
- A byte string compatible with BCryptVerifySignature()
- """
-
- r_bytes = int_to_bytes(self['r'].native)
- s_bytes = int_to_bytes(self['s'].native)
-
- int_byte_length = max(len(r_bytes), len(s_bytes))
- r_bytes = fill_width(r_bytes, int_byte_length)
- s_bytes = fill_width(s_bytes, int_byte_length)
-
- return r_bytes + s_bytes
-
-
-class EncryptionAlgorithmId(ObjectIdentifier):
- _map = {
- '1.3.14.3.2.7': 'des',
- '1.2.840.113549.3.7': 'tripledes_3key',
- '1.2.840.113549.3.2': 'rc2',
- '1.2.840.113549.3.9': 'rc5',
- # From http://csrc.nist.gov/groups/ST/crypto_apps_infra/csor/algorithms.html#AES
- '2.16.840.1.101.3.4.1.1': 'aes128_ecb',
- '2.16.840.1.101.3.4.1.2': 'aes128_cbc',
- '2.16.840.1.101.3.4.1.3': 'aes128_ofb',
- '2.16.840.1.101.3.4.1.4': 'aes128_cfb',
- '2.16.840.1.101.3.4.1.5': 'aes128_wrap',
- '2.16.840.1.101.3.4.1.6': 'aes128_gcm',
- '2.16.840.1.101.3.4.1.7': 'aes128_ccm',
- '2.16.840.1.101.3.4.1.8': 'aes128_wrap_pad',
- '2.16.840.1.101.3.4.1.21': 'aes192_ecb',
- '2.16.840.1.101.3.4.1.22': 'aes192_cbc',
- '2.16.840.1.101.3.4.1.23': 'aes192_ofb',
- '2.16.840.1.101.3.4.1.24': 'aes192_cfb',
- '2.16.840.1.101.3.4.1.25': 'aes192_wrap',
- '2.16.840.1.101.3.4.1.26': 'aes192_gcm',
- '2.16.840.1.101.3.4.1.27': 'aes192_ccm',
- '2.16.840.1.101.3.4.1.28': 'aes192_wrap_pad',
- '2.16.840.1.101.3.4.1.41': 'aes256_ecb',
- '2.16.840.1.101.3.4.1.42': 'aes256_cbc',
- '2.16.840.1.101.3.4.1.43': 'aes256_ofb',
- '2.16.840.1.101.3.4.1.44': 'aes256_cfb',
- '2.16.840.1.101.3.4.1.45': 'aes256_wrap',
- '2.16.840.1.101.3.4.1.46': 'aes256_gcm',
- '2.16.840.1.101.3.4.1.47': 'aes256_ccm',
- '2.16.840.1.101.3.4.1.48': 'aes256_wrap_pad',
- # From PKCS#5
- '1.2.840.113549.1.5.13': 'pbes2',
- '1.2.840.113549.1.5.1': 'pbes1_md2_des',
- '1.2.840.113549.1.5.3': 'pbes1_md5_des',
- '1.2.840.113549.1.5.4': 'pbes1_md2_rc2',
- '1.2.840.113549.1.5.6': 'pbes1_md5_rc2',
- '1.2.840.113549.1.5.10': 'pbes1_sha1_des',
- '1.2.840.113549.1.5.11': 'pbes1_sha1_rc2',
- # From PKCS#12
- '1.2.840.113549.1.12.1.1': 'pkcs12_sha1_rc4_128',
- '1.2.840.113549.1.12.1.2': 'pkcs12_sha1_rc4_40',
- '1.2.840.113549.1.12.1.3': 'pkcs12_sha1_tripledes_3key',
- '1.2.840.113549.1.12.1.4': 'pkcs12_sha1_tripledes_2key',
- '1.2.840.113549.1.12.1.5': 'pkcs12_sha1_rc2_128',
- '1.2.840.113549.1.12.1.6': 'pkcs12_sha1_rc2_40',
- # PKCS#1 v2.2
- '1.2.840.113549.1.1.1': 'rsaes_pkcs1v15',
- '1.2.840.113549.1.1.7': 'rsaes_oaep',
- }
-
-
-class EncryptionAlgorithm(_ForceNullParameters, Sequence):
- _fields = [
- ('algorithm', EncryptionAlgorithmId),
- ('parameters', Any, {'optional': True}),
- ]
-
- _oid_pair = ('algorithm', 'parameters')
- _oid_specs = {
- 'des': OctetString,
- 'tripledes_3key': OctetString,
- 'rc2': Rc2Params,
- 'rc5': Rc5Params,
- 'aes128_cbc': OctetString,
- 'aes192_cbc': OctetString,
- 'aes256_cbc': OctetString,
- 'aes128_ofb': OctetString,
- 'aes192_ofb': OctetString,
- 'aes256_ofb': OctetString,
- # From PKCS#5
- 'pbes1_md2_des': Pbes1Params,
- 'pbes1_md5_des': Pbes1Params,
- 'pbes1_md2_rc2': Pbes1Params,
- 'pbes1_md5_rc2': Pbes1Params,
- 'pbes1_sha1_des': Pbes1Params,
- 'pbes1_sha1_rc2': Pbes1Params,
- # From PKCS#12
- 'pkcs12_sha1_rc4_128': Pbes1Params,
- 'pkcs12_sha1_rc4_40': Pbes1Params,
- 'pkcs12_sha1_tripledes_3key': Pbes1Params,
- 'pkcs12_sha1_tripledes_2key': Pbes1Params,
- 'pkcs12_sha1_rc2_128': Pbes1Params,
- 'pkcs12_sha1_rc2_40': Pbes1Params,
- # PKCS#1 v2.2
- 'rsaes_oaep': RSAESOAEPParams,
- }
-
- @property
- def kdf(self):
- """
- Returns the name of the key derivation function to use.
-
- :return:
- A unicode from of one of the following: "pbkdf1", "pbkdf2",
- "pkcs12_kdf"
- """
-
- encryption_algo = self['algorithm'].native
-
- if encryption_algo == 'pbes2':
- return self['parameters']['key_derivation_func']['algorithm'].native
-
- if encryption_algo.find('.') == -1:
- if encryption_algo.find('_') != -1:
- encryption_algo, _ = encryption_algo.split('_', 1)
-
- if encryption_algo == 'pbes1':
- return 'pbkdf1'
-
- if encryption_algo == 'pkcs12':
- return 'pkcs12_kdf'
-
- raise ValueError(unwrap(
- '''
- Encryption algorithm "%s" does not have a registered key
- derivation function
- ''',
- encryption_algo
- ))
-
- raise ValueError(unwrap(
- '''
- Unrecognized encryption algorithm "%s", can not determine key
- derivation function
- ''',
- encryption_algo
- ))
-
- @property
- def kdf_hmac(self):
- """
- Returns the HMAC algorithm to use with the KDF.
-
- :return:
- A unicode string of one of the following: "md2", "md5", "sha1",
- "sha224", "sha256", "sha384", "sha512"
- """
-
- encryption_algo = self['algorithm'].native
-
- if encryption_algo == 'pbes2':
- return self['parameters']['key_derivation_func']['parameters']['prf']['algorithm'].native
-
- if encryption_algo.find('.') == -1:
- if encryption_algo.find('_') != -1:
- _, hmac_algo, _ = encryption_algo.split('_', 2)
- return hmac_algo
-
- raise ValueError(unwrap(
- '''
- Encryption algorithm "%s" does not have a registered key
- derivation function
- ''',
- encryption_algo
- ))
-
- raise ValueError(unwrap(
- '''
- Unrecognized encryption algorithm "%s", can not determine key
- derivation hmac algorithm
- ''',
- encryption_algo
- ))
-
- @property
- def kdf_salt(self):
- """
- Returns the byte string to use as the salt for the KDF.
-
- :return:
- A byte string
- """
-
- encryption_algo = self['algorithm'].native
-
- if encryption_algo == 'pbes2':
- salt = self['parameters']['key_derivation_func']['parameters']['salt']
-
- if salt.name == 'other_source':
- raise ValueError(unwrap(
- '''
- Can not determine key derivation salt - the
- reserved-for-future-use other source salt choice was
- specified in the PBKDF2 params structure
- '''
- ))
-
- return salt.native
-
- if encryption_algo.find('.') == -1:
- if encryption_algo.find('_') != -1:
- return self['parameters']['salt'].native
-
- raise ValueError(unwrap(
- '''
- Encryption algorithm "%s" does not have a registered key
- derivation function
- ''',
- encryption_algo
- ))
-
- raise ValueError(unwrap(
- '''
- Unrecognized encryption algorithm "%s", can not determine key
- derivation salt
- ''',
- encryption_algo
- ))
-
- @property
- def kdf_iterations(self):
- """
- Returns the number of iterations that should be run via the KDF.
-
- :return:
- An integer
- """
-
- encryption_algo = self['algorithm'].native
-
- if encryption_algo == 'pbes2':
- return self['parameters']['key_derivation_func']['parameters']['iteration_count'].native
-
- if encryption_algo.find('.') == -1:
- if encryption_algo.find('_') != -1:
- return self['parameters']['iterations'].native
-
- raise ValueError(unwrap(
- '''
- Encryption algorithm "%s" does not have a registered key
- derivation function
- ''',
- encryption_algo
- ))
-
- raise ValueError(unwrap(
- '''
- Unrecognized encryption algorithm "%s", can not determine key
- derivation iterations
- ''',
- encryption_algo
- ))
-
- @property
- def key_length(self):
- """
- Returns the key length to pass to the cipher/kdf. The PKCS#5 spec does
- not specify a way to store the RC5 key length, however this tends not
- to be a problem since OpenSSL does not support RC5 in PKCS#8 and OS X
- does not provide an RC5 cipher for use in the Security Transforms
- library.
-
- :raises:
- ValueError - when the key length can not be determined
-
- :return:
- An integer representing the length in bytes
- """
-
- encryption_algo = self['algorithm'].native
-
- if encryption_algo[0:3] == 'aes':
- return {
- 'aes128_': 16,
- 'aes192_': 24,
- 'aes256_': 32,
- }[encryption_algo[0:7]]
-
- cipher_lengths = {
- 'des': 8,
- 'tripledes_3key': 24,
- }
-
- if encryption_algo in cipher_lengths:
- return cipher_lengths[encryption_algo]
-
- if encryption_algo == 'rc2':
- rc2_params = self['parameters'].parsed['encryption_scheme']['parameters'].parsed
- rc2_parameter_version = rc2_params['rc2_parameter_version'].native
-
- # See page 24 of
- # http://www.emc.com/collateral/white-papers/h11302-pkcs5v2-1-password-based-cryptography-standard-wp.pdf
- encoded_key_bits_map = {
- 160: 5, # 40-bit
- 120: 8, # 64-bit
- 58: 16, # 128-bit
- }
-
- if rc2_parameter_version in encoded_key_bits_map:
- return encoded_key_bits_map[rc2_parameter_version]
-
- if rc2_parameter_version >= 256:
- return rc2_parameter_version
-
- if rc2_parameter_version is None:
- return 4 # 32-bit default
-
- raise ValueError(unwrap(
- '''
- Invalid RC2 parameter version found in EncryptionAlgorithm
- parameters
- '''
- ))
-
- if encryption_algo == 'pbes2':
- key_length = self['parameters']['key_derivation_func']['parameters']['key_length'].native
- if key_length is not None:
- return key_length
-
- # If the KDF params don't specify the key size, we can infer it from
- # the encryption scheme for all schemes except for RC5. However, in
- # practical terms, neither OpenSSL or OS X support RC5 for PKCS#8
- # so it is unlikely to be an issue that is run into.
-
- return self['parameters']['encryption_scheme'].key_length
-
- if encryption_algo.find('.') == -1:
- return {
- 'pbes1_md2_des': 8,
- 'pbes1_md5_des': 8,
- 'pbes1_md2_rc2': 8,
- 'pbes1_md5_rc2': 8,
- 'pbes1_sha1_des': 8,
- 'pbes1_sha1_rc2': 8,
- 'pkcs12_sha1_rc4_128': 16,
- 'pkcs12_sha1_rc4_40': 5,
- 'pkcs12_sha1_tripledes_3key': 24,
- 'pkcs12_sha1_tripledes_2key': 16,
- 'pkcs12_sha1_rc2_128': 16,
- 'pkcs12_sha1_rc2_40': 5,
- }[encryption_algo]
-
- raise ValueError(unwrap(
- '''
- Unrecognized encryption algorithm "%s"
- ''',
- encryption_algo
- ))
-
- @property
- def encryption_mode(self):
- """
- Returns the name of the encryption mode to use.
-
- :return:
- A unicode string from one of the following: "cbc", "ecb", "ofb",
- "cfb", "wrap", "gcm", "ccm", "wrap_pad"
- """
-
- encryption_algo = self['algorithm'].native
-
- if encryption_algo[0:7] in set(['aes128_', 'aes192_', 'aes256_']):
- return encryption_algo[7:]
-
- if encryption_algo[0:6] == 'pbes1_':
- return 'cbc'
-
- if encryption_algo[0:7] == 'pkcs12_':
- return 'cbc'
-
- if encryption_algo in set(['des', 'tripledes_3key', 'rc2', 'rc5']):
- return 'cbc'
-
- if encryption_algo == 'pbes2':
- return self['parameters']['encryption_scheme'].encryption_mode
-
- raise ValueError(unwrap(
- '''
- Unrecognized encryption algorithm "%s"
- ''',
- encryption_algo
- ))
-
- @property
- def encryption_cipher(self):
- """
- Returns the name of the symmetric encryption cipher to use. The key
- length can be retrieved via the .key_length property to disabiguate
- between different variations of TripleDES, AES, and the RC* ciphers.
-
- :return:
- A unicode string from one of the following: "rc2", "rc5", "des",
- "tripledes", "aes"
- """
-
- encryption_algo = self['algorithm'].native
-
- if encryption_algo[0:7] in set(['aes128_', 'aes192_', 'aes256_']):
- return 'aes'
-
- if encryption_algo in set(['des', 'rc2', 'rc5']):
- return encryption_algo
-
- if encryption_algo == 'tripledes_3key':
- return 'tripledes'
-
- if encryption_algo == 'pbes2':
- return self['parameters']['encryption_scheme'].encryption_cipher
-
- if encryption_algo.find('.') == -1:
- return {
- 'pbes1_md2_des': 'des',
- 'pbes1_md5_des': 'des',
- 'pbes1_md2_rc2': 'rc2',
- 'pbes1_md5_rc2': 'rc2',
- 'pbes1_sha1_des': 'des',
- 'pbes1_sha1_rc2': 'rc2',
- 'pkcs12_sha1_rc4_128': 'rc4',
- 'pkcs12_sha1_rc4_40': 'rc4',
- 'pkcs12_sha1_tripledes_3key': 'tripledes',
- 'pkcs12_sha1_tripledes_2key': 'tripledes',
- 'pkcs12_sha1_rc2_128': 'rc2',
- 'pkcs12_sha1_rc2_40': 'rc2',
- }[encryption_algo]
-
- raise ValueError(unwrap(
- '''
- Unrecognized encryption algorithm "%s"
- ''',
- encryption_algo
- ))
-
- @property
- def encryption_block_size(self):
- """
- Returns the block size of the encryption cipher, in bytes.
-
- :return:
- An integer that is the block size in bytes
- """
-
- encryption_algo = self['algorithm'].native
-
- if encryption_algo[0:7] in set(['aes128_', 'aes192_', 'aes256_']):
- return 16
-
- cipher_map = {
- 'des': 8,
- 'tripledes_3key': 8,
- 'rc2': 8,
- }
- if encryption_algo in cipher_map:
- return cipher_map[encryption_algo]
-
- if encryption_algo == 'rc5':
- return self['parameters'].parsed['block_size_in_bits'].native / 8
-
- if encryption_algo == 'pbes2':
- return self['parameters']['encryption_scheme'].encryption_block_size
-
- if encryption_algo.find('.') == -1:
- return {
- 'pbes1_md2_des': 8,
- 'pbes1_md5_des': 8,
- 'pbes1_md2_rc2': 8,
- 'pbes1_md5_rc2': 8,
- 'pbes1_sha1_des': 8,
- 'pbes1_sha1_rc2': 8,
- 'pkcs12_sha1_rc4_128': 0,
- 'pkcs12_sha1_rc4_40': 0,
- 'pkcs12_sha1_tripledes_3key': 8,
- 'pkcs12_sha1_tripledes_2key': 8,
- 'pkcs12_sha1_rc2_128': 8,
- 'pkcs12_sha1_rc2_40': 8,
- }[encryption_algo]
-
- raise ValueError(unwrap(
- '''
- Unrecognized encryption algorithm "%s"
- ''',
- encryption_algo
- ))
-
- @property
- def encryption_iv(self):
- """
- Returns the byte string of the initialization vector for the encryption
- scheme. Only the PBES2 stores the IV in the params. For PBES1, the IV
- is derived from the KDF and this property will return None.
-
- :return:
- A byte string or None
- """
-
- encryption_algo = self['algorithm'].native
-
- if encryption_algo in set(['rc2', 'rc5']):
- return self['parameters'].parsed['iv'].native
-
- # For DES/Triple DES and AES the IV is the entirety of the parameters
- octet_string_iv_oids = set([
- 'des',
- 'tripledes_3key',
- 'aes128_cbc',
- 'aes192_cbc',
- 'aes256_cbc',
- 'aes128_ofb',
- 'aes192_ofb',
- 'aes256_ofb',
- ])
- if encryption_algo in octet_string_iv_oids:
- return self['parameters'].native
-
- if encryption_algo == 'pbes2':
- return self['parameters']['encryption_scheme'].encryption_iv
-
- # All of the PBES1 algos use their KDF to create the IV. For the pbkdf1,
- # the KDF is told to generate a key that is an extra 8 bytes long, and
- # that is used for the IV. For the PKCS#12 KDF, it is called with an id
- # of 2 to generate the IV. In either case, we can't return the IV
- # without knowing the user's password.
- if encryption_algo.find('.') == -1:
- return None
-
- raise ValueError(unwrap(
- '''
- Unrecognized encryption algorithm "%s"
- ''',
- encryption_algo
- ))
-
-
-class Pbes2Params(Sequence):
- _fields = [
- ('key_derivation_func', KdfAlgorithm),
- ('encryption_scheme', EncryptionAlgorithm),
- ]
-
-
-class Pbmac1Params(Sequence):
- _fields = [
- ('key_derivation_func', KdfAlgorithm),
- ('message_auth_scheme', HmacAlgorithm),
- ]
-
-
-class Pkcs5MacId(ObjectIdentifier):
- _map = {
- '1.2.840.113549.1.5.14': 'pbmac1',
- }
-
-
-class Pkcs5MacAlgorithm(Sequence):
- _fields = [
- ('algorithm', Pkcs5MacId),
- ('parameters', Any),
- ]
-
- _oid_pair = ('algorithm', 'parameters')
- _oid_specs = {
- 'pbmac1': Pbmac1Params,
- }
-
-
-EncryptionAlgorithm._oid_specs['pbes2'] = Pbes2Params
diff --git a/functions/source/CreateSSHKey/asn1crypto/cms.py b/functions/source/CreateSSHKey/asn1crypto/cms.py
deleted file mode 100644
index 2964f0a..0000000
--- a/functions/source/CreateSSHKey/asn1crypto/cms.py
+++ /dev/null
@@ -1,930 +0,0 @@
-# coding: utf-8
-
-"""
-ASN.1 type classes for cryptographic message syntax (CMS). Structures are also
-compatible with PKCS#7. Exports the following items:
-
- - AuthenticatedData()
- - AuthEnvelopedData()
- - CompressedData()
- - ContentInfo()
- - DigestedData()
- - EncryptedData()
- - EnvelopedData()
- - SignedAndEnvelopedData()
- - SignedData()
-
-Other type classes are defined that help compose the types listed above.
-"""
-
-from __future__ import unicode_literals, division, absolute_import, print_function
-
-try:
- import zlib
-except (ImportError):
- zlib = None
-
-from .algos import (
- _ForceNullParameters,
- DigestAlgorithm,
- EncryptionAlgorithm,
- HmacAlgorithm,
- KdfAlgorithm,
- SignedDigestAlgorithm,
-)
-from .core import (
- Any,
- BitString,
- Choice,
- Enumerated,
- GeneralizedTime,
- Integer,
- ObjectIdentifier,
- OctetBitString,
- OctetString,
- ParsableOctetString,
- Sequence,
- SequenceOf,
- SetOf,
- UTCTime,
- UTF8String,
-)
-from .crl import CertificateList
-from .keys import PublicKeyInfo
-from .ocsp import OCSPResponse
-from .x509 import Attributes, Certificate, Extensions, GeneralName, GeneralNames, Name
-
-
-# These structures are taken from
-# ftp://ftp.rsasecurity.com/pub/pkcs/ascii/pkcs-6.asc
-
-class ExtendedCertificateInfo(Sequence):
- _fields = [
- ('version', Integer),
- ('certificate', Certificate),
- ('attributes', Attributes),
- ]
-
-
-class ExtendedCertificate(Sequence):
- _fields = [
- ('extended_certificate_info', ExtendedCertificateInfo),
- ('signature_algorithm', SignedDigestAlgorithm),
- ('signature', OctetBitString),
- ]
-
-
-# These structures are taken from https://tools.ietf.org/html/rfc5652,
-# https://tools.ietf.org/html/rfc5083, http://tools.ietf.org/html/rfc2315,
-# https://tools.ietf.org/html/rfc5940, https://tools.ietf.org/html/rfc3274,
-# https://tools.ietf.org/html/rfc3281
-
-
-class CMSVersion(Integer):
- _map = {
- 0: 'v0',
- 1: 'v1',
- 2: 'v2',
- 3: 'v3',
- 4: 'v4',
- 5: 'v5',
- }
-
-
-class CMSAttributeType(ObjectIdentifier):
- _map = {
- '1.2.840.113549.1.9.3': 'content_type',
- '1.2.840.113549.1.9.4': 'message_digest',
- '1.2.840.113549.1.9.5': 'signing_time',
- '1.2.840.113549.1.9.6': 'counter_signature',
- # https://tools.ietf.org/html/rfc3161#page-20
- '1.2.840.113549.1.9.16.2.14': 'signature_time_stamp_token',
- # https://tools.ietf.org/html/rfc6211#page-5
- '1.2.840.113549.1.9.52': 'cms_algorithm_protection',
- }
-
-
-class Time(Choice):
- _alternatives = [
- ('utc_time', UTCTime),
- ('generalized_time', GeneralizedTime),
- ]
-
-
-class ContentType(ObjectIdentifier):
- _map = {
- '1.2.840.113549.1.7.1': 'data',
- '1.2.840.113549.1.7.2': 'signed_data',
- '1.2.840.113549.1.7.3': 'enveloped_data',
- '1.2.840.113549.1.7.4': 'signed_and_enveloped_data',
- '1.2.840.113549.1.7.5': 'digested_data',
- '1.2.840.113549.1.7.6': 'encrypted_data',
- '1.2.840.113549.1.9.16.1.2': 'authenticated_data',
- '1.2.840.113549.1.9.16.1.9': 'compressed_data',
- '1.2.840.113549.1.9.16.1.23': 'authenticated_enveloped_data',
- }
-
-
-class CMSAlgorithmProtection(Sequence):
- _fields = [
- ('digest_algorithm', DigestAlgorithm),
- ('signature_algorithm', SignedDigestAlgorithm, {'implicit': 1, 'optional': True}),
- ('mac_algorithm', HmacAlgorithm, {'implicit': 2, 'optional': True}),
- ]
-
-
-class SetOfContentType(SetOf):
- _child_spec = ContentType
-
-
-class SetOfOctetString(SetOf):
- _child_spec = OctetString
-
-
-class SetOfTime(SetOf):
- _child_spec = Time
-
-
-class SetOfAny(SetOf):
- _child_spec = Any
-
-
-class SetOfCMSAlgorithmProtection(SetOf):
- _child_spec = CMSAlgorithmProtection
-
-
-class CMSAttribute(Sequence):
- _fields = [
- ('type', CMSAttributeType),
- ('values', None),
- ]
-
- _oid_specs = {}
-
- def _values_spec(self):
- return self._oid_specs.get(self['type'].native, SetOfAny)
-
- _spec_callbacks = {
- 'values': _values_spec
- }
-
-
-class CMSAttributes(SetOf):
- _child_spec = CMSAttribute
-
-
-class IssuerSerial(Sequence):
- _fields = [
- ('issuer', GeneralNames),
- ('serial', Integer),
- ('issuer_uid', OctetBitString, {'optional': True}),
- ]
-
-
-class AttCertVersion(Integer):
- _map = {
- 0: 'v1',
- 1: 'v2',
- }
-
-
-class AttCertSubject(Choice):
- _alternatives = [
- ('base_certificate_id', IssuerSerial, {'explicit': 0}),
- ('subject_name', GeneralNames, {'explicit': 1}),
- ]
-
-
-class AttCertValidityPeriod(Sequence):
- _fields = [
- ('not_before_time', GeneralizedTime),
- ('not_after_time', GeneralizedTime),
- ]
-
-
-class AttributeCertificateInfoV1(Sequence):
- _fields = [
- ('version', AttCertVersion, {'default': 'v1'}),
- ('subject', AttCertSubject),
- ('issuer', GeneralNames),
- ('signature', SignedDigestAlgorithm),
- ('serial_number', Integer),
- ('att_cert_validity_period', AttCertValidityPeriod),
- ('attributes', Attributes),
- ('issuer_unique_id', OctetBitString, {'optional': True}),
- ('extensions', Extensions, {'optional': True}),
- ]
-
-
-class AttributeCertificateV1(Sequence):
- _fields = [
- ('ac_info', AttributeCertificateInfoV1),
- ('signature_algorithm', SignedDigestAlgorithm),
- ('signature', OctetBitString),
- ]
-
-
-class DigestedObjectType(Enumerated):
- _map = {
- 0: 'public_key',
- 1: 'public_key_cert',
- 2: 'other_objy_types',
- }
-
-
-class ObjectDigestInfo(Sequence):
- _fields = [
- ('digested_object_type', DigestedObjectType),
- ('other_object_type_id', ObjectIdentifier, {'optional': True}),
- ('digest_algorithm', DigestAlgorithm),
- ('object_digest', OctetBitString),
- ]
-
-
-class Holder(Sequence):
- _fields = [
- ('base_certificate_id', IssuerSerial, {'implicit': 0, 'optional': True}),
- ('entity_name', GeneralNames, {'implicit': 1, 'optional': True}),
- ('object_digest_info', ObjectDigestInfo, {'implicit': 2, 'optional': True}),
- ]
-
-
-class V2Form(Sequence):
- _fields = [
- ('issuer_name', GeneralNames, {'optional': True}),
- ('base_certificate_id', IssuerSerial, {'explicit': 0, 'optional': True}),
- ('object_digest_info', ObjectDigestInfo, {'explicit': 1, 'optional': True}),
- ]
-
-
-class AttCertIssuer(Choice):
- _alternatives = [
- ('v1_form', GeneralNames),
- ('v2_form', V2Form, {'explicit': 0}),
- ]
-
-
-class IetfAttrValue(Choice):
- _alternatives = [
- ('octets', OctetString),
- ('oid', ObjectIdentifier),
- ('string', UTF8String),
- ]
-
-
-class IetfAttrValues(SequenceOf):
- _child_spec = IetfAttrValue
-
-
-class IetfAttrSyntax(Sequence):
- _fields = [
- ('policy_authority', GeneralNames, {'implicit': 0, 'optional': True}),
- ('values', IetfAttrValues),
- ]
-
-
-class SetOfIetfAttrSyntax(SetOf):
- _child_spec = IetfAttrSyntax
-
-
-class SvceAuthInfo(Sequence):
- _fields = [
- ('service', GeneralName),
- ('ident', GeneralName),
- ('auth_info', OctetString, {'optional': True}),
- ]
-
-
-class SetOfSvceAuthInfo(SetOf):
- _child_spec = SvceAuthInfo
-
-
-class RoleSyntax(Sequence):
- _fields = [
- ('role_authority', GeneralNames, {'implicit': 0, 'optional': True}),
- ('role_name', GeneralName, {'implicit': 1}),
- ]
-
-
-class SetOfRoleSyntax(SetOf):
- _child_spec = RoleSyntax
-
-
-class ClassList(BitString):
- _map = {
- 0: 'unmarked',
- 1: 'unclassified',
- 2: 'restricted',
- 3: 'confidential',
- 4: 'secret',
- 5: 'top_secret',
- }
-
-
-class SecurityCategory(Sequence):
- _fields = [
- ('type', ObjectIdentifier, {'implicit': 0}),
- ('value', Any, {'implicit': 1}),
- ]
-
-
-class SetOfSecurityCategory(SetOf):
- _child_spec = SecurityCategory
-
-
-class Clearance(Sequence):
- _fields = [
- ('policy_id', ObjectIdentifier, {'implicit': 0}),
- ('class_list', ClassList, {'implicit': 1, 'default': 'unclassified'}),
- ('security_categories', SetOfSecurityCategory, {'implicit': 2, 'optional': True}),
- ]
-
-
-class SetOfClearance(SetOf):
- _child_spec = Clearance
-
-
-class BigTime(Sequence):
- _fields = [
- ('major', Integer),
- ('fractional_seconds', Integer),
- ('sign', Integer, {'optional': True}),
- ]
-
-
-class LeapData(Sequence):
- _fields = [
- ('leap_time', BigTime),
- ('action', Integer),
- ]
-
-
-class SetOfLeapData(SetOf):
- _child_spec = LeapData
-
-
-class TimingMetrics(Sequence):
- _fields = [
- ('ntp_time', BigTime),
- ('offset', BigTime),
- ('delay', BigTime),
- ('expiration', BigTime),
- ('leap_event', SetOfLeapData, {'optional': True}),
- ]
-
-
-class SetOfTimingMetrics(SetOf):
- _child_spec = TimingMetrics
-
-
-class TimingPolicy(Sequence):
- _fields = [
- ('policy_id', SequenceOf, {'spec': ObjectIdentifier}),
- ('max_offset', BigTime, {'explicit': 0, 'optional': True}),
- ('max_delay', BigTime, {'explicit': 1, 'optional': True}),
- ]
-
-
-class SetOfTimingPolicy(SetOf):
- _child_spec = TimingPolicy
-
-
-class AttCertAttributeType(ObjectIdentifier):
- _map = {
- '1.3.6.1.5.5.7.10.1': 'authentication_info',
- '1.3.6.1.5.5.7.10.2': 'access_identity',
- '1.3.6.1.5.5.7.10.3': 'charging_identity',
- '1.3.6.1.5.5.7.10.4': 'group',
- '2.5.4.72': 'role',
- '2.5.4.55': 'clearance',
- '1.3.6.1.4.1.601.10.4.1': 'timing_metrics',
- '1.3.6.1.4.1.601.10.4.2': 'timing_policy',
- }
-
-
-class AttCertAttribute(Sequence):
- _fields = [
- ('type', AttCertAttributeType),
- ('values', None),
- ]
-
- _oid_specs = {
- 'authentication_info': SetOfSvceAuthInfo,
- 'access_identity': SetOfSvceAuthInfo,
- 'charging_identity': SetOfIetfAttrSyntax,
- 'group': SetOfIetfAttrSyntax,
- 'role': SetOfRoleSyntax,
- 'clearance': SetOfClearance,
- 'timing_metrics': SetOfTimingMetrics,
- 'timing_policy': SetOfTimingPolicy,
- }
-
- def _values_spec(self):
- return self._oid_specs.get(self['type'].native, SetOfAny)
-
- _spec_callbacks = {
- 'values': _values_spec
- }
-
-
-class AttCertAttributes(SequenceOf):
- _child_spec = AttCertAttribute
-
-
-class AttributeCertificateInfoV2(Sequence):
- _fields = [
- ('version', AttCertVersion),
- ('holder', Holder),
- ('issuer', AttCertIssuer),
- ('signature', SignedDigestAlgorithm),
- ('serial_number', Integer),
- ('att_cert_validity_period', AttCertValidityPeriod),
- ('attributes', AttCertAttributes),
- ('issuer_unique_id', OctetBitString, {'optional': True}),
- ('extensions', Extensions, {'optional': True}),
- ]
-
-
-class AttributeCertificateV2(Sequence):
- # Handle the situation where a V2 cert is encoded as V1
- _bad_tag = 1
-
- _fields = [
- ('ac_info', AttributeCertificateInfoV2),
- ('signature_algorithm', SignedDigestAlgorithm),
- ('signature', OctetBitString),
- ]
-
-
-class OtherCertificateFormat(Sequence):
- _fields = [
- ('other_cert_format', ObjectIdentifier),
- ('other_cert', Any),
- ]
-
-
-class CertificateChoices(Choice):
- _alternatives = [
- ('certificate', Certificate),
- ('extended_certificate', ExtendedCertificate, {'implicit': 0}),
- ('v1_attr_cert', AttributeCertificateV1, {'implicit': 1}),
- ('v2_attr_cert', AttributeCertificateV2, {'implicit': 2}),
- ('other', OtherCertificateFormat, {'implicit': 3}),
- ]
-
- def validate(self, class_, tag, contents):
- """
- Ensures that the class and tag specified exist as an alternative. This
- custom version fixes parsing broken encodings there a V2 attribute
- # certificate is encoded as a V1
-
- :param class_:
- The integer class_ from the encoded value header
-
- :param tag:
- The integer tag from the encoded value header
-
- :param contents:
- A byte string of the contents of the value - used when the object
- is explicitly tagged
-
- :raises:
- ValueError - when value is not a valid alternative
- """
-
- super(CertificateChoices, self).validate(class_, tag, contents)
- if self._choice == 2:
- if AttCertVersion.load(Sequence.load(contents)[0].dump()).native == 'v2':
- self._choice = 3
-
-
-class CertificateSet(SetOf):
- _child_spec = CertificateChoices
-
-
-class ContentInfo(Sequence):
- _fields = [
- ('content_type', ContentType),
- ('content', Any, {'explicit': 0, 'optional': True}),
- ]
-
- _oid_pair = ('content_type', 'content')
- _oid_specs = {}
-
-
-class SetOfContentInfo(SetOf):
- _child_spec = ContentInfo
-
-
-class EncapsulatedContentInfo(Sequence):
- _fields = [
- ('content_type', ContentType),
- ('content', ParsableOctetString, {'explicit': 0, 'optional': True}),
- ]
-
- _oid_pair = ('content_type', 'content')
- _oid_specs = {}
-
-
-class IssuerAndSerialNumber(Sequence):
- _fields = [
- ('issuer', Name),
- ('serial_number', Integer),
- ]
-
-
-class SignerIdentifier(Choice):
- _alternatives = [
- ('issuer_and_serial_number', IssuerAndSerialNumber),
- ('subject_key_identifier', OctetString, {'implicit': 0}),
- ]
-
-
-class DigestAlgorithms(SetOf):
- _child_spec = DigestAlgorithm
-
-
-class CertificateRevocationLists(SetOf):
- _child_spec = CertificateList
-
-
-class SCVPReqRes(Sequence):
- _fields = [
- ('request', ContentInfo, {'explicit': 0, 'optional': True}),
- ('response', ContentInfo),
- ]
-
-
-class OtherRevInfoFormatId(ObjectIdentifier):
- _map = {
- '1.3.6.1.5.5.7.16.2': 'ocsp_response',
- '1.3.6.1.5.5.7.16.4': 'scvp',
- }
-
-
-class OtherRevocationInfoFormat(Sequence):
- _fields = [
- ('other_rev_info_format', OtherRevInfoFormatId),
- ('other_rev_info', Any),
- ]
-
- _oid_pair = ('other_rev_info_format', 'other_rev_info')
- _oid_specs = {
- 'ocsp_response': OCSPResponse,
- 'scvp': SCVPReqRes,
- }
-
-
-class RevocationInfoChoice(Choice):
- _alternatives = [
- ('crl', CertificateList),
- ('other', OtherRevocationInfoFormat, {'implicit': 1}),
- ]
-
-
-class RevocationInfoChoices(SetOf):
- _child_spec = RevocationInfoChoice
-
-
-class SignerInfo(Sequence):
- _fields = [
- ('version', CMSVersion),
- ('sid', SignerIdentifier),
- ('digest_algorithm', DigestAlgorithm),
- ('signed_attrs', CMSAttributes, {'implicit': 0, 'optional': True}),
- ('signature_algorithm', SignedDigestAlgorithm),
- ('signature', OctetString),
- ('unsigned_attrs', CMSAttributes, {'implicit': 1, 'optional': True}),
- ]
-
-
-class SignerInfos(SetOf):
- _child_spec = SignerInfo
-
-
-class SignedData(Sequence):
- _fields = [
- ('version', CMSVersion),
- ('digest_algorithms', DigestAlgorithms),
- ('encap_content_info', None),
- ('certificates', CertificateSet, {'implicit': 0, 'optional': True}),
- ('crls', RevocationInfoChoices, {'implicit': 1, 'optional': True}),
- ('signer_infos', SignerInfos),
- ]
-
- def _encap_content_info_spec(self):
- # If the encap_content_info is version v1, then this could be a PKCS#7
- # structure, or a CMS structure. CMS wraps the encoded value in an
- # Octet String tag.
-
- # If the version is greater than 1, it is definite CMS
- if self['version'].native != 'v1':
- return EncapsulatedContentInfo
-
- # Otherwise, the ContentInfo spec from PKCS#7 will be compatible with
- # CMS v1 (which only allows Data, an Octet String) and PKCS#7, which
- # allows Any
- return ContentInfo
-
- _spec_callbacks = {
- 'encap_content_info': _encap_content_info_spec
- }
-
-
-class OriginatorInfo(Sequence):
- _fields = [
- ('certs', CertificateSet, {'implicit': 0, 'optional': True}),
- ('crls', RevocationInfoChoices, {'implicit': 1, 'optional': True}),
- ]
-
-
-class RecipientIdentifier(Choice):
- _alternatives = [
- ('issuer_and_serial_number', IssuerAndSerialNumber),
- ('subject_key_identifier', OctetString, {'implicit': 0}),
- ]
-
-
-class KeyEncryptionAlgorithmId(ObjectIdentifier):
- _map = {
- '1.2.840.113549.1.1.1': 'rsa',
- '2.16.840.1.101.3.4.1.5': 'aes128_wrap',
- '2.16.840.1.101.3.4.1.8': 'aes128_wrap_pad',
- '2.16.840.1.101.3.4.1.25': 'aes192_wrap',
- '2.16.840.1.101.3.4.1.28': 'aes192_wrap_pad',
- '2.16.840.1.101.3.4.1.45': 'aes256_wrap',
- '2.16.840.1.101.3.4.1.48': 'aes256_wrap_pad',
- }
-
-
-class KeyEncryptionAlgorithm(_ForceNullParameters, Sequence):
- _fields = [
- ('algorithm', KeyEncryptionAlgorithmId),
- ('parameters', Any, {'optional': True}),
- ]
-
-
-class KeyTransRecipientInfo(Sequence):
- _fields = [
- ('version', CMSVersion),
- ('rid', RecipientIdentifier),
- ('key_encryption_algorithm', KeyEncryptionAlgorithm),
- ('encrypted_key', OctetString),
- ]
-
-
-class OriginatorIdentifierOrKey(Choice):
- _alternatives = [
- ('issuer_and_serial_number', IssuerAndSerialNumber),
- ('subject_key_identifier', OctetString, {'implicit': 0}),
- ('originator_key', PublicKeyInfo, {'implicit': 1}),
- ]
-
-
-class OtherKeyAttribute(Sequence):
- _fields = [
- ('key_attr_id', ObjectIdentifier),
- ('key_attr', Any),
- ]
-
-
-class RecipientKeyIdentifier(Sequence):
- _fields = [
- ('subject_key_identifier', OctetString),
- ('date', GeneralizedTime, {'optional': True}),
- ('other', OtherKeyAttribute, {'optional': True}),
- ]
-
-
-class KeyAgreementRecipientIdentifier(Choice):
- _alternatives = [
- ('issuer_and_serial_number', IssuerAndSerialNumber),
- ('r_key_id', RecipientKeyIdentifier, {'implicit': 0}),
- ]
-
-
-class RecipientEncryptedKey(Sequence):
- _fields = [
- ('rid', KeyAgreementRecipientIdentifier),
- ('encrypted_key', OctetString),
- ]
-
-
-class RecipientEncryptedKeys(SequenceOf):
- _child_spec = RecipientEncryptedKey
-
-
-class KeyAgreeRecipientInfo(Sequence):
- _fields = [
- ('version', CMSVersion),
- ('originator', OriginatorIdentifierOrKey, {'explicit': 0}),
- ('ukm', OctetString, {'explicit': 1, 'optional': True}),
- ('key_encryption_algorithm', KeyEncryptionAlgorithm),
- ('recipient_encrypted_keys', RecipientEncryptedKeys),
- ]
-
-
-class KEKIdentifier(Sequence):
- _fields = [
- ('key_identifier', OctetString),
- ('date', GeneralizedTime, {'optional': True}),
- ('other', OtherKeyAttribute, {'optional': True}),
- ]
-
-
-class KEKRecipientInfo(Sequence):
- _fields = [
- ('version', CMSVersion),
- ('kekid', KEKIdentifier),
- ('key_encryption_algorithm', KeyEncryptionAlgorithm),
- ('encrypted_key', OctetString),
- ]
-
-
-class PasswordRecipientInfo(Sequence):
- _fields = [
- ('version', CMSVersion),
- ('key_derivation_algorithm', KdfAlgorithm, {'implicit': 0, 'optional': True}),
- ('key_encryption_algorithm', KeyEncryptionAlgorithm),
- ('encrypted_key', OctetString),
- ]
-
-
-class OtherRecipientInfo(Sequence):
- _fields = [
- ('ori_type', ObjectIdentifier),
- ('ori_value', Any),
- ]
-
-
-class RecipientInfo(Choice):
- _alternatives = [
- ('ktri', KeyTransRecipientInfo),
- ('kari', KeyAgreeRecipientInfo, {'implicit': 1}),
- ('kekri', KEKRecipientInfo, {'implicit': 2}),
- ('pwri', PasswordRecipientInfo, {'implicit': 3}),
- ('ori', OtherRecipientInfo, {'implicit': 4}),
- ]
-
-
-class RecipientInfos(SetOf):
- _child_spec = RecipientInfo
-
-
-class EncryptedContentInfo(Sequence):
- _fields = [
- ('content_type', ContentType),
- ('content_encryption_algorithm', EncryptionAlgorithm),
- ('encrypted_content', OctetString, {'implicit': 0, 'optional': True}),
- ]
-
-
-class EnvelopedData(Sequence):
- _fields = [
- ('version', CMSVersion),
- ('originator_info', OriginatorInfo, {'implicit': 0, 'optional': True}),
- ('recipient_infos', RecipientInfos),
- ('encrypted_content_info', EncryptedContentInfo),
- ('unprotected_attrs', CMSAttributes, {'implicit': 1, 'optional': True}),
- ]
-
-
-class SignedAndEnvelopedData(Sequence):
- _fields = [
- ('version', CMSVersion),
- ('recipient_infos', RecipientInfos),
- ('digest_algorithms', DigestAlgorithms),
- ('encrypted_content_info', EncryptedContentInfo),
- ('certificates', CertificateSet, {'implicit': 0, 'optional': True}),
- ('crls', CertificateRevocationLists, {'implicit': 1, 'optional': True}),
- ('signer_infos', SignerInfos),
- ]
-
-
-class DigestedData(Sequence):
- _fields = [
- ('version', CMSVersion),
- ('digest_algorithm', DigestAlgorithm),
- ('encap_content_info', None),
- ('digest', OctetString),
- ]
-
- def _encap_content_info_spec(self):
- # If the encap_content_info is version v1, then this could be a PKCS#7
- # structure, or a CMS structure. CMS wraps the encoded value in an
- # Octet String tag.
-
- # If the version is greater than 1, it is definite CMS
- if self['version'].native != 'v1':
- return EncapsulatedContentInfo
-
- # Otherwise, the ContentInfo spec from PKCS#7 will be compatible with
- # CMS v1 (which only allows Data, an Octet String) and PKCS#7, which
- # allows Any
- return ContentInfo
-
- _spec_callbacks = {
- 'encap_content_info': _encap_content_info_spec
- }
-
-
-class EncryptedData(Sequence):
- _fields = [
- ('version', CMSVersion),
- ('encrypted_content_info', EncryptedContentInfo),
- ('unprotected_attrs', CMSAttributes, {'implicit': 1, 'optional': True}),
- ]
-
-
-class AuthenticatedData(Sequence):
- _fields = [
- ('version', CMSVersion),
- ('originator_info', OriginatorInfo, {'implicit': 0, 'optional': True}),
- ('recipient_infos', RecipientInfos),
- ('mac_algorithm', HmacAlgorithm),
- ('digest_algorithm', DigestAlgorithm, {'implicit': 1, 'optional': True}),
- # This does not require the _spec_callbacks approach of SignedData and
- # DigestedData since AuthenticatedData was not part of PKCS#7
- ('encap_content_info', EncapsulatedContentInfo),
- ('auth_attrs', CMSAttributes, {'implicit': 2, 'optional': True}),
- ('mac', OctetString),
- ('unauth_attrs', CMSAttributes, {'implicit': 3, 'optional': True}),
- ]
-
-
-class AuthEnvelopedData(Sequence):
- _fields = [
- ('version', CMSVersion),
- ('originator_info', OriginatorInfo, {'implicit': 0, 'optional': True}),
- ('recipient_infos', RecipientInfos),
- ('auth_encrypted_content_info', EncryptedContentInfo),
- ('auth_attrs', CMSAttributes, {'implicit': 1, 'optional': True}),
- ('mac', OctetString),
- ('unauth_attrs', CMSAttributes, {'implicit': 2, 'optional': True}),
- ]
-
-
-class CompressionAlgorithmId(ObjectIdentifier):
- _map = {
- '1.2.840.113549.1.9.16.3.8': 'zlib',
- }
-
-
-class CompressionAlgorithm(Sequence):
- _fields = [
- ('algorithm', CompressionAlgorithmId),
- ('parameters', Any, {'optional': True}),
- ]
-
-
-class CompressedData(Sequence):
- _fields = [
- ('version', CMSVersion),
- ('compression_algorithm', CompressionAlgorithm),
- ('encap_content_info', EncapsulatedContentInfo),
- ]
-
- _decompressed = None
-
- @property
- def decompressed(self):
- if self._decompressed is None:
- if zlib is None:
- raise SystemError('The zlib module is not available')
- self._decompressed = zlib.decompress(self['encap_content_info']['content'].native)
- return self._decompressed
-
-
-ContentInfo._oid_specs = {
- 'data': OctetString,
- 'signed_data': SignedData,
- 'enveloped_data': EnvelopedData,
- 'signed_and_enveloped_data': SignedAndEnvelopedData,
- 'digested_data': DigestedData,
- 'encrypted_data': EncryptedData,
- 'authenticated_data': AuthenticatedData,
- 'compressed_data': CompressedData,
- 'authenticated_enveloped_data': AuthEnvelopedData,
-}
-
-
-EncapsulatedContentInfo._oid_specs = {
- 'signed_data': SignedData,
- 'enveloped_data': EnvelopedData,
- 'signed_and_enveloped_data': SignedAndEnvelopedData,
- 'digested_data': DigestedData,
- 'encrypted_data': EncryptedData,
- 'authenticated_data': AuthenticatedData,
- 'compressed_data': CompressedData,
- 'authenticated_enveloped_data': AuthEnvelopedData,
-}
-
-
-CMSAttribute._oid_specs = {
- 'content_type': SetOfContentType,
- 'message_digest': SetOfOctetString,
- 'signing_time': SetOfTime,
- 'counter_signature': SignerInfos,
- 'signature_time_stamp_token': SetOfContentInfo,
- 'cms_algorithm_protection': SetOfCMSAlgorithmProtection,
-}
diff --git a/functions/source/CreateSSHKey/asn1crypto/core.py b/functions/source/CreateSSHKey/asn1crypto/core.py
deleted file mode 100644
index 97eeda3..0000000
--- a/functions/source/CreateSSHKey/asn1crypto/core.py
+++ /dev/null
@@ -1,5234 +0,0 @@
-# coding: utf-8
-
-"""
-ASN.1 type classes for universal types. Exports the following items:
-
- - load()
- - Any()
- - Asn1Value()
- - BitString()
- - BMPString()
- - Boolean()
- - CharacterString()
- - Choice()
- - EmbeddedPdv()
- - Enumerated()
- - GeneralizedTime()
- - GeneralString()
- - GraphicString()
- - IA5String()
- - InstanceOf()
- - Integer()
- - IntegerBitString()
- - IntegerOctetString()
- - Null()
- - NumericString()
- - ObjectDescriptor()
- - ObjectIdentifier()
- - OctetBitString()
- - OctetString()
- - PrintableString()
- - Real()
- - RelativeOid()
- - Sequence()
- - SequenceOf()
- - Set()
- - SetOf()
- - TeletexString()
- - UniversalString()
- - UTCTime()
- - UTF8String()
- - VideotexString()
- - VisibleString()
- - VOID
- - Void()
-
-Other type classes are defined that help compose the types listed above.
-"""
-
-from __future__ import unicode_literals, division, absolute_import, print_function
-
-from datetime import datetime, timedelta
-import binascii
-import copy
-import math
-import re
-import sys
-
-from . import _teletex_codec
-from ._errors import unwrap
-from ._ordereddict import OrderedDict
-from ._types import type_name, str_cls, byte_cls, int_types, chr_cls
-from .parser import _parse, _dump_header
-from .util import int_to_bytes, int_from_bytes, timezone, extended_datetime
-
-if sys.version_info <= (3,):
- from cStringIO import StringIO as BytesIO
-
- range = xrange # noqa
- _PY2 = True
-
-else:
- from io import BytesIO
-
- _PY2 = False
-
-
-_teletex_codec.register()
-
-
-CLASS_NUM_TO_NAME_MAP = {
- 0: 'universal',
- 1: 'application',
- 2: 'context',
- 3: 'private',
-}
-
-CLASS_NAME_TO_NUM_MAP = {
- 'universal': 0,
- 'application': 1,
- 'context': 2,
- 'private': 3,
- 0: 0,
- 1: 1,
- 2: 2,
- 3: 3,
-}
-
-METHOD_NUM_TO_NAME_MAP = {
- 0: 'primitive',
- 1: 'constructed',
-}
-
-
-_OID_RE = re.compile(r'^\d+(\.\d+)*$')
-
-
-# A global tracker to ensure that _setup() is called for every class, even
-# if is has been called for a parent class. This allows different _fields
-# definitions for child classes. Without such a construct, the child classes
-# would just see the parent class attributes and would use them.
-_SETUP_CLASSES = {}
-
-
-def load(encoded_data, strict=False):
- """
- Loads a BER/DER-encoded byte string and construct a universal object based
- on the tag value:
-
- - 1: Boolean
- - 2: Integer
- - 3: BitString
- - 4: OctetString
- - 5: Null
- - 6: ObjectIdentifier
- - 7: ObjectDescriptor
- - 8: InstanceOf
- - 9: Real
- - 10: Enumerated
- - 11: EmbeddedPdv
- - 12: UTF8String
- - 13: RelativeOid
- - 16: Sequence,
- - 17: Set
- - 18: NumericString
- - 19: PrintableString
- - 20: TeletexString
- - 21: VideotexString
- - 22: IA5String
- - 23: UTCTime
- - 24: GeneralizedTime
- - 25: GraphicString
- - 26: VisibleString
- - 27: GeneralString
- - 28: UniversalString
- - 29: CharacterString
- - 30: BMPString
-
- :param encoded_data:
- A byte string of BER or DER-encoded data
-
- :param strict:
- A boolean indicating if trailing data should be forbidden - if so, a
- ValueError will be raised when trailing data exists
-
- :raises:
- ValueError - when strict is True and trailing data is present
- ValueError - when the encoded value tag a tag other than listed above
- ValueError - when the ASN.1 header length is longer than the data
- TypeError - when encoded_data is not a byte string
-
- :return:
- An instance of the one of the universal classes
- """
-
- return Asn1Value.load(encoded_data, strict=strict)
-
-
-class Asn1Value(object):
- """
- The basis of all ASN.1 values
- """
-
- # The integer 0 for primitive, 1 for constructed
- method = None
-
- # An integer 0 through 3 - see CLASS_NUM_TO_NAME_MAP for value
- class_ = None
-
- # An integer 1 or greater indicating the tag number
- tag = None
-
- # An alternate tag allowed for this type - used for handling broken
- # structures where a string value is encoded using an incorrect tag
- _bad_tag = None
-
- # If the value has been implicitly tagged
- implicit = False
-
- # If explicitly tagged, a tuple of 2-element tuples containing the
- # class int and tag int, from innermost to outermost
- explicit = None
-
- # The BER/DER header bytes
- _header = None
-
- # Raw encoded value bytes not including class, method, tag, length header
- contents = None
-
- # The BER/DER trailer bytes
- _trailer = b''
-
- # The native python representation of the value - this is not used by
- # some classes since they utilize _bytes or _unicode
- _native = None
-
- @classmethod
- def load(cls, encoded_data, strict=False, **kwargs):
- """
- Loads a BER/DER-encoded byte string using the current class as the spec
-
- :param encoded_data:
- A byte string of BER or DER-encoded data
-
- :param strict:
- A boolean indicating if trailing data should be forbidden - if so, a
- ValueError will be raised when trailing data exists
-
- :return:
- An instance of the current class
- """
-
- if not isinstance(encoded_data, byte_cls):
- raise TypeError('encoded_data must be a byte string, not %s' % type_name(encoded_data))
-
- spec = None
- if cls.tag is not None:
- spec = cls
-
- value, _ = _parse_build(encoded_data, spec=spec, spec_params=kwargs, strict=strict)
- return value
-
- def __init__(self, explicit=None, implicit=None, no_explicit=False, tag_type=None, class_=None, tag=None,
- optional=None, default=None, contents=None):
- """
- The optional parameter is not used, but rather included so we don't
- have to delete it from the parameter dictionary when passing as keyword
- args
-
- :param explicit:
- An int tag number for explicit tagging, or a 2-element tuple of
- class and tag.
-
- :param implicit:
- An int tag number for implicit tagging, or a 2-element tuple of
- class and tag.
-
- :param no_explicit:
- If explicit tagging info should be removed from this instance.
- Used internally to allow contructing the underlying value that
- has been wrapped in an explicit tag.
-
- :param tag_type:
- None for normal values, or one of "implicit", "explicit" for tagged
- values. Deprecated in favor of explicit and implicit params.
-
- :param class_:
- The class for the value - defaults to "universal" if tag_type is
- None, otherwise defaults to "context". Valid values include:
- - "universal"
- - "application"
- - "context"
- - "private"
- Deprecated in favor of explicit and implicit params.
-
- :param tag:
- The integer tag to override - usually this is used with tag_type or
- class_. Deprecated in favor of explicit and implicit params.
-
- :param optional:
- Dummy parameter that allows "optional" key in spec param dicts
-
- :param default:
- The default value to use if the value is currently None
-
- :param contents:
- A byte string of the encoded contents of the value
-
- :raises:
- ValueError - when implicit, explicit, tag_type, class_ or tag are invalid values
- """
-
- try:
- if self.__class__ not in _SETUP_CLASSES:
- cls = self.__class__
- # Allow explicit to be specified as a simple 2-element tuple
- # instead of requiring the user make a nested tuple
- if cls.explicit is not None and isinstance(cls.explicit[0], int_types):
- cls.explicit = (cls.explicit, )
- if hasattr(cls, '_setup'):
- self._setup()
- _SETUP_CLASSES[cls] = True
-
- # Normalize tagging values
- if explicit is not None:
- if isinstance(explicit, int_types):
- if class_ is None:
- class_ = 'context'
- explicit = (class_, explicit)
- # Prevent both explicit and tag_type == 'explicit'
- if tag_type == 'explicit':
- tag_type = None
- tag = None
-
- if implicit is not None:
- if isinstance(implicit, int_types):
- if class_ is None:
- class_ = 'context'
- implicit = (class_, implicit)
- # Prevent both implicit and tag_type == 'implicit'
- if tag_type == 'implicit':
- tag_type = None
- tag = None
-
- # Convert old tag_type API to explicit/implicit params
- if tag_type is not None:
- if class_ is None:
- class_ = 'context'
- if tag_type == 'explicit':
- explicit = (class_, tag)
- elif tag_type == 'implicit':
- implicit = (class_, tag)
- else:
- raise ValueError(unwrap(
- '''
- tag_type must be one of "implicit", "explicit", not %s
- ''',
- repr(tag_type)
- ))
-
- if explicit is not None:
- # Ensure we have a tuple of 2-element tuples
- if len(explicit) == 2 and isinstance(explicit[1], int_types):
- explicit = (explicit, )
- for class_, tag in explicit:
- invalid_class = None
- if isinstance(class_, int_types):
- if class_ not in CLASS_NUM_TO_NAME_MAP:
- invalid_class = class_
- else:
- if class_ not in CLASS_NAME_TO_NUM_MAP:
- invalid_class = class_
- class_ = CLASS_NAME_TO_NUM_MAP[class_]
- if invalid_class is not None:
- raise ValueError(unwrap(
- '''
- explicit class must be one of "universal", "application",
- "context", "private", not %s
- ''',
- repr(invalid_class)
- ))
- if tag is not None:
- if not isinstance(tag, int_types):
- raise TypeError(unwrap(
- '''
- explicit tag must be an integer, not %s
- ''',
- type_name(tag)
- ))
- if self.explicit is None:
- self.explicit = ((class_, tag), )
- else:
- self.explicit = self.explicit + ((class_, tag), )
-
- elif implicit is not None:
- class_, tag = implicit
- if class_ not in CLASS_NAME_TO_NUM_MAP:
- raise ValueError(unwrap(
- '''
- implicit class must be one of "universal", "application",
- "context", "private", not %s
- ''',
- repr(class_)
- ))
- if tag is not None:
- if not isinstance(tag, int_types):
- raise TypeError(unwrap(
- '''
- implicit tag must be an integer, not %s
- ''',
- type_name(tag)
- ))
- self.class_ = CLASS_NAME_TO_NUM_MAP[class_]
- self.tag = tag
- self.implicit = True
- else:
- if class_ is not None:
- if class_ not in CLASS_NUM_TO_NAME_MAP:
- raise ValueError(unwrap(
- '''
- class_ must be one of "universal", "application",
- "context", "private", not %s
- ''',
- repr(class_)
- ))
- self.class_ = CLASS_NAME_TO_NUM_MAP[class_]
-
- if tag is not None:
- self.tag = tag
-
- if no_explicit:
- self.explicit = None
-
- if contents is not None:
- self.contents = contents
-
- elif default is not None:
- self.set(default)
-
- except (ValueError, TypeError) as e:
- args = e.args[1:]
- e.args = (e.args[0] + '\n while constructing %s' % type_name(self),) + args
- raise e
-
- def __str__(self):
- """
- Since str is different in Python 2 and 3, this calls the appropriate
- method, __unicode__() or __bytes__()
-
- :return:
- A unicode string
- """
-
- if _PY2:
- return self.__bytes__()
- else:
- return self.__unicode__()
-
- def __repr__(self):
- """
- :return:
- A unicode string
- """
-
- if _PY2:
- return '<%s %s b%s>' % (type_name(self), id(self), repr(self.dump()))
- else:
- return '<%s %s %s>' % (type_name(self), id(self), repr(self.dump()))
-
- def __bytes__(self):
- """
- A fall-back method for print() in Python 2
-
- :return:
- A byte string of the output of repr()
- """
-
- return self.__repr__().encode('utf-8')
-
- def __unicode__(self):
- """
- A fall-back method for print() in Python 3
-
- :return:
- A unicode string of the output of repr()
- """
-
- return self.__repr__()
-
- def _new_instance(self):
- """
- Constructs a new copy of the current object, preserving any tagging
-
- :return:
- An Asn1Value object
- """
-
- new_obj = self.__class__()
- new_obj.class_ = self.class_
- new_obj.tag = self.tag
- new_obj.implicit = self.implicit
- new_obj.explicit = self.explicit
- return new_obj
-
- def __copy__(self):
- """
- Implements the copy.copy() interface
-
- :return:
- A new shallow copy of the current Asn1Value object
- """
-
- new_obj = self._new_instance()
- new_obj._copy(self, copy.copy)
- return new_obj
-
- def __deepcopy__(self, memo):
- """
- Implements the copy.deepcopy() interface
-
- :param memo:
- A dict for memoization
-
- :return:
- A new deep copy of the current Asn1Value object
- """
-
- new_obj = self._new_instance()
- memo[id(self)] = new_obj
- new_obj._copy(self, copy.deepcopy)
- return new_obj
-
- def copy(self):
- """
- Copies the object, preserving any special tagging from it
-
- :return:
- An Asn1Value object
- """
-
- return copy.deepcopy(self)
-
- def retag(self, tagging, tag=None):
- """
- Copies the object, applying a new tagging to it
-
- :param tagging:
- A dict containing the keys "explicit" and "implicit". Legacy
- API allows a unicode string of "implicit" or "explicit".
-
- :param tag:
- A integer tag number. Only used when tagging is a unicode string.
-
- :return:
- An Asn1Value object
- """
-
- # This is required to preserve the old API
- if not isinstance(tagging, dict):
- tagging = {tagging: tag}
- new_obj = self.__class__(explicit=tagging.get('explicit'), implicit=tagging.get('implicit'))
- new_obj._copy(self, copy.deepcopy)
- return new_obj
-
- def untag(self):
- """
- Copies the object, removing any special tagging from it
-
- :return:
- An Asn1Value object
- """
-
- new_obj = self.__class__()
- new_obj._copy(self, copy.deepcopy)
- return new_obj
-
- def _copy(self, other, copy_func):
- """
- Copies the contents of another Asn1Value object to itself
-
- :param object:
- Another instance of the same class
-
- :param copy_func:
- An reference of copy.copy() or copy.deepcopy() to use when copying
- lists, dicts and objects
- """
-
- if self.__class__ != other.__class__:
- raise TypeError(unwrap(
- '''
- Can not copy values from %s object to %s object
- ''',
- type_name(other),
- type_name(self)
- ))
-
- self.contents = other.contents
- self._native = copy_func(other._native)
-
- def debug(self, nest_level=1):
- """
- Show the binary data and parsed data in a tree structure
- """
-
- prefix = ' ' * nest_level
-
- # This interacts with Any and moves the tag, implicit, explicit, _header,
- # contents, _footer to the parsed value so duplicate data isn't present
- has_parsed = hasattr(self, 'parsed')
-
- _basic_debug(prefix, self)
- if has_parsed:
- self.parsed.debug(nest_level + 2)
- elif hasattr(self, 'chosen'):
- self.chosen.debug(nest_level + 2)
- else:
- if _PY2 and isinstance(self.native, byte_cls):
- print('%s Native: b%s' % (prefix, repr(self.native)))
- else:
- print('%s Native: %s' % (prefix, self.native))
-
- def dump(self, force=False):
- """
- Encodes the value using DER
-
- :param force:
- If the encoded contents already exist, clear them and regenerate
- to ensure they are in DER format instead of BER format
-
- :return:
- A byte string of the DER-encoded value
- """
-
- contents = self.contents
-
- if self._header is None or force:
- if isinstance(self, Constructable) and self._indefinite:
- self.method = 0
-
- header = _dump_header(self.class_, self.method, self.tag, self.contents)
-
- if self.explicit is not None:
- for class_, tag in self.explicit:
- header = _dump_header(class_, 1, tag, header + self.contents) + header
-
- self._header = header
- self._trailer = b''
-
- return self._header + contents
-
-
-class ValueMap():
- """
- Basic functionality that allows for mapping values from ints or OIDs to
- python unicode strings
- """
-
- # A dict from primitive value (int or OID) to unicode string. This needs
- # to be defined in the source code
- _map = None
-
- # A dict from unicode string to int/OID. This is automatically generated
- # from _map the first time it is needed
- _reverse_map = None
-
- def _setup(self):
- """
- Generates _reverse_map from _map
- """
-
- cls = self.__class__
- if cls._map is None or cls._reverse_map is not None:
- return
- cls._reverse_map = {}
- for key, value in cls._map.items():
- cls._reverse_map[value] = key
-
-
-class Castable(object):
- """
- A mixin to handle converting an object between different classes that
- represent the same encoded value, but with different rules for converting
- to and from native Python values
- """
-
- def cast(self, other_class):
- """
- Converts the current object into an object of a different class. The
- new class must use the ASN.1 encoding for the value.
-
- :param other_class:
- The class to instantiate the new object from
-
- :return:
- An instance of the type other_class
- """
-
- if other_class.tag != self.__class__.tag:
- raise TypeError(unwrap(
- '''
- Can not covert a value from %s object to %s object since they
- use different tags: %d versus %d
- ''',
- type_name(other_class),
- type_name(self),
- other_class.tag,
- self.__class__.tag
- ))
-
- new_obj = other_class()
- new_obj.class_ = self.class_
- new_obj.implicit = self.implicit
- new_obj.explicit = self.explicit
- new_obj._header = self._header
- new_obj.contents = self.contents
- new_obj._trailer = self._trailer
- if isinstance(self, Constructable):
- new_obj.method = self.method
- new_obj._indefinite = self._indefinite
- return new_obj
-
-
-class Constructable(object):
- """
- A mixin to handle string types that may be constructed from chunks
- contained within an indefinite length BER-encoded container
- """
-
- # Instance attribute indicating if an object was indefinite
- # length when parsed - affects parsing and dumping
- _indefinite = False
-
- # Class attribute that indicates the offset into self.contents
- # that contains the chunks of data to merge
- _chunks_offset = 0
-
- def _merge_chunks(self):
- """
- :return:
- A concatenation of the native values of the contained chunks
- """
-
- if not self._indefinite:
- return self._as_chunk()
-
- pointer = self._chunks_offset
- contents_len = len(self.contents)
- output = None
-
- while pointer < contents_len:
- # We pass the current class as the spec so content semantics are preserved
- sub_value, pointer = _parse_build(self.contents, pointer, spec=self.__class__)
- if output is None:
- output = sub_value._merge_chunks()
- else:
- output += sub_value._merge_chunks()
-
- if output is None:
- return self._as_chunk()
-
- return output
-
- def _as_chunk(self):
- """
- A method to return a chunk of data that can be combined for
- constructed method values
-
- :return:
- A native Python value that can be added together. Examples include
- byte strings, unicode strings or tuples.
- """
-
- if self._chunks_offset == 0:
- return self.contents
- return self.contents[self._chunks_offset:]
-
- def _copy(self, other, copy_func):
- """
- Copies the contents of another Constructable object to itself
-
- :param object:
- Another instance of the same class
-
- :param copy_func:
- An reference of copy.copy() or copy.deepcopy() to use when copying
- lists, dicts and objects
- """
-
- super(Constructable, self)._copy(other, copy_func)
- self.method = other.method
- self._indefinite = other._indefinite
-
-
-class Void(Asn1Value):
- """
- A representation of an optional value that is not present. Has .native
- property and .dump() method to be compatible with other value classes.
- """
-
- contents = b''
-
- def __eq__(self, other):
- """
- :param other:
- The other Primitive to compare to
-
- :return:
- A boolean
- """
-
- return other.__class__ == self.__class__
-
- def __nonzero__(self):
- return False
-
- def __len__(self):
- return 0
-
- def __iter__(self):
- return iter(())
-
- @property
- def native(self):
- """
- The a native Python datatype representation of this value
-
- :return:
- None
- """
-
- return None
-
- def dump(self, force=False):
- """
- Encodes the value using DER
-
- :param force:
- If the encoded contents already exist, clear them and regenerate
- to ensure they are in DER format instead of BER format
-
- :return:
- A byte string of the DER-encoded value
- """
-
- return b''
-
-
-VOID = Void()
-
-
-class Any(Asn1Value):
- """
- A value class that can contain any value, and allows for easy parsing of
- the underlying encoded value using a spec. This is normally contained in
- a Structure that has an ObjectIdentifier field and _oid_pair and _oid_specs
- defined.
- """
-
- # The parsed value object
- _parsed = None
-
- def __init__(self, value=None, **kwargs):
- """
- Sets the value of the object before passing to Asn1Value.__init__()
-
- :param value:
- An Asn1Value object that will be set as the parsed value
- """
-
- Asn1Value.__init__(self, **kwargs)
-
- try:
- if value is not None:
- if not isinstance(value, Asn1Value):
- raise TypeError(unwrap(
- '''
- value must be an instance of Asn1Value, not %s
- ''',
- type_name(value)
- ))
-
- self._parsed = (value, value.__class__, None)
- self.contents = value.dump()
-
- except (ValueError, TypeError) as e:
- args = e.args[1:]
- e.args = (e.args[0] + '\n while constructing %s' % type_name(self),) + args
- raise e
-
- @property
- def native(self):
- """
- The a native Python datatype representation of this value
-
- :return:
- The .native value from the parsed value object
- """
-
- if self._parsed is None:
- self.parse()
-
- return self._parsed[0].native
-
- @property
- def parsed(self):
- """
- Returns the parsed object from .parse()
-
- :return:
- The object returned by .parse()
- """
-
- if self._parsed is None:
- self.parse()
-
- return self._parsed[0]
-
- def parse(self, spec=None, spec_params=None):
- """
- Parses the contents generically, or using a spec with optional params
-
- :param spec:
- A class derived from Asn1Value that defines what class_ and tag the
- value should have, and the semantics of the encoded value. The
- return value will be of this type. If omitted, the encoded value
- will be decoded using the standard universal tag based on the
- encoded tag number.
-
- :param spec_params:
- A dict of params to pass to the spec object
-
- :return:
- An object of the type spec, or if not present, a child of Asn1Value
- """
-
- if self._parsed is None or self._parsed[1:3] != (spec, spec_params):
- try:
- passed_params = spec_params or {}
- _tag_type_to_explicit_implicit(passed_params)
- if self.explicit is not None:
- if 'explicit' in passed_params:
- passed_params['explicit'] = self.explicit + passed_params['explicit']
- else:
- passed_params['explicit'] = self.explicit
- contents = self._header + self.contents + self._trailer
- parsed_value, _ = _parse_build(
- contents,
- spec=spec,
- spec_params=passed_params
- )
- self._parsed = (parsed_value, spec, spec_params)
-
- # Once we've parsed the Any value, clear any attributes from this object
- # since they are now duplicate
- self.tag = None
- self.explicit = None
- self.implicit = False
- self._header = b''
- self.contents = contents
- self._trailer = b''
-
- except (ValueError, TypeError) as e:
- args = e.args[1:]
- e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args
- raise e
- return self._parsed[0]
-
- def _copy(self, other, copy_func):
- """
- Copies the contents of another Any object to itself
-
- :param object:
- Another instance of the same class
-
- :param copy_func:
- An reference of copy.copy() or copy.deepcopy() to use when copying
- lists, dicts and objects
- """
-
- super(Any, self)._copy(other, copy_func)
- self._parsed = copy_func(other._parsed)
-
- def dump(self, force=False):
- """
- Encodes the value using DER
-
- :param force:
- If the encoded contents already exist, clear them and regenerate
- to ensure they are in DER format instead of BER format
-
- :return:
- A byte string of the DER-encoded value
- """
-
- if self._parsed is None:
- self.parse()
-
- return self._parsed[0].dump(force=force)
-
-
-class Choice(Asn1Value):
- """
- A class to handle when a value may be one of several options
- """
-
- # The index in _alternatives of the validated alternative
- _choice = None
-
- # The name of the chosen alternative
- _name = None
-
- # The Asn1Value object for the chosen alternative
- _parsed = None
-
- # A list of tuples in one of the following forms.
- #
- # Option 1, a unicode string field name and a value class
- #
- # ("name", Asn1ValueClass)
- #
- # Option 2, same as Option 1, but with a dict of class params
- #
- # ("name", Asn1ValueClass, {'explicit': 5})
- _alternatives = None
-
- # A dict that maps tuples of (class_, tag) to an index in _alternatives
- _id_map = None
-
- # A dict that maps alternative names to an index in _alternatives
- _name_map = None
-
- @classmethod
- def load(cls, encoded_data, strict=False, **kwargs):
- """
- Loads a BER/DER-encoded byte string using the current class as the spec
-
- :param encoded_data:
- A byte string of BER or DER encoded data
-
- :param strict:
- A boolean indicating if trailing data should be forbidden - if so, a
- ValueError will be raised when trailing data exists
-
- :return:
- A instance of the current class
- """
-
- if not isinstance(encoded_data, byte_cls):
- raise TypeError('encoded_data must be a byte string, not %s' % type_name(encoded_data))
-
- value, _ = _parse_build(encoded_data, spec=cls, spec_params=kwargs, strict=strict)
- return value
-
- def _setup(self):
- """
- Generates _id_map from _alternatives to allow validating contents
- """
-
- cls = self.__class__
- cls._id_map = {}
- cls._name_map = {}
- for index, info in enumerate(cls._alternatives):
- if len(info) < 3:
- info = info + ({},)
- cls._alternatives[index] = info
- id_ = _build_id_tuple(info[2], info[1])
- cls._id_map[id_] = index
- cls._name_map[info[0]] = index
-
- def __init__(self, name=None, value=None, **kwargs):
- """
- Checks to ensure implicit tagging is not being used since it is
- incompatible with Choice, then forwards on to Asn1Value.__init__()
-
- :param name:
- The name of the alternative to be set - used with value.
- Alternatively this may be a dict with a single key being the name
- and the value being the value, or a two-element tuple of the the
- name and the value.
-
- :param value:
- The alternative value to set - used with name
-
- :raises:
- ValueError - when implicit param is passed (or legacy tag_type param is "implicit")
- """
-
- _tag_type_to_explicit_implicit(kwargs)
-
- Asn1Value.__init__(self, **kwargs)
-
- try:
- if kwargs.get('implicit') is not None:
- raise ValueError(unwrap(
- '''
- The Choice type can not be implicitly tagged even if in an
- implicit module - due to its nature any tagging must be
- explicit
- '''
- ))
-
- if name is not None:
- if isinstance(name, dict):
- if len(name) != 1:
- raise ValueError(unwrap(
- '''
- When passing a dict as the "name" argument to %s,
- it must have a single key/value - however %d were
- present
- ''',
- type_name(self),
- len(name)
- ))
- name, value = list(name.items())[0]
-
- if isinstance(name, tuple):
- if len(name) != 2:
- raise ValueError(unwrap(
- '''
- When passing a tuple as the "name" argument to %s,
- it must have two elements, the name and value -
- however %d were present
- ''',
- type_name(self),
- len(name)
- ))
- value = name[1]
- name = name[0]
-
- if name not in self._name_map:
- raise ValueError(unwrap(
- '''
- The name specified, "%s", is not a valid alternative
- for %s
- ''',
- name,
- type_name(self)
- ))
-
- self._choice = self._name_map[name]
- _, spec, params = self._alternatives[self._choice]
-
- if not isinstance(value, spec):
- value = spec(value, **params)
- else:
- value = _fix_tagging(value, params)
- self._parsed = value
-
- except (ValueError, TypeError) as e:
- args = e.args[1:]
- e.args = (e.args[0] + '\n while constructing %s' % type_name(self),) + args
- raise e
-
- @property
- def name(self):
- """
- :return:
- A unicode string of the field name of the chosen alternative
- """
- if not self._name:
- self._name = self._alternatives[self._choice][0]
- return self._name
-
- def parse(self):
- """
- Parses the detected alternative
-
- :return:
- An Asn1Value object of the chosen alternative
- """
-
- if self._parsed is not None:
- return self._parsed
-
- try:
- _, spec, params = self._alternatives[self._choice]
- self._parsed, _ = _parse_build(self.contents, spec=spec, spec_params=params)
- except (ValueError, TypeError) as e:
- args = e.args[1:]
- e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args
- raise e
-
- @property
- def chosen(self):
- """
- :return:
- An Asn1Value object of the chosen alternative
- """
-
- return self.parse()
-
- @property
- def native(self):
- """
- The a native Python datatype representation of this value
-
- :return:
- The .native value from the contained value object
- """
-
- return self.chosen.native
-
- def validate(self, class_, tag, contents):
- """
- Ensures that the class and tag specified exist as an alternative
-
- :param class_:
- The integer class_ from the encoded value header
-
- :param tag:
- The integer tag from the encoded value header
-
- :param contents:
- A byte string of the contents of the value - used when the object
- is explicitly tagged
-
- :raises:
- ValueError - when value is not a valid alternative
- """
-
- id_ = (class_, tag)
-
- if self.explicit is not None:
- if self.explicit[-1] != id_:
- raise ValueError(unwrap(
- '''
- %s was explicitly tagged, but the value provided does not
- match the class and tag
- ''',
- type_name(self)
- ))
-
- ((class_, _, tag, _, _, _), _) = _parse(contents, len(contents))
- id_ = (class_, tag)
-
- if id_ in self._id_map:
- self._choice = self._id_map[id_]
- return
-
- # This means the Choice was implicitly tagged
- if self.class_ is not None and self.tag is not None:
- if len(self._alternatives) > 1:
- raise ValueError(unwrap(
- '''
- %s was implicitly tagged, but more than one alternative
- exists
- ''',
- type_name(self)
- ))
- if id_ == (self.class_, self.tag):
- self._choice = 0
- return
-
- asn1 = self._format_class_tag(class_, tag)
- asn1s = [self._format_class_tag(pair[0], pair[1]) for pair in self._id_map]
-
- raise ValueError(unwrap(
- '''
- Value %s did not match the class and tag of any of the alternatives
- in %s: %s
- ''',
- asn1,
- type_name(self),
- ', '.join(asn1s)
- ))
-
- def _format_class_tag(self, class_, tag):
- """
- :return:
- A unicode string of a human-friendly representation of the class and tag
- """
-
- return '[%s %s]' % (CLASS_NUM_TO_NAME_MAP[class_].upper(), tag)
-
- def _copy(self, other, copy_func):
- """
- Copies the contents of another Choice object to itself
-
- :param object:
- Another instance of the same class
-
- :param copy_func:
- An reference of copy.copy() or copy.deepcopy() to use when copying
- lists, dicts and objects
- """
-
- super(Choice, self)._copy(other, copy_func)
- self._choice = other._choice
- self._name = other._name
- self._parsed = copy_func(other._parsed)
-
- def dump(self, force=False):
- """
- Encodes the value using DER
-
- :param force:
- If the encoded contents already exist, clear them and regenerate
- to ensure they are in DER format instead of BER format
-
- :return:
- A byte string of the DER-encoded value
- """
-
- self.contents = self.chosen.dump(force=force)
- if self._header is None or force:
- self._header = b''
- if self.explicit is not None:
- for class_, tag in self.explicit:
- self._header = _dump_header(class_, 1, tag, self._header + self.contents) + self._header
- return self._header + self.contents
-
-
-class Concat(object):
- """
- A class that contains two or more encoded child values concatentated
- together. THIS IS NOT PART OF THE ASN.1 SPECIFICATION! This exists to handle
- the x509.TrustedCertificate() class for OpenSSL certificates containing
- extra information.
- """
-
- # A list of the specs of the concatenated values
- _child_specs = None
-
- _children = None
-
- @classmethod
- def load(cls, encoded_data, strict=False):
- """
- Loads a BER/DER-encoded byte string using the current class as the spec
-
- :param encoded_data:
- A byte string of BER or DER encoded data
-
- :param strict:
- A boolean indicating if trailing data should be forbidden - if so, a
- ValueError will be raised when trailing data exists
-
- :return:
- A Concat object
- """
-
- return cls(contents=encoded_data, strict=strict)
-
- def __init__(self, value=None, contents=None, strict=False):
- """
- :param value:
- A native Python datatype to initialize the object value with
-
- :param contents:
- A byte string of the encoded contents of the value
-
- :param strict:
- A boolean indicating if trailing data should be forbidden - if so, a
- ValueError will be raised when trailing data exists in contents
-
- :raises:
- ValueError - when an error occurs with one of the children
- TypeError - when an error occurs with one of the children
- """
-
- if contents is not None:
- try:
- contents_len = len(contents)
- self._children = []
-
- offset = 0
- for spec in self._child_specs:
- if offset < contents_len:
- child_value, offset = _parse_build(contents, pointer=offset, spec=spec)
- else:
- child_value = spec()
- self._children.append(child_value)
-
- if strict and offset != contents_len:
- extra_bytes = contents_len - offset
- raise ValueError('Extra data - %d bytes of trailing data were provided' % extra_bytes)
-
- except (ValueError, TypeError) as e:
- args = e.args[1:]
- e.args = (e.args[0] + '\n while constructing %s' % type_name(self),) + args
- raise e
-
- if value is not None:
- if self._children is None:
- self._children = [None] * len(self._child_specs)
- for index, data in enumerate(value):
- self.__setitem__(index, data)
-
- def __str__(self):
- """
- Since str is different in Python 2 and 3, this calls the appropriate
- method, __unicode__() or __bytes__()
-
- :return:
- A unicode string
- """
-
- if _PY2:
- return self.__bytes__()
- else:
- return self.__unicode__()
-
- def __bytes__(self):
- """
- A byte string of the DER-encoded contents
- """
-
- return self.dump()
-
- def __unicode__(self):
- """
- :return:
- A unicode string
- """
-
- return repr(self)
-
- def __repr__(self):
- """
- :return:
- A unicode string
- """
-
- return '<%s %s %s>' % (type_name(self), id(self), repr(self.dump()))
-
- def __copy__(self):
- """
- Implements the copy.copy() interface
-
- :return:
- A new shallow copy of the Concat object
- """
-
- new_obj = self.__class__()
- new_obj._copy(self, copy.copy)
- return new_obj
-
- def __deepcopy__(self, memo):
- """
- Implements the copy.deepcopy() interface
-
- :param memo:
- A dict for memoization
-
- :return:
- A new deep copy of the Concat object and all child objects
- """
-
- new_obj = self.__class__()
- memo[id(self)] = new_obj
- new_obj._copy(self, copy.deepcopy)
- return new_obj
-
- def copy(self):
- """
- Copies the object
-
- :return:
- A Concat object
- """
-
- return copy.deepcopy(self)
-
- def _copy(self, other, copy_func):
- """
- Copies the contents of another Concat object to itself
-
- :param object:
- Another instance of the same class
-
- :param copy_func:
- An reference of copy.copy() or copy.deepcopy() to use when copying
- lists, dicts and objects
- """
-
- if self.__class__ != other.__class__:
- raise TypeError(unwrap(
- '''
- Can not copy values from %s object to %s object
- ''',
- type_name(other),
- type_name(self)
- ))
-
- self._children = copy_func(other._children)
-
- def debug(self, nest_level=1):
- """
- Show the binary data and parsed data in a tree structure
- """
-
- prefix = ' ' * nest_level
- print('%s%s Object #%s' % (prefix, type_name(self), id(self)))
- print('%s Children:' % (prefix,))
- for child in self._children:
- child.debug(nest_level + 2)
-
- def dump(self, force=False):
- """
- Encodes the value using DER
-
- :param force:
- If the encoded contents already exist, clear them and regenerate
- to ensure they are in DER format instead of BER format
-
- :return:
- A byte string of the DER-encoded value
- """
-
- contents = b''
- for child in self._children:
- contents += child.dump(force=force)
- return contents
-
- @property
- def contents(self):
- """
- :return:
- A byte string of the DER-encoded contents of the children
- """
-
- return self.dump()
-
- def __len__(self):
- """
- :return:
- Integer
- """
-
- return len(self._children)
-
- def __getitem__(self, key):
- """
- Allows accessing children by index
-
- :param key:
- An integer of the child index
-
- :raises:
- KeyError - when an index is invalid
-
- :return:
- The Asn1Value object of the child specified
- """
-
- if key > len(self._child_specs) - 1 or key < 0:
- raise KeyError(unwrap(
- '''
- No child is definition for position %d of %s
- ''',
- key,
- type_name(self)
- ))
-
- return self._children[key]
-
- def __setitem__(self, key, value):
- """
- Allows settings children by index
-
- :param key:
- An integer of the child index
-
- :param value:
- An Asn1Value object to set the child to
-
- :raises:
- KeyError - when an index is invalid
- ValueError - when the value is not an instance of Asn1Value
- """
-
- if key > len(self._child_specs) - 1 or key < 0:
- raise KeyError(unwrap(
- '''
- No child is defined for position %d of %s
- ''',
- key,
- type_name(self)
- ))
-
- if not isinstance(value, Asn1Value):
- raise ValueError(unwrap(
- '''
- Value for child %s of %s is not an instance of
- asn1crypto.core.Asn1Value
- ''',
- key,
- type_name(self)
- ))
-
- self._children[key] = value
-
- def __iter__(self):
- """
- :return:
- An iterator of child values
- """
-
- return iter(self._children)
-
-
-class Primitive(Asn1Value):
- """
- Sets the class_ and method attributes for primitive, universal values
- """
-
- class_ = 0
-
- method = 0
-
- def __init__(self, value=None, default=None, contents=None, **kwargs):
- """
- Sets the value of the object before passing to Asn1Value.__init__()
-
- :param value:
- A native Python datatype to initialize the object value with
-
- :param default:
- The default value if no value is specified
-
- :param contents:
- A byte string of the encoded contents of the value
- """
-
- Asn1Value.__init__(self, **kwargs)
-
- try:
- if contents is not None:
- self.contents = contents
-
- elif value is not None:
- self.set(value)
-
- elif default is not None:
- self.set(default)
-
- except (ValueError, TypeError) as e:
- args = e.args[1:]
- e.args = (e.args[0] + '\n while constructing %s' % type_name(self),) + args
- raise e
-
- def set(self, value):
- """
- Sets the value of the object
-
- :param value:
- A byte string
- """
-
- if not isinstance(value, byte_cls):
- raise TypeError(unwrap(
- '''
- %s value must be a byte string, not %s
- ''',
- type_name(self),
- type_name(value)
- ))
-
- self._native = value
- self.contents = value
- self._header = None
- if self._trailer != b'':
- self._trailer = b''
-
- def dump(self, force=False):
- """
- Encodes the value using DER
-
- :param force:
- If the encoded contents already exist, clear them and regenerate
- to ensure they are in DER format instead of BER format
-
- :return:
- A byte string of the DER-encoded value
- """
-
- if force:
- native = self.native
- self.contents = None
- self.set(native)
-
- return Asn1Value.dump(self)
-
- def __ne__(self, other):
- return not self == other
-
- def __eq__(self, other):
- """
- :param other:
- The other Primitive to compare to
-
- :return:
- A boolean
- """
-
- if not isinstance(other, Primitive):
- return False
-
- if self.contents != other.contents:
- return False
-
- # We compare class tag numbers since object tag numbers could be
- # different due to implicit or explicit tagging
- if self.__class__.tag != other.__class__.tag:
- return False
-
- if self.__class__ == other.__class__ and self.contents == other.contents:
- return True
-
- # If the objects share a common base class that is not too low-level
- # then we can compare the contents
- self_bases = (set(self.__class__.__bases__) | set([self.__class__])) - set([Asn1Value, Primitive, ValueMap])
- other_bases = (set(other.__class__.__bases__) | set([other.__class__])) - set([Asn1Value, Primitive, ValueMap])
- if self_bases | other_bases:
- return self.contents == other.contents
-
- # When tagging is going on, do the extra work of constructing new
- # objects to see if the dumped representation are the same
- if self.implicit or self.explicit or other.implicit or other.explicit:
- return self.untag().dump() == other.untag().dump()
-
- return self.dump() == other.dump()
-
-
-class AbstractString(Constructable, Primitive):
- """
- A base class for all strings that have a known encoding. In general, we do
- not worry ourselves with confirming that the decoded values match a specific
- set of characters, only that they are decoded into a Python unicode string
- """
-
- # The Python encoding name to use when decoding or encoded the contents
- _encoding = 'latin1'
-
- # Instance attribute of (possibly-merged) unicode string
- _unicode = None
-
- def set(self, value):
- """
- Sets the value of the string
-
- :param value:
- A unicode string
- """
-
- if not isinstance(value, str_cls):
- raise TypeError(unwrap(
- '''
- %s value must be a unicode string, not %s
- ''',
- type_name(self),
- type_name(value)
- ))
-
- self._unicode = value
- self.contents = value.encode(self._encoding)
- self._header = None
- if self._indefinite:
- self._indefinite = False
- self.method = 0
- if self._trailer != b'':
- self._trailer = b''
-
- def __unicode__(self):
- """
- :return:
- A unicode string
- """
-
- if self.contents is None:
- return ''
- if self._unicode is None:
- self._unicode = self._merge_chunks().decode(self._encoding)
- return self._unicode
-
- def _copy(self, other, copy_func):
- """
- Copies the contents of another AbstractString object to itself
-
- :param object:
- Another instance of the same class
-
- :param copy_func:
- An reference of copy.copy() or copy.deepcopy() to use when copying
- lists, dicts and objects
- """
-
- super(AbstractString, self)._copy(other, copy_func)
- self._unicode = other._unicode
-
- @property
- def native(self):
- """
- The a native Python datatype representation of this value
-
- :return:
- A unicode string or None
- """
-
- if self.contents is None:
- return None
-
- return self.__unicode__()
-
-
-class Boolean(Primitive):
- """
- Represents a boolean in both ASN.1 and Python
- """
-
- tag = 1
-
- def set(self, value):
- """
- Sets the value of the object
-
- :param value:
- True, False or another value that works with bool()
- """
-
- self._native = bool(value)
- self.contents = b'\x00' if not value else b'\xff'
- self._header = None
- if self._trailer != b'':
- self._trailer = b''
-
- # Python 2
- def __nonzero__(self):
- """
- :return:
- True or False
- """
- return self.__bool__()
-
- def __bool__(self):
- """
- :return:
- True or False
- """
- return self.contents != b'\x00'
-
- @property
- def native(self):
- """
- The a native Python datatype representation of this value
-
- :return:
- True, False or None
- """
-
- if self.contents is None:
- return None
-
- if self._native is None:
- self._native = self.__bool__()
- return self._native
-
-
-class Integer(Primitive, ValueMap):
- """
- Represents an integer in both ASN.1 and Python
- """
-
- tag = 2
-
- def set(self, value):
- """
- Sets the value of the object
-
- :param value:
- An integer, or a unicode string if _map is set
-
- :raises:
- ValueError - when an invalid value is passed
- """
-
- if isinstance(value, str_cls):
- if self._map is None:
- raise ValueError(unwrap(
- '''
- %s value is a unicode string, but no _map provided
- ''',
- type_name(self)
- ))
-
- if value not in self._reverse_map:
- raise ValueError(unwrap(
- '''
- %s value, %s, is not present in the _map
- ''',
- type_name(self),
- value
- ))
-
- value = self._reverse_map[value]
-
- elif not isinstance(value, int_types):
- raise TypeError(unwrap(
- '''
- %s value must be an integer or unicode string when a name_map
- is provided, not %s
- ''',
- type_name(self),
- type_name(value)
- ))
-
- self._native = self._map[value] if self._map and value in self._map else value
-
- self.contents = int_to_bytes(value, signed=True)
- self._header = None
- if self._trailer != b'':
- self._trailer = b''
-
- def __int__(self):
- """
- :return:
- An integer
- """
- return int_from_bytes(self.contents, signed=True)
-
- @property
- def native(self):
- """
- The a native Python datatype representation of this value
-
- :return:
- An integer or None
- """
-
- if self.contents is None:
- return None
-
- if self._native is None:
- self._native = self.__int__()
- if self._map is not None and self._native in self._map:
- self._native = self._map[self._native]
- return self._native
-
-
-class BitString(Constructable, Castable, Primitive, ValueMap, object):
- """
- Represents a bit string from ASN.1 as a Python tuple of 1s and 0s
- """
-
- tag = 3
-
- _size = None
-
- # Used with _as_chunk() from Constructable
- _chunk = None
- _chunks_offset = 1
-
- def _setup(self):
- """
- Generates _reverse_map from _map
- """
-
- ValueMap._setup(self)
-
- cls = self.__class__
- if cls._map is not None:
- cls._size = max(self._map.keys()) + 1
-
- def set(self, value):
- """
- Sets the value of the object
-
- :param value:
- An integer or a tuple of integers 0 and 1
-
- :raises:
- ValueError - when an invalid value is passed
- """
-
- if isinstance(value, set):
- if self._map is None:
- raise ValueError(unwrap(
- '''
- %s._map has not been defined
- ''',
- type_name(self)
- ))
-
- bits = [0] * self._size
- self._native = value
- for index in range(0, self._size):
- key = self._map.get(index)
- if key is None:
- continue
- if key in value:
- bits[index] = 1
-
- value = ''.join(map(str_cls, bits))
-
- elif value.__class__ == tuple:
- if self._map is None:
- self._native = value
- else:
- self._native = set()
- for index, bit in enumerate(value):
- if bit:
- name = self._map.get(index, index)
- self._native.add(name)
- value = ''.join(map(str_cls, value))
-
- else:
- raise TypeError(unwrap(
- '''
- %s value must be a tuple of ones and zeros or a set of unicode
- strings, not %s
- ''',
- type_name(self),
- type_name(value)
- ))
-
- self._chunk = None
-
- if self._map is not None:
- if len(value) > self._size:
- raise ValueError(unwrap(
- '''
- %s value must be at most %s bits long, specified was %s long
- ''',
- type_name(self),
- self._size,
- len(value)
- ))
- # A NamedBitList must have trailing zero bit truncated. See
- # https://www.itu.int/ITU-T/studygroups/com17/languages/X.690-0207.pdf
- # section 11.2,
- # https://tools.ietf.org/html/rfc5280#page-134 and
- # https://www.ietf.org/mail-archive/web/pkix/current/msg10443.html
- value = value.rstrip('0')
- size = len(value)
-
- size_mod = size % 8
- extra_bits = 0
- if size_mod != 0:
- extra_bits = 8 - size_mod
- value += '0' * extra_bits
-
- size_in_bytes = int(math.ceil(size / 8))
-
- if extra_bits:
- extra_bits_byte = int_to_bytes(extra_bits)
- else:
- extra_bits_byte = b'\x00'
-
- if value == '':
- value_bytes = b''
- else:
- value_bytes = int_to_bytes(int(value, 2))
- if len(value_bytes) != size_in_bytes:
- value_bytes = (b'\x00' * (size_in_bytes - len(value_bytes))) + value_bytes
-
- self.contents = extra_bits_byte + value_bytes
- self._header = None
- if self._indefinite:
- self._indefinite = False
- self.method = 0
- if self._trailer != b'':
- self._trailer = b''
-
- def __getitem__(self, key):
- """
- Retrieves a boolean version of one of the bits based on a name from the
- _map
-
- :param key:
- The unicode string of one of the bit names
-
- :raises:
- ValueError - when _map is not set or the key name is invalid
-
- :return:
- A boolean if the bit is set
- """
-
- is_int = isinstance(key, int_types)
- if not is_int:
- if not isinstance(self._map, dict):
- raise ValueError(unwrap(
- '''
- %s._map has not been defined
- ''',
- type_name(self)
- ))
-
- if key not in self._reverse_map:
- raise ValueError(unwrap(
- '''
- %s._map does not contain an entry for "%s"
- ''',
- type_name(self),
- key
- ))
-
- if self._native is None:
- self.native
-
- if self._map is None:
- if len(self._native) >= key + 1:
- return bool(self._native[key])
- return False
-
- if is_int:
- key = self._map.get(key, key)
-
- return key in self._native
-
- def __setitem__(self, key, value):
- """
- Sets one of the bits based on a name from the _map
-
- :param key:
- The unicode string of one of the bit names
-
- :param value:
- A boolean value
-
- :raises:
- ValueError - when _map is not set or the key name is invalid
- """
-
- is_int = isinstance(key, int_types)
- if not is_int:
- if self._map is None:
- raise ValueError(unwrap(
- '''
- %s._map has not been defined
- ''',
- type_name(self)
- ))
-
- if key not in self._reverse_map:
- raise ValueError(unwrap(
- '''
- %s._map does not contain an entry for "%s"
- ''',
- type_name(self),
- key
- ))
-
- if self._native is None:
- self.native
-
- if self._map is None:
- new_native = list(self._native)
- max_key = len(new_native) - 1
- if key > max_key:
- new_native.extend([0] * (key - max_key))
- new_native[key] = 1 if value else 0
- self._native = tuple(new_native)
-
- else:
- if is_int:
- key = self._map.get(key, key)
-
- if value:
- if key not in self._native:
- self._native.add(key)
- else:
- if key in self._native:
- self._native.remove(key)
-
- self.set(self._native)
-
- def _as_chunk(self):
- """
- Allows reconstructing indefinite length values
-
- :return:
- A tuple of integers
- """
-
- extra_bits = int_from_bytes(self.contents[0:1])
- bit_string = '{0:b}'.format(int_from_bytes(self.contents[1:]))
- byte_len = len(self.contents[1:])
- bit_len = len(bit_string)
-
- # Left-pad the bit string to a byte multiple to ensure we didn't
- # lose any zero bits on the left
- mod_bit_len = bit_len % 8
- if mod_bit_len != 0:
- bit_string = ('0' * (8 - mod_bit_len)) + bit_string
- bit_len = len(bit_string)
-
- if bit_len // 8 < byte_len:
- missing_bytes = byte_len - (bit_len // 8)
- bit_string = ('0' * (8 * missing_bytes)) + bit_string
-
- # Trim off the extra bits on the right used to fill the last byte
- if extra_bits > 0:
- bit_string = bit_string[0:0 - extra_bits]
-
- return tuple(map(int, tuple(bit_string)))
-
- @property
- def native(self):
- """
- The a native Python datatype representation of this value
-
- :return:
- If a _map is set, a set of names, or if no _map is set, a tuple of
- integers 1 and 0. None if no value.
- """
-
- # For BitString we default the value to be all zeros
- if self.contents is None:
- if self._map is None:
- self.set(())
- else:
- self.set(set())
-
- if self._native is None:
- bits = self._merge_chunks()
- if self._map:
- self._native = set()
- for index, bit in enumerate(bits):
- if bit:
- name = self._map.get(index, index)
- self._native.add(name)
- else:
- self._native = bits
- return self._native
-
-
-class OctetBitString(Constructable, Castable, Primitive):
- """
- Represents a bit string in ASN.1 as a Python byte string
- """
-
- tag = 3
-
- # Whenever dealing with octet-based bit strings, we really want the
- # bytes, so we just ignore the unused bits portion since it isn't
- # applicable to the current use case
- # unused_bits = struct.unpack('>B', self.contents[0:1])[0]
- _chunks_offset = 1
-
- # Instance attribute of (possibly-merged) byte string
- _bytes = None
-
- def set(self, value):
- """
- Sets the value of the object
-
- :param value:
- A byte string
-
- :raises:
- ValueError - when an invalid value is passed
- """
-
- if not isinstance(value, byte_cls):
- raise TypeError(unwrap(
- '''
- %s value must be a byte string, not %s
- ''',
- type_name(self),
- type_name(value)
- ))
-
- self._bytes = value
- # Set the unused bits to 0
- self.contents = b'\x00' + value
- self._header = None
- if self._indefinite:
- self._indefinite = False
- self.method = 0
- if self._trailer != b'':
- self._trailer = b''
-
- def __bytes__(self):
- """
- :return:
- A byte string
- """
-
- if self.contents is None:
- return b''
- if self._bytes is None:
- self._bytes = self._merge_chunks()
- return self._bytes
-
- def _copy(self, other, copy_func):
- """
- Copies the contents of another OctetBitString object to itself
-
- :param object:
- Another instance of the same class
-
- :param copy_func:
- An reference of copy.copy() or copy.deepcopy() to use when copying
- lists, dicts and objects
- """
-
- super(OctetBitString, self)._copy(other, copy_func)
- self._bytes = other._bytes
-
- @property
- def native(self):
- """
- The a native Python datatype representation of this value
-
- :return:
- A byte string or None
- """
-
- if self.contents is None:
- return None
-
- return self.__bytes__()
-
-
-class IntegerBitString(Constructable, Castable, Primitive):
- """
- Represents a bit string in ASN.1 as a Python integer
- """
-
- tag = 3
-
- _chunks_offset = 1
-
- def set(self, value):
- """
- Sets the value of the object
-
- :param value:
- An integer
-
- :raises:
- ValueError - when an invalid value is passed
- """
-
- if not isinstance(value, int_types):
- raise TypeError(unwrap(
- '''
- %s value must be an integer, not %s
- ''',
- type_name(self),
- type_name(value)
- ))
-
- self._native = value
- # Set the unused bits to 0
- self.contents = b'\x00' + int_to_bytes(value, signed=True)
- self._header = None
- if self._indefinite:
- self._indefinite = False
- self.method = 0
- if self._trailer != b'':
- self._trailer = b''
-
- def _as_chunk(self):
- """
- Allows reconstructing indefinite length values
-
- :return:
- A unicode string of bits - 1s and 0s
- """
-
- extra_bits = int_from_bytes(self.contents[0:1])
- bit_string = '{0:b}'.format(int_from_bytes(self.contents[1:]))
-
- # Ensure we have leading zeros since these chunks may be concatenated together
- mod_bit_len = len(bit_string) % 8
- if mod_bit_len != 0:
- bit_string = ('0' * (8 - mod_bit_len)) + bit_string
-
- if extra_bits > 0:
- return bit_string[0:0 - extra_bits]
-
- return bit_string
-
- @property
- def native(self):
- """
- The a native Python datatype representation of this value
-
- :return:
- An integer or None
- """
-
- if self.contents is None:
- return None
-
- if self._native is None:
- extra_bits = int_from_bytes(self.contents[0:1])
- # Fast path
- if not self._indefinite and extra_bits == 0:
- self._native = int_from_bytes(self.contents[1:])
- else:
- if self._indefinite and extra_bits > 0:
- raise ValueError('Constructed bit string has extra bits on indefinite container')
- self._native = int(self._merge_chunks(), 2)
- return self._native
-
-
-class OctetString(Constructable, Castable, Primitive):
- """
- Represents a byte string in both ASN.1 and Python
- """
-
- tag = 4
-
- # Instance attribute of (possibly-merged) byte string
- _bytes = None
-
- def set(self, value):
- """
- Sets the value of the object
-
- :param value:
- A byte string
- """
-
- if not isinstance(value, byte_cls):
- raise TypeError(unwrap(
- '''
- %s value must be a byte string, not %s
- ''',
- type_name(self),
- type_name(value)
- ))
-
- self._bytes = value
- self.contents = value
- self._header = None
- if self._indefinite:
- self._indefinite = False
- self.method = 0
- if self._trailer != b'':
- self._trailer = b''
-
- def __bytes__(self):
- """
- :return:
- A byte string
- """
-
- if self.contents is None:
- return b''
- if self._bytes is None:
- self._bytes = self._merge_chunks()
- return self._bytes
-
- def _copy(self, other, copy_func):
- """
- Copies the contents of another OctetString object to itself
-
- :param object:
- Another instance of the same class
-
- :param copy_func:
- An reference of copy.copy() or copy.deepcopy() to use when copying
- lists, dicts and objects
- """
-
- super(OctetString, self)._copy(other, copy_func)
- self._bytes = other._bytes
-
- @property
- def native(self):
- """
- The a native Python datatype representation of this value
-
- :return:
- A byte string or None
- """
-
- if self.contents is None:
- return None
-
- return self.__bytes__()
-
-
-class IntegerOctetString(Constructable, Castable, Primitive):
- """
- Represents a byte string in ASN.1 as a Python integer
- """
-
- tag = 4
-
- def set(self, value):
- """
- Sets the value of the object
-
- :param value:
- An integer
-
- :raises:
- ValueError - when an invalid value is passed
- """
-
- if not isinstance(value, int_types):
- raise TypeError(unwrap(
- '''
- %s value must be an integer, not %s
- ''',
- type_name(self),
- type_name(value)
- ))
-
- self._native = value
- self.contents = int_to_bytes(value, signed=False)
- self._header = None
- if self._indefinite:
- self._indefinite = False
- self.method = 0
- if self._trailer != b'':
- self._trailer = b''
-
- @property
- def native(self):
- """
- The a native Python datatype representation of this value
-
- :return:
- An integer or None
- """
-
- if self.contents is None:
- return None
-
- if self._native is None:
- self._native = int_from_bytes(self._merge_chunks())
- return self._native
-
-
-class ParsableOctetString(Constructable, Castable, Primitive):
-
- tag = 4
-
- _parsed = None
-
- # Instance attribute of (possibly-merged) byte string
- _bytes = None
-
- def __init__(self, value=None, parsed=None, **kwargs):
- """
- Allows providing a parsed object that will be serialized to get the
- byte string value
-
- :param value:
- A native Python datatype to initialize the object value with
-
- :param parsed:
- If value is None and this is an Asn1Value object, this will be
- set as the parsed value, and the value will be obtained by calling
- .dump() on this object.
- """
-
- set_parsed = False
- if value is None and parsed is not None and isinstance(parsed, Asn1Value):
- value = parsed.dump()
- set_parsed = True
-
- Primitive.__init__(self, value=value, **kwargs)
-
- if set_parsed:
- self._parsed = (parsed, parsed.__class__, None)
-
- def set(self, value):
- """
- Sets the value of the object
-
- :param value:
- A byte string
- """
-
- if not isinstance(value, byte_cls):
- raise TypeError(unwrap(
- '''
- %s value must be a byte string, not %s
- ''',
- type_name(self),
- type_name(value)
- ))
-
- self._bytes = value
- self.contents = value
- self._header = None
- if self._indefinite:
- self._indefinite = False
- self.method = 0
- if self._trailer != b'':
- self._trailer = b''
-
- def parse(self, spec=None, spec_params=None):
- """
- Parses the contents generically, or using a spec with optional params
-
- :param spec:
- A class derived from Asn1Value that defines what class_ and tag the
- value should have, and the semantics of the encoded value. The
- return value will be of this type. If omitted, the encoded value
- will be decoded using the standard universal tag based on the
- encoded tag number.
-
- :param spec_params:
- A dict of params to pass to the spec object
-
- :return:
- An object of the type spec, or if not present, a child of Asn1Value
- """
-
- if self._parsed is None or self._parsed[1:3] != (spec, spec_params):
- parsed_value, _ = _parse_build(self.__bytes__(), spec=spec, spec_params=spec_params)
- self._parsed = (parsed_value, spec, spec_params)
- return self._parsed[0]
-
- def __bytes__(self):
- """
- :return:
- A byte string
- """
-
- if self.contents is None:
- return b''
- if self._bytes is None:
- self._bytes = self._merge_chunks()
- return self._bytes
-
- def _copy(self, other, copy_func):
- """
- Copies the contents of another ParsableOctetString object to itself
-
- :param object:
- Another instance of the same class
-
- :param copy_func:
- An reference of copy.copy() or copy.deepcopy() to use when copying
- lists, dicts and objects
- """
-
- super(ParsableOctetString, self)._copy(other, copy_func)
- self._bytes = other._bytes
- self._parsed = copy_func(other._parsed)
-
- @property
- def native(self):
- """
- The a native Python datatype representation of this value
-
- :return:
- A byte string or None
- """
-
- if self.contents is None:
- return None
-
- if self._parsed is not None:
- return self._parsed[0].native
- else:
- return self.__bytes__()
-
- @property
- def parsed(self):
- """
- Returns the parsed object from .parse()
-
- :return:
- The object returned by .parse()
- """
-
- if self._parsed is None:
- self.parse()
-
- return self._parsed[0]
-
- def dump(self, force=False):
- """
- Encodes the value using DER
-
- :param force:
- If the encoded contents already exist, clear them and regenerate
- to ensure they are in DER format instead of BER format
-
- :return:
- A byte string of the DER-encoded value
- """
-
- if force:
- if self._parsed is not None:
- native = self.parsed.dump(force=force)
- else:
- native = self.native
- self.contents = None
- self.set(native)
-
- return Asn1Value.dump(self)
-
-
-class ParsableOctetBitString(ParsableOctetString):
-
- tag = 3
-
- # Whenever dealing with octet-based bit strings, we really want the
- # bytes, so we just ignore the unused bits portion since it isn't
- # applicable to the current use case
- # unused_bits = struct.unpack('>B', self.contents[0:1])[0]
- _chunks_offset = 1
-
- def set(self, value):
- """
- Sets the value of the object
-
- :param value:
- A byte string
-
- :raises:
- ValueError - when an invalid value is passed
- """
-
- if not isinstance(value, byte_cls):
- raise TypeError(unwrap(
- '''
- %s value must be a byte string, not %s
- ''',
- type_name(self),
- type_name(value)
- ))
-
- self._bytes = value
- # Set the unused bits to 0
- self.contents = b'\x00' + value
- self._header = None
- if self._indefinite:
- self._indefinite = False
- self.method = 0
- if self._trailer != b'':
- self._trailer = b''
-
-
-class Null(Primitive):
- """
- Represents a null value in ASN.1 as None in Python
- """
-
- tag = 5
-
- contents = b''
-
- def set(self, value):
- """
- Sets the value of the object
-
- :param value:
- None
- """
-
- self.contents = b''
-
- @property
- def native(self):
- """
- The a native Python datatype representation of this value
-
- :return:
- None
- """
-
- return None
-
-
-class ObjectIdentifier(Primitive, ValueMap):
- """
- Represents an object identifier in ASN.1 as a Python unicode dotted
- integer string
- """
-
- tag = 6
-
- # A unicode string of the dotted form of the object identifier
- _dotted = None
-
- @classmethod
- def map(cls, value):
- """
- Converts a dotted unicode string OID into a mapped unicode string
-
- :param value:
- A dotted unicode string OID
-
- :raises:
- ValueError - when no _map dict has been defined on the class
- TypeError - when value is not a unicode string
-
- :return:
- A mapped unicode string
- """
-
- if cls._map is None:
- raise ValueError(unwrap(
- '''
- %s._map has not been defined
- ''',
- type_name(cls)
- ))
-
- if not isinstance(value, str_cls):
- raise TypeError(unwrap(
- '''
- value must be a unicode string, not %s
- ''',
- type_name(value)
- ))
-
- return cls._map.get(value, value)
-
- @classmethod
- def unmap(cls, value):
- """
- Converts a mapped unicode string value into a dotted unicode string OID
-
- :param value:
- A mapped unicode string OR dotted unicode string OID
-
- :raises:
- ValueError - when no _map dict has been defined on the class or the value can't be unmapped
- TypeError - when value is not a unicode string
-
- :return:
- A dotted unicode string OID
- """
-
- if cls not in _SETUP_CLASSES:
- cls()._setup()
- _SETUP_CLASSES[cls] = True
-
- if cls._map is None:
- raise ValueError(unwrap(
- '''
- %s._map has not been defined
- ''',
- type_name(cls)
- ))
-
- if not isinstance(value, str_cls):
- raise TypeError(unwrap(
- '''
- value must be a unicode string, not %s
- ''',
- type_name(value)
- ))
-
- if value in cls._reverse_map:
- return cls._reverse_map[value]
-
- if not _OID_RE.match(value):
- raise ValueError(unwrap(
- '''
- %s._map does not contain an entry for "%s"
- ''',
- type_name(cls),
- value
- ))
-
- return value
-
- def set(self, value):
- """
- Sets the value of the object
-
- :param value:
- A unicode string. May be a dotted integer string, or if _map is
- provided, one of the mapped values.
-
- :raises:
- ValueError - when an invalid value is passed
- """
-
- if not isinstance(value, str_cls):
- raise TypeError(unwrap(
- '''
- %s value must be a unicode string, not %s
- ''',
- type_name(self),
- type_name(value)
- ))
-
- self._native = value
-
- if self._map is not None:
- if value in self._reverse_map:
- value = self._reverse_map[value]
-
- self.contents = b''
- first = None
- for index, part in enumerate(value.split('.')):
- part = int(part)
-
- # The first two parts are merged into a single byte
- if index == 0:
- first = part
- continue
- elif index == 1:
- part = (first * 40) + part
-
- encoded_part = chr_cls(0x7F & part)
- part = part >> 7
- while part > 0:
- encoded_part = chr_cls(0x80 | (0x7F & part)) + encoded_part
- part = part >> 7
- self.contents += encoded_part
-
- self._header = None
- if self._trailer != b'':
- self._trailer = b''
-
- def __unicode__(self):
- """
- :return:
- A unicode string
- """
-
- return self.dotted
-
- @property
- def dotted(self):
- """
- :return:
- A unicode string of the object identifier in dotted notation, thus
- ignoring any mapped value
- """
-
- if self._dotted is None:
- output = []
-
- part = 0
- for byte in self.contents:
- if _PY2:
- byte = ord(byte)
- part = part * 128
- part += byte & 127
- # Last byte in subidentifier has the eighth bit set to 0
- if byte & 0x80 == 0:
- if len(output) == 0:
- output.append(str_cls(part // 40))
- output.append(str_cls(part % 40))
- else:
- output.append(str_cls(part))
- part = 0
-
- self._dotted = '.'.join(output)
- return self._dotted
-
- @property
- def native(self):
- """
- The a native Python datatype representation of this value
-
- :return:
- A unicode string or None. If _map is not defined, the unicode string
- is a string of dotted integers. If _map is defined and the dotted
- string is present in the _map, the mapped value is returned.
- """
-
- if self.contents is None:
- return None
-
- if self._native is None:
- self._native = self.dotted
- if self._map is not None and self._native in self._map:
- self._native = self._map[self._native]
- return self._native
-
-
-class ObjectDescriptor(Primitive):
- """
- Represents an object descriptor from ASN.1 - no Python implementation
- """
-
- tag = 7
-
-
-class InstanceOf(Primitive):
- """
- Represents an instance from ASN.1 - no Python implementation
- """
-
- tag = 8
-
-
-class Real(Primitive):
- """
- Represents a real number from ASN.1 - no Python implementation
- """
-
- tag = 9
-
-
-class Enumerated(Integer):
- """
- Represents a enumerated list of integers from ASN.1 as a Python
- unicode string
- """
-
- tag = 10
-
- def set(self, value):
- """
- Sets the value of the object
-
- :param value:
- An integer or a unicode string from _map
-
- :raises:
- ValueError - when an invalid value is passed
- """
-
- if not isinstance(value, int_types) and not isinstance(value, str_cls):
- raise TypeError(unwrap(
- '''
- %s value must be an integer or a unicode string, not %s
- ''',
- type_name(self),
- type_name(value)
- ))
-
- if isinstance(value, str_cls):
- if value not in self._reverse_map:
- raise ValueError(unwrap(
- '''
- %s value "%s" is not a valid value
- ''',
- type_name(self),
- value
- ))
-
- value = self._reverse_map[value]
-
- elif value not in self._map:
- raise ValueError(unwrap(
- '''
- %s value %s is not a valid value
- ''',
- type_name(self),
- value
- ))
-
- Integer.set(self, value)
-
- @property
- def native(self):
- """
- The a native Python datatype representation of this value
-
- :return:
- A unicode string or None
- """
-
- if self.contents is None:
- return None
-
- if self._native is None:
- self._native = self._map[self.__int__()]
- return self._native
-
-
-class UTF8String(AbstractString):
- """
- Represents a UTF-8 string from ASN.1 as a Python unicode string
- """
-
- tag = 12
- _encoding = 'utf-8'
-
-
-class RelativeOid(ObjectIdentifier):
- """
- Represents an object identifier in ASN.1 as a Python unicode dotted
- integer string
- """
-
- tag = 13
-
-
-class Sequence(Asn1Value):
- """
- Represents a sequence of fields from ASN.1 as a Python object with a
- dict-like interface
- """
-
- tag = 16
-
- class_ = 0
- method = 1
-
- # A list of child objects, in order of _fields
- children = None
-
- # Sequence overrides .contents to be a property so that the mutated state
- # of child objects can be checked to ensure everything is up-to-date
- _contents = None
-
- # Variable to track if the object has been mutated
- _mutated = False
-
- # A list of tuples in one of the following forms.
- #
- # Option 1, a unicode string field name and a value class
- #
- # ("name", Asn1ValueClass)
- #
- # Option 2, same as Option 1, but with a dict of class params
- #
- # ("name", Asn1ValueClass, {'explicit': 5})
- _fields = []
-
- # A dict with keys being the name of a field and the value being a unicode
- # string of the method name on self to call to get the spec for that field
- _spec_callbacks = None
-
- # A dict that maps unicode string field names to an index in _fields
- _field_map = None
-
- # A list in the same order as _fields that has tuples in the form (class_, tag)
- _field_ids = None
-
- # An optional 2-element tuple that defines the field names of an OID field
- # and the field that the OID should be used to help decode. Works with the
- # _oid_specs attribute.
- _oid_pair = None
-
- # A dict with keys that are unicode string OID values and values that are
- # Asn1Value classes to use for decoding a variable-type field.
- _oid_specs = None
-
- # A 2-element tuple of the indexes in _fields of the OID and value fields
- _oid_nums = None
-
- # Predetermined field specs to optimize away calls to _determine_spec()
- _precomputed_specs = None
-
- def __init__(self, value=None, default=None, **kwargs):
- """
- Allows setting field values before passing everything else along to
- Asn1Value.__init__()
-
- :param value:
- A native Python datatype to initialize the object value with
-
- :param default:
- The default value if no value is specified
- """
-
- Asn1Value.__init__(self, **kwargs)
-
- check_existing = False
- if value is None and default is not None:
- check_existing = True
- if self.children is None:
- if self.contents is None:
- check_existing = False
- else:
- self._parse_children()
- value = default
-
- if value is not None:
- try:
- # Fields are iterated in definition order to allow things like
- # OID-based specs. Otherwise sometimes the value would be processed
- # before the OID field, resulting in invalid value object creation.
- if self._fields:
- keys = [info[0] for info in self._fields]
- unused_keys = set(value.keys())
- else:
- keys = value.keys()
- unused_keys = set(keys)
-
- for key in keys:
- # If we are setting defaults, but a real value has already
- # been set for the field, then skip it
- if check_existing:
- index = self._field_map[key]
- if index < len(self.children) and self.children[index] is not VOID:
- if key in unused_keys:
- unused_keys.remove(key)
- continue
-
- if key in value:
- self.__setitem__(key, value[key])
- unused_keys.remove(key)
-
- if len(unused_keys):
- raise ValueError(unwrap(
- '''
- One or more unknown fields was passed to the constructor
- of %s: %s
- ''',
- type_name(self),
- ', '.join(sorted(list(unused_keys)))
- ))
-
- except (ValueError, TypeError) as e:
- args = e.args[1:]
- e.args = (e.args[0] + '\n while constructing %s' % type_name(self),) + args
- raise e
-
- @property
- def contents(self):
- """
- :return:
- A byte string of the DER-encoded contents of the sequence
- """
-
- if self.children is None:
- return self._contents
-
- if self._is_mutated():
- self._set_contents()
-
- return self._contents
-
- @contents.setter
- def contents(self, value):
- """
- :param value:
- A byte string of the DER-encoded contents of the sequence
- """
-
- self._contents = value
-
- def _is_mutated(self):
- """
- :return:
- A boolean - if the sequence or any children (recursively) have been
- mutated
- """
-
- mutated = self._mutated
- if self.children is not None:
- for child in self.children:
- if isinstance(child, Sequence) or isinstance(child, SequenceOf):
- mutated = mutated or child._is_mutated()
-
- return mutated
-
- def _lazy_child(self, index):
- """
- Builds a child object if the child has only been parsed into a tuple so far
- """
-
- child = self.children[index]
- if child.__class__ == tuple:
- child = self.children[index] = _build(*child)
- return child
-
- def __len__(self):
- """
- :return:
- Integer
- """
- # We inline this check to prevent method invocation each time
- if self.children is None:
- self._parse_children()
-
- return len(self.children)
-
- def __getitem__(self, key):
- """
- Allows accessing fields by name or index
-
- :param key:
- A unicode string of the field name, or an integer of the field index
-
- :raises:
- KeyError - when a field name or index is invalid
-
- :return:
- The Asn1Value object of the field specified
- """
-
- # We inline this check to prevent method invocation each time
- if self.children is None:
- self._parse_children()
-
- if not isinstance(key, int_types):
- if key not in self._field_map:
- raise KeyError(unwrap(
- '''
- No field named "%s" defined for %s
- ''',
- key,
- type_name(self)
- ))
- key = self._field_map[key]
-
- if key >= len(self.children):
- raise KeyError(unwrap(
- '''
- No field numbered %s is present in this %s
- ''',
- key,
- type_name(self)
- ))
-
- try:
- return self._lazy_child(key)
-
- except (ValueError, TypeError) as e:
- args = e.args[1:]
- e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args
- raise e
-
- def __setitem__(self, key, value):
- """
- Allows settings fields by name or index
-
- :param key:
- A unicode string of the field name, or an integer of the field index
-
- :param value:
- A native Python datatype to set the field value to. This method will
- construct the appropriate Asn1Value object from _fields.
-
- :raises:
- ValueError - when a field name or index is invalid
- """
-
- # We inline this check to prevent method invocation each time
- if self.children is None:
- self._parse_children()
-
- if not isinstance(key, int_types):
- if key not in self._field_map:
- raise KeyError(unwrap(
- '''
- No field named "%s" defined for %s
- ''',
- key,
- type_name(self)
- ))
- key = self._field_map[key]
-
- field_name, field_spec, value_spec, field_params, _ = self._determine_spec(key)
-
- new_value = self._make_value(field_name, field_spec, value_spec, field_params, value)
-
- invalid_value = False
- if isinstance(new_value, Any):
- invalid_value = new_value.parsed is None
- elif isinstance(new_value, Choice):
- invalid_value = new_value.chosen.contents is None
- else:
- invalid_value = new_value.contents is None
-
- if invalid_value:
- raise ValueError(unwrap(
- '''
- Value for field "%s" of %s is not set
- ''',
- field_name,
- type_name(self)
- ))
-
- self.children[key] = new_value
-
- if self._native is not None:
- self._native[self._fields[key][0]] = self.children[key].native
- self._mutated = True
-
- def __delitem__(self, key):
- """
- Allows deleting optional or default fields by name or index
-
- :param key:
- A unicode string of the field name, or an integer of the field index
-
- :raises:
- ValueError - when a field name or index is invalid, or the field is not optional or defaulted
- """
-
- # We inline this check to prevent method invocation each time
- if self.children is None:
- self._parse_children()
-
- if not isinstance(key, int_types):
- if key not in self._field_map:
- raise KeyError(unwrap(
- '''
- No field named "%s" defined for %s
- ''',
- key,
- type_name(self)
- ))
- key = self._field_map[key]
-
- name, _, params = self._fields[key]
- if not params or ('default' not in params and 'optional' not in params):
- raise ValueError(unwrap(
- '''
- Can not delete the value for the field "%s" of %s since it is
- not optional or defaulted
- ''',
- name,
- type_name(self)
- ))
-
- if 'optional' in params:
- self.children[key] = VOID
- if self._native is not None:
- self._native[name] = None
- else:
- self.__setitem__(key, None)
- self._mutated = True
-
- def __iter__(self):
- """
- :return:
- An iterator of field key names
- """
-
- for info in self._fields:
- yield info[0]
-
- def _set_contents(self, force=False):
- """
- Updates the .contents attribute of the value with the encoded value of
- all of the child objects
-
- :param force:
- Ensure all contents are in DER format instead of possibly using
- cached BER-encoded data
- """
-
- if self.children is None:
- self._parse_children()
-
- contents = BytesIO()
- for index, info in enumerate(self._fields):
- child = self.children[index]
- if child is None:
- child_dump = b''
- elif child.__class__ == tuple:
- if force:
- child_dump = self._lazy_child(index).dump(force=force)
- else:
- child_dump = child[3] + child[4] + child[5]
- else:
- child_dump = child.dump(force=force)
- # Skip values that are the same as the default
- if info[2] and 'default' in info[2]:
- default_value = info[1](**info[2])
- if default_value.dump() == child_dump:
- continue
- contents.write(child_dump)
- self._contents = contents.getvalue()
-
- self._header = None
- if self._trailer != b'':
- self._trailer = b''
-
- def _setup(self):
- """
- Generates _field_map, _field_ids and _oid_nums for use in parsing
- """
-
- cls = self.__class__
- cls._field_map = {}
- cls._field_ids = []
- cls._precomputed_specs = []
- for index, field in enumerate(cls._fields):
- if len(field) < 3:
- field = field + ({},)
- cls._fields[index] = field
- cls._field_map[field[0]] = index
- cls._field_ids.append(_build_id_tuple(field[2], field[1]))
-
- if cls._oid_pair is not None:
- cls._oid_nums = (cls._field_map[cls._oid_pair[0]], cls._field_map[cls._oid_pair[1]])
-
- for index, field in enumerate(cls._fields):
- has_callback = cls._spec_callbacks is not None and field[0] in cls._spec_callbacks
- is_mapped_oid = cls._oid_nums is not None and cls._oid_nums[1] == index
- if has_callback or is_mapped_oid:
- cls._precomputed_specs.append(None)
- else:
- cls._precomputed_specs.append((field[0], field[1], field[1], field[2], None))
-
- def _determine_spec(self, index):
- """
- Determine how a value for a field should be constructed
-
- :param index:
- The field number
-
- :return:
- A tuple containing the following elements:
- - unicode string of the field name
- - Asn1Value class of the field spec
- - Asn1Value class of the value spec
- - None or dict of params to pass to the field spec
- - None or Asn1Value class indicating the value spec was derived from an OID or a spec callback
- """
-
- name, field_spec, field_params = self._fields[index]
- value_spec = field_spec
- spec_override = None
-
- if self._spec_callbacks is not None and name in self._spec_callbacks:
- callback = self._spec_callbacks[name]
- spec_override = callback(self)
- if spec_override:
- # Allow a spec callback to specify both the base spec and
- # the override, for situations such as OctetString and parse_as
- if spec_override.__class__ == tuple and len(spec_override) == 2:
- field_spec, value_spec = spec_override
- if value_spec is None:
- value_spec = field_spec
- spec_override = None
- # When no field spec is specified, use a single return value as that
- elif field_spec is None:
- field_spec = spec_override
- value_spec = field_spec
- spec_override = None
- else:
- value_spec = spec_override
-
- elif self._oid_nums is not None and self._oid_nums[1] == index:
- oid = self._lazy_child(self._oid_nums[0]).native
- if oid in self._oid_specs:
- spec_override = self._oid_specs[oid]
- value_spec = spec_override
-
- return (name, field_spec, value_spec, field_params, spec_override)
-
- def _make_value(self, field_name, field_spec, value_spec, field_params, value):
- """
- Contructs an appropriate Asn1Value object for a field
-
- :param field_name:
- A unicode string of the field name
-
- :param field_spec:
- An Asn1Value class that is the field spec
-
- :param value_spec:
- An Asn1Value class that is the vaue spec
-
- :param field_params:
- None or a dict of params for the field spec
-
- :param value:
- The value to construct an Asn1Value object from
-
- :return:
- An instance of a child class of Asn1Value
- """
-
- if value is None and 'optional' in field_params:
- return VOID
-
- specs_different = field_spec != value_spec
- is_any = issubclass(field_spec, Any)
-
- if issubclass(value_spec, Choice):
- if not isinstance(value, Asn1Value):
- raise ValueError(unwrap(
- '''
- Can not set a native python value to %s, which has the
- choice type of %s - value must be an instance of Asn1Value
- ''',
- field_name,
- type_name(value_spec)
- ))
- if not isinstance(value, value_spec):
- wrapper = value_spec()
- wrapper.validate(value.class_, value.tag, value.contents)
- wrapper._parsed = value
- new_value = wrapper
- else:
- new_value = value
-
- elif isinstance(value, field_spec):
- new_value = value
- if specs_different:
- new_value.parse(value_spec)
-
- elif (not specs_different or is_any) and not isinstance(value, value_spec):
- new_value = value_spec(value, **field_params)
-
- else:
- if isinstance(value, value_spec):
- new_value = value
- else:
- new_value = value_spec(value)
-
- # For when the field is OctetString or OctetBitString with embedded
- # values we need to wrap the value in the field spec to get the
- # appropriate encoded value.
- if specs_different and not is_any:
- wrapper = field_spec(value=new_value.dump(), **field_params)
- wrapper._parsed = (new_value, new_value.__class__, None)
- new_value = wrapper
-
- new_value = _fix_tagging(new_value, field_params)
-
- return new_value
-
- def _parse_children(self, recurse=False):
- """
- Parses the contents and generates Asn1Value objects based on the
- definitions from _fields.
-
- :param recurse:
- If child objects that are Sequence or SequenceOf objects should
- be recursively parsed
-
- :raises:
- ValueError - when an error occurs parsing child objects
- """
-
- cls = self.__class__
- if self._contents is None:
- if self._fields:
- self.children = [VOID] * len(self._fields)
- for index, (_, _, params) in enumerate(self._fields):
- if 'default' in params:
- if cls._precomputed_specs[index]:
- field_name, field_spec, value_spec, field_params, _ = cls._precomputed_specs[index]
- else:
- field_name, field_spec, value_spec, field_params, _ = self._determine_spec(index)
- self.children[index] = self._make_value(field_name, field_spec, value_spec, field_params, None)
- return
-
- try:
- self.children = []
- contents_length = len(self._contents)
- child_pointer = 0
- field = 0
- field_len = len(self._fields)
- parts = None
- again = child_pointer < contents_length
- while again:
- if parts is None:
- parts, child_pointer = _parse(self._contents, contents_length, pointer=child_pointer)
- again = child_pointer < contents_length
-
- if field < field_len:
- _, field_spec, value_spec, field_params, spec_override = (
- cls._precomputed_specs[field] or self._determine_spec(field))
-
- # If the next value is optional or default, allow it to be absent
- if field_params and ('optional' in field_params or 'default' in field_params):
- if self._field_ids[field] != (parts[0], parts[2]) and field_spec != Any:
-
- # See if the value is a valid choice before assuming
- # that we have a missing optional or default value
- choice_match = False
- if issubclass(field_spec, Choice):
- try:
- tester = field_spec(**field_params)
- tester.validate(parts[0], parts[2], parts[4])
- choice_match = True
- except (ValueError):
- pass
-
- if not choice_match:
- if 'optional' in field_params:
- self.children.append(VOID)
- else:
- self.children.append(field_spec(**field_params))
- field += 1
- again = True
- continue
-
- if field_spec is None or (spec_override and issubclass(field_spec, Any)):
- field_spec = value_spec
- spec_override = None
-
- if spec_override:
- child = parts + (field_spec, field_params, value_spec)
- else:
- child = parts + (field_spec, field_params)
-
- # Handle situations where an optional or defaulted field definition is incorrect
- elif field_len > 0 and field + 1 <= field_len:
- missed_fields = []
- prev_field = field - 1
- while prev_field >= 0:
- prev_field_info = self._fields[prev_field]
- if len(prev_field_info) < 3:
- break
- if 'optional' in prev_field_info[2] or 'default' in prev_field_info[2]:
- missed_fields.append(prev_field_info[0])
- prev_field -= 1
- plural = 's' if len(missed_fields) > 1 else ''
- missed_field_names = ', '.join(missed_fields)
- raise ValueError(unwrap(
- '''
- Data for field %s (%s class, %s method, tag %s) does
- not match the field definition%s of %s
- ''',
- field + 1,
- CLASS_NUM_TO_NAME_MAP.get(parts[0]),
- METHOD_NUM_TO_NAME_MAP.get(parts[1]),
- parts[2],
- plural,
- missed_field_names
- ))
-
- else:
- child = parts
-
- if recurse:
- child = _build(*child)
- if isinstance(child, (Sequence, SequenceOf)):
- child._parse_children(recurse=True)
-
- self.children.append(child)
- field += 1
- parts = None
-
- index = len(self.children)
- while index < field_len:
- name, field_spec, field_params = self._fields[index]
- if 'default' in field_params:
- self.children.append(field_spec(**field_params))
- elif 'optional' in field_params:
- self.children.append(VOID)
- else:
- raise ValueError(unwrap(
- '''
- Field "%s" is missing from structure
- ''',
- name
- ))
- index += 1
-
- except (ValueError, TypeError) as e:
- args = e.args[1:]
- e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args
- raise e
-
- def spec(self, field_name):
- """
- Determines the spec to use for the field specified. Depending on how
- the spec is determined (_oid_pair or _spec_callbacks), it may be
- necessary to set preceding field values before calling this. Usually
- specs, if dynamic, are controlled by a preceding ObjectIdentifier
- field.
-
- :param field_name:
- A unicode string of the field name to get the spec for
-
- :return:
- A child class of asn1crypto.core.Asn1Value that the field must be
- encoded using
- """
-
- if not isinstance(field_name, str_cls):
- raise TypeError(unwrap(
- '''
- field_name must be a unicode string, not %s
- ''',
- type_name(field_name)
- ))
-
- if self._fields is None:
- raise ValueError(unwrap(
- '''
- Unable to retrieve spec for field %s in the class %s because
- _fields has not been set
- ''',
- repr(field_name),
- type_name(self)
- ))
-
- index = self._field_map[field_name]
- info = self._determine_spec(index)
-
- return info[2]
-
- @property
- def native(self):
- """
- The a native Python datatype representation of this value
-
- :return:
- An OrderedDict or None. If an OrderedDict, all child values are
- recursively converted to native representation also.
- """
-
- if self.contents is None:
- return None
-
- if self._native is None:
- if self.children is None:
- self._parse_children(recurse=True)
- try:
- self._native = OrderedDict()
- for index, child in enumerate(self.children):
- if child.__class__ == tuple:
- child = _build(*child)
- self.children[index] = child
- try:
- name = self._fields[index][0]
- except (IndexError):
- name = str_cls(index)
- self._native[name] = child.native
- except (ValueError, TypeError) as e:
- args = e.args[1:]
- e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args
- raise e
- return self._native
-
- def _copy(self, other, copy_func):
- """
- Copies the contents of another Sequence object to itself
-
- :param object:
- Another instance of the same class
-
- :param copy_func:
- An reference of copy.copy() or copy.deepcopy() to use when copying
- lists, dicts and objects
- """
-
- super(Sequence, self)._copy(other, copy_func)
- if self.children is not None:
- self.children = []
- for child in other.children:
- if child.__class__ == tuple:
- self.children.append(child)
- else:
- self.children.append(child.copy())
-
- def debug(self, nest_level=1):
- """
- Show the binary data and parsed data in a tree structure
- """
-
- if self.children is None:
- self._parse_children()
-
- prefix = ' ' * nest_level
- _basic_debug(prefix, self)
- for field_name in self:
- child = self._lazy_child(self._field_map[field_name])
- if child is not VOID:
- print('%s Field "%s"' % (prefix, field_name))
- child.debug(nest_level + 3)
-
- def dump(self, force=False):
- """
- Encodes the value using DER
-
- :param force:
- If the encoded contents already exist, clear them and regenerate
- to ensure they are in DER format instead of BER format
-
- :return:
- A byte string of the DER-encoded value
- """
-
- if force:
- self._set_contents(force=force)
-
- if self._fields and self.children is not None:
- for index, (field_name, _, params) in enumerate(self._fields):
- if self.children[index] is not VOID:
- continue
- if 'default' in params or 'optional' in params:
- continue
- raise ValueError(unwrap(
- '''
- Field "%s" is missing from structure
- ''',
- field_name
- ))
-
- return Asn1Value.dump(self)
-
-
-class SequenceOf(Asn1Value):
- """
- Represents a sequence (ordered) of a single type of values from ASN.1 as a
- Python object with a list-like interface
- """
-
- tag = 16
-
- class_ = 0
- method = 1
-
- # A list of child objects
- children = None
-
- # SequenceOf overrides .contents to be a property so that the mutated state
- # of child objects can be checked to ensure everything is up-to-date
- _contents = None
-
- # Variable to track if the object has been mutated
- _mutated = False
-
- # An Asn1Value class to use when parsing children
- _child_spec = None
-
- def __init__(self, value=None, default=None, contents=None, spec=None, **kwargs):
- """
- Allows setting child objects and the _child_spec via the spec parameter
- before passing everything else along to Asn1Value.__init__()
-
- :param value:
- A native Python datatype to initialize the object value with
-
- :param default:
- The default value if no value is specified
-
- :param contents:
- A byte string of the encoded contents of the value
-
- :param spec:
- A class derived from Asn1Value to use to parse children
- """
-
- if spec:
- self._child_spec = spec
-
- Asn1Value.__init__(self, **kwargs)
-
- try:
- if contents is not None:
- self.contents = contents
- else:
- if value is None and default is not None:
- value = default
-
- if value is not None:
- for index, child in enumerate(value):
- self.__setitem__(index, child)
-
- # Make sure a blank list is serialized
- if self.contents is None:
- self._set_contents()
-
- except (ValueError, TypeError) as e:
- args = e.args[1:]
- e.args = (e.args[0] + '\n while constructing %s' % type_name(self),) + args
- raise e
-
- @property
- def contents(self):
- """
- :return:
- A byte string of the DER-encoded contents of the sequence
- """
-
- if self.children is None:
- return self._contents
-
- if self._is_mutated():
- self._set_contents()
-
- return self._contents
-
- @contents.setter
- def contents(self, value):
- """
- :param value:
- A byte string of the DER-encoded contents of the sequence
- """
-
- self._contents = value
-
- def _is_mutated(self):
- """
- :return:
- A boolean - if the sequence or any children (recursively) have been
- mutated
- """
-
- mutated = self._mutated
- if self.children is not None:
- for child in self.children:
- if isinstance(child, Sequence) or isinstance(child, SequenceOf):
- mutated = mutated or child._is_mutated()
-
- return mutated
-
- def _lazy_child(self, index):
- """
- Builds a child object if the child has only been parsed into a tuple so far
- """
-
- child = self.children[index]
- if child.__class__ == tuple:
- child = _build(*child)
- self.children[index] = child
- return child
-
- def _make_value(self, value):
- """
- Constructs a _child_spec value from a native Python data type, or
- an appropriate Asn1Value object
-
- :param value:
- A native Python value, or some child of Asn1Value
-
- :return:
- An object of type _child_spec
- """
-
- if isinstance(value, self._child_spec):
- new_value = value
-
- elif issubclass(self._child_spec, Any):
- if isinstance(value, Asn1Value):
- new_value = value
- else:
- raise ValueError(unwrap(
- '''
- Can not set a native python value to %s where the
- _child_spec is Any - value must be an instance of Asn1Value
- ''',
- type_name(self)
- ))
-
- elif issubclass(self._child_spec, Choice):
- if not isinstance(value, Asn1Value):
- raise ValueError(unwrap(
- '''
- Can not set a native python value to %s where the
- _child_spec is the choice type %s - value must be an
- instance of Asn1Value
- ''',
- type_name(self),
- self._child_spec.__name__
- ))
- if not isinstance(value, self._child_spec):
- wrapper = self._child_spec()
- wrapper.validate(value.class_, value.tag, value.contents)
- wrapper._parsed = value
- value = wrapper
- new_value = value
-
- else:
- return self._child_spec(value=value)
-
- params = {}
- if self._child_spec.explicit:
- params['explicit'] = self._child_spec.explicit
- if self._child_spec.implicit:
- params['implicit'] = (self._child_spec.class_, self._child_spec.tag)
- return _fix_tagging(new_value, params)
-
- def __len__(self):
- """
- :return:
- An integer
- """
- # We inline this checks to prevent method invocation each time
- if self.children is None:
- self._parse_children()
-
- return len(self.children)
-
- def __getitem__(self, key):
- """
- Allows accessing children via index
-
- :param key:
- Integer index of child
- """
-
- # We inline this checks to prevent method invocation each time
- if self.children is None:
- self._parse_children()
-
- return self._lazy_child(key)
-
- def __setitem__(self, key, value):
- """
- Allows overriding a child via index
-
- :param key:
- Integer index of child
-
- :param value:
- Native python datatype that will be passed to _child_spec to create
- new child object
- """
-
- # We inline this checks to prevent method invocation each time
- if self.children is None:
- self._parse_children()
-
- new_value = self._make_value(value)
-
- # If adding at the end, create a space for the new value
- if key == len(self.children):
- self.children.append(None)
- if self._native is not None:
- self._native.append(None)
-
- self.children[key] = new_value
-
- if self._native is not None:
- self._native[key] = self.children[key].native
-
- self._mutated = True
-
- def __delitem__(self, key):
- """
- Allows removing a child via index
-
- :param key:
- Integer index of child
- """
-
- # We inline this checks to prevent method invocation each time
- if self.children is None:
- self._parse_children()
-
- self.children.pop(key)
- if self._native is not None:
- self._native.pop(key)
-
- self._mutated = True
-
- def __iter__(self):
- """
- :return:
- An iter() of child objects
- """
-
- # We inline this checks to prevent method invocation each time
- if self.children is None:
- self._parse_children()
-
- for index in range(0, len(self.children)):
- yield self._lazy_child(index)
-
- def __contains__(self, item):
- """
- :param item:
- An object of the type cls._child_spec
-
- :return:
- A boolean if the item is contained in this SequenceOf
- """
-
- if item is None or item is VOID:
- return False
-
- if not isinstance(item, self._child_spec):
- raise TypeError(unwrap(
- '''
- Checking membership in %s is only available for instances of
- %s, not %s
- ''',
- type_name(self),
- type_name(self._child_spec),
- type_name(item)
- ))
-
- for child in self:
- if child == item:
- return True
-
- return False
-
- def append(self, value):
- """
- Allows adding a child to the end of the sequence
-
- :param value:
- Native python datatype that will be passed to _child_spec to create
- new child object
- """
-
- # We inline this checks to prevent method invocation each time
- if self.children is None:
- self._parse_children()
-
- self.children.append(self._make_value(value))
-
- if self._native is not None:
- self._native.append(self.children[-1].native)
-
- self._mutated = True
-
- def _set_contents(self, force=False):
- """
- Encodes all child objects into the contents for this object
-
- :param force:
- Ensure all contents are in DER format instead of possibly using
- cached BER-encoded data
- """
-
- if self.children is None:
- self._parse_children()
-
- contents = BytesIO()
- for child in self:
- contents.write(child.dump(force=force))
- self._contents = contents.getvalue()
- self._header = None
- if self._trailer != b'':
- self._trailer = b''
-
- def _parse_children(self, recurse=False):
- """
- Parses the contents and generates Asn1Value objects based on the
- definitions from _child_spec.
-
- :param recurse:
- If child objects that are Sequence or SequenceOf objects should
- be recursively parsed
-
- :raises:
- ValueError - when an error occurs parsing child objects
- """
-
- try:
- self.children = []
- if self._contents is None:
- return
- contents_length = len(self._contents)
- child_pointer = 0
- while child_pointer < contents_length:
- parts, child_pointer = _parse(self._contents, contents_length, pointer=child_pointer)
- if self._child_spec:
- child = parts + (self._child_spec,)
- else:
- child = parts
- if recurse:
- child = _build(*child)
- if isinstance(child, (Sequence, SequenceOf)):
- child._parse_children(recurse=True)
- self.children.append(child)
- except (ValueError, TypeError) as e:
- args = e.args[1:]
- e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args
- raise e
-
- def spec(self):
- """
- Determines the spec to use for child values.
-
- :return:
- A child class of asn1crypto.core.Asn1Value that child values must be
- encoded using
- """
-
- return self._child_spec
-
- @property
- def native(self):
- """
- The a native Python datatype representation of this value
-
- :return:
- A list or None. If a list, all child values are recursively
- converted to native representation also.
- """
-
- if self.contents is None:
- return None
-
- if self._native is None:
- if self.children is None:
- self._parse_children(recurse=True)
- try:
- self._native = [child.native for child in self]
- except (ValueError, TypeError) as e:
- args = e.args[1:]
- e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args
- raise e
- return self._native
-
- def _copy(self, other, copy_func):
- """
- Copies the contents of another SequenceOf object to itself
-
- :param object:
- Another instance of the same class
-
- :param copy_func:
- An reference of copy.copy() or copy.deepcopy() to use when copying
- lists, dicts and objects
- """
-
- super(SequenceOf, self)._copy(other, copy_func)
- if self.children is not None:
- self.children = []
- for child in other.children:
- if child.__class__ == tuple:
- self.children.append(child)
- else:
- self.children.append(child.copy())
-
- def debug(self, nest_level=1):
- """
- Show the binary data and parsed data in a tree structure
- """
-
- if self.children is None:
- self._parse_children()
-
- prefix = ' ' * nest_level
- _basic_debug(prefix, self)
- for child in self:
- child.debug(nest_level + 1)
-
- def dump(self, force=False):
- """
- Encodes the value using DER
-
- :param force:
- If the encoded contents already exist, clear them and regenerate
- to ensure they are in DER format instead of BER format
-
- :return:
- A byte string of the DER-encoded value
- """
-
- if force:
- self._set_contents(force=force)
-
- return Asn1Value.dump(self)
-
-
-class Set(Sequence):
- """
- Represents a set of fields (unordered) from ASN.1 as a Python object with a
- dict-like interface
- """
-
- method = 1
- class_ = 0
- tag = 17
-
- # A dict of 2-element tuples in the form (class_, tag) as keys and integers
- # as values that are the index of the field in _fields
- _field_ids = None
-
- def _setup(self):
- """
- Generates _field_map, _field_ids and _oid_nums for use in parsing
- """
-
- cls = self.__class__
- cls._field_map = {}
- cls._field_ids = {}
- cls._precomputed_specs = []
- for index, field in enumerate(cls._fields):
- if len(field) < 3:
- field = field + ({},)
- cls._fields[index] = field
- cls._field_map[field[0]] = index
- cls._field_ids[_build_id_tuple(field[2], field[1])] = index
-
- if cls._oid_pair is not None:
- cls._oid_nums = (cls._field_map[cls._oid_pair[0]], cls._field_map[cls._oid_pair[1]])
-
- for index, field in enumerate(cls._fields):
- has_callback = cls._spec_callbacks is not None and field[0] in cls._spec_callbacks
- is_mapped_oid = cls._oid_nums is not None and cls._oid_nums[1] == index
- if has_callback or is_mapped_oid:
- cls._precomputed_specs.append(None)
- else:
- cls._precomputed_specs.append((field[0], field[1], field[1], field[2], None))
-
- def _parse_children(self, recurse=False):
- """
- Parses the contents and generates Asn1Value objects based on the
- definitions from _fields.
-
- :param recurse:
- If child objects that are Sequence or SequenceOf objects should
- be recursively parsed
-
- :raises:
- ValueError - when an error occurs parsing child objects
- """
-
- cls = self.__class__
- if self._contents is None:
- if self._fields:
- self.children = [VOID] * len(self._fields)
- for index, (_, _, params) in enumerate(self._fields):
- if 'default' in params:
- if cls._precomputed_specs[index]:
- field_name, field_spec, value_spec, field_params, _ = cls._precomputed_specs[index]
- else:
- field_name, field_spec, value_spec, field_params, _ = self._determine_spec(index)
- self.children[index] = self._make_value(field_name, field_spec, value_spec, field_params, None)
- return
-
- try:
- child_map = {}
- contents_length = len(self.contents)
- child_pointer = 0
- seen_field = 0
- while child_pointer < contents_length:
- parts, child_pointer = _parse(self.contents, contents_length, pointer=child_pointer)
-
- id_ = (parts[0], parts[2])
-
- field = self._field_ids.get(id_)
- if field is None:
- raise ValueError(unwrap(
- '''
- Data for field %s (%s class, %s method, tag %s) does
- not match any of the field definitions
- ''',
- seen_field,
- CLASS_NUM_TO_NAME_MAP.get(parts[0]),
- METHOD_NUM_TO_NAME_MAP.get(parts[1]),
- parts[2],
- ))
-
- _, field_spec, value_spec, field_params, spec_override = (
- cls._precomputed_specs[field] or self._determine_spec(field))
-
- if field_spec is None or (spec_override and issubclass(field_spec, Any)):
- field_spec = value_spec
- spec_override = None
-
- if spec_override:
- child = parts + (field_spec, field_params, value_spec)
- else:
- child = parts + (field_spec, field_params)
-
- if recurse:
- child = _build(*child)
- if isinstance(child, (Sequence, SequenceOf)):
- child._parse_children(recurse=True)
-
- child_map[field] = child
- seen_field += 1
-
- total_fields = len(self._fields)
-
- for index in range(0, total_fields):
- if index in child_map:
- continue
-
- name, field_spec, value_spec, field_params, spec_override = (
- cls._precomputed_specs[index] or self._determine_spec(index))
-
- if field_spec is None or (spec_override and issubclass(field_spec, Any)):
- field_spec = value_spec
- spec_override = None
-
- missing = False
-
- if not field_params:
- missing = True
- elif 'optional' not in field_params and 'default' not in field_params:
- missing = True
- elif 'optional' in field_params:
- child_map[index] = VOID
- elif 'default' in field_params:
- child_map[index] = field_spec(**field_params)
-
- if missing:
- raise ValueError(unwrap(
- '''
- Missing required field "%s" from %s
- ''',
- name,
- type_name(self)
- ))
-
- self.children = []
- for index in range(0, total_fields):
- self.children.append(child_map[index])
-
- except (ValueError, TypeError) as e:
- args = e.args[1:]
- e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args
- raise e
-
- def _set_contents(self, force=False):
- """
- Encodes all child objects into the contents for this object.
-
- This method is overridden because a Set needs to be encoded by
- removing defaulted fields and then sorting the fields by tag.
-
- :param force:
- Ensure all contents are in DER format instead of possibly using
- cached BER-encoded data
- """
-
- if self.children is None:
- self._parse_children()
-
- child_tag_encodings = []
- for index, child in enumerate(self.children):
- child_encoding = child.dump(force=force)
-
- # Skip encoding defaulted children
- name, spec, field_params = self._fields[index]
- if 'default' in field_params:
- if spec(**field_params).dump() == child_encoding:
- continue
-
- child_tag_encodings.append((child.tag, child_encoding))
- child_tag_encodings.sort(key=lambda ct: ct[0])
-
- self._contents = b''.join([ct[1] for ct in child_tag_encodings])
- self._header = None
- if self._trailer != b'':
- self._trailer = b''
-
-
-class SetOf(SequenceOf):
- """
- Represents a set (unordered) of a single type of values from ASN.1 as a
- Python object with a list-like interface
- """
-
- tag = 17
-
- def _set_contents(self, force=False):
- """
- Encodes all child objects into the contents for this object.
-
- This method is overridden because a SetOf needs to be encoded by
- sorting the child encodings.
-
- :param force:
- Ensure all contents are in DER format instead of possibly using
- cached BER-encoded data
- """
-
- if self.children is None:
- self._parse_children()
-
- child_encodings = []
- for child in self:
- child_encodings.append(child.dump(force=force))
-
- self._contents = b''.join(sorted(child_encodings))
- self._header = None
- if self._trailer != b'':
- self._trailer = b''
-
-
-class EmbeddedPdv(Sequence):
- """
- A sequence structure
- """
-
- tag = 11
-
-
-class NumericString(AbstractString):
- """
- Represents a numeric string from ASN.1 as a Python unicode string
- """
-
- tag = 18
- _encoding = 'latin1'
-
-
-class PrintableString(AbstractString):
- """
- Represents a printable string from ASN.1 as a Python unicode string
- """
-
- tag = 19
- _encoding = 'latin1'
-
-
-class TeletexString(AbstractString):
- """
- Represents a teletex string from ASN.1 as a Python unicode string
- """
-
- tag = 20
- _encoding = 'teletex'
-
-
-class VideotexString(OctetString):
- """
- Represents a videotex string from ASN.1 as a Python byte string
- """
-
- tag = 21
-
-
-class IA5String(AbstractString):
- """
- Represents an IA5 string from ASN.1 as a Python unicode string
- """
-
- tag = 22
- _encoding = 'ascii'
-
-
-class AbstractTime(AbstractString):
- """
- Represents a time from ASN.1 as a Python datetime.datetime object
- """
-
- @property
- def native(self):
- """
- The a native Python datatype representation of this value
-
- :return:
- A datetime.datetime object in the UTC timezone or None
- """
-
- if self.contents is None:
- return None
-
- if self._native is None:
- string = str_cls(self)
- has_timezone = re.search('[-\\+]', string)
-
- # We don't know what timezone it is in, or it is UTC because of a Z
- # suffix, so we just assume UTC
- if not has_timezone:
- string = string.rstrip('Z')
- date = self._date_by_len(string)
- self._native = date.replace(tzinfo=timezone.utc)
-
- else:
- # Python 2 doesn't support the %z format code, so we have to manually
- # process the timezone offset.
- date = self._date_by_len(string[0:-5])
-
- hours = int(string[-4:-2])
- minutes = int(string[-2:])
- delta = timedelta(hours=abs(hours), minutes=minutes)
- if hours < 0:
- date -= delta
- else:
- date += delta
-
- self._native = date.replace(tzinfo=timezone.utc)
-
- return self._native
-
-
-class UTCTime(AbstractTime):
- """
- Represents a UTC time from ASN.1 as a Python datetime.datetime object in UTC
- """
-
- tag = 23
-
- def set(self, value):
- """
- Sets the value of the object
-
- :param value:
- A unicode string or a datetime.datetime object
-
- :raises:
- ValueError - when an invalid value is passed
- """
-
- if isinstance(value, datetime):
- value = value.strftime('%y%m%d%H%M%SZ')
- if _PY2:
- value = value.decode('ascii')
-
- AbstractString.set(self, value)
- # Set it to None and let the class take care of converting the next
- # time that .native is called
- self._native = None
-
- def _date_by_len(self, string):
- """
- Parses a date from a string based on its length
-
- :param string:
- A unicode string to parse
-
- :return:
- A datetime.datetime object or a unicode string
- """
-
- strlen = len(string)
-
- year_num = int(string[0:2])
- if year_num < 50:
- prefix = '20'
- else:
- prefix = '19'
-
- if strlen == 10:
- return datetime.strptime(prefix + string, '%Y%m%d%H%M')
-
- if strlen == 12:
- return datetime.strptime(prefix + string, '%Y%m%d%H%M%S')
-
- return string
-
-
-class GeneralizedTime(AbstractTime):
- """
- Represents a generalized time from ASN.1 as a Python datetime.datetime
- object or asn1crypto.util.extended_datetime object in UTC
- """
-
- tag = 24
-
- def set(self, value):
- """
- Sets the value of the object
-
- :param value:
- A unicode string, a datetime.datetime object or an
- asn1crypto.util.extended_datetime object
-
- :raises:
- ValueError - when an invalid value is passed
- """
-
- if isinstance(value, (datetime, extended_datetime)):
- value = value.strftime('%Y%m%d%H%M%SZ')
- if _PY2:
- value = value.decode('ascii')
-
- AbstractString.set(self, value)
- # Set it to None and let the class take care of converting the next
- # time that .native is called
- self._native = None
-
- def _date_by_len(self, string):
- """
- Parses a date from a string based on its length
-
- :param string:
- A unicode string to parse
-
- :return:
- A datetime.datetime object, asn1crypto.util.extended_datetime object or
- a unicode string
- """
-
- strlen = len(string)
-
- date_format = None
- if strlen == 10:
- date_format = '%Y%m%d%H'
- elif strlen == 12:
- date_format = '%Y%m%d%H%M'
- elif strlen == 14:
- date_format = '%Y%m%d%H%M%S'
- elif strlen == 18:
- date_format = '%Y%m%d%H%M%S.%f'
-
- if date_format:
- if len(string) >= 4 and string[0:4] == '0000':
- # Year 2000 shares a calendar with year 0, and is supported natively
- t = datetime.strptime('2000' + string[4:], date_format)
- return extended_datetime(
- 0,
- t.month,
- t.day,
- t.hour,
- t.minute,
- t.second,
- t.microsecond,
- t.tzinfo
- )
- return datetime.strptime(string, date_format)
-
- return string
-
-
-class GraphicString(AbstractString):
- """
- Represents a graphic string from ASN.1 as a Python unicode string
- """
-
- tag = 25
- # This is technically not correct since this type can contain any charset
- _encoding = 'latin1'
-
-
-class VisibleString(AbstractString):
- """
- Represents a visible string from ASN.1 as a Python unicode string
- """
-
- tag = 26
- _encoding = 'latin1'
-
-
-class GeneralString(AbstractString):
- """
- Represents a general string from ASN.1 as a Python unicode string
- """
-
- tag = 27
- # This is technically not correct since this type can contain any charset
- _encoding = 'latin1'
-
-
-class UniversalString(AbstractString):
- """
- Represents a universal string from ASN.1 as a Python unicode string
- """
-
- tag = 28
- _encoding = 'utf-32-be'
-
-
-class CharacterString(AbstractString):
- """
- Represents a character string from ASN.1 as a Python unicode string
- """
-
- tag = 29
- # This is technically not correct since this type can contain any charset
- _encoding = 'latin1'
-
-
-class BMPString(AbstractString):
- """
- Represents a BMP string from ASN.1 as a Python unicode string
- """
-
- tag = 30
- _encoding = 'utf-16-be'
-
-
-def _basic_debug(prefix, self):
- """
- Prints out basic information about an Asn1Value object. Extracted for reuse
- among different classes that customize the debug information.
-
- :param prefix:
- A unicode string of spaces to prefix output line with
-
- :param self:
- The object to print the debugging information about
- """
-
- print('%s%s Object #%s' % (prefix, type_name(self), id(self)))
- if self._header:
- print('%s Header: 0x%s' % (prefix, binascii.hexlify(self._header or b'').decode('utf-8')))
-
- has_header = self.method is not None and self.class_ is not None and self.tag is not None
- if has_header:
- method_name = METHOD_NUM_TO_NAME_MAP.get(self.method)
- class_name = CLASS_NUM_TO_NAME_MAP.get(self.class_)
-
- if self.explicit is not None:
- for class_, tag in self.explicit:
- print(
- '%s %s tag %s (explicitly tagged)' %
- (
- prefix,
- CLASS_NUM_TO_NAME_MAP.get(class_),
- tag
- )
- )
- if has_header:
- print('%s %s %s %s' % (prefix, method_name, class_name, self.tag))
-
- elif self.implicit:
- if has_header:
- print('%s %s %s tag %s (implicitly tagged)' % (prefix, method_name, class_name, self.tag))
-
- elif has_header:
- print('%s %s %s tag %s' % (prefix, method_name, class_name, self.tag))
-
- print('%s Data: 0x%s' % (prefix, binascii.hexlify(self.contents or b'').decode('utf-8')))
-
-
-def _tag_type_to_explicit_implicit(params):
- """
- Converts old-style "tag_type" and "tag" params to "explicit" and "implicit"
-
- :param params:
- A dict of parameters to convert from tag_type/tag to explicit/implicit
- """
-
- if 'tag_type' in params:
- if params['tag_type'] == 'explicit':
- params['explicit'] = (params.get('class', 2), params['tag'])
- elif params['tag_type'] == 'implicit':
- params['implicit'] = (params.get('class', 2), params['tag'])
- del params['tag_type']
- del params['tag']
- if 'class' in params:
- del params['class']
-
-
-def _fix_tagging(value, params):
- """
- Checks if a value is properly tagged based on the spec, and re/untags as
- necessary
-
- :param value:
- An Asn1Value object
-
- :param params:
- A dict of spec params
-
- :return:
- An Asn1Value that is properly tagged
- """
-
- _tag_type_to_explicit_implicit(params)
-
- retag = False
- if 'implicit' not in params:
- if value.implicit is not False:
- retag = True
- else:
- if isinstance(params['implicit'], tuple):
- class_, tag = params['implicit']
- else:
- tag = params['implicit']
- class_ = 'context'
- if value.implicit is False:
- retag = True
- elif value.class_ != CLASS_NAME_TO_NUM_MAP[class_] or value.tag != tag:
- retag = True
-
- if params.get('explicit') != value.explicit:
- retag = True
-
- if retag:
- return value.retag(params)
- return value
-
-
-def _build_id_tuple(params, spec):
- """
- Builds a 2-element tuple used to identify fields by grabbing the class_
- and tag from an Asn1Value class and the params dict being passed to it
-
- :param params:
- A dict of params to pass to spec
-
- :param spec:
- An Asn1Value class
-
- :return:
- A 2-element integer tuple in the form (class_, tag)
- """
-
- # Handle situations where the the spec is not known at setup time
- if spec is None:
- return (None, None)
-
- required_class = spec.class_
- required_tag = spec.tag
-
- _tag_type_to_explicit_implicit(params)
-
- if 'explicit' in params:
- if isinstance(params['explicit'], tuple):
- required_class, required_tag = params['explicit']
- else:
- required_class = 2
- required_tag = params['explicit']
- elif 'implicit' in params:
- if isinstance(params['implicit'], tuple):
- required_class, required_tag = params['implicit']
- else:
- required_class = 2
- required_tag = params['implicit']
- if required_class is not None and not isinstance(required_class, int_types):
- required_class = CLASS_NAME_TO_NUM_MAP[required_class]
-
- required_class = params.get('class_', required_class)
- required_tag = params.get('tag', required_tag)
-
- return (required_class, required_tag)
-
-
-_UNIVERSAL_SPECS = {
- 1: Boolean,
- 2: Integer,
- 3: BitString,
- 4: OctetString,
- 5: Null,
- 6: ObjectIdentifier,
- 7: ObjectDescriptor,
- 8: InstanceOf,
- 9: Real,
- 10: Enumerated,
- 11: EmbeddedPdv,
- 12: UTF8String,
- 13: RelativeOid,
- 16: Sequence,
- 17: Set,
- 18: NumericString,
- 19: PrintableString,
- 20: TeletexString,
- 21: VideotexString,
- 22: IA5String,
- 23: UTCTime,
- 24: GeneralizedTime,
- 25: GraphicString,
- 26: VisibleString,
- 27: GeneralString,
- 28: UniversalString,
- 29: CharacterString,
- 30: BMPString
-}
-
-
-def _build(class_, method, tag, header, contents, trailer, spec=None, spec_params=None, nested_spec=None):
- """
- Builds an Asn1Value object generically, or using a spec with optional params
-
- :param class_:
- An integer representing the ASN.1 class
-
- :param method:
- An integer representing the ASN.1 method
-
- :param tag:
- An integer representing the ASN.1 tag
-
- :param header:
- A byte string of the ASN.1 header (class, method, tag, length)
-
- :param contents:
- A byte string of the ASN.1 value
-
- :param trailer:
- A byte string of any ASN.1 trailer (only used by indefinite length encodings)
-
- :param spec:
- A class derived from Asn1Value that defines what class_ and tag the
- value should have, and the semantics of the encoded value. The
- return value will be of this type. If omitted, the encoded value
- will be decoded using the standard universal tag based on the
- encoded tag number.
-
- :param spec_params:
- A dict of params to pass to the spec object
-
- :param nested_spec:
- For certain Asn1Value classes (such as OctetString and BitString), the
- contents can be further parsed and interpreted as another Asn1Value.
- This parameter controls the spec for that sub-parsing.
-
- :return:
- An object of the type spec, or if not specified, a child of Asn1Value
- """
-
- if spec_params is not None:
- _tag_type_to_explicit_implicit(spec_params)
-
- if header is None:
- return VOID
-
- header_set = False
-
- # If an explicit specification was passed in, make sure it matches
- if spec is not None:
- if spec_params:
- value = spec(contents=contents, **spec_params)
- else:
- value = spec(contents=contents)
-
- if spec is Any:
- pass
-
- elif value.explicit:
- original_explicit = value.explicit
- explicit_info = reversed(original_explicit)
- parsed_class = class_
- parsed_method = method
- parsed_tag = tag
- to_parse = contents
- explicit_header = header
- explicit_trailer = trailer or b''
- for expected_class, expected_tag in explicit_info:
- if parsed_class != expected_class:
- raise ValueError(unwrap(
- '''
- Error parsing %s - explicitly-tagged class should have been
- %s, but %s was found
- ''',
- type_name(value),
- CLASS_NUM_TO_NAME_MAP.get(expected_class),
- CLASS_NUM_TO_NAME_MAP.get(parsed_class, parsed_class)
- ))
- if parsed_method != 1:
- raise ValueError(unwrap(
- '''
- Error parsing %s - explicitly-tagged method should have
- been %s, but %s was found
- ''',
- type_name(value),
- METHOD_NUM_TO_NAME_MAP.get(1),
- METHOD_NUM_TO_NAME_MAP.get(parsed_method, parsed_method)
- ))
- if parsed_tag != expected_tag:
- raise ValueError(unwrap(
- '''
- Error parsing %s - explicitly-tagged tag should have been
- %s, but %s was found
- ''',
- type_name(value),
- expected_tag,
- parsed_tag
- ))
- info, _ = _parse(to_parse, len(to_parse))
- parsed_class, parsed_method, parsed_tag, parsed_header, to_parse, parsed_trailer = info
- explicit_header += parsed_header
- explicit_trailer = parsed_trailer + explicit_trailer
- value = _build(*info, spec=spec, spec_params={'no_explicit': True})
- value._header = explicit_header
- value._trailer = explicit_trailer
- value.explicit = original_explicit
- header_set = True
-
- elif isinstance(value, Choice):
- value.validate(class_, tag, contents)
- try:
- # Force parsing the Choice now
- value.contents = header + value.contents
- header = b''
- value.parse()
- except (ValueError, TypeError) as e:
- args = e.args[1:]
- e.args = (e.args[0] + '\n while parsing %s' % type_name(value),) + args
- raise e
-
- else:
- if class_ != value.class_:
- raise ValueError(unwrap(
- '''
- Error parsing %s - class should have been %s, but %s was
- found
- ''',
- type_name(value),
- CLASS_NUM_TO_NAME_MAP.get(value.class_),
- CLASS_NUM_TO_NAME_MAP.get(class_, class_)
- ))
- if method != value.method:
- # Allow parsing a primitive method as constructed if the value
- # is indefinite length. This is to allow parsing BER.
- ber_indef = method == 1 and value.method == 0 and trailer == b'\x00\x00'
- if not ber_indef or not isinstance(value, Constructable):
- raise ValueError(unwrap(
- '''
- Error parsing %s - method should have been %s, but %s was found
- ''',
- type_name(value),
- METHOD_NUM_TO_NAME_MAP.get(value.method),
- METHOD_NUM_TO_NAME_MAP.get(method, method)
- ))
- else:
- value.method = method
- value._indefinite = True
- if tag != value.tag and tag != value._bad_tag:
- raise ValueError(unwrap(
- '''
- Error parsing %s - tag should have been %s, but %s was found
- ''',
- type_name(value),
- value.tag,
- tag
- ))
-
- # For explicitly tagged, un-speced parsings, we use a generic container
- # since we will be parsing the contents and discarding the outer object
- # anyway a little further on
- elif spec_params and 'explicit' in spec_params:
- original_value = Asn1Value(contents=contents, **spec_params)
- original_explicit = original_value.explicit
-
- to_parse = contents
- explicit_header = header
- explicit_trailer = trailer or b''
- for expected_class, expected_tag in reversed(original_explicit):
- info, _ = _parse(to_parse, len(to_parse))
- _, _, _, parsed_header, to_parse, parsed_trailer = info
- explicit_header += parsed_header
- explicit_trailer = parsed_trailer + explicit_trailer
- value = _build(*info, spec=spec, spec_params={'no_explicit': True})
- value._header = header + value._header
- value._trailer += trailer or b''
- value.explicit = original_explicit
- header_set = True
-
- # If no spec was specified, allow anything and just process what
- # is in the input data
- else:
- if tag not in _UNIVERSAL_SPECS:
- raise ValueError(unwrap(
- '''
- Unknown element - %s class, %s method, tag %s
- ''',
- CLASS_NUM_TO_NAME_MAP.get(class_),
- METHOD_NUM_TO_NAME_MAP.get(method),
- tag
- ))
-
- spec = _UNIVERSAL_SPECS[tag]
-
- value = spec(contents=contents, class_=class_)
- ber_indef = method == 1 and value.method == 0 and trailer == b'\x00\x00'
- if ber_indef and isinstance(value, Constructable):
- value._indefinite = True
- value.method = method
-
- if not header_set:
- value._header = header
- value._trailer = trailer or b''
-
- # Destroy any default value that our contents have overwritten
- value._native = None
-
- if nested_spec:
- try:
- value.parse(nested_spec)
- except (ValueError, TypeError) as e:
- args = e.args[1:]
- e.args = (e.args[0] + '\n while parsing %s' % type_name(value),) + args
- raise e
-
- return value
-
-
-def _parse_build(encoded_data, pointer=0, spec=None, spec_params=None, strict=False):
- """
- Parses a byte string generically, or using a spec with optional params
-
- :param encoded_data:
- A byte string that contains BER-encoded data
-
- :param pointer:
- The index in the byte string to parse from
-
- :param spec:
- A class derived from Asn1Value that defines what class_ and tag the
- value should have, and the semantics of the encoded value. The
- return value will be of this type. If omitted, the encoded value
- will be decoded using the standard universal tag based on the
- encoded tag number.
-
- :param spec_params:
- A dict of params to pass to the spec object
-
- :param strict:
- A boolean indicating if trailing data should be forbidden - if so, a
- ValueError will be raised when trailing data exists
-
- :return:
- A 2-element tuple:
- - 0: An object of the type spec, or if not specified, a child of Asn1Value
- - 1: An integer indicating how many bytes were consumed
- """
-
- encoded_len = len(encoded_data)
- info, new_pointer = _parse(encoded_data, encoded_len, pointer)
- if strict and new_pointer != pointer + encoded_len:
- extra_bytes = pointer + encoded_len - new_pointer
- raise ValueError('Extra data - %d bytes of trailing data were provided' % extra_bytes)
- return (_build(*info, spec=spec, spec_params=spec_params), new_pointer)
diff --git a/functions/source/CreateSSHKey/asn1crypto/crl.py b/functions/source/CreateSSHKey/asn1crypto/crl.py
deleted file mode 100644
index 84cb168..0000000
--- a/functions/source/CreateSSHKey/asn1crypto/crl.py
+++ /dev/null
@@ -1,536 +0,0 @@
-# coding: utf-8
-
-"""
-ASN.1 type classes for certificate revocation lists (CRL). Exports the
-following items:
-
- - CertificateList()
-
-Other type classes are defined that help compose the types listed above.
-"""
-
-from __future__ import unicode_literals, division, absolute_import, print_function
-
-import hashlib
-
-from .algos import SignedDigestAlgorithm
-from .core import (
- Boolean,
- Enumerated,
- GeneralizedTime,
- Integer,
- ObjectIdentifier,
- OctetBitString,
- ParsableOctetString,
- Sequence,
- SequenceOf,
-)
-from .x509 import (
- AuthorityInfoAccessSyntax,
- AuthorityKeyIdentifier,
- CRLDistributionPoints,
- DistributionPointName,
- GeneralNames,
- Name,
- ReasonFlags,
- Time,
-)
-
-
-# The structures in this file are taken from https://tools.ietf.org/html/rfc5280
-
-
-class Version(Integer):
- _map = {
- 0: 'v1',
- 1: 'v2',
- 2: 'v3',
- }
-
-
-class IssuingDistributionPoint(Sequence):
- _fields = [
- ('distribution_point', DistributionPointName, {'explicit': 0, 'optional': True}),
- ('only_contains_user_certs', Boolean, {'implicit': 1, 'default': False}),
- ('only_contains_ca_certs', Boolean, {'implicit': 2, 'default': False}),
- ('only_some_reasons', ReasonFlags, {'implicit': 3, 'optional': True}),
- ('indirect_crl', Boolean, {'implicit': 4, 'default': False}),
- ('only_contains_attribute_certs', Boolean, {'implicit': 5, 'default': False}),
- ]
-
-
-class TBSCertListExtensionId(ObjectIdentifier):
- _map = {
- '2.5.29.18': 'issuer_alt_name',
- '2.5.29.20': 'crl_number',
- '2.5.29.27': 'delta_crl_indicator',
- '2.5.29.28': 'issuing_distribution_point',
- '2.5.29.35': 'authority_key_identifier',
- '2.5.29.46': 'freshest_crl',
- '1.3.6.1.5.5.7.1.1': 'authority_information_access',
- }
-
-
-class TBSCertListExtension(Sequence):
- _fields = [
- ('extn_id', TBSCertListExtensionId),
- ('critical', Boolean, {'default': False}),
- ('extn_value', ParsableOctetString),
- ]
-
- _oid_pair = ('extn_id', 'extn_value')
- _oid_specs = {
- 'issuer_alt_name': GeneralNames,
- 'crl_number': Integer,
- 'delta_crl_indicator': Integer,
- 'issuing_distribution_point': IssuingDistributionPoint,
- 'authority_key_identifier': AuthorityKeyIdentifier,
- 'freshest_crl': CRLDistributionPoints,
- 'authority_information_access': AuthorityInfoAccessSyntax,
- }
-
-
-class TBSCertListExtensions(SequenceOf):
- _child_spec = TBSCertListExtension
-
-
-class CRLReason(Enumerated):
- _map = {
- 0: 'unspecified',
- 1: 'key_compromise',
- 2: 'ca_compromise',
- 3: 'affiliation_changed',
- 4: 'superseded',
- 5: 'cessation_of_operation',
- 6: 'certificate_hold',
- 8: 'remove_from_crl',
- 9: 'privilege_withdrawn',
- 10: 'aa_compromise',
- }
-
- @property
- def human_friendly(self):
- """
- :return:
- A unicode string with revocation description that is suitable to
- show to end-users. Starts with a lower case letter and phrased in
- such a way that it makes sense after the phrase "because of" or
- "due to".
- """
-
- return {
- 'unspecified': 'an unspecified reason',
- 'key_compromise': 'a compromised key',
- 'ca_compromise': 'the CA being compromised',
- 'affiliation_changed': 'an affiliation change',
- 'superseded': 'certificate supersession',
- 'cessation_of_operation': 'a cessation of operation',
- 'certificate_hold': 'a certificate hold',
- 'remove_from_crl': 'removal from the CRL',
- 'privilege_withdrawn': 'privilege withdrawl',
- 'aa_compromise': 'the AA being compromised',
- }[self.native]
-
-
-class CRLEntryExtensionId(ObjectIdentifier):
- _map = {
- '2.5.29.21': 'crl_reason',
- '2.5.29.23': 'hold_instruction_code',
- '2.5.29.24': 'invalidity_date',
- '2.5.29.29': 'certificate_issuer',
- }
-
-
-class CRLEntryExtension(Sequence):
- _fields = [
- ('extn_id', CRLEntryExtensionId),
- ('critical', Boolean, {'default': False}),
- ('extn_value', ParsableOctetString),
- ]
-
- _oid_pair = ('extn_id', 'extn_value')
- _oid_specs = {
- 'crl_reason': CRLReason,
- 'hold_instruction_code': ObjectIdentifier,
- 'invalidity_date': GeneralizedTime,
- 'certificate_issuer': GeneralNames,
- }
-
-
-class CRLEntryExtensions(SequenceOf):
- _child_spec = CRLEntryExtension
-
-
-class RevokedCertificate(Sequence):
- _fields = [
- ('user_certificate', Integer),
- ('revocation_date', Time),
- ('crl_entry_extensions', CRLEntryExtensions, {'optional': True}),
- ]
-
- _processed_extensions = False
- _critical_extensions = None
- _crl_reason_value = None
- _invalidity_date_value = None
- _certificate_issuer_value = None
- _issuer_name = False
-
- def _set_extensions(self):
- """
- Sets common named extensions to private attributes and creates a list
- of critical extensions
- """
-
- self._critical_extensions = set()
-
- for extension in self['crl_entry_extensions']:
- name = extension['extn_id'].native
- attribute_name = '_%s_value' % name
- if hasattr(self, attribute_name):
- setattr(self, attribute_name, extension['extn_value'].parsed)
- if extension['critical'].native:
- self._critical_extensions.add(name)
-
- self._processed_extensions = True
-
- @property
- def critical_extensions(self):
- """
- Returns a set of the names (or OID if not a known extension) of the
- extensions marked as critical
-
- :return:
- A set of unicode strings
- """
-
- if not self._processed_extensions:
- self._set_extensions()
- return self._critical_extensions
-
- @property
- def crl_reason_value(self):
- """
- This extension indicates the reason that a certificate was revoked.
-
- :return:
- None or a CRLReason object
- """
-
- if self._processed_extensions is False:
- self._set_extensions()
- return self._crl_reason_value
-
- @property
- def invalidity_date_value(self):
- """
- This extension indicates the suspected date/time the private key was
- compromised or the certificate became invalid. This would usually be
- before the revocation date, which is when the CA processed the
- revocation.
-
- :return:
- None or a GeneralizedTime object
- """
-
- if self._processed_extensions is False:
- self._set_extensions()
- return self._invalidity_date_value
-
- @property
- def certificate_issuer_value(self):
- """
- This extension indicates the issuer of the certificate in question,
- and is used in indirect CRLs. CRL entries without this extension are
- for certificates issued from the last seen issuer.
-
- :return:
- None or an x509.GeneralNames object
- """
-
- if self._processed_extensions is False:
- self._set_extensions()
- return self._certificate_issuer_value
-
- @property
- def issuer_name(self):
- """
- :return:
- None, or an asn1crypto.x509.Name object for the issuer of the cert
- """
-
- if self._issuer_name is False:
- self._issuer_name = None
- if self.certificate_issuer_value:
- for general_name in self.certificate_issuer_value:
- if general_name.name == 'directory_name':
- self._issuer_name = general_name.chosen
- break
- return self._issuer_name
-
-
-class RevokedCertificates(SequenceOf):
- _child_spec = RevokedCertificate
-
-
-class TbsCertList(Sequence):
- _fields = [
- ('version', Version, {'optional': True}),
- ('signature', SignedDigestAlgorithm),
- ('issuer', Name),
- ('this_update', Time),
- ('next_update', Time, {'optional': True}),
- ('revoked_certificates', RevokedCertificates, {'optional': True}),
- ('crl_extensions', TBSCertListExtensions, {'explicit': 0, 'optional': True}),
- ]
-
-
-class CertificateList(Sequence):
- _fields = [
- ('tbs_cert_list', TbsCertList),
- ('signature_algorithm', SignedDigestAlgorithm),
- ('signature', OctetBitString),
- ]
-
- _processed_extensions = False
- _critical_extensions = None
- _issuer_alt_name_value = None
- _crl_number_value = None
- _delta_crl_indicator_value = None
- _issuing_distribution_point_value = None
- _authority_key_identifier_value = None
- _freshest_crl_value = None
- _authority_information_access_value = None
- _issuer_cert_urls = None
- _delta_crl_distribution_points = None
- _sha1 = None
- _sha256 = None
-
- def _set_extensions(self):
- """
- Sets common named extensions to private attributes and creates a list
- of critical extensions
- """
-
- self._critical_extensions = set()
-
- for extension in self['tbs_cert_list']['crl_extensions']:
- name = extension['extn_id'].native
- attribute_name = '_%s_value' % name
- if hasattr(self, attribute_name):
- setattr(self, attribute_name, extension['extn_value'].parsed)
- if extension['critical'].native:
- self._critical_extensions.add(name)
-
- self._processed_extensions = True
-
- @property
- def critical_extensions(self):
- """
- Returns a set of the names (or OID if not a known extension) of the
- extensions marked as critical
-
- :return:
- A set of unicode strings
- """
-
- if not self._processed_extensions:
- self._set_extensions()
- return self._critical_extensions
-
- @property
- def issuer_alt_name_value(self):
- """
- This extension allows associating one or more alternative names with
- the issuer of the CRL.
-
- :return:
- None or an x509.GeneralNames object
- """
-
- if self._processed_extensions is False:
- self._set_extensions()
- return self._issuer_alt_name_value
-
- @property
- def crl_number_value(self):
- """
- This extension adds a monotonically increasing number to the CRL and is
- used to distinguish different versions of the CRL.
-
- :return:
- None or an Integer object
- """
-
- if self._processed_extensions is False:
- self._set_extensions()
- return self._crl_number_value
-
- @property
- def delta_crl_indicator_value(self):
- """
- This extension indicates a CRL is a delta CRL, and contains the CRL
- number of the base CRL that it is a delta from.
-
- :return:
- None or an Integer object
- """
-
- if self._processed_extensions is False:
- self._set_extensions()
- return self._delta_crl_indicator_value
-
- @property
- def issuing_distribution_point_value(self):
- """
- This extension includes information about what types of revocations
- and certificates are part of the CRL.
-
- :return:
- None or an IssuingDistributionPoint object
- """
-
- if self._processed_extensions is False:
- self._set_extensions()
- return self._issuing_distribution_point_value
-
- @property
- def authority_key_identifier_value(self):
- """
- This extension helps in identifying the public key with which to
- validate the authenticity of the CRL.
-
- :return:
- None or an AuthorityKeyIdentifier object
- """
-
- if self._processed_extensions is False:
- self._set_extensions()
- return self._authority_key_identifier_value
-
- @property
- def freshest_crl_value(self):
- """
- This extension is used in complete CRLs to indicate where a delta CRL
- may be located.
-
- :return:
- None or a CRLDistributionPoints object
- """
-
- if self._processed_extensions is False:
- self._set_extensions()
- return self._freshest_crl_value
-
- @property
- def authority_information_access_value(self):
- """
- This extension is used to provide a URL with which to download the
- certificate used to sign this CRL.
-
- :return:
- None or an AuthorityInfoAccessSyntax object
- """
-
- if self._processed_extensions is False:
- self._set_extensions()
- return self._authority_information_access_value
-
- @property
- def issuer(self):
- """
- :return:
- An asn1crypto.x509.Name object for the issuer of the CRL
- """
-
- return self['tbs_cert_list']['issuer']
-
- @property
- def authority_key_identifier(self):
- """
- :return:
- None or a byte string of the key_identifier from the authority key
- identifier extension
- """
-
- if not self.authority_key_identifier_value:
- return None
-
- return self.authority_key_identifier_value['key_identifier'].native
-
- @property
- def issuer_cert_urls(self):
- """
- :return:
- A list of unicode strings that are URLs that should contain either
- an individual DER-encoded X.509 certificate, or a DER-encoded CMS
- message containing multiple certificates
- """
-
- if self._issuer_cert_urls is None:
- self._issuer_cert_urls = []
- if self.authority_information_access_value:
- for entry in self.authority_information_access_value:
- if entry['access_method'].native == 'ca_issuers':
- location = entry['access_location']
- if location.name != 'uniform_resource_identifier':
- continue
- url = location.native
- if url.lower()[0:7] == 'http://':
- self._issuer_cert_urls.append(url)
- return self._issuer_cert_urls
-
- @property
- def delta_crl_distribution_points(self):
- """
- Returns delta CRL URLs - only applies to complete CRLs
-
- :return:
- A list of zero or more DistributionPoint objects
- """
-
- if self._delta_crl_distribution_points is None:
- self._delta_crl_distribution_points = []
-
- if self.freshest_crl_value is not None:
- for distribution_point in self.freshest_crl_value:
- distribution_point_name = distribution_point['distribution_point']
- # RFC 5280 indicates conforming CA should not use the relative form
- if distribution_point_name.name == 'name_relative_to_crl_issuer':
- continue
- # This library is currently only concerned with HTTP-based CRLs
- for general_name in distribution_point_name.chosen:
- if general_name.name == 'uniform_resource_identifier':
- self._delta_crl_distribution_points.append(distribution_point)
-
- return self._delta_crl_distribution_points
-
- @property
- def signature(self):
- """
- :return:
- A byte string of the signature
- """
-
- return self['signature'].native
-
- @property
- def sha1(self):
- """
- :return:
- The SHA1 hash of the DER-encoded bytes of this certificate list
- """
-
- if self._sha1 is None:
- self._sha1 = hashlib.sha1(self.dump()).digest()
- return self._sha1
-
- @property
- def sha256(self):
- """
- :return:
- The SHA-256 hash of the DER-encoded bytes of this certificate list
- """
-
- if self._sha256 is None:
- self._sha256 = hashlib.sha256(self.dump()).digest()
- return self._sha256
diff --git a/functions/source/CreateSSHKey/asn1crypto/csr.py b/functions/source/CreateSSHKey/asn1crypto/csr.py
deleted file mode 100644
index 7ea2848..0000000
--- a/functions/source/CreateSSHKey/asn1crypto/csr.py
+++ /dev/null
@@ -1,96 +0,0 @@
-# coding: utf-8
-
-"""
-ASN.1 type classes for certificate signing requests (CSR). Exports the
-following items:
-
- - CertificatationRequest()
-
-Other type classes are defined that help compose the types listed above.
-"""
-
-from __future__ import unicode_literals, division, absolute_import, print_function
-
-from .algos import SignedDigestAlgorithm
-from .core import (
- Any,
- Integer,
- ObjectIdentifier,
- OctetBitString,
- Sequence,
- SetOf,
-)
-from .keys import PublicKeyInfo
-from .x509 import DirectoryString, Extensions, Name
-
-
-# The structures in this file are taken from https://tools.ietf.org/html/rfc2986
-# and https://tools.ietf.org/html/rfc2985
-
-
-class Version(Integer):
- _map = {
- 0: 'v1',
- }
-
-
-class CSRAttributeType(ObjectIdentifier):
- _map = {
- '1.2.840.113549.1.9.7': 'challenge_password',
- '1.2.840.113549.1.9.9': 'extended_certificate_attributes',
- '1.2.840.113549.1.9.14': 'extension_request',
- }
-
-
-class SetOfDirectoryString(SetOf):
- _child_spec = DirectoryString
-
-
-class Attribute(Sequence):
- _fields = [
- ('type', ObjectIdentifier),
- ('values', SetOf, {'spec': Any}),
- ]
-
-
-class SetOfAttributes(SetOf):
- _child_spec = Attribute
-
-
-class SetOfExtensions(SetOf):
- _child_spec = Extensions
-
-
-class CRIAttribute(Sequence):
- _fields = [
- ('type', CSRAttributeType),
- ('values', Any),
- ]
-
- _oid_pair = ('type', 'values')
- _oid_specs = {
- 'challenge_password': SetOfDirectoryString,
- 'extended_certificate_attributes': SetOfAttributes,
- 'extension_request': SetOfExtensions,
- }
-
-
-class CRIAttributes(SetOf):
- _child_spec = CRIAttribute
-
-
-class CertificationRequestInfo(Sequence):
- _fields = [
- ('version', Version),
- ('subject', Name),
- ('subject_pk_info', PublicKeyInfo),
- ('attributes', CRIAttributes, {'implicit': 0, 'optional': True}),
- ]
-
-
-class CertificationRequest(Sequence):
- _fields = [
- ('certification_request_info', CertificationRequestInfo),
- ('signature_algorithm', SignedDigestAlgorithm),
- ('signature', OctetBitString),
- ]
diff --git a/functions/source/CreateSSHKey/asn1crypto/keys.py b/functions/source/CreateSSHKey/asn1crypto/keys.py
deleted file mode 100644
index 3c4870d..0000000
--- a/functions/source/CreateSSHKey/asn1crypto/keys.py
+++ /dev/null
@@ -1,1245 +0,0 @@
-# coding: utf-8
-
-"""
-ASN.1 type classes for public and private keys. Exports the following items:
-
- - DSAPrivateKey()
- - ECPrivateKey()
- - EncryptedPrivateKeyInfo()
- - PrivateKeyInfo()
- - PublicKeyInfo()
- - RSAPrivateKey()
- - RSAPublicKey()
-
-Other type classes are defined that help compose the types listed above.
-"""
-
-from __future__ import unicode_literals, division, absolute_import, print_function
-
-import hashlib
-import math
-
-from ._elliptic_curve import (
- SECP192R1_BASE_POINT,
- SECP224R1_BASE_POINT,
- SECP256R1_BASE_POINT,
- SECP384R1_BASE_POINT,
- SECP521R1_BASE_POINT,
- PrimeCurve,
- PrimePoint,
-)
-from ._errors import unwrap
-from ._types import type_name, str_cls, byte_cls
-from .algos import _ForceNullParameters, DigestAlgorithm, EncryptionAlgorithm
-from .core import (
- Any,
- Asn1Value,
- BitString,
- Choice,
- Integer,
- IntegerOctetString,
- Null,
- ObjectIdentifier,
- OctetBitString,
- OctetString,
- ParsableOctetString,
- ParsableOctetBitString,
- Sequence,
- SequenceOf,
- SetOf,
-)
-from .util import int_from_bytes, int_to_bytes
-
-
-class OtherPrimeInfo(Sequence):
- """
- Source: https://tools.ietf.org/html/rfc3447#page-46
- """
-
- _fields = [
- ('prime', Integer),
- ('exponent', Integer),
- ('coefficient', Integer),
- ]
-
-
-class OtherPrimeInfos(SequenceOf):
- """
- Source: https://tools.ietf.org/html/rfc3447#page-46
- """
-
- _child_spec = OtherPrimeInfo
-
-
-class RSAPrivateKeyVersion(Integer):
- """
- Original Name: Version
- Source: https://tools.ietf.org/html/rfc3447#page-45
- """
-
- _map = {
- 0: 'two-prime',
- 1: 'multi',
- }
-
-
-class RSAPrivateKey(Sequence):
- """
- Source: https://tools.ietf.org/html/rfc3447#page-45
- """
-
- _fields = [
- ('version', RSAPrivateKeyVersion),
- ('modulus', Integer),
- ('public_exponent', Integer),
- ('private_exponent', Integer),
- ('prime1', Integer),
- ('prime2', Integer),
- ('exponent1', Integer),
- ('exponent2', Integer),
- ('coefficient', Integer),
- ('other_prime_infos', OtherPrimeInfos, {'optional': True})
- ]
-
-
-class RSAPublicKey(Sequence):
- """
- Source: https://tools.ietf.org/html/rfc3447#page-44
- """
-
- _fields = [
- ('modulus', Integer),
- ('public_exponent', Integer)
- ]
-
-
-class DSAPrivateKey(Sequence):
- """
- The ASN.1 structure that OpenSSL uses to store a DSA private key that is
- not part of a PKCS#8 structure. Reversed engineered from english-language
- description on linked OpenSSL documentation page.
-
- Original Name: None
- Source: https://www.openssl.org/docs/apps/dsa.html
- """
-
- _fields = [
- ('version', Integer),
- ('p', Integer),
- ('q', Integer),
- ('g', Integer),
- ('public_key', Integer),
- ('private_key', Integer),
- ]
-
-
-class _ECPoint():
- """
- In both PublicKeyInfo and PrivateKeyInfo, the EC public key is a byte
- string that is encoded as a bit string. This class adds convenience
- methods for converting to and from the byte string to a pair of integers
- that are the X and Y coordinates.
- """
-
- @classmethod
- def from_coords(cls, x, y):
- """
- Creates an ECPoint object from the X and Y integer coordinates of the
- point
-
- :param x:
- The X coordinate, as an integer
-
- :param y:
- The Y coordinate, as an integer
-
- :return:
- An ECPoint object
- """
-
- x_bytes = int(math.ceil(math.log(x, 2) / 8.0))
- y_bytes = int(math.ceil(math.log(y, 2) / 8.0))
-
- num_bytes = max(x_bytes, y_bytes)
-
- byte_string = b'\x04'
- byte_string += int_to_bytes(x, width=num_bytes)
- byte_string += int_to_bytes(y, width=num_bytes)
-
- return cls(byte_string)
-
- def to_coords(self):
- """
- Returns the X and Y coordinates for this EC point, as native Python
- integers
-
- :return:
- A 2-element tuple containing integers (X, Y)
- """
-
- data = self.native
- first_byte = data[0:1]
-
- # Uncompressed
- if first_byte == b'\x04':
- remaining = data[1:]
- field_len = len(remaining) // 2
- x = int_from_bytes(remaining[0:field_len])
- y = int_from_bytes(remaining[field_len:])
- return (x, y)
-
- if first_byte not in set([b'\x02', b'\x03']):
- raise ValueError(unwrap(
- '''
- Invalid EC public key - first byte is incorrect
- '''
- ))
-
- raise ValueError(unwrap(
- '''
- Compressed representations of EC public keys are not supported due
- to patent US6252960
- '''
- ))
-
-
-class ECPoint(OctetString, _ECPoint):
-
- pass
-
-
-class ECPointBitString(OctetBitString, _ECPoint):
-
- pass
-
-
-class SpecifiedECDomainVersion(Integer):
- """
- Source: http://www.secg.org/sec1-v2.pdf page 104
- """
- _map = {
- 1: 'ecdpVer1',
- 2: 'ecdpVer2',
- 3: 'ecdpVer3',
- }
-
-
-class FieldType(ObjectIdentifier):
- """
- Original Name: None
- Source: http://www.secg.org/sec1-v2.pdf page 101
- """
-
- _map = {
- '1.2.840.10045.1.1': 'prime_field',
- '1.2.840.10045.1.2': 'characteristic_two_field',
- }
-
-
-class CharacteristicTwoBasis(ObjectIdentifier):
- """
- Original Name: None
- Source: http://www.secg.org/sec1-v2.pdf page 102
- """
-
- _map = {
- '1.2.840.10045.1.2.1.1': 'gn_basis',
- '1.2.840.10045.1.2.1.2': 'tp_basis',
- '1.2.840.10045.1.2.1.3': 'pp_basis',
- }
-
-
-class Pentanomial(Sequence):
- """
- Source: http://www.secg.org/sec1-v2.pdf page 102
- """
-
- _fields = [
- ('k1', Integer),
- ('k2', Integer),
- ('k3', Integer),
- ]
-
-
-class CharacteristicTwo(Sequence):
- """
- Original Name: Characteristic-two
- Source: http://www.secg.org/sec1-v2.pdf page 101
- """
-
- _fields = [
- ('m', Integer),
- ('basis', CharacteristicTwoBasis),
- ('parameters', Any),
- ]
-
- _oid_pair = ('basis', 'parameters')
- _oid_specs = {
- 'gn_basis': Null,
- 'tp_basis': Integer,
- 'pp_basis': Pentanomial,
- }
-
-
-class FieldID(Sequence):
- """
- Source: http://www.secg.org/sec1-v2.pdf page 100
- """
-
- _fields = [
- ('field_type', FieldType),
- ('parameters', Any),
- ]
-
- _oid_pair = ('field_type', 'parameters')
- _oid_specs = {
- 'prime_field': Integer,
- 'characteristic_two_field': CharacteristicTwo,
- }
-
-
-class Curve(Sequence):
- """
- Source: http://www.secg.org/sec1-v2.pdf page 104
- """
-
- _fields = [
- ('a', OctetString),
- ('b', OctetString),
- ('seed', OctetBitString, {'optional': True}),
- ]
-
-
-class SpecifiedECDomain(Sequence):
- """
- Source: http://www.secg.org/sec1-v2.pdf page 103
- """
-
- _fields = [
- ('version', SpecifiedECDomainVersion),
- ('field_id', FieldID),
- ('curve', Curve),
- ('base', ECPoint),
- ('order', Integer),
- ('cofactor', Integer, {'optional': True}),
- ('hash', DigestAlgorithm, {'optional': True}),
- ]
-
-
-class NamedCurve(ObjectIdentifier):
- """
- Various named curves
-
- Original Name: None
- Source: https://tools.ietf.org/html/rfc3279#page-23,
- https://tools.ietf.org/html/rfc5480#page-5
- """
-
- _map = {
- # https://tools.ietf.org/html/rfc3279#page-23
- '1.2.840.10045.3.0.1': 'c2pnb163v1',
- '1.2.840.10045.3.0.2': 'c2pnb163v2',
- '1.2.840.10045.3.0.3': 'c2pnb163v3',
- '1.2.840.10045.3.0.4': 'c2pnb176w1',
- '1.2.840.10045.3.0.5': 'c2tnb191v1',
- '1.2.840.10045.3.0.6': 'c2tnb191v2',
- '1.2.840.10045.3.0.7': 'c2tnb191v3',
- '1.2.840.10045.3.0.8': 'c2onb191v4',
- '1.2.840.10045.3.0.9': 'c2onb191v5',
- '1.2.840.10045.3.0.10': 'c2pnb208w1',
- '1.2.840.10045.3.0.11': 'c2tnb239v1',
- '1.2.840.10045.3.0.12': 'c2tnb239v2',
- '1.2.840.10045.3.0.13': 'c2tnb239v3',
- '1.2.840.10045.3.0.14': 'c2onb239v4',
- '1.2.840.10045.3.0.15': 'c2onb239v5',
- '1.2.840.10045.3.0.16': 'c2pnb272w1',
- '1.2.840.10045.3.0.17': 'c2pnb304w1',
- '1.2.840.10045.3.0.18': 'c2tnb359v1',
- '1.2.840.10045.3.0.19': 'c2pnb368w1',
- '1.2.840.10045.3.0.20': 'c2tnb431r1',
- '1.2.840.10045.3.1.2': 'prime192v2',
- '1.2.840.10045.3.1.3': 'prime192v3',
- '1.2.840.10045.3.1.4': 'prime239v1',
- '1.2.840.10045.3.1.5': 'prime239v2',
- '1.2.840.10045.3.1.6': 'prime239v3',
- # https://tools.ietf.org/html/rfc5480#page-5
- '1.3.132.0.1': 'sect163k1',
- '1.3.132.0.15': 'sect163r2',
- '1.2.840.10045.3.1.1': 'secp192r1',
- '1.3.132.0.33': 'secp224r1',
- '1.3.132.0.26': 'sect233k1',
- '1.2.840.10045.3.1.7': 'secp256r1',
- '1.3.132.0.27': 'sect233r1',
- '1.3.132.0.16': 'sect283k1',
- '1.3.132.0.17': 'sect283r1',
- '1.3.132.0.34': 'secp384r1',
- '1.3.132.0.36': 'sect409k1',
- '1.3.132.0.37': 'sect409r1',
- '1.3.132.0.35': 'secp521r1',
- '1.3.132.0.38': 'sect571k1',
- '1.3.132.0.39': 'sect571r1',
- }
-
-
-class ECDomainParameters(Choice):
- """
- Source: http://www.secg.org/sec1-v2.pdf page 102
- """
-
- _alternatives = [
- ('specified', SpecifiedECDomain),
- ('named', NamedCurve),
- ('implicit_ca', Null),
- ]
-
-
-class ECPrivateKeyVersion(Integer):
- """
- Original Name: None
- Source: http://www.secg.org/sec1-v2.pdf page 108
- """
-
- _map = {
- 1: 'ecPrivkeyVer1',
- }
-
-
-class ECPrivateKey(Sequence):
- """
- Source: http://www.secg.org/sec1-v2.pdf page 108
- """
-
- _fields = [
- ('version', ECPrivateKeyVersion),
- ('private_key', IntegerOctetString),
- ('parameters', ECDomainParameters, {'explicit': 0, 'optional': True}),
- ('public_key', ECPointBitString, {'explicit': 1, 'optional': True}),
- ]
-
-
-class DSAParams(Sequence):
- """
- Parameters for a DSA public or private key
-
- Original Name: Dss-Parms
- Source: https://tools.ietf.org/html/rfc3279#page-9
- """
-
- _fields = [
- ('p', Integer),
- ('q', Integer),
- ('g', Integer),
- ]
-
-
-class Attribute(Sequence):
- """
- Source: https://www.itu.int/rec/dologin_pub.asp?lang=e&id=T-REC-X.501-198811-S!!PDF-E&type=items page 8
- """
-
- _fields = [
- ('type', ObjectIdentifier),
- ('values', SetOf, {'spec': Any}),
- ]
-
-
-class Attributes(SetOf):
- """
- Source: https://tools.ietf.org/html/rfc5208#page-3
- """
-
- _child_spec = Attribute
-
-
-class PrivateKeyAlgorithmId(ObjectIdentifier):
- """
- These OIDs for various public keys are reused when storing private keys
- inside of a PKCS#8 structure
-
- Original Name: None
- Source: https://tools.ietf.org/html/rfc3279
- """
-
- _map = {
- # https://tools.ietf.org/html/rfc3279#page-19
- '1.2.840.113549.1.1.1': 'rsa',
- # https://tools.ietf.org/html/rfc3279#page-18
- '1.2.840.10040.4.1': 'dsa',
- # https://tools.ietf.org/html/rfc3279#page-13
- '1.2.840.10045.2.1': 'ec',
- }
-
-
-class PrivateKeyAlgorithm(_ForceNullParameters, Sequence):
- """
- Original Name: PrivateKeyAlgorithmIdentifier
- Source: https://tools.ietf.org/html/rfc5208#page-3
- """
-
- _fields = [
- ('algorithm', PrivateKeyAlgorithmId),
- ('parameters', Any, {'optional': True}),
- ]
-
- _oid_pair = ('algorithm', 'parameters')
- _oid_specs = {
- 'dsa': DSAParams,
- 'ec': ECDomainParameters,
- }
-
-
-class PrivateKeyInfo(Sequence):
- """
- Source: https://tools.ietf.org/html/rfc5208#page-3
- """
-
- _fields = [
- ('version', Integer),
- ('private_key_algorithm', PrivateKeyAlgorithm),
- ('private_key', ParsableOctetString),
- ('attributes', Attributes, {'implicit': 0, 'optional': True}),
- ]
-
- def _private_key_spec(self):
- algorithm = self['private_key_algorithm']['algorithm'].native
- return {
- 'rsa': RSAPrivateKey,
- 'dsa': Integer,
- 'ec': ECPrivateKey,
- }[algorithm]
-
- _spec_callbacks = {
- 'private_key': _private_key_spec
- }
-
- _algorithm = None
- _bit_size = None
- _public_key = None
- _fingerprint = None
-
- @classmethod
- def wrap(cls, private_key, algorithm):
- """
- Wraps a private key in a PrivateKeyInfo structure
-
- :param private_key:
- A byte string or Asn1Value object of the private key
-
- :param algorithm:
- A unicode string of "rsa", "dsa" or "ec"
-
- :return:
- A PrivateKeyInfo object
- """
-
- if not isinstance(private_key, byte_cls) and not isinstance(private_key, Asn1Value):
- raise TypeError(unwrap(
- '''
- private_key must be a byte string or Asn1Value, not %s
- ''',
- type_name(private_key)
- ))
-
- if algorithm == 'rsa':
- if not isinstance(private_key, RSAPrivateKey):
- private_key = RSAPrivateKey.load(private_key)
- params = Null()
- elif algorithm == 'dsa':
- if not isinstance(private_key, DSAPrivateKey):
- private_key = DSAPrivateKey.load(private_key)
- params = DSAParams()
- params['p'] = private_key['p']
- params['q'] = private_key['q']
- params['g'] = private_key['g']
- public_key = private_key['public_key']
- private_key = private_key['private_key']
- elif algorithm == 'ec':
- if not isinstance(private_key, ECPrivateKey):
- private_key = ECPrivateKey.load(private_key)
- else:
- private_key = private_key.copy()
- params = private_key['parameters']
- del private_key['parameters']
- else:
- raise ValueError(unwrap(
- '''
- algorithm must be one of "rsa", "dsa", "ec", not %s
- ''',
- repr(algorithm)
- ))
-
- private_key_algo = PrivateKeyAlgorithm()
- private_key_algo['algorithm'] = PrivateKeyAlgorithmId(algorithm)
- private_key_algo['parameters'] = params
-
- container = cls()
- container._algorithm = algorithm
- container['version'] = Integer(0)
- container['private_key_algorithm'] = private_key_algo
- container['private_key'] = private_key
-
- # Here we save the DSA public key if possible since it is not contained
- # within the PKCS#8 structure for a DSA key
- if algorithm == 'dsa':
- container._public_key = public_key
-
- return container
-
- def _compute_public_key(self):
- """
- Computes the public key corresponding to the current private key.
-
- :return:
- For RSA keys, an RSAPublicKey object. For DSA keys, an Integer
- object. For EC keys, an ECPointBitString.
- """
-
- if self.algorithm == 'dsa':
- params = self['private_key_algorithm']['parameters']
- return Integer(pow(
- params['g'].native,
- self['private_key'].parsed.native,
- params['p'].native
- ))
-
- if self.algorithm == 'rsa':
- key = self['private_key'].parsed
- return RSAPublicKey({
- 'modulus': key['modulus'],
- 'public_exponent': key['public_exponent'],
- })
-
- if self.algorithm == 'ec':
- curve_type, details = self.curve
-
- if curve_type == 'implicit_ca':
- raise ValueError(unwrap(
- '''
- Unable to compute public key for EC key using Implicit CA
- parameters
- '''
- ))
-
- if curve_type == 'specified':
- if details['field_id']['field_type'] == 'characteristic_two_field':
- raise ValueError(unwrap(
- '''
- Unable to compute public key for EC key over a
- characteristic two field
- '''
- ))
-
- curve = PrimeCurve(
- details['field_id']['parameters'],
- int_from_bytes(details['curve']['a']),
- int_from_bytes(details['curve']['b'])
- )
- base_x, base_y = self['private_key_algorithm']['parameters'].chosen['base'].to_coords()
- base_point = PrimePoint(curve, base_x, base_y)
-
- elif curve_type == 'named':
- if details not in ('secp192r1', 'secp224r1', 'secp256r1', 'secp384r1', 'secp521r1'):
- raise ValueError(unwrap(
- '''
- Unable to compute public key for EC named curve %s,
- parameters not currently included
- ''',
- details
- ))
-
- base_point = {
- 'secp192r1': SECP192R1_BASE_POINT,
- 'secp224r1': SECP224R1_BASE_POINT,
- 'secp256r1': SECP256R1_BASE_POINT,
- 'secp384r1': SECP384R1_BASE_POINT,
- 'secp521r1': SECP521R1_BASE_POINT,
- }[details]
-
- public_point = base_point * self['private_key'].parsed['private_key'].native
- return ECPointBitString.from_coords(public_point.x, public_point.y)
-
- def unwrap(self):
- """
- Unwraps the private key into an RSAPrivateKey, DSAPrivateKey or
- ECPrivateKey object
-
- :return:
- An RSAPrivateKey, DSAPrivateKey or ECPrivateKey object
- """
-
- if self.algorithm == 'rsa':
- return self['private_key'].parsed
-
- if self.algorithm == 'dsa':
- params = self['private_key_algorithm']['parameters']
- return DSAPrivateKey({
- 'version': 0,
- 'p': params['p'],
- 'q': params['q'],
- 'g': params['g'],
- 'public_key': self.public_key,
- 'private_key': self['private_key'].parsed,
- })
-
- if self.algorithm == 'ec':
- output = self['private_key'].parsed
- output['parameters'] = self['private_key_algorithm']['parameters']
- output['public_key'] = self.public_key
- return output
-
- @property
- def curve(self):
- """
- Returns information about the curve used for an EC key
-
- :raises:
- ValueError - when the key is not an EC key
-
- :return:
- A two-element tuple, with the first element being a unicode string
- of "implicit_ca", "specified" or "named". If the first element is
- "implicit_ca", the second is None. If "specified", the second is
- an OrderedDict that is the native version of SpecifiedECDomain. If
- "named", the second is a unicode string of the curve name.
- """
-
- if self.algorithm != 'ec':
- raise ValueError(unwrap(
- '''
- Only EC keys have a curve, this key is %s
- ''',
- self.algorithm.upper()
- ))
-
- params = self['private_key_algorithm']['parameters']
- chosen = params.chosen
-
- if params.name == 'implicit_ca':
- value = None
- else:
- value = chosen.native
-
- return (params.name, value)
-
- @property
- def hash_algo(self):
- """
- Returns the name of the family of hash algorithms used to generate a
- DSA key
-
- :raises:
- ValueError - when the key is not a DSA key
-
- :return:
- A unicode string of "sha1" or "sha2"
- """
-
- if self.algorithm != 'dsa':
- raise ValueError(unwrap(
- '''
- Only DSA keys are generated using a hash algorithm, this key is
- %s
- ''',
- self.algorithm.upper()
- ))
-
- byte_len = math.log(self['private_key_algorithm']['parameters']['q'].native, 2) / 8
-
- return 'sha1' if byte_len <= 20 else 'sha2'
-
- @property
- def algorithm(self):
- """
- :return:
- A unicode string of "rsa", "dsa" or "ec"
- """
-
- if self._algorithm is None:
- self._algorithm = self['private_key_algorithm']['algorithm'].native
- return self._algorithm
-
- @property
- def bit_size(self):
- """
- :return:
- The bit size of the private key, as an integer
- """
-
- if self._bit_size is None:
- if self.algorithm == 'rsa':
- prime = self['private_key'].parsed['modulus'].native
- elif self.algorithm == 'dsa':
- prime = self['private_key_algorithm']['parameters']['p'].native
- elif self.algorithm == 'ec':
- prime = self['private_key'].parsed['private_key'].native
- self._bit_size = int(math.ceil(math.log(prime, 2)))
- modulus = self._bit_size % 8
- if modulus != 0:
- self._bit_size += 8 - modulus
- return self._bit_size
-
- @property
- def byte_size(self):
- """
- :return:
- The byte size of the private key, as an integer
- """
-
- return int(math.ceil(self.bit_size / 8))
-
- @property
- def public_key(self):
- """
- :return:
- If an RSA key, an RSAPublicKey object. If a DSA key, an Integer
- object. If an EC key, an ECPointBitString object.
- """
-
- if self._public_key is None:
- if self.algorithm == 'ec':
- key = self['private_key'].parsed
- if key['public_key']:
- self._public_key = key['public_key'].untag()
- else:
- self._public_key = self._compute_public_key()
- else:
- self._public_key = self._compute_public_key()
-
- return self._public_key
-
- @property
- def public_key_info(self):
- """
- :return:
- A PublicKeyInfo object derived from this private key.
- """
-
- return PublicKeyInfo({
- 'algorithm': {
- 'algorithm': self.algorithm,
- 'parameters': self['private_key_algorithm']['parameters']
- },
- 'public_key': self.public_key
- })
-
- @property
- def fingerprint(self):
- """
- Creates a fingerprint that can be compared with a public key to see if
- the two form a pair.
-
- This fingerprint is not compatible with fingerprints generated by any
- other software.
-
- :return:
- A byte string that is a sha256 hash of selected components (based
- on the key type)
- """
-
- if self._fingerprint is None:
- params = self['private_key_algorithm']['parameters']
- key = self['private_key'].parsed
-
- if self.algorithm == 'rsa':
- to_hash = '%d:%d' % (
- key['modulus'].native,
- key['public_exponent'].native,
- )
-
- elif self.algorithm == 'dsa':
- public_key = self.public_key
- to_hash = '%d:%d:%d:%d' % (
- params['p'].native,
- params['q'].native,
- params['g'].native,
- public_key.native,
- )
-
- elif self.algorithm == 'ec':
- public_key = key['public_key'].native
- if public_key is None:
- public_key = self.public_key.native
-
- if params.name == 'named':
- to_hash = '%s:' % params.chosen.native
- to_hash = to_hash.encode('utf-8')
- to_hash += public_key
-
- elif params.name == 'implicit_ca':
- to_hash = public_key
-
- elif params.name == 'specified':
- to_hash = '%s:' % params.chosen['field_id']['parameters'].native
- to_hash = to_hash.encode('utf-8')
- to_hash += b':' + params.chosen['curve']['a'].native
- to_hash += b':' + params.chosen['curve']['b'].native
- to_hash += public_key
-
- if isinstance(to_hash, str_cls):
- to_hash = to_hash.encode('utf-8')
-
- self._fingerprint = hashlib.sha256(to_hash).digest()
-
- return self._fingerprint
-
-
-class EncryptedPrivateKeyInfo(Sequence):
- """
- Source: https://tools.ietf.org/html/rfc5208#page-4
- """
-
- _fields = [
- ('encryption_algorithm', EncryptionAlgorithm),
- ('encrypted_data', OctetString),
- ]
-
-
-# These structures are from https://tools.ietf.org/html/rfc3279
-
-class ValidationParms(Sequence):
- """
- Source: https://tools.ietf.org/html/rfc3279#page-10
- """
-
- _fields = [
- ('seed', BitString),
- ('pgen_counter', Integer),
- ]
-
-
-class DomainParameters(Sequence):
- """
- Source: https://tools.ietf.org/html/rfc3279#page-10
- """
-
- _fields = [
- ('p', Integer),
- ('g', Integer),
- ('q', Integer),
- ('j', Integer, {'optional': True}),
- ('validation_params', ValidationParms, {'optional': True}),
- ]
-
-
-class PublicKeyAlgorithmId(ObjectIdentifier):
- """
- Original Name: None
- Source: https://tools.ietf.org/html/rfc3279
- """
-
- _map = {
- # https://tools.ietf.org/html/rfc3279#page-19
- '1.2.840.113549.1.1.1': 'rsa',
- # https://tools.ietf.org/html/rfc3279#page-18
- '1.2.840.10040.4.1': 'dsa',
- # https://tools.ietf.org/html/rfc3279#page-13
- '1.2.840.10045.2.1': 'ec',
- # https://tools.ietf.org/html/rfc3279#page-10
- '1.2.840.10046.2.1': 'dh',
- }
-
-
-class PublicKeyAlgorithm(_ForceNullParameters, Sequence):
- """
- Original Name: AlgorithmIdentifier
- Source: https://tools.ietf.org/html/rfc5280#page-18
- """
-
- _fields = [
- ('algorithm', PublicKeyAlgorithmId),
- ('parameters', Any, {'optional': True}),
- ]
-
- _oid_pair = ('algorithm', 'parameters')
- _oid_specs = {
- 'dsa': DSAParams,
- 'ec': ECDomainParameters,
- 'dh': DomainParameters,
- }
-
-
-class PublicKeyInfo(Sequence):
- """
- Original Name: SubjectPublicKeyInfo
- Source: https://tools.ietf.org/html/rfc5280#page-17
- """
-
- _fields = [
- ('algorithm', PublicKeyAlgorithm),
- ('public_key', ParsableOctetBitString),
- ]
-
- def _public_key_spec(self):
- algorithm = self['algorithm']['algorithm'].native
- return {
- 'rsa': RSAPublicKey,
- 'dsa': Integer,
- # We override the field spec with ECPoint so that users can easily
- # decompose the byte string into the constituent X and Y coords
- 'ec': (ECPointBitString, None),
- 'dh': Integer,
- }[algorithm]
-
- _spec_callbacks = {
- 'public_key': _public_key_spec
- }
-
- _algorithm = None
- _bit_size = None
- _fingerprint = None
- _sha1 = None
- _sha256 = None
-
- @classmethod
- def wrap(cls, public_key, algorithm):
- """
- Wraps a public key in a PublicKeyInfo structure
-
- :param public_key:
- A byte string or Asn1Value object of the public key
-
- :param algorithm:
- A unicode string of "rsa"
-
- :return:
- A PublicKeyInfo object
- """
-
- if not isinstance(public_key, byte_cls) and not isinstance(public_key, Asn1Value):
- raise TypeError(unwrap(
- '''
- public_key must be a byte string or Asn1Value, not %s
- ''',
- type_name(public_key)
- ))
-
- if algorithm != 'rsa':
- raise ValueError(unwrap(
- '''
- algorithm must "rsa", not %s
- ''',
- repr(algorithm)
- ))
-
- algo = PublicKeyAlgorithm()
- algo['algorithm'] = PublicKeyAlgorithmId(algorithm)
- algo['parameters'] = Null()
-
- container = cls()
- container['algorithm'] = algo
- if isinstance(public_key, Asn1Value):
- public_key = public_key.untag().dump()
- container['public_key'] = ParsableOctetBitString(public_key)
-
- return container
-
- def unwrap(self):
- """
- Unwraps an RSA public key into an RSAPublicKey object. Does not support
- DSA or EC public keys since they do not have an unwrapped form.
-
- :return:
- An RSAPublicKey object
- """
-
- if self.algorithm == 'rsa':
- return self['public_key'].parsed
-
- key_type = self.algorithm.upper()
- a_an = 'an' if key_type == 'EC' else 'a'
- raise ValueError(unwrap(
- '''
- Only RSA public keys may be unwrapped - this key is %s %s public
- key
- ''',
- a_an,
- key_type
- ))
-
- @property
- def curve(self):
- """
- Returns information about the curve used for an EC key
-
- :raises:
- ValueError - when the key is not an EC key
-
- :return:
- A two-element tuple, with the first element being a unicode string
- of "implicit_ca", "specified" or "named". If the first element is
- "implicit_ca", the second is None. If "specified", the second is
- an OrderedDict that is the native version of SpecifiedECDomain. If
- "named", the second is a unicode string of the curve name.
- """
-
- if self.algorithm != 'ec':
- raise ValueError(unwrap(
- '''
- Only EC keys have a curve, this key is %s
- ''',
- self.algorithm.upper()
- ))
-
- params = self['algorithm']['parameters']
- chosen = params.chosen
-
- if params.name == 'implicit_ca':
- value = None
- else:
- value = chosen.native
-
- return (params.name, value)
-
- @property
- def hash_algo(self):
- """
- Returns the name of the family of hash algorithms used to generate a
- DSA key
-
- :raises:
- ValueError - when the key is not a DSA key
-
- :return:
- A unicode string of "sha1" or "sha2" or None if no parameters are
- present
- """
-
- if self.algorithm != 'dsa':
- raise ValueError(unwrap(
- '''
- Only DSA keys are generated using a hash algorithm, this key is
- %s
- ''',
- self.algorithm.upper()
- ))
-
- parameters = self['algorithm']['parameters']
- if parameters.native is None:
- return None
-
- byte_len = math.log(parameters['q'].native, 2) / 8
-
- return 'sha1' if byte_len <= 20 else 'sha2'
-
- @property
- def algorithm(self):
- """
- :return:
- A unicode string of "rsa", "dsa" or "ec"
- """
-
- if self._algorithm is None:
- self._algorithm = self['algorithm']['algorithm'].native
- return self._algorithm
-
- @property
- def bit_size(self):
- """
- :return:
- The bit size of the public key, as an integer
- """
-
- if self._bit_size is None:
- if self.algorithm == 'ec':
- self._bit_size = ((len(self['public_key'].native) - 1) / 2) * 8
- else:
- if self.algorithm == 'rsa':
- prime = self['public_key'].parsed['modulus'].native
- elif self.algorithm == 'dsa':
- prime = self['algorithm']['parameters']['p'].native
- self._bit_size = int(math.ceil(math.log(prime, 2)))
- modulus = self._bit_size % 8
- if modulus != 0:
- self._bit_size += 8 - modulus
-
- return self._bit_size
-
- @property
- def byte_size(self):
- """
- :return:
- The byte size of the public key, as an integer
- """
-
- return int(math.ceil(self.bit_size / 8))
-
- @property
- def sha1(self):
- """
- :return:
- The SHA1 hash of the DER-encoded bytes of this public key info
- """
-
- if self._sha1 is None:
- self._sha1 = hashlib.sha1(byte_cls(self['public_key'])).digest()
- return self._sha1
-
- @property
- def sha256(self):
- """
- :return:
- The SHA-256 hash of the DER-encoded bytes of this public key info
- """
-
- if self._sha256 is None:
- self._sha256 = hashlib.sha256(byte_cls(self['public_key'])).digest()
- return self._sha256
-
- @property
- def fingerprint(self):
- """
- Creates a fingerprint that can be compared with a private key to see if
- the two form a pair.
-
- This fingerprint is not compatible with fingerprints generated by any
- other software.
-
- :return:
- A byte string that is a sha256 hash of selected components (based
- on the key type)
- """
-
- if self._fingerprint is None:
- key_type = self['algorithm']['algorithm'].native
- params = self['algorithm']['parameters']
-
- if key_type == 'rsa':
- key = self['public_key'].parsed
- to_hash = '%d:%d' % (
- key['modulus'].native,
- key['public_exponent'].native,
- )
-
- elif key_type == 'dsa':
- key = self['public_key'].parsed
- to_hash = '%d:%d:%d:%d' % (
- params['p'].native,
- params['q'].native,
- params['g'].native,
- key.native,
- )
-
- elif key_type == 'ec':
- key = self['public_key']
-
- if params.name == 'named':
- to_hash = '%s:' % params.chosen.native
- to_hash = to_hash.encode('utf-8')
- to_hash += key.native
-
- elif params.name == 'implicit_ca':
- to_hash = key.native
-
- elif params.name == 'specified':
- to_hash = '%s:' % params.chosen['field_id']['parameters'].native
- to_hash = to_hash.encode('utf-8')
- to_hash += b':' + params.chosen['curve']['a'].native
- to_hash += b':' + params.chosen['curve']['b'].native
- to_hash += key.native
-
- if isinstance(to_hash, str_cls):
- to_hash = to_hash.encode('utf-8')
-
- self._fingerprint = hashlib.sha256(to_hash).digest()
-
- return self._fingerprint
diff --git a/functions/source/CreateSSHKey/asn1crypto/ocsp.py b/functions/source/CreateSSHKey/asn1crypto/ocsp.py
deleted file mode 100644
index f18d8e8..0000000
--- a/functions/source/CreateSSHKey/asn1crypto/ocsp.py
+++ /dev/null
@@ -1,652 +0,0 @@
-# coding: utf-8
-
-"""
-ASN.1 type classes for the online certificate status protocol (OCSP). Exports
-the following items:
-
- - OCSPRequest()
- - OCSPResponse()
-
-Other type classes are defined that help compose the types listed above.
-"""
-
-from __future__ import unicode_literals, division, absolute_import, print_function
-
-from .algos import DigestAlgorithm, SignedDigestAlgorithm
-from .core import (
- Boolean,
- Choice,
- Enumerated,
- GeneralizedTime,
- IA5String,
- Integer,
- Null,
- ObjectIdentifier,
- OctetBitString,
- OctetString,
- ParsableOctetString,
- Sequence,
- SequenceOf,
-)
-from .crl import AuthorityInfoAccessSyntax, CRLReason
-from .keys import PublicKeyAlgorithm
-from .x509 import Certificate, GeneralName, GeneralNames, Name
-
-
-# The structures in this file are taken from https://tools.ietf.org/html/rfc6960
-
-
-class Version(Integer):
- _map = {
- 0: 'v1'
- }
-
-
-class CertId(Sequence):
- _fields = [
- ('hash_algorithm', DigestAlgorithm),
- ('issuer_name_hash', OctetString),
- ('issuer_key_hash', OctetString),
- ('serial_number', Integer),
- ]
-
-
-class ServiceLocator(Sequence):
- _fields = [
- ('issuer', Name),
- ('locator', AuthorityInfoAccessSyntax),
- ]
-
-
-class RequestExtensionId(ObjectIdentifier):
- _map = {
- '1.3.6.1.5.5.7.48.1.7': 'service_locator',
- }
-
-
-class RequestExtension(Sequence):
- _fields = [
- ('extn_id', RequestExtensionId),
- ('critical', Boolean, {'default': False}),
- ('extn_value', ParsableOctetString),
- ]
-
- _oid_pair = ('extn_id', 'extn_value')
- _oid_specs = {
- 'service_locator': ServiceLocator,
- }
-
-
-class RequestExtensions(SequenceOf):
- _child_spec = RequestExtension
-
-
-class Request(Sequence):
- _fields = [
- ('req_cert', CertId),
- ('single_request_extensions', RequestExtensions, {'explicit': 0, 'optional': True}),
- ]
-
- _processed_extensions = False
- _critical_extensions = None
- _service_locator_value = None
-
- def _set_extensions(self):
- """
- Sets common named extensions to private attributes and creates a list
- of critical extensions
- """
-
- self._critical_extensions = set()
-
- for extension in self['single_request_extensions']:
- name = extension['extn_id'].native
- attribute_name = '_%s_value' % name
- if hasattr(self, attribute_name):
- setattr(self, attribute_name, extension['extn_value'].parsed)
- if extension['critical'].native:
- self._critical_extensions.add(name)
-
- self._processed_extensions = True
-
- @property
- def critical_extensions(self):
- """
- Returns a set of the names (or OID if not a known extension) of the
- extensions marked as critical
-
- :return:
- A set of unicode strings
- """
-
- if not self._processed_extensions:
- self._set_extensions()
- return self._critical_extensions
-
- @property
- def service_locator_value(self):
- """
- This extension is used when communicating with an OCSP responder that
- acts as a proxy for OCSP requests
-
- :return:
- None or a ServiceLocator object
- """
-
- if self._processed_extensions is False:
- self._set_extensions()
- return self._service_locator_value
-
-
-class Requests(SequenceOf):
- _child_spec = Request
-
-
-class ResponseType(ObjectIdentifier):
- _map = {
- '1.3.6.1.5.5.7.48.1.1': 'basic_ocsp_response',
- }
-
-
-class AcceptableResponses(SequenceOf):
- _child_spec = ResponseType
-
-
-class PreferredSignatureAlgorithm(Sequence):
- _fields = [
- ('sig_identifier', SignedDigestAlgorithm),
- ('cert_identifier', PublicKeyAlgorithm, {'optional': True}),
- ]
-
-
-class PreferredSignatureAlgorithms(SequenceOf):
- _child_spec = PreferredSignatureAlgorithm
-
-
-class TBSRequestExtensionId(ObjectIdentifier):
- _map = {
- '1.3.6.1.5.5.7.48.1.2': 'nonce',
- '1.3.6.1.5.5.7.48.1.4': 'acceptable_responses',
- '1.3.6.1.5.5.7.48.1.8': 'preferred_signature_algorithms',
- }
-
-
-class TBSRequestExtension(Sequence):
- _fields = [
- ('extn_id', TBSRequestExtensionId),
- ('critical', Boolean, {'default': False}),
- ('extn_value', ParsableOctetString),
- ]
-
- _oid_pair = ('extn_id', 'extn_value')
- _oid_specs = {
- 'nonce': OctetString,
- 'acceptable_responses': AcceptableResponses,
- 'preferred_signature_algorithms': PreferredSignatureAlgorithms,
- }
-
-
-class TBSRequestExtensions(SequenceOf):
- _child_spec = TBSRequestExtension
-
-
-class TBSRequest(Sequence):
- _fields = [
- ('version', Version, {'explicit': 0, 'default': 'v1'}),
- ('requestor_name', GeneralName, {'explicit': 1, 'optional': True}),
- ('request_list', Requests),
- ('request_extensions', TBSRequestExtensions, {'explicit': 2, 'optional': True}),
- ]
-
-
-class Certificates(SequenceOf):
- _child_spec = Certificate
-
-
-class Signature(Sequence):
- _fields = [
- ('signature_algorithm', SignedDigestAlgorithm),
- ('signature', OctetBitString),
- ('certs', Certificates, {'explicit': 0, 'optional': True}),
- ]
-
-
-class OCSPRequest(Sequence):
- _fields = [
- ('tbs_request', TBSRequest),
- ('optional_signature', Signature, {'explicit': 0, 'optional': True}),
- ]
-
- _processed_extensions = False
- _critical_extensions = None
- _nonce_value = None
- _acceptable_responses_value = None
- _preferred_signature_algorithms_value = None
-
- def _set_extensions(self):
- """
- Sets common named extensions to private attributes and creates a list
- of critical extensions
- """
-
- self._critical_extensions = set()
-
- for extension in self['tbs_request']['request_extensions']:
- name = extension['extn_id'].native
- attribute_name = '_%s_value' % name
- if hasattr(self, attribute_name):
- setattr(self, attribute_name, extension['extn_value'].parsed)
- if extension['critical'].native:
- self._critical_extensions.add(name)
-
- self._processed_extensions = True
-
- @property
- def critical_extensions(self):
- """
- Returns a set of the names (or OID if not a known extension) of the
- extensions marked as critical
-
- :return:
- A set of unicode strings
- """
-
- if not self._processed_extensions:
- self._set_extensions()
- return self._critical_extensions
-
- @property
- def nonce_value(self):
- """
- This extension is used to prevent replay attacks by including a unique,
- random value with each request/response pair
-
- :return:
- None or an OctetString object
- """
-
- if self._processed_extensions is False:
- self._set_extensions()
- return self._nonce_value
-
- @property
- def acceptable_responses_value(self):
- """
- This extension is used to allow the client and server to communicate
- with alternative response formats other than just basic_ocsp_response,
- although no other formats are defined in the standard.
-
- :return:
- None or an AcceptableResponses object
- """
-
- if self._processed_extensions is False:
- self._set_extensions()
- return self._acceptable_responses_value
-
- @property
- def preferred_signature_algorithms_value(self):
- """
- This extension is used by the client to define what signature algorithms
- are preferred, including both the hash algorithm and the public key
- algorithm, with a level of detail down to even the public key algorithm
- parameters, such as curve name.
-
- :return:
- None or a PreferredSignatureAlgorithms object
- """
-
- if self._processed_extensions is False:
- self._set_extensions()
- return self._preferred_signature_algorithms_value
-
-
-class OCSPResponseStatus(Enumerated):
- _map = {
- 0: 'successful',
- 1: 'malformed_request',
- 2: 'internal_error',
- 3: 'try_later',
- 5: 'sign_required',
- 6: 'unauthorized',
- }
-
-
-class ResponderId(Choice):
- _alternatives = [
- ('by_name', Name, {'explicit': 1}),
- ('by_key', OctetString, {'explicit': 2}),
- ]
-
-
-class RevokedInfo(Sequence):
- _fields = [
- ('revocation_time', GeneralizedTime),
- ('revocation_reason', CRLReason, {'explicit': 0, 'optional': True}),
- ]
-
-
-class CertStatus(Choice):
- _alternatives = [
- ('good', Null, {'implicit': 0}),
- ('revoked', RevokedInfo, {'implicit': 1}),
- ('unknown', Null, {'implicit': 2}),
- ]
-
-
-class CrlId(Sequence):
- _fields = [
- ('crl_url', IA5String, {'explicit': 0, 'optional': True}),
- ('crl_num', Integer, {'explicit': 1, 'optional': True}),
- ('crl_time', GeneralizedTime, {'explicit': 2, 'optional': True}),
- ]
-
-
-class SingleResponseExtensionId(ObjectIdentifier):
- _map = {
- '1.3.6.1.5.5.7.48.1.3': 'crl',
- '1.3.6.1.5.5.7.48.1.6': 'archive_cutoff',
- # These are CRLEntryExtension values from
- # https://tools.ietf.org/html/rfc5280
- '2.5.29.21': 'crl_reason',
- '2.5.29.24': 'invalidity_date',
- '2.5.29.29': 'certificate_issuer',
- # https://tools.ietf.org/html/rfc6962.html#page-13
- '1.3.6.1.4.1.11129.2.4.5': 'signed_certificate_timestamp_list',
- }
-
-
-class SingleResponseExtension(Sequence):
- _fields = [
- ('extn_id', SingleResponseExtensionId),
- ('critical', Boolean, {'default': False}),
- ('extn_value', ParsableOctetString),
- ]
-
- _oid_pair = ('extn_id', 'extn_value')
- _oid_specs = {
- 'crl': CrlId,
- 'archive_cutoff': GeneralizedTime,
- 'crl_reason': CRLReason,
- 'invalidity_date': GeneralizedTime,
- 'certificate_issuer': GeneralNames,
- 'signed_certificate_timestamp_list': OctetString,
- }
-
-
-class SingleResponseExtensions(SequenceOf):
- _child_spec = SingleResponseExtension
-
-
-class SingleResponse(Sequence):
- _fields = [
- ('cert_id', CertId),
- ('cert_status', CertStatus),
- ('this_update', GeneralizedTime),
- ('next_update', GeneralizedTime, {'explicit': 0, 'optional': True}),
- ('single_extensions', SingleResponseExtensions, {'explicit': 1, 'optional': True}),
- ]
-
- _processed_extensions = False
- _critical_extensions = None
- _crl_value = None
- _archive_cutoff_value = None
- _crl_reason_value = None
- _invalidity_date_value = None
- _certificate_issuer_value = None
-
- def _set_extensions(self):
- """
- Sets common named extensions to private attributes and creates a list
- of critical extensions
- """
-
- self._critical_extensions = set()
-
- for extension in self['single_extensions']:
- name = extension['extn_id'].native
- attribute_name = '_%s_value' % name
- if hasattr(self, attribute_name):
- setattr(self, attribute_name, extension['extn_value'].parsed)
- if extension['critical'].native:
- self._critical_extensions.add(name)
-
- self._processed_extensions = True
-
- @property
- def critical_extensions(self):
- """
- Returns a set of the names (or OID if not a known extension) of the
- extensions marked as critical
-
- :return:
- A set of unicode strings
- """
-
- if not self._processed_extensions:
- self._set_extensions()
- return self._critical_extensions
-
- @property
- def crl_value(self):
- """
- This extension is used to locate the CRL that a certificate's revocation
- is contained within.
-
- :return:
- None or a CrlId object
- """
-
- if self._processed_extensions is False:
- self._set_extensions()
- return self._crl_value
-
- @property
- def archive_cutoff_value(self):
- """
- This extension is used to indicate the date at which an archived
- (historical) certificate status entry will no longer be available.
-
- :return:
- None or a GeneralizedTime object
- """
-
- if self._processed_extensions is False:
- self._set_extensions()
- return self._archive_cutoff_value
-
- @property
- def crl_reason_value(self):
- """
- This extension indicates the reason that a certificate was revoked.
-
- :return:
- None or a CRLReason object
- """
-
- if self._processed_extensions is False:
- self._set_extensions()
- return self._crl_reason_value
-
- @property
- def invalidity_date_value(self):
- """
- This extension indicates the suspected date/time the private key was
- compromised or the certificate became invalid. This would usually be
- before the revocation date, which is when the CA processed the
- revocation.
-
- :return:
- None or a GeneralizedTime object
- """
-
- if self._processed_extensions is False:
- self._set_extensions()
- return self._invalidity_date_value
-
- @property
- def certificate_issuer_value(self):
- """
- This extension indicates the issuer of the certificate in question.
-
- :return:
- None or an x509.GeneralNames object
- """
-
- if self._processed_extensions is False:
- self._set_extensions()
- return self._certificate_issuer_value
-
-
-class Responses(SequenceOf):
- _child_spec = SingleResponse
-
-
-class ResponseDataExtensionId(ObjectIdentifier):
- _map = {
- '1.3.6.1.5.5.7.48.1.2': 'nonce',
- '1.3.6.1.5.5.7.48.1.9': 'extended_revoke',
- }
-
-
-class ResponseDataExtension(Sequence):
- _fields = [
- ('extn_id', ResponseDataExtensionId),
- ('critical', Boolean, {'default': False}),
- ('extn_value', ParsableOctetString),
- ]
-
- _oid_pair = ('extn_id', 'extn_value')
- _oid_specs = {
- 'nonce': OctetString,
- 'extended_revoke': Null,
- }
-
-
-class ResponseDataExtensions(SequenceOf):
- _child_spec = ResponseDataExtension
-
-
-class ResponseData(Sequence):
- _fields = [
- ('version', Version, {'explicit': 0, 'default': 'v1'}),
- ('responder_id', ResponderId),
- ('produced_at', GeneralizedTime),
- ('responses', Responses),
- ('response_extensions', ResponseDataExtensions, {'explicit': 1, 'optional': True}),
- ]
-
-
-class BasicOCSPResponse(Sequence):
- _fields = [
- ('tbs_response_data', ResponseData),
- ('signature_algorithm', SignedDigestAlgorithm),
- ('signature', OctetBitString),
- ('certs', Certificates, {'explicit': 0, 'optional': True}),
- ]
-
-
-class ResponseBytes(Sequence):
- _fields = [
- ('response_type', ResponseType),
- ('response', ParsableOctetString),
- ]
-
- _oid_pair = ('response_type', 'response')
- _oid_specs = {
- 'basic_ocsp_response': BasicOCSPResponse,
- }
-
-
-class OCSPResponse(Sequence):
- _fields = [
- ('response_status', OCSPResponseStatus),
- ('response_bytes', ResponseBytes, {'explicit': 0, 'optional': True}),
- ]
-
- _processed_extensions = False
- _critical_extensions = None
- _nonce_value = None
- _extended_revoke_value = None
-
- def _set_extensions(self):
- """
- Sets common named extensions to private attributes and creates a list
- of critical extensions
- """
-
- self._critical_extensions = set()
-
- for extension in self['response_bytes']['response'].parsed['tbs_response_data']['response_extensions']:
- name = extension['extn_id'].native
- attribute_name = '_%s_value' % name
- if hasattr(self, attribute_name):
- setattr(self, attribute_name, extension['extn_value'].parsed)
- if extension['critical'].native:
- self._critical_extensions.add(name)
-
- self._processed_extensions = True
-
- @property
- def critical_extensions(self):
- """
- Returns a set of the names (or OID if not a known extension) of the
- extensions marked as critical
-
- :return:
- A set of unicode strings
- """
-
- if not self._processed_extensions:
- self._set_extensions()
- return self._critical_extensions
-
- @property
- def nonce_value(self):
- """
- This extension is used to prevent replay attacks on the request/response
- exchange
-
- :return:
- None or an OctetString object
- """
-
- if self._processed_extensions is False:
- self._set_extensions()
- return self._nonce_value
-
- @property
- def extended_revoke_value(self):
- """
- This extension is used to signal that the responder will return a
- "revoked" status for non-issued certificates.
-
- :return:
- None or a Null object (if present)
- """
-
- if self._processed_extensions is False:
- self._set_extensions()
- return self._extended_revoke_value
-
- @property
- def basic_ocsp_response(self):
- """
- A shortcut into the BasicOCSPResponse sequence
-
- :return:
- None or an asn1crypto.ocsp.BasicOCSPResponse object
- """
-
- return self['response_bytes']['response'].parsed
-
- @property
- def response_data(self):
- """
- A shortcut into the parsed, ResponseData sequence
-
- :return:
- None or an asn1crypto.ocsp.ResponseData object
- """
-
- return self['response_bytes']['response'].parsed['tbs_response_data']
diff --git a/functions/source/CreateSSHKey/asn1crypto/parser.py b/functions/source/CreateSSHKey/asn1crypto/parser.py
deleted file mode 100644
index 07f53ab..0000000
--- a/functions/source/CreateSSHKey/asn1crypto/parser.py
+++ /dev/null
@@ -1,289 +0,0 @@
-# coding: utf-8
-
-"""
-Functions for parsing and dumping using the ASN.1 DER encoding. Exports the
-following items:
-
- - emit()
- - parse()
- - peek()
-
-Other type classes are defined that help compose the types listed above.
-"""
-
-from __future__ import unicode_literals, division, absolute_import, print_function
-
-import sys
-
-from ._types import byte_cls, chr_cls, type_name
-from .util import int_from_bytes, int_to_bytes
-
-_PY2 = sys.version_info <= (3,)
-_INSUFFICIENT_DATA_MESSAGE = 'Insufficient data - %s bytes requested but only %s available'
-
-
-def emit(class_, method, tag, contents):
- """
- Constructs a byte string of an ASN.1 DER-encoded value
-
- This is typically not useful. Instead, use one of the standard classes from
- asn1crypto.core, or construct a new class with specific fields, and call the
- .dump() method.
-
- :param class_:
- An integer ASN.1 class value: 0 (universal), 1 (application),
- 2 (context), 3 (private)
-
- :param method:
- An integer ASN.1 method value: 0 (primitive), 1 (constructed)
-
- :param tag:
- An integer ASN.1 tag value
-
- :param contents:
- A byte string of the encoded byte contents
-
- :return:
- A byte string of the ASN.1 DER value (header and contents)
- """
-
- if not isinstance(class_, int):
- raise TypeError('class_ must be an integer, not %s' % type_name(class_))
-
- if class_ < 0 or class_ > 3:
- raise ValueError('class_ must be one of 0, 1, 2 or 3, not %s' % class_)
-
- if not isinstance(method, int):
- raise TypeError('method must be an integer, not %s' % type_name(method))
-
- if method < 0 or method > 1:
- raise ValueError('method must be 0 or 1, not %s' % method)
-
- if not isinstance(tag, int):
- raise TypeError('tag must be an integer, not %s' % type_name(tag))
-
- if tag < 0:
- raise ValueError('tag must be greater than zero, not %s' % tag)
-
- if not isinstance(contents, byte_cls):
- raise TypeError('contents must be a byte string, not %s' % type_name(contents))
-
- return _dump_header(class_, method, tag, contents) + contents
-
-
-def parse(contents, strict=False):
- """
- Parses a byte string of ASN.1 BER/DER-encoded data.
-
- This is typically not useful. Instead, use one of the standard classes from
- asn1crypto.core, or construct a new class with specific fields, and call the
- .load() class method.
-
- :param contents:
- A byte string of BER/DER-encoded data
-
- :param strict:
- A boolean indicating if trailing data should be forbidden - if so, a
- ValueError will be raised when trailing data exists
-
- :raises:
- ValueError - when the contents do not contain an ASN.1 header or are truncated in some way
- TypeError - when contents is not a byte string
-
- :return:
- A 6-element tuple:
- - 0: integer class (0 to 3)
- - 1: integer method
- - 2: integer tag
- - 3: byte string header
- - 4: byte string content
- - 5: byte string trailer
- """
-
- if not isinstance(contents, byte_cls):
- raise TypeError('contents must be a byte string, not %s' % type_name(contents))
-
- contents_len = len(contents)
- info, consumed = _parse(contents, contents_len)
- if strict and consumed != contents_len:
- raise ValueError('Extra data - %d bytes of trailing data were provided' % (contents_len - consumed))
- return info
-
-
-def peek(contents):
- """
- Parses a byte string of ASN.1 BER/DER-encoded data to find the length
-
- This is typically used to look into an encoded value to see how long the
- next chunk of ASN.1-encoded data is. Primarily it is useful when a
- value is a concatenation of multiple values.
-
- :param contents:
- A byte string of BER/DER-encoded data
-
- :raises:
- ValueError - when the contents do not contain an ASN.1 header or are truncated in some way
- TypeError - when contents is not a byte string
-
- :return:
- An integer with the number of bytes occupied by the ASN.1 value
- """
-
- if not isinstance(contents, byte_cls):
- raise TypeError('contents must be a byte string, not %s' % type_name(contents))
-
- info, consumed = _parse(contents, len(contents))
- return consumed
-
-
-def _parse(encoded_data, data_len, pointer=0, lengths_only=False):
- """
- Parses a byte string into component parts
-
- :param encoded_data:
- A byte string that contains BER-encoded data
-
- :param data_len:
- The integer length of the encoded data
-
- :param pointer:
- The index in the byte string to parse from
-
- :param lengths_only:
- A boolean to cause the call to return a 2-element tuple of the integer
- number of bytes in the header and the integer number of bytes in the
- contents. Internal use only.
-
- :return:
- A 2-element tuple:
- - 0: A tuple of (class_, method, tag, header, content, trailer)
- - 1: An integer indicating how many bytes were consumed
- """
-
- if data_len < pointer + 2:
- raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (2, data_len - pointer))
-
- start = pointer
- first_octet = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer]
- pointer += 1
-
- tag = first_octet & 31
- # Base 128 length using 8th bit as continuation indicator
- if tag == 31:
- tag = 0
- while True:
- num = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer]
- pointer += 1
- tag *= 128
- tag += num & 127
- if num >> 7 == 0:
- break
-
- length_octet = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer]
- pointer += 1
-
- if length_octet >> 7 == 0:
- if lengths_only:
- return (pointer, pointer + (length_octet & 127))
- contents_end = pointer + (length_octet & 127)
-
- else:
- length_octets = length_octet & 127
- if length_octets:
- pointer += length_octets
- contents_end = pointer + int_from_bytes(encoded_data[pointer - length_octets:pointer], signed=False)
- if lengths_only:
- return (pointer, contents_end)
-
- else:
- # To properly parse indefinite length values, we need to scan forward
- # parsing headers until we find a value with a length of zero. If we
- # just scanned looking for \x00\x00, nested indefinite length values
- # would not work.
- contents_end = pointer
- # Unfortunately we need to understand the contents of the data to
- # properly scan forward, which bleeds some representation info into
- # the parser. This condition handles the unused bits byte in
- # constructed bit strings.
- if tag == 3:
- contents_end += 1
- while contents_end < data_len:
- sub_header_end, contents_end = _parse(encoded_data, data_len, contents_end, lengths_only=True)
- if contents_end == sub_header_end and encoded_data[contents_end - 2:contents_end] == b'\x00\x00':
- break
- if lengths_only:
- return (pointer, contents_end)
- if contents_end > data_len:
- raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (contents_end, data_len))
- return (
- (
- first_octet >> 6,
- (first_octet >> 5) & 1,
- tag,
- encoded_data[start:pointer],
- encoded_data[pointer:contents_end - 2],
- b'\x00\x00'
- ),
- contents_end
- )
-
- if contents_end > data_len:
- raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (contents_end, data_len))
- return (
- (
- first_octet >> 6,
- (first_octet >> 5) & 1,
- tag,
- encoded_data[start:pointer],
- encoded_data[pointer:contents_end],
- b''
- ),
- contents_end
- )
-
-
-def _dump_header(class_, method, tag, contents):
- """
- Constructs the header bytes for an ASN.1 object
-
- :param class_:
- An integer ASN.1 class value: 0 (universal), 1 (application),
- 2 (context), 3 (private)
-
- :param method:
- An integer ASN.1 method value: 0 (primitive), 1 (constructed)
-
- :param tag:
- An integer ASN.1 tag value
-
- :param contents:
- A byte string of the encoded byte contents
-
- :return:
- A byte string of the ASN.1 DER header
- """
-
- header = b''
-
- id_num = 0
- id_num |= class_ << 6
- id_num |= method << 5
-
- if tag >= 31:
- header += chr_cls(id_num | 31)
- while tag > 0:
- continuation_bit = 0x80 if tag > 0x7F else 0
- header += chr_cls(continuation_bit | (tag & 0x7F))
- tag = tag >> 7
- else:
- header += chr_cls(id_num | tag)
-
- length = len(contents)
- if length <= 127:
- header += chr_cls(length)
- else:
- length_bytes = int_to_bytes(length)
- header += chr_cls(0x80 | len(length_bytes))
- header += length_bytes
-
- return header
diff --git a/functions/source/CreateSSHKey/asn1crypto/pdf.py b/functions/source/CreateSSHKey/asn1crypto/pdf.py
deleted file mode 100644
index b72c886..0000000
--- a/functions/source/CreateSSHKey/asn1crypto/pdf.py
+++ /dev/null
@@ -1,84 +0,0 @@
-# coding: utf-8
-
-"""
-ASN.1 type classes for PDF signature structures. Adds extra oid mapping and
-value parsing to asn1crypto.x509.Extension() and asn1crypto.xms.CMSAttribute().
-"""
-
-from __future__ import unicode_literals, division, absolute_import, print_function
-
-from .cms import CMSAttributeType, CMSAttribute
-from .core import (
- Boolean,
- Integer,
- Null,
- ObjectIdentifier,
- OctetString,
- Sequence,
- SequenceOf,
- SetOf,
-)
-from .crl import CertificateList
-from .ocsp import OCSPResponse
-from .x509 import (
- Extension,
- ExtensionId,
- GeneralName,
- KeyPurposeId,
-)
-
-
-class AdobeArchiveRevInfo(Sequence):
- _fields = [
- ('version', Integer)
- ]
-
-
-class AdobeTimestamp(Sequence):
- _fields = [
- ('version', Integer),
- ('location', GeneralName),
- ('requires_auth', Boolean, {'optional': True, 'default': False}),
- ]
-
-
-class OtherRevInfo(Sequence):
- _fields = [
- ('type', ObjectIdentifier),
- ('value', OctetString),
- ]
-
-
-class SequenceOfCertificateList(SequenceOf):
- _child_spec = CertificateList
-
-
-class SequenceOfOCSPResponse(SequenceOf):
- _child_spec = OCSPResponse
-
-
-class SequenceOfOtherRevInfo(SequenceOf):
- _child_spec = OtherRevInfo
-
-
-class RevocationInfoArchival(Sequence):
- _fields = [
- ('crl', SequenceOfCertificateList, {'explicit': 0, 'optional': True}),
- ('ocsp', SequenceOfOCSPResponse, {'explicit': 1, 'optional': True}),
- ('other_rev_info', SequenceOfOtherRevInfo, {'explicit': 2, 'optional': True}),
- ]
-
-
-class SetOfRevocationInfoArchival(SetOf):
- _child_spec = RevocationInfoArchival
-
-
-ExtensionId._map['1.2.840.113583.1.1.9.2'] = 'adobe_archive_rev_info'
-ExtensionId._map['1.2.840.113583.1.1.9.1'] = 'adobe_timestamp'
-ExtensionId._map['1.2.840.113583.1.1.10'] = 'adobe_ppklite_credential'
-Extension._oid_specs['adobe_archive_rev_info'] = AdobeArchiveRevInfo
-Extension._oid_specs['adobe_timestamp'] = AdobeTimestamp
-Extension._oid_specs['adobe_ppklite_credential'] = Null
-KeyPurposeId._map['1.2.840.113583.1.1.5'] = 'pdf_signing'
-CMSAttributeType._map['1.2.840.113583.1.1.8'] = 'adobe_revocation_info_archival'
-CMSAttribute._oid_specs['adobe_revocation_info_archival'] = SetOfRevocationInfoArchival
diff --git a/functions/source/CreateSSHKey/asn1crypto/pem.py b/functions/source/CreateSSHKey/asn1crypto/pem.py
deleted file mode 100644
index 8cb6024..0000000
--- a/functions/source/CreateSSHKey/asn1crypto/pem.py
+++ /dev/null
@@ -1,222 +0,0 @@
-# coding: utf-8
-
-"""
-Encoding DER to PEM and decoding PEM to DER. Exports the following items:
-
- - armor()
- - detect()
- - unarmor()
-
-"""
-
-from __future__ import unicode_literals, division, absolute_import, print_function
-
-import base64
-import re
-import sys
-
-from ._errors import unwrap
-from ._types import type_name, str_cls, byte_cls
-
-if sys.version_info < (3,):
- from cStringIO import StringIO as BytesIO
-else:
- from io import BytesIO
-
-
-def detect(byte_string):
- """
- Detect if a byte string seems to contain a PEM-encoded block
-
- :param byte_string:
- A byte string to look through
-
- :return:
- A boolean, indicating if a PEM-encoded block is contained in the byte
- string
- """
-
- if not isinstance(byte_string, byte_cls):
- raise TypeError(unwrap(
- '''
- byte_string must be a byte string, not %s
- ''',
- type_name(byte_string)
- ))
-
- return byte_string.find(b'-----BEGIN') != -1 or byte_string.find(b'---- BEGIN') != -1
-
-
-def armor(type_name, der_bytes, headers=None):
- """
- Armors a DER-encoded byte string in PEM
-
- :param der_bytes:
- A byte string to be armored
-
- :param type_name:
- A unicode string that will be capitalized and placed in the header
- and footer of the block. E.g. "CERTIFICATE", "PRIVATE KEY", etc. This
- will appear as "-----BEGIN CERTIFICATE-----" and
- "-----END CERTIFICATE-----".
-
- :param headers:
- An OrderedDict of the header lines to write after the BEGIN line
-
- :return:
- A byte string of the PEM block
- """
-
- if not isinstance(der_bytes, byte_cls):
- raise TypeError(unwrap(
- '''
- der_bytes must be a byte string, not %s
- ''' % type_name(der_bytes)
- ))
-
- if not isinstance(type_name, str_cls):
- raise TypeError(unwrap(
- '''
- type_name must be a unicode string, not %s
- ''',
- type_name(type_name)
- ))
-
- type_name = type_name.upper().encode('ascii')
-
- output = BytesIO()
- output.write(b'-----BEGIN ')
- output.write(type_name)
- output.write(b'-----\n')
- if headers:
- for key in headers:
- output.write(key.encode('ascii'))
- output.write(b': ')
- output.write(headers[key].encode('ascii'))
- output.write(b'\n')
- output.write(b'\n')
- b64_bytes = base64.b64encode(der_bytes)
- b64_len = len(b64_bytes)
- i = 0
- while i < b64_len:
- output.write(b64_bytes[i:i + 64])
- output.write(b'\n')
- i += 64
- output.write(b'-----END ')
- output.write(type_name)
- output.write(b'-----\n')
-
- return output.getvalue()
-
-
-def _unarmor(pem_bytes):
- """
- Convert a PEM-encoded byte string into one or more DER-encoded byte strings
-
- :param pem_bytes:
- A byte string of the PEM-encoded data
-
- :raises:
- ValueError - when the pem_bytes do not appear to be PEM-encoded bytes
-
- :return:
- A generator of 3-element tuples in the format: (object_type, headers,
- der_bytes). The object_type is a unicode string of what is between
- "-----BEGIN " and "-----". Examples include: "CERTIFICATE",
- "PUBLIC KEY", "PRIVATE KEY". The headers is a dict containing any lines
- in the form "Name: Value" that are right after the begin line.
- """
-
- if not isinstance(pem_bytes, byte_cls):
- raise TypeError(unwrap(
- '''
- pem_bytes must be a byte string, not %s
- ''',
- type_name(pem_bytes)
- ))
-
- # Valid states include: "trash", "headers", "body"
- state = 'trash'
- headers = {}
- base64_data = b''
- object_type = None
-
- found_start = False
- found_end = False
-
- for line in pem_bytes.splitlines(False):
- if line == b'':
- continue
-
- if state == "trash":
- # Look for a starting line since some CA cert bundle show the cert
- # into in a parsed format above each PEM block
- type_name_match = re.match(b'^(?:---- |-----)BEGIN ([A-Z0-9 ]+)(?: ----|-----)', line)
- if not type_name_match:
- continue
- object_type = type_name_match.group(1).decode('ascii')
-
- found_start = True
- state = 'headers'
- continue
-
- if state == 'headers':
- if line.find(b':') == -1:
- state = 'body'
- else:
- decoded_line = line.decode('ascii')
- name, value = decoded_line.split(':', 1)
- headers[name] = value.strip()
- continue
-
- if state == 'body':
- if line[0:5] in (b'-----', b'---- '):
- der_bytes = base64.b64decode(base64_data)
-
- yield (object_type, headers, der_bytes)
-
- state = 'trash'
- headers = {}
- base64_data = b''
- object_type = None
- found_end = True
- continue
-
- base64_data += line
-
- if not found_start or not found_end:
- raise ValueError(unwrap(
- '''
- pem_bytes does not appear to contain PEM-encoded data - no
- BEGIN/END combination found
- '''
- ))
-
-
-def unarmor(pem_bytes, multiple=False):
- """
- Convert a PEM-encoded byte string into a DER-encoded byte string
-
- :param pem_bytes:
- A byte string of the PEM-encoded data
-
- :param multiple:
- If True, function will return a generator
-
- :raises:
- ValueError - when the pem_bytes do not appear to be PEM-encoded bytes
-
- :return:
- A 3-element tuple (object_name, headers, der_bytes). The object_name is
- a unicode string of what is between "-----BEGIN " and "-----". Examples
- include: "CERTIFICATE", "PUBLIC KEY", "PRIVATE KEY". The headers is a
- dict containing any lines in the form "Name: Value" that are right
- after the begin line.
- """
-
- generator = _unarmor(pem_bytes)
-
- if not multiple:
- return next(generator)
-
- return generator
diff --git a/functions/source/CreateSSHKey/asn1crypto/pkcs12.py b/functions/source/CreateSSHKey/asn1crypto/pkcs12.py
deleted file mode 100644
index 7ebcefe..0000000
--- a/functions/source/CreateSSHKey/asn1crypto/pkcs12.py
+++ /dev/null
@@ -1,193 +0,0 @@
-# coding: utf-8
-
-"""
-ASN.1 type classes for PKCS#12 files. Exports the following items:
-
- - CertBag()
- - CrlBag()
- - Pfx()
- - SafeBag()
- - SecretBag()
-
-Other type classes are defined that help compose the types listed above.
-"""
-
-from __future__ import unicode_literals, division, absolute_import, print_function
-
-from .algos import DigestInfo
-from .cms import ContentInfo, SignedData
-from .core import (
- Any,
- BMPString,
- Integer,
- ObjectIdentifier,
- OctetString,
- ParsableOctetString,
- Sequence,
- SequenceOf,
- SetOf,
-)
-from .keys import PrivateKeyInfo, EncryptedPrivateKeyInfo
-from .x509 import Certificate, KeyPurposeId
-
-
-# The structures in this file are taken from https://tools.ietf.org/html/rfc7292
-
-class MacData(Sequence):
- _fields = [
- ('mac', DigestInfo),
- ('mac_salt', OctetString),
- ('iterations', Integer, {'default': 1}),
- ]
-
-
-class Version(Integer):
- _map = {
- 3: 'v3'
- }
-
-
-class AttributeType(ObjectIdentifier):
- _map = {
- # https://tools.ietf.org/html/rfc2985#page-18
- '1.2.840.113549.1.9.20': 'friendly_name',
- '1.2.840.113549.1.9.21': 'local_key_id',
- # https://support.microsoft.com/en-us/kb/287547
- '1.3.6.1.4.1.311.17.1': 'microsoft_local_machine_keyset',
- # https://github.com/frohoff/jdk8u-dev-jdk/blob/master/src/share/classes/sun/security/pkcs12/PKCS12KeyStore.java
- # this is a set of OIDs, representing key usage, the usual value is a SET of one element OID 2.5.29.37.0
- '2.16.840.1.113894.746875.1.1': 'trusted_key_usage',
- }
-
-
-class SetOfAny(SetOf):
- _child_spec = Any
-
-
-class SetOfBMPString(SetOf):
- _child_spec = BMPString
-
-
-class SetOfOctetString(SetOf):
- _child_spec = OctetString
-
-
-class SetOfKeyPurposeId(SetOf):
- _child_spec = KeyPurposeId
-
-
-class Attribute(Sequence):
- _fields = [
- ('type', AttributeType),
- ('values', None),
- ]
-
- _oid_specs = {
- 'friendly_name': SetOfBMPString,
- 'local_key_id': SetOfOctetString,
- 'microsoft_csp_name': SetOfBMPString,
- 'trusted_key_usage': SetOfKeyPurposeId,
- }
-
- def _values_spec(self):
- return self._oid_specs.get(self['type'].native, SetOfAny)
-
- _spec_callbacks = {
- 'values': _values_spec
- }
-
-
-class Attributes(SetOf):
- _child_spec = Attribute
-
-
-class Pfx(Sequence):
- _fields = [
- ('version', Version),
- ('auth_safe', ContentInfo),
- ('mac_data', MacData, {'optional': True})
- ]
-
- _authenticated_safe = None
-
- @property
- def authenticated_safe(self):
- if self._authenticated_safe is None:
- content = self['auth_safe']['content']
- if isinstance(content, SignedData):
- content = content['content_info']['content']
- self._authenticated_safe = AuthenticatedSafe.load(content.native)
- return self._authenticated_safe
-
-
-class AuthenticatedSafe(SequenceOf):
- _child_spec = ContentInfo
-
-
-class BagId(ObjectIdentifier):
- _map = {
- '1.2.840.113549.1.12.10.1.1': 'key_bag',
- '1.2.840.113549.1.12.10.1.2': 'pkcs8_shrouded_key_bag',
- '1.2.840.113549.1.12.10.1.3': 'cert_bag',
- '1.2.840.113549.1.12.10.1.4': 'crl_bag',
- '1.2.840.113549.1.12.10.1.5': 'secret_bag',
- '1.2.840.113549.1.12.10.1.6': 'safe_contents',
- }
-
-
-class CertId(ObjectIdentifier):
- _map = {
- '1.2.840.113549.1.9.22.1': 'x509',
- '1.2.840.113549.1.9.22.2': 'sdsi',
- }
-
-
-class CertBag(Sequence):
- _fields = [
- ('cert_id', CertId),
- ('cert_value', ParsableOctetString, {'explicit': 0}),
- ]
-
- _oid_pair = ('cert_id', 'cert_value')
- _oid_specs = {
- 'x509': Certificate,
- }
-
-
-class CrlBag(Sequence):
- _fields = [
- ('crl_id', ObjectIdentifier),
- ('crl_value', OctetString, {'explicit': 0}),
- ]
-
-
-class SecretBag(Sequence):
- _fields = [
- ('secret_type_id', ObjectIdentifier),
- ('secret_value', OctetString, {'explicit': 0}),
- ]
-
-
-class SafeContents(SequenceOf):
- pass
-
-
-class SafeBag(Sequence):
- _fields = [
- ('bag_id', BagId),
- ('bag_value', Any, {'explicit': 0}),
- ('bag_attributes', Attributes, {'optional': True}),
- ]
-
- _oid_pair = ('bag_id', 'bag_value')
- _oid_specs = {
- 'key_bag': PrivateKeyInfo,
- 'pkcs8_shrouded_key_bag': EncryptedPrivateKeyInfo,
- 'cert_bag': CertBag,
- 'crl_bag': CrlBag,
- 'secret_bag': SecretBag,
- 'safe_contents': SafeContents
- }
-
-
-SafeContents._child_spec = SafeBag
diff --git a/functions/source/CreateSSHKey/asn1crypto/tsp.py b/functions/source/CreateSSHKey/asn1crypto/tsp.py
deleted file mode 100644
index bd40810..0000000
--- a/functions/source/CreateSSHKey/asn1crypto/tsp.py
+++ /dev/null
@@ -1,310 +0,0 @@
-# coding: utf-8
-
-"""
-ASN.1 type classes for the time stamp protocol (TSP). Exports the following
-items:
-
- - TimeStampReq()
- - TimeStampResp()
-
-Also adds TimeStampedData() support to asn1crypto.cms.ContentInfo(),
-TimeStampedData() and TSTInfo() support to
-asn1crypto.cms.EncapsulatedContentInfo() and some oids and value parsers to
-asn1crypto.cms.CMSAttribute().
-
-Other type classes are defined that help compose the types listed above.
-"""
-
-from __future__ import unicode_literals, division, absolute_import, print_function
-
-from .algos import DigestAlgorithm
-from .cms import (
- CMSAttribute,
- CMSAttributeType,
- ContentInfo,
- ContentType,
- EncapsulatedContentInfo,
-)
-from .core import (
- Any,
- BitString,
- Boolean,
- Choice,
- GeneralizedTime,
- IA5String,
- Integer,
- ObjectIdentifier,
- OctetString,
- Sequence,
- SequenceOf,
- SetOf,
- UTF8String,
-)
-from .crl import CertificateList
-from .x509 import (
- Attributes,
- CertificatePolicies,
- GeneralName,
- GeneralNames,
-)
-
-
-# The structures in this file are based on https://tools.ietf.org/html/rfc3161,
-# https://tools.ietf.org/html/rfc4998, https://tools.ietf.org/html/rfc5544,
-# https://tools.ietf.org/html/rfc5035, https://tools.ietf.org/html/rfc2634
-
-class Version(Integer):
- _map = {
- 0: 'v0',
- 1: 'v1',
- 2: 'v2',
- 3: 'v3',
- 4: 'v4',
- 5: 'v5',
- }
-
-
-class MessageImprint(Sequence):
- _fields = [
- ('hash_algorithm', DigestAlgorithm),
- ('hashed_message', OctetString),
- ]
-
-
-class Accuracy(Sequence):
- _fields = [
- ('seconds', Integer, {'optional': True}),
- ('millis', Integer, {'implicit': 0, 'optional': True}),
- ('micros', Integer, {'implicit': 1, 'optional': True}),
- ]
-
-
-class Extension(Sequence):
- _fields = [
- ('extn_id', ObjectIdentifier),
- ('critical', Boolean, {'default': False}),
- ('extn_value', OctetString),
- ]
-
-
-class Extensions(SequenceOf):
- _child_spec = Extension
-
-
-class TSTInfo(Sequence):
- _fields = [
- ('version', Version),
- ('policy', ObjectIdentifier),
- ('message_imprint', MessageImprint),
- ('serial_number', Integer),
- ('gen_time', GeneralizedTime),
- ('accuracy', Accuracy, {'optional': True}),
- ('ordering', Boolean, {'default': False}),
- ('nonce', Integer, {'optional': True}),
- ('tsa', GeneralName, {'explicit': 0, 'optional': True}),
- ('extensions', Extensions, {'implicit': 1, 'optional': True}),
- ]
-
-
-class TimeStampReq(Sequence):
- _fields = [
- ('version', Version),
- ('message_imprint', MessageImprint),
- ('req_policy', ObjectIdentifier, {'optional': True}),
- ('nonce', Integer, {'optional': True}),
- ('cert_req', Boolean, {'default': False}),
- ('extensions', Extensions, {'implicit': 0, 'optional': True}),
- ]
-
-
-class PKIStatus(Integer):
- _map = {
- 0: 'granted',
- 1: 'granted_with_mods',
- 2: 'rejection',
- 3: 'waiting',
- 4: 'revocation_warning',
- 5: 'revocation_notification',
- }
-
-
-class PKIFreeText(SequenceOf):
- _child_spec = UTF8String
-
-
-class PKIFailureInfo(BitString):
- _map = {
- 0: 'bad_alg',
- 2: 'bad_request',
- 5: 'bad_data_format',
- 14: 'time_not_available',
- 15: 'unaccepted_policy',
- 16: 'unaccepted_extensions',
- 17: 'add_info_not_available',
- 25: 'system_failure',
- }
-
-
-class PKIStatusInfo(Sequence):
- _fields = [
- ('status', PKIStatus),
- ('status_string', PKIFreeText, {'optional': True}),
- ('fail_info', PKIFailureInfo, {'optional': True}),
- ]
-
-
-class TimeStampResp(Sequence):
- _fields = [
- ('status', PKIStatusInfo),
- ('time_stamp_token', ContentInfo),
- ]
-
-
-class MetaData(Sequence):
- _fields = [
- ('hash_protected', Boolean),
- ('file_name', UTF8String, {'optional': True}),
- ('media_type', IA5String, {'optional': True}),
- ('other_meta_data', Attributes, {'optional': True}),
- ]
-
-
-class TimeStampAndCRL(SequenceOf):
- _fields = [
- ('time_stamp', EncapsulatedContentInfo),
- ('crl', CertificateList, {'optional': True}),
- ]
-
-
-class TimeStampTokenEvidence(SequenceOf):
- _child_spec = TimeStampAndCRL
-
-
-class DigestAlgorithms(SequenceOf):
- _child_spec = DigestAlgorithm
-
-
-class EncryptionInfo(Sequence):
- _fields = [
- ('encryption_info_type', ObjectIdentifier),
- ('encryption_info_value', Any),
- ]
-
-
-class PartialHashtree(SequenceOf):
- _child_spec = OctetString
-
-
-class PartialHashtrees(SequenceOf):
- _child_spec = PartialHashtree
-
-
-class ArchiveTimeStamp(Sequence):
- _fields = [
- ('digest_algorithm', DigestAlgorithm, {'implicit': 0, 'optional': True}),
- ('attributes', Attributes, {'implicit': 1, 'optional': True}),
- ('reduced_hashtree', PartialHashtrees, {'implicit': 2, 'optional': True}),
- ('time_stamp', ContentInfo),
- ]
-
-
-class ArchiveTimeStampSequence(SequenceOf):
- _child_spec = ArchiveTimeStamp
-
-
-class EvidenceRecord(Sequence):
- _fields = [
- ('version', Version),
- ('digest_algorithms', DigestAlgorithms),
- ('crypto_infos', Attributes, {'implicit': 0, 'optional': True}),
- ('encryption_info', EncryptionInfo, {'implicit': 1, 'optional': True}),
- ('archive_time_stamp_sequence', ArchiveTimeStampSequence),
- ]
-
-
-class OtherEvidence(Sequence):
- _fields = [
- ('oe_type', ObjectIdentifier),
- ('oe_value', Any),
- ]
-
-
-class Evidence(Choice):
- _alternatives = [
- ('tst_evidence', TimeStampTokenEvidence, {'implicit': 0}),
- ('ers_evidence', EvidenceRecord, {'implicit': 1}),
- ('other_evidence', OtherEvidence, {'implicit': 2}),
- ]
-
-
-class TimeStampedData(Sequence):
- _fields = [
- ('version', Version),
- ('data_uri', IA5String, {'optional': True}),
- ('meta_data', MetaData, {'optional': True}),
- ('content', OctetString, {'optional': True}),
- ('temporal_evidence', Evidence),
- ]
-
-
-class IssuerSerial(Sequence):
- _fields = [
- ('issuer', GeneralNames),
- ('serial_number', Integer),
- ]
-
-
-class ESSCertID(Sequence):
- _fields = [
- ('cert_hash', OctetString),
- ('issuer_serial', IssuerSerial, {'optional': True}),
- ]
-
-
-class ESSCertIDs(SequenceOf):
- _child_spec = ESSCertID
-
-
-class SigningCertificate(Sequence):
- _fields = [
- ('certs', ESSCertIDs),
- ('policies', CertificatePolicies, {'optional': True}),
- ]
-
-
-class SetOfSigningCertificates(SetOf):
- _child_spec = SigningCertificate
-
-
-class ESSCertIDv2(Sequence):
- _fields = [
- ('hash_algorithm', DigestAlgorithm, {'default': {'algorithm': 'sha256'}}),
- ('cert_hash', OctetString),
- ('issuer_serial', IssuerSerial, {'optional': True}),
- ]
-
-
-class ESSCertIDv2s(SequenceOf):
- _child_spec = ESSCertIDv2
-
-
-class SigningCertificateV2(Sequence):
- _fields = [
- ('certs', ESSCertIDv2s),
- ('policies', CertificatePolicies, {'optional': True}),
- ]
-
-
-class SetOfSigningCertificatesV2(SetOf):
- _child_spec = SigningCertificateV2
-
-
-EncapsulatedContentInfo._oid_specs['tst_info'] = TSTInfo
-EncapsulatedContentInfo._oid_specs['timestamped_data'] = TimeStampedData
-ContentInfo._oid_specs['timestamped_data'] = TimeStampedData
-ContentType._map['1.2.840.113549.1.9.16.1.4'] = 'tst_info'
-ContentType._map['1.2.840.113549.1.9.16.1.31'] = 'timestamped_data'
-CMSAttributeType._map['1.2.840.113549.1.9.16.2.12'] = 'signing_certificate'
-CMSAttribute._oid_specs['signing_certificate'] = SetOfSigningCertificates
-CMSAttributeType._map['1.2.840.113549.1.9.16.2.47'] = 'signing_certificate_v2'
-CMSAttribute._oid_specs['signing_certificate_v2'] = SetOfSigningCertificatesV2
diff --git a/functions/source/CreateSSHKey/asn1crypto/util.py b/functions/source/CreateSSHKey/asn1crypto/util.py
deleted file mode 100644
index 2e55ef8..0000000
--- a/functions/source/CreateSSHKey/asn1crypto/util.py
+++ /dev/null
@@ -1,712 +0,0 @@
-# coding: utf-8
-
-"""
-Miscellaneous data helpers, including functions for converting integers to and
-from bytes and UTC timezone. Exports the following items:
-
- - OrderedDict()
- - int_from_bytes()
- - int_to_bytes()
- - timezone.utc
- - inet_ntop()
- - inet_pton()
- - uri_to_iri()
- - iri_to_uri()
-"""
-
-from __future__ import unicode_literals, division, absolute_import, print_function
-
-import math
-import sys
-from datetime import datetime, date, time
-
-from ._errors import unwrap
-from ._iri import iri_to_uri, uri_to_iri # noqa
-from ._ordereddict import OrderedDict # noqa
-from ._types import type_name
-
-if sys.platform == 'win32':
- from ._inet import inet_ntop, inet_pton
-else:
- from socket import inet_ntop, inet_pton # noqa
-
-
-# Python 2
-if sys.version_info <= (3,):
-
- from datetime import timedelta, tzinfo
-
- py2 = True
-
- def int_to_bytes(value, signed=False, width=None):
- """
- Converts an integer to a byte string
-
- :param value:
- The integer to convert
-
- :param signed:
- If the byte string should be encoded using two's complement
-
- :param width:
- None == auto, otherwise an integer of the byte width for the return
- value
-
- :return:
- A byte string
- """
-
- # Handle negatives in two's complement
- is_neg = False
- if signed and value < 0:
- is_neg = True
- bits = int(math.ceil(len('%x' % abs(value)) / 2.0) * 8)
- value = (value + (1 << bits)) % (1 << bits)
-
- hex_str = '%x' % value
- if len(hex_str) & 1:
- hex_str = '0' + hex_str
-
- output = hex_str.decode('hex')
-
- if signed and not is_neg and ord(output[0:1]) & 0x80:
- output = b'\x00' + output
-
- if width is not None:
- if is_neg:
- pad_char = b'\xFF'
- else:
- pad_char = b'\x00'
- output = (pad_char * (width - len(output))) + output
- elif is_neg and ord(output[0:1]) & 0x80 == 0:
- output = b'\xFF' + output
-
- return output
-
- def int_from_bytes(value, signed=False):
- """
- Converts a byte string to an integer
-
- :param value:
- The byte string to convert
-
- :param signed:
- If the byte string should be interpreted using two's complement
-
- :return:
- An integer
- """
-
- if value == b'':
- return 0
-
- num = long(value.encode("hex"), 16) # noqa
-
- if not signed:
- return num
-
- # Check for sign bit and handle two's complement
- if ord(value[0:1]) & 0x80:
- bit_len = len(value) * 8
- return num - (1 << bit_len)
-
- return num
-
- class utc(tzinfo): # noqa
-
- def tzname(self, _):
- return b'UTC+00:00'
-
- def utcoffset(self, _):
- return timedelta(0)
-
- def dst(self, _):
- return timedelta(0)
-
- class timezone(): # noqa
-
- utc = utc()
-
-
-# Python 3
-else:
-
- from datetime import timezone # noqa
-
- py2 = False
-
- def int_to_bytes(value, signed=False, width=None):
- """
- Converts an integer to a byte string
-
- :param value:
- The integer to convert
-
- :param signed:
- If the byte string should be encoded using two's complement
-
- :param width:
- None == auto, otherwise an integer of the byte width for the return
- value
-
- :return:
- A byte string
- """
-
- if width is None:
- if signed:
- if value < 0:
- bits_required = abs(value + 1).bit_length()
- else:
- bits_required = value.bit_length()
- if bits_required % 8 == 0:
- bits_required += 1
- else:
- bits_required = value.bit_length()
- width = math.ceil(bits_required / 8) or 1
- return value.to_bytes(width, byteorder='big', signed=signed)
-
- def int_from_bytes(value, signed=False):
- """
- Converts a byte string to an integer
-
- :param value:
- The byte string to convert
-
- :param signed:
- If the byte string should be interpreted using two's complement
-
- :return:
- An integer
- """
-
- return int.from_bytes(value, 'big', signed=signed)
-
-
-_DAYS_PER_MONTH_YEAR_0 = {
- 1: 31,
- 2: 29, # Year 0 was a leap year
- 3: 31,
- 4: 30,
- 5: 31,
- 6: 30,
- 7: 31,
- 8: 31,
- 9: 30,
- 10: 31,
- 11: 30,
- 12: 31
-}
-
-
-class extended_date(object):
- """
- A datetime.date-like object that can represent the year 0. This is just
- to handle 0000-01-01 found in some certificates.
- """
-
- year = None
- month = None
- day = None
-
- def __init__(self, year, month, day):
- """
- :param year:
- The integer 0
-
- :param month:
- An integer from 1 to 12
-
- :param day:
- An integer from 1 to 31
- """
-
- if year != 0:
- raise ValueError('year must be 0')
-
- if month < 1 or month > 12:
- raise ValueError('month is out of range')
-
- if day < 0 or day > _DAYS_PER_MONTH_YEAR_0[month]:
- raise ValueError('day is out of range')
-
- self.year = year
- self.month = month
- self.day = day
-
- def _format(self, format):
- """
- Performs strftime(), always returning a unicode string
-
- :param format:
- A strftime() format string
-
- :return:
- A unicode string of the formatted date
- """
-
- format = format.replace('%Y', '0000')
- # Year 0 is 1BC and a leap year. Leap years repeat themselves
- # every 28 years. Because of adjustments and the proleptic gregorian
- # calendar, the simplest way to format is to substitute year 2000.
- temp = date(2000, self.month, self.day)
- if '%c' in format:
- c_out = temp.strftime('%c')
- # Handle full years
- c_out = c_out.replace('2000', '0000')
- c_out = c_out.replace('%', '%%')
- format = format.replace('%c', c_out)
- if '%x' in format:
- x_out = temp.strftime('%x')
- # Handle formats such as 08/16/2000 or 16.08.2000
- x_out = x_out.replace('2000', '0000')
- x_out = x_out.replace('%', '%%')
- format = format.replace('%x', x_out)
- return temp.strftime(format)
-
- def isoformat(self):
- """
- Formats the date as %Y-%m-%d
-
- :return:
- The date formatted to %Y-%m-%d as a unicode string in Python 3
- and a byte string in Python 2
- """
-
- return self.strftime('0000-%m-%d')
-
- def strftime(self, format):
- """
- Formats the date using strftime()
-
- :param format:
- The strftime() format string
-
- :return:
- The formatted date as a unicode string in Python 3 and a byte
- string in Python 2
- """
-
- output = self._format(format)
- if py2:
- return output.encode('utf-8')
- return output
-
- def replace(self, year=None, month=None, day=None):
- """
- Returns a new datetime.date or asn1crypto.util.extended_date
- object with the specified components replaced
-
- :return:
- A datetime.date or asn1crypto.util.extended_date object
- """
-
- if year is None:
- year = self.year
- if month is None:
- month = self.month
- if day is None:
- day = self.day
-
- if year > 0:
- cls = date
- else:
- cls = extended_date
-
- return cls(
- year,
- month,
- day
- )
-
- def __str__(self):
- if py2:
- return self.__bytes__()
- else:
- return self.__unicode__()
-
- def __bytes__(self):
- return self.__unicode__().encode('utf-8')
-
- def __unicode__(self):
- return self._format('%Y-%m-%d')
-
- def __eq__(self, other):
- if not isinstance(other, self.__class__):
- return False
- return self.__cmp__(other) == 0
-
- def __ne__(self, other):
- return not self.__eq__(other)
-
- def _comparison_error(self, other):
- raise TypeError(unwrap(
- '''
- An asn1crypto.util.extended_date object can only be compared to
- an asn1crypto.util.extended_date or datetime.date object, not %s
- ''',
- type_name(other)
- ))
-
- def __cmp__(self, other):
- if isinstance(other, date):
- return -1
-
- if not isinstance(other, self.__class__):
- self._comparison_error(other)
-
- st = (
- self.year,
- self.month,
- self.day
- )
- ot = (
- other.year,
- other.month,
- other.day
- )
-
- if st < ot:
- return -1
- if st > ot:
- return 1
- return 0
-
- def __lt__(self, other):
- return self.__cmp__(other) < 0
-
- def __le__(self, other):
- return self.__cmp__(other) <= 0
-
- def __gt__(self, other):
- return self.__cmp__(other) > 0
-
- def __ge__(self, other):
- return self.__cmp__(other) >= 0
-
-
-class extended_datetime(object):
- """
- A datetime.datetime-like object that can represent the year 0. This is just
- to handle 0000-01-01 found in some certificates.
- """
-
- year = None
- month = None
- day = None
- hour = None
- minute = None
- second = None
- microsecond = None
- tzinfo = None
-
- def __init__(self, year, month, day, hour=0, minute=0, second=0, microsecond=0, tzinfo=None):
- """
- :param year:
- The integer 0
-
- :param month:
- An integer from 1 to 12
-
- :param day:
- An integer from 1 to 31
-
- :param hour:
- An integer from 0 to 23
-
- :param minute:
- An integer from 0 to 59
-
- :param second:
- An integer from 0 to 59
-
- :param microsecond:
- An integer from 0 to 999999
- """
-
- if year != 0:
- raise ValueError('year must be 0')
-
- if month < 1 or month > 12:
- raise ValueError('month is out of range')
-
- if day < 0 or day > _DAYS_PER_MONTH_YEAR_0[month]:
- raise ValueError('day is out of range')
-
- if hour < 0 or hour > 23:
- raise ValueError('hour is out of range')
-
- if minute < 0 or minute > 59:
- raise ValueError('minute is out of range')
-
- if second < 0 or second > 59:
- raise ValueError('second is out of range')
-
- if microsecond < 0 or microsecond > 999999:
- raise ValueError('microsecond is out of range')
-
- self.year = year
- self.month = month
- self.day = day
- self.hour = hour
- self.minute = minute
- self.second = second
- self.microsecond = microsecond
- self.tzinfo = tzinfo
-
- def date(self):
- """
- :return:
- An asn1crypto.util.extended_date of the date
- """
-
- return extended_date(self.year, self.month, self.day)
-
- def time(self):
- """
- :return:
- A datetime.time object of the time
- """
-
- return time(self.hour, self.minute, self.second, self.microsecond, self.tzinfo)
-
- def utcoffset(self):
- """
- :return:
- None or a datetime.timedelta() of the offset from UTC
- """
-
- if self.tzinfo is None:
- return None
- return self.tzinfo.utcoffset(self.replace(year=2000))
-
- def dst(self):
- """
- :return:
- None or a datetime.timedelta() of the daylight savings time offset
- """
-
- if self.tzinfo is None:
- return None
- return self.tzinfo.dst(self.replace(year=2000))
-
- def tzname(self):
- """
- :return:
- None or the name of the timezone as a unicode string in Python 3
- and a byte string in Python 2
- """
-
- if self.tzinfo is None:
- return None
- return self.tzinfo.tzname(self.replace(year=2000))
-
- def _format(self, format):
- """
- Performs strftime(), always returning a unicode string
-
- :param format:
- A strftime() format string
-
- :return:
- A unicode string of the formatted datetime
- """
-
- format = format.replace('%Y', '0000')
- # Year 0 is 1BC and a leap year. Leap years repeat themselves
- # every 28 years. Because of adjustments and the proleptic gregorian
- # calendar, the simplest way to format is to substitute year 2000.
- temp = datetime(
- 2000,
- self.month,
- self.day,
- self.hour,
- self.minute,
- self.second,
- self.microsecond,
- self.tzinfo
- )
- if '%c' in format:
- c_out = temp.strftime('%c')
- # Handle full years
- c_out = c_out.replace('2000', '0000')
- c_out = c_out.replace('%', '%%')
- format = format.replace('%c', c_out)
- if '%x' in format:
- x_out = temp.strftime('%x')
- # Handle formats such as 08/16/2000 or 16.08.2000
- x_out = x_out.replace('2000', '0000')
- x_out = x_out.replace('%', '%%')
- format = format.replace('%x', x_out)
- return temp.strftime(format)
-
- def isoformat(self, sep='T'):
- """
- Formats the date as "%Y-%m-%d %H:%M:%S" with the sep param between the
- date and time portions
-
- :param set:
- A single character of the separator to place between the date and
- time
-
- :return:
- The formatted datetime as a unicode string in Python 3 and a byte
- string in Python 2
- """
-
- if self.microsecond == 0:
- return self.strftime('0000-%%m-%%d%s%%H:%%M:%%S' % sep)
- return self.strftime('0000-%%m-%%d%s%%H:%%M:%%S.%%f' % sep)
-
- def strftime(self, format):
- """
- Formats the date using strftime()
-
- :param format:
- The strftime() format string
-
- :return:
- The formatted date as a unicode string in Python 3 and a byte
- string in Python 2
- """
-
- output = self._format(format)
- if py2:
- return output.encode('utf-8')
- return output
-
- def replace(self, year=None, month=None, day=None, hour=None, minute=None,
- second=None, microsecond=None, tzinfo=None):
- """
- Returns a new datetime.datetime or asn1crypto.util.extended_datetime
- object with the specified components replaced
-
- :return:
- A datetime.datetime or asn1crypto.util.extended_datetime object
- """
-
- if year is None:
- year = self.year
- if month is None:
- month = self.month
- if day is None:
- day = self.day
- if hour is None:
- hour = self.hour
- if minute is None:
- minute = self.minute
- if second is None:
- second = self.second
- if microsecond is None:
- microsecond = self.microsecond
- if tzinfo is None:
- tzinfo = self.tzinfo
-
- if year > 0:
- cls = datetime
- else:
- cls = extended_datetime
-
- return cls(
- year,
- month,
- day,
- hour,
- minute,
- second,
- microsecond,
- tzinfo
- )
-
- def __str__(self):
- if py2:
- return self.__bytes__()
- else:
- return self.__unicode__()
-
- def __bytes__(self):
- return self.__unicode__().encode('utf-8')
-
- def __unicode__(self):
- format = '%Y-%m-%d %H:%M:%S'
- if self.microsecond != 0:
- format += '.%f'
- return self._format(format)
-
- def __eq__(self, other):
- if not isinstance(other, self.__class__):
- return False
- return self.__cmp__(other) == 0
-
- def __ne__(self, other):
- return not self.__eq__(other)
-
- def _comparison_error(self, other):
- """
- Raises a TypeError about the other object not being suitable for
- comparison
-
- :param other:
- The object being compared to
- """
-
- raise TypeError(unwrap(
- '''
- An asn1crypto.util.extended_datetime object can only be compared to
- an asn1crypto.util.extended_datetime or datetime.datetime object,
- not %s
- ''',
- type_name(other)
- ))
-
- def __cmp__(self, other):
- so = self.utcoffset()
- oo = other.utcoffset()
-
- if (so is not None and oo is None) or (so is None and oo is not None):
- raise TypeError("can't compare offset-naive and offset-aware datetimes")
-
- if isinstance(other, datetime):
- return -1
-
- if not isinstance(other, self.__class__):
- self._comparison_error(other)
-
- st = (
- self.year,
- self.month,
- self.day,
- self.hour,
- self.minute,
- self.second,
- self.microsecond,
- so
- )
- ot = (
- other.year,
- other.month,
- other.day,
- other.hour,
- other.minute,
- other.second,
- other.microsecond,
- oo
- )
-
- if st < ot:
- return -1
- if st > ot:
- return 1
- return 0
-
- def __lt__(self, other):
- return self.__cmp__(other) < 0
-
- def __le__(self, other):
- return self.__cmp__(other) <= 0
-
- def __gt__(self, other):
- return self.__cmp__(other) > 0
-
- def __ge__(self, other):
- return self.__cmp__(other) >= 0
diff --git a/functions/source/CreateSSHKey/asn1crypto/version.py b/functions/source/CreateSSHKey/asn1crypto/version.py
deleted file mode 100644
index 31da728..0000000
--- a/functions/source/CreateSSHKey/asn1crypto/version.py
+++ /dev/null
@@ -1,6 +0,0 @@
-# coding: utf-8
-from __future__ import unicode_literals, division, absolute_import, print_function
-
-
-__version__ = '0.23.0'
-__version_info__ = (0, 23, 0)
diff --git a/functions/source/CreateSSHKey/asn1crypto/x509.py b/functions/source/CreateSSHKey/asn1crypto/x509.py
deleted file mode 100644
index 25c1a4c..0000000
--- a/functions/source/CreateSSHKey/asn1crypto/x509.py
+++ /dev/null
@@ -1,2728 +0,0 @@
-# coding: utf-8
-
-"""
-ASN.1 type classes for X.509 certificates. Exports the following items:
-
- - Attributes()
- - Certificate()
- - Extensions()
- - GeneralName()
- - GeneralNames()
- - Name()
-
-Other type classes are defined that help compose the types listed above.
-"""
-
-from __future__ import unicode_literals, division, absolute_import, print_function
-
-from contextlib import contextmanager
-from encodings import idna # noqa
-import hashlib
-import re
-import socket
-import stringprep
-import sys
-import unicodedata
-
-from ._errors import unwrap
-from ._iri import iri_to_uri, uri_to_iri
-from ._ordereddict import OrderedDict
-from ._types import type_name, str_cls, bytes_to_list
-from .algos import AlgorithmIdentifier, SignedDigestAlgorithm
-from .core import (
- Any,
- BitString,
- BMPString,
- Boolean,
- Choice,
- Concat,
- GeneralizedTime,
- GeneralString,
- IA5String,
- Integer,
- Null,
- NumericString,
- ObjectIdentifier,
- OctetBitString,
- OctetString,
- ParsableOctetString,
- PrintableString,
- Sequence,
- SequenceOf,
- Set,
- SetOf,
- TeletexString,
- UniversalString,
- UTCTime,
- UTF8String,
- VisibleString,
- VOID,
-)
-from .keys import PublicKeyInfo
-from .util import int_to_bytes, int_from_bytes, inet_ntop, inet_pton
-
-
-# The structures in this file are taken from https://tools.ietf.org/html/rfc5280
-# and a few other supplementary sources, mostly due to extra supported
-# extension and name OIDs
-
-
-class DNSName(IA5String):
-
- _encoding = 'idna'
- _bad_tag = 19
-
- def __ne__(self, other):
- return not self == other
-
- def __eq__(self, other):
- """
- Equality as defined by https://tools.ietf.org/html/rfc5280#section-7.2
-
- :param other:
- Another DNSName object
-
- :return:
- A boolean
- """
-
- if not isinstance(other, DNSName):
- return False
-
- return self.__unicode__().lower() == other.__unicode__().lower()
-
- def set(self, value):
- """
- Sets the value of the DNS name
-
- :param value:
- A unicode string
- """
-
- if not isinstance(value, str_cls):
- raise TypeError(unwrap(
- '''
- %s value must be a unicode string, not %s
- ''',
- type_name(self),
- type_name(value)
- ))
-
- if value.startswith('.'):
- encoded_value = b'.' + value[1:].encode(self._encoding)
- else:
- encoded_value = value.encode(self._encoding)
-
- self._unicode = value
- self.contents = encoded_value
- self._header = None
- if self._trailer != b'':
- self._trailer = b''
-
-
-class URI(IA5String):
-
- def set(self, value):
- """
- Sets the value of the string
-
- :param value:
- A unicode string
- """
-
- if not isinstance(value, str_cls):
- raise TypeError(unwrap(
- '''
- %s value must be a unicode string, not %s
- ''',
- type_name(self),
- type_name(value)
- ))
-
- self._unicode = value
- self.contents = iri_to_uri(value)
- self._header = None
- if self._trailer != b'':
- self._trailer = b''
-
- def __ne__(self, other):
- return not self == other
-
- def __eq__(self, other):
- """
- Equality as defined by https://tools.ietf.org/html/rfc5280#section-7.4
-
- :param other:
- Another URI object
-
- :return:
- A boolean
- """
-
- if not isinstance(other, URI):
- return False
-
- return iri_to_uri(self.native) == iri_to_uri(other.native)
-
- def __unicode__(self):
- """
- :return:
- A unicode string
- """
-
- if self.contents is None:
- return ''
- if self._unicode is None:
- self._unicode = uri_to_iri(self._merge_chunks())
- return self._unicode
-
-
-class EmailAddress(IA5String):
-
- _contents = None
-
- # If the value has gone through the .set() method, thus normalizing it
- _normalized = False
-
- @property
- def contents(self):
- """
- :return:
- A byte string of the DER-encoded contents of the sequence
- """
-
- return self._contents
-
- @contents.setter
- def contents(self, value):
- """
- :param value:
- A byte string of the DER-encoded contents of the sequence
- """
-
- self._normalized = False
- self._contents = value
-
- def set(self, value):
- """
- Sets the value of the string
-
- :param value:
- A unicode string
- """
-
- if not isinstance(value, str_cls):
- raise TypeError(unwrap(
- '''
- %s value must be a unicode string, not %s
- ''',
- type_name(self),
- type_name(value)
- ))
-
- if value.find('@') != -1:
- mailbox, hostname = value.rsplit('@', 1)
- encoded_value = mailbox.encode('ascii') + b'@' + hostname.encode('idna')
- else:
- encoded_value = value.encode('ascii')
-
- self._normalized = True
- self._unicode = value
- self.contents = encoded_value
- self._header = None
- if self._trailer != b'':
- self._trailer = b''
-
- def __unicode__(self):
- """
- :return:
- A unicode string
- """
-
- if self._unicode is None:
- contents = self._merge_chunks()
- if contents.find(b'@') == -1:
- self._unicode = contents.decode('ascii')
- else:
- mailbox, hostname = contents.rsplit(b'@', 1)
- self._unicode = mailbox.decode('ascii') + '@' + hostname.decode('idna')
- return self._unicode
-
- def __ne__(self, other):
- return not self == other
-
- def __eq__(self, other):
- """
- Equality as defined by https://tools.ietf.org/html/rfc5280#section-7.5
-
- :param other:
- Another EmailAddress object
-
- :return:
- A boolean
- """
-
- if not isinstance(other, EmailAddress):
- return False
-
- if not self._normalized:
- self.set(self.native)
- if not other._normalized:
- other.set(other.native)
-
- if self._contents.find(b'@') == -1 or other._contents.find(b'@') == -1:
- return self._contents == other._contents
-
- other_mailbox, other_hostname = other._contents.rsplit(b'@', 1)
- mailbox, hostname = self._contents.rsplit(b'@', 1)
-
- if mailbox != other_mailbox:
- return False
-
- if hostname.lower() != other_hostname.lower():
- return False
-
- return True
-
-
-class IPAddress(OctetString):
- def parse(self, spec=None, spec_params=None):
- """
- This method is not applicable to IP addresses
- """
-
- raise ValueError(unwrap(
- '''
- IP address values can not be parsed
- '''
- ))
-
- def set(self, value):
- """
- Sets the value of the object
-
- :param value:
- A unicode string containing an IPv4 address, IPv4 address with CIDR,
- an IPv6 address or IPv6 address with CIDR
- """
-
- if not isinstance(value, str_cls):
- raise TypeError(unwrap(
- '''
- %s value must be a unicode string, not %s
- ''',
- type_name(self),
- type_name(value)
- ))
-
- original_value = value
-
- has_cidr = value.find('/') != -1
- cidr = 0
- if has_cidr:
- parts = value.split('/', 1)
- value = parts[0]
- cidr = int(parts[1])
- if cidr < 0:
- raise ValueError(unwrap(
- '''
- %s value contains a CIDR range less than 0
- ''',
- type_name(self)
- ))
-
- if value.find(':') != -1:
- family = socket.AF_INET6
- if cidr > 128:
- raise ValueError(unwrap(
- '''
- %s value contains a CIDR range bigger than 128, the maximum
- value for an IPv6 address
- ''',
- type_name(self)
- ))
- cidr_size = 128
- else:
- family = socket.AF_INET
- if cidr > 32:
- raise ValueError(unwrap(
- '''
- %s value contains a CIDR range bigger than 32, the maximum
- value for an IPv4 address
- ''',
- type_name(self)
- ))
- cidr_size = 32
-
- cidr_bytes = b''
- if has_cidr:
- cidr_mask = '1' * cidr
- cidr_mask += '0' * (cidr_size - len(cidr_mask))
- cidr_bytes = int_to_bytes(int(cidr_mask, 2))
- cidr_bytes = (b'\x00' * ((cidr_size // 8) - len(cidr_bytes))) + cidr_bytes
-
- self._native = original_value
- self.contents = inet_pton(family, value) + cidr_bytes
- self._bytes = self.contents
- self._header = None
- if self._trailer != b'':
- self._trailer = b''
-
- @property
- def native(self):
- """
- The a native Python datatype representation of this value
-
- :return:
- A unicode string or None
- """
-
- if self.contents is None:
- return None
-
- if self._native is None:
- byte_string = self.__bytes__()
- byte_len = len(byte_string)
- cidr_int = None
- if byte_len in set([32, 16]):
- value = inet_ntop(socket.AF_INET6, byte_string[0:16])
- if byte_len > 16:
- cidr_int = int_from_bytes(byte_string[16:])
- elif byte_len in set([8, 4]):
- value = inet_ntop(socket.AF_INET, byte_string[0:4])
- if byte_len > 4:
- cidr_int = int_from_bytes(byte_string[4:])
- if cidr_int is not None:
- cidr_bits = '{0:b}'.format(cidr_int)
- cidr = len(cidr_bits.rstrip('0'))
- value = value + '/' + str_cls(cidr)
- self._native = value
- return self._native
-
- def __ne__(self, other):
- return not self == other
-
- def __eq__(self, other):
- """
- :param other:
- Another IPAddress object
-
- :return:
- A boolean
- """
-
- if not isinstance(other, IPAddress):
- return False
-
- return self.__bytes__() == other.__bytes__()
-
-
-class Attribute(Sequence):
- _fields = [
- ('type', ObjectIdentifier),
- ('values', SetOf, {'spec': Any}),
- ]
-
-
-class Attributes(SequenceOf):
- _child_spec = Attribute
-
-
-class KeyUsage(BitString):
- _map = {
- 0: 'digital_signature',
- 1: 'non_repudiation',
- 2: 'key_encipherment',
- 3: 'data_encipherment',
- 4: 'key_agreement',
- 5: 'key_cert_sign',
- 6: 'crl_sign',
- 7: 'encipher_only',
- 8: 'decipher_only',
- }
-
-
-class PrivateKeyUsagePeriod(Sequence):
- _fields = [
- ('not_before', GeneralizedTime, {'implicit': 0, 'optional': True}),
- ('not_after', GeneralizedTime, {'implicit': 1, 'optional': True}),
- ]
-
-
-class NotReallyTeletexString(TeletexString):
- """
- OpenSSL (and probably some other libraries) puts ISO-8859-1
- into TeletexString instead of ITU T.61. We use Windows-1252 when
- decoding since it is a superset of ISO-8859-1, and less likely to
- cause encoding issues, but we stay strict with encoding to prevent
- us from creating bad data.
- """
-
- _decoding_encoding = 'cp1252'
-
- def __unicode__(self):
- """
- :return:
- A unicode string
- """
-
- if self.contents is None:
- return ''
- if self._unicode is None:
- self._unicode = self._merge_chunks().decode(self._decoding_encoding)
- return self._unicode
-
-
-@contextmanager
-def strict_teletex():
- try:
- NotReallyTeletexString._decoding_encoding = 'teletex'
- yield
- finally:
- NotReallyTeletexString._decoding_encoding = 'cp1252'
-
-
-class DirectoryString(Choice):
- _alternatives = [
- ('teletex_string', NotReallyTeletexString),
- ('printable_string', PrintableString),
- ('universal_string', UniversalString),
- ('utf8_string', UTF8String),
- ('bmp_string', BMPString),
- # This is an invalid/bad alternative, but some broken certs use it
- ('ia5_string', IA5String),
- ]
-
-
-class NameType(ObjectIdentifier):
- _map = {
- '2.5.4.3': 'common_name',
- '2.5.4.4': 'surname',
- '2.5.4.5': 'serial_number',
- '2.5.4.6': 'country_name',
- '2.5.4.7': 'locality_name',
- '2.5.4.8': 'state_or_province_name',
- '2.5.4.9': 'street_address',
- '2.5.4.10': 'organization_name',
- '2.5.4.11': 'organizational_unit_name',
- '2.5.4.12': 'title',
- '2.5.4.15': 'business_category',
- '2.5.4.17': 'postal_code',
- '2.5.4.20': 'telephone_number',
- '2.5.4.41': 'name',
- '2.5.4.42': 'given_name',
- '2.5.4.43': 'initials',
- '2.5.4.44': 'generation_qualifier',
- '2.5.4.45': 'unique_identifier',
- '2.5.4.46': 'dn_qualifier',
- '2.5.4.65': 'pseudonym',
- '2.5.4.97': 'organization_identifier',
- # https://tools.ietf.org/html/rfc2985#page-26
- '1.2.840.113549.1.9.1': 'email_address',
- # Page 10 of https://cabforum.org/wp-content/uploads/EV-V1_5_5.pdf
- '1.3.6.1.4.1.311.60.2.1.1': 'incorporation_locality',
- '1.3.6.1.4.1.311.60.2.1.2': 'incorporation_state_or_province',
- '1.3.6.1.4.1.311.60.2.1.3': 'incorporation_country',
- # https://tools.ietf.org/html/rfc2247#section-4
- '0.9.2342.19200300.100.1.25': 'domain_component',
- # http://www.alvestrand.no/objectid/0.2.262.1.10.7.20.html
- '0.2.262.1.10.7.20': 'name_distinguisher',
- }
-
- # This order is largely based on observed order seen in EV certs from
- # Symantec and DigiCert. Some of the uncommon name-related fields are
- # just placed in what seems like a reasonable order.
- preferred_order = [
- 'incorporation_country',
- 'incorporation_state_or_province',
- 'incorporation_locality',
- 'business_category',
- 'serial_number',
- 'country_name',
- 'postal_code',
- 'state_or_province_name',
- 'locality_name',
- 'street_address',
- 'organization_name',
- 'organizational_unit_name',
- 'title',
- 'common_name',
- 'initials',
- 'generation_qualifier',
- 'surname',
- 'given_name',
- 'name',
- 'pseudonym',
- 'dn_qualifier',
- 'telephone_number',
- 'email_address',
- 'domain_component',
- 'name_distinguisher',
- 'organization_identifier',
- ]
-
- @classmethod
- def preferred_ordinal(cls, attr_name):
- """
- Returns an ordering value for a particular attribute key.
-
- Unrecognized attributes and OIDs will be sorted lexically at the end.
-
- :return:
- An orderable value.
-
- """
-
- attr_name = cls.map(attr_name)
- if attr_name in cls.preferred_order:
- ordinal = cls.preferred_order.index(attr_name)
- else:
- ordinal = len(cls.preferred_order)
-
- return (ordinal, attr_name)
-
- @property
- def human_friendly(self):
- """
- :return:
- A human-friendly unicode string to display to users
- """
-
- return {
- 'common_name': 'Common Name',
- 'surname': 'Surname',
- 'serial_number': 'Serial Number',
- 'country_name': 'Country',
- 'locality_name': 'Locality',
- 'state_or_province_name': 'State/Province',
- 'street_address': 'Street Address',
- 'organization_name': 'Organization',
- 'organizational_unit_name': 'Organizational Unit',
- 'title': 'Title',
- 'business_category': 'Business Category',
- 'postal_code': 'Postal Code',
- 'telephone_number': 'Telephone Number',
- 'name': 'Name',
- 'given_name': 'Given Name',
- 'initials': 'Initials',
- 'generation_qualifier': 'Generation Qualifier',
- 'unique_identifier': 'Unique Identifier',
- 'dn_qualifier': 'DN Qualifier',
- 'pseudonym': 'Pseudonym',
- 'email_address': 'Email Address',
- 'incorporation_locality': 'Incorporation Locality',
- 'incorporation_state_or_province': 'Incorporation State/Province',
- 'incorporation_country': 'Incorporation Country',
- 'domain_component': 'Domain Component',
- 'name_distinguisher': 'Name Distinguisher',
- 'organization_identifier': 'Organization Identifier',
- }.get(self.native, self.native)
-
-
-class NameTypeAndValue(Sequence):
- _fields = [
- ('type', NameType),
- ('value', Any),
- ]
-
- _oid_pair = ('type', 'value')
- _oid_specs = {
- 'common_name': DirectoryString,
- 'surname': DirectoryString,
- 'serial_number': DirectoryString,
- 'country_name': DirectoryString,
- 'locality_name': DirectoryString,
- 'state_or_province_name': DirectoryString,
- 'street_address': DirectoryString,
- 'organization_name': DirectoryString,
- 'organizational_unit_name': DirectoryString,
- 'title': DirectoryString,
- 'business_category': DirectoryString,
- 'postal_code': DirectoryString,
- 'telephone_number': PrintableString,
- 'name': DirectoryString,
- 'given_name': DirectoryString,
- 'initials': DirectoryString,
- 'generation_qualifier': DirectoryString,
- 'unique_identifier': OctetBitString,
- 'dn_qualifier': DirectoryString,
- 'pseudonym': DirectoryString,
- # https://tools.ietf.org/html/rfc2985#page-26
- 'email_address': EmailAddress,
- # Page 10 of https://cabforum.org/wp-content/uploads/EV-V1_5_5.pdf
- 'incorporation_locality': DirectoryString,
- 'incorporation_state_or_province': DirectoryString,
- 'incorporation_country': DirectoryString,
- 'domain_component': DNSName,
- 'name_distinguisher': DirectoryString,
- 'organization_identifier': DirectoryString,
- }
-
- _prepped = None
-
- @property
- def prepped_value(self):
- """
- Returns the value after being processed by the internationalized string
- preparation as specified by RFC 5280
-
- :return:
- A unicode string
- """
-
- if self._prepped is None:
- self._prepped = self._ldap_string_prep(self['value'].native)
- return self._prepped
-
- def __ne__(self, other):
- return not self == other
-
- def __eq__(self, other):
- """
- Equality as defined by https://tools.ietf.org/html/rfc5280#section-7.1
-
- :param other:
- Another NameTypeAndValue object
-
- :return:
- A boolean
- """
-
- if not isinstance(other, NameTypeAndValue):
- return False
-
- if other['type'].native != self['type'].native:
- return False
-
- return other.prepped_value == self.prepped_value
-
- def _ldap_string_prep(self, string):
- """
- Implements the internationalized string preparation algorithm from
- RFC 4518. https://tools.ietf.org/html/rfc4518#section-2
-
- :param string:
- A unicode string to prepare
-
- :return:
- A prepared unicode string, ready for comparison
- """
-
- # Map step
- string = re.sub('[\u00ad\u1806\u034f\u180b-\u180d\ufe0f-\uff00\ufffc]+', '', string)
- string = re.sub('[\u0009\u000a\u000b\u000c\u000d\u0085]', ' ', string)
- if sys.maxunicode == 0xffff:
- # Some installs of Python 2.7 don't support 8-digit unicode escape
- # ranges, so we have to break them into pieces
- # Original was: \U0001D173-\U0001D17A and \U000E0020-\U000E007F
- string = re.sub('\ud834[\udd73-\udd7a]|\udb40[\udc20-\udc7f]|\U000e0001', '', string)
- else:
- string = re.sub('[\U0001D173-\U0001D17A\U000E0020-\U000E007F\U000e0001]', '', string)
- string = re.sub(
- '[\u0000-\u0008\u000e-\u001f\u007f-\u0084\u0086-\u009f\u06dd\u070f\u180e\u200c-\u200f'
- '\u202a-\u202e\u2060-\u2063\u206a-\u206f\ufeff\ufff9-\ufffb]+',
- '',
- string
- )
- string = string.replace('\u200b', '')
- string = re.sub('[\u00a0\u1680\u2000-\u200a\u2028-\u2029\u202f\u205f\u3000]', ' ', string)
-
- string = ''.join(map(stringprep.map_table_b2, string))
-
- # Normalize step
- string = unicodedata.normalize('NFKC', string)
-
- # Prohibit step
- for char in string:
- if stringprep.in_table_a1(char):
- raise ValueError(unwrap(
- '''
- X.509 Name objects may not contain unassigned code points
- '''
- ))
-
- if stringprep.in_table_c8(char):
- raise ValueError(unwrap(
- '''
- X.509 Name objects may not contain change display or
- zzzzdeprecated characters
- '''
- ))
-
- if stringprep.in_table_c3(char):
- raise ValueError(unwrap(
- '''
- X.509 Name objects may not contain private use characters
- '''
- ))
-
- if stringprep.in_table_c4(char):
- raise ValueError(unwrap(
- '''
- X.509 Name objects may not contain non-character code points
- '''
- ))
-
- if stringprep.in_table_c5(char):
- raise ValueError(unwrap(
- '''
- X.509 Name objects may not contain surrogate code points
- '''
- ))
-
- if char == '\ufffd':
- raise ValueError(unwrap(
- '''
- X.509 Name objects may not contain the replacement character
- '''
- ))
-
- # Check bidirectional step - here we ensure that we are not mixing
- # left-to-right and right-to-left text in the string
- has_r_and_al_cat = False
- has_l_cat = False
- for char in string:
- if stringprep.in_table_d1(char):
- has_r_and_al_cat = True
- elif stringprep.in_table_d2(char):
- has_l_cat = True
-
- if has_r_and_al_cat:
- first_is_r_and_al = stringprep.in_table_d1(string[0])
- last_is_r_and_al = stringprep.in_table_d1(string[-1])
-
- if has_l_cat or not first_is_r_and_al or not last_is_r_and_al:
- raise ValueError(unwrap(
- '''
- X.509 Name object contains a malformed bidirectional
- sequence
- '''
- ))
-
- # Insignificant space handling step
- string = ' ' + re.sub(' +', ' ', string).strip() + ' '
-
- return string
-
-
-class RelativeDistinguishedName(SetOf):
- _child_spec = NameTypeAndValue
-
- @property
- def hashable(self):
- """
- :return:
- A unicode string that can be used as a dict key or in a set
- """
-
- output = []
- values = self._get_values(self)
- for key in sorted(values.keys()):
- output.append('%s: %s' % (key, values[key]))
- # Unit separator is used here since the normalization process for
- # values moves any such character, and the keys are all dotted integers
- # or under_score_words
- return '\x1F'.join(output)
-
- def __ne__(self, other):
- return not self == other
-
- def __eq__(self, other):
- """
- Equality as defined by https://tools.ietf.org/html/rfc5280#section-7.1
-
- :param other:
- Another RelativeDistinguishedName object
-
- :return:
- A boolean
- """
-
- if not isinstance(other, RelativeDistinguishedName):
- return False
-
- if len(self) != len(other):
- return False
-
- self_types = self._get_types(self)
- other_types = self._get_types(other)
-
- if self_types != other_types:
- return False
-
- self_values = self._get_values(self)
- other_values = self._get_values(other)
-
- for type_name_ in self_types:
- if self_values[type_name_] != other_values[type_name_]:
- return False
-
- return True
-
- def _get_types(self, rdn):
- """
- Returns a set of types contained in an RDN
-
- :param rdn:
- A RelativeDistinguishedName object
-
- :return:
- A set object with unicode strings of NameTypeAndValue type field
- values
- """
-
- return set([ntv['type'].native for ntv in rdn])
-
- def _get_values(self, rdn):
- """
- Returns a dict of prepped values contained in an RDN
-
- :param rdn:
- A RelativeDistinguishedName object
-
- :return:
- A dict object with unicode strings of NameTypeAndValue value field
- values that have been prepped for comparison
- """
-
- output = {}
- [output.update([(ntv['type'].native, ntv.prepped_value)]) for ntv in rdn]
- return output
-
-
-class RDNSequence(SequenceOf):
- _child_spec = RelativeDistinguishedName
-
- @property
- def hashable(self):
- """
- :return:
- A unicode string that can be used as a dict key or in a set
- """
-
- # Record separator is used here since the normalization process for
- # values moves any such character, and the keys are all dotted integers
- # or under_score_words
- return '\x1E'.join(rdn.hashable for rdn in self)
-
- def __ne__(self, other):
- return not self == other
-
- def __eq__(self, other):
- """
- Equality as defined by https://tools.ietf.org/html/rfc5280#section-7.1
-
- :param other:
- Another RDNSequence object
-
- :return:
- A boolean
- """
-
- if not isinstance(other, RDNSequence):
- return False
-
- if len(self) != len(other):
- return False
-
- for index, self_rdn in enumerate(self):
- if other[index] != self_rdn:
- return False
-
- return True
-
-
-class Name(Choice):
- _alternatives = [
- ('', RDNSequence),
- ]
-
- _human_friendly = None
- _sha1 = None
- _sha256 = None
-
- @classmethod
- def build(cls, name_dict, use_printable=False):
- """
- Creates a Name object from a dict of unicode string keys and values.
- The keys should be from NameType._map, or a dotted-integer OID unicode
- string.
-
- :param name_dict:
- A dict of name information, e.g. {"common_name": "Will Bond",
- "country_name": "US", "organization": "Codex Non Sufficit LC"}
-
- :param use_printable:
- A bool - if PrintableString should be used for encoding instead of
- UTF8String. This is for backwards compatibility with old software.
-
- :return:
- An x509.Name object
- """
-
- rdns = []
- if not use_printable:
- encoding_name = 'utf8_string'
- encoding_class = UTF8String
- else:
- encoding_name = 'printable_string'
- encoding_class = PrintableString
-
- # Sort the attributes according to NameType.preferred_order
- name_dict = OrderedDict(
- sorted(
- name_dict.items(),
- key=lambda item: NameType.preferred_ordinal(item[0])
- )
- )
-
- for attribute_name, attribute_value in name_dict.items():
- attribute_name = NameType.map(attribute_name)
- if attribute_name == 'email_address':
- value = EmailAddress(attribute_value)
- elif attribute_name == 'domain_component':
- value = DNSName(attribute_value)
- elif attribute_name in set(['dn_qualifier', 'country_name', 'serial_number']):
- value = DirectoryString(
- name='printable_string',
- value=PrintableString(attribute_value)
- )
- else:
- value = DirectoryString(
- name=encoding_name,
- value=encoding_class(attribute_value)
- )
-
- rdns.append(RelativeDistinguishedName([
- NameTypeAndValue({
- 'type': attribute_name,
- 'value': value
- })
- ]))
-
- return cls(name='', value=RDNSequence(rdns))
-
- @property
- def hashable(self):
- """
- :return:
- A unicode string that can be used as a dict key or in a set
- """
-
- return self.chosen.hashable
-
- def __len__(self):
- return len(self.chosen)
-
- def __ne__(self, other):
- return not self == other
-
- def __eq__(self, other):
- """
- Equality as defined by https://tools.ietf.org/html/rfc5280#section-7.1
-
- :param other:
- Another Name object
-
- :return:
- A boolean
- """
-
- if not isinstance(other, Name):
- return False
- return self.chosen == other.chosen
-
- @property
- def native(self):
- if self._native is None:
- self._native = OrderedDict()
- for rdn in self.chosen.native:
- for type_val in rdn:
- field_name = type_val['type']
- if field_name in self._native:
- existing = self._native[field_name]
- if not isinstance(existing, list):
- existing = self._native[field_name] = [existing]
- existing.append(type_val['value'])
- else:
- self._native[field_name] = type_val['value']
- return self._native
-
- @property
- def human_friendly(self):
- """
- :return:
- A human-friendly unicode string containing the parts of the name
- """
-
- if self._human_friendly is None:
- data = OrderedDict()
- last_field = None
- for rdn in self.chosen:
- for type_val in rdn:
- field_name = type_val['type'].human_friendly
- last_field = field_name
- if field_name in data:
- data[field_name] = [data[field_name]]
- data[field_name].append(type_val['value'])
- else:
- data[field_name] = type_val['value']
- to_join = []
- keys = data.keys()
- if last_field == 'Country':
- keys = reversed(list(keys))
- for key in keys:
- value = data[key]
- native_value = self._recursive_humanize(value)
- to_join.append('%s: %s' % (key, native_value))
-
- has_comma = False
- for element in to_join:
- if element.find(',') != -1:
- has_comma = True
- break
-
- separator = ', ' if not has_comma else '; '
- self._human_friendly = separator.join(to_join[::-1])
-
- return self._human_friendly
-
- def _recursive_humanize(self, value):
- """
- Recursively serializes data compiled from the RDNSequence
-
- :param value:
- An Asn1Value object, or a list of Asn1Value objects
-
- :return:
- A unicode string
- """
-
- if isinstance(value, list):
- return', '.join(
- reversed([self._recursive_humanize(sub_value) for sub_value in value])
- )
- return value.native
-
- @property
- def sha1(self):
- """
- :return:
- The SHA1 hash of the DER-encoded bytes of this name
- """
-
- if self._sha1 is None:
- self._sha1 = hashlib.sha1(self.dump()).digest()
- return self._sha1
-
- @property
- def sha256(self):
- """
- :return:
- The SHA-256 hash of the DER-encoded bytes of this name
- """
-
- if self._sha256 is None:
- self._sha256 = hashlib.sha256(self.dump()).digest()
- return self._sha256
-
-
-class AnotherName(Sequence):
- _fields = [
- ('type_id', ObjectIdentifier),
- ('value', Any, {'explicit': 0}),
- ]
-
-
-class CountryName(Choice):
- class_ = 1
- tag = 1
-
- _alternatives = [
- ('x121_dcc_code', NumericString),
- ('iso_3166_alpha2_code', PrintableString),
- ]
-
-
-class AdministrationDomainName(Choice):
- class_ = 1
- tag = 2
-
- _alternatives = [
- ('numeric', NumericString),
- ('printable', PrintableString),
- ]
-
-
-class PrivateDomainName(Choice):
- _alternatives = [
- ('numeric', NumericString),
- ('printable', PrintableString),
- ]
-
-
-class PersonalName(Set):
- _fields = [
- ('surname', PrintableString, {'implicit': 0}),
- ('given_name', PrintableString, {'implicit': 1, 'optional': True}),
- ('initials', PrintableString, {'implicit': 2, 'optional': True}),
- ('generation_qualifier', PrintableString, {'implicit': 3, 'optional': True}),
- ]
-
-
-class TeletexPersonalName(Set):
- _fields = [
- ('surname', TeletexString, {'implicit': 0}),
- ('given_name', TeletexString, {'implicit': 1, 'optional': True}),
- ('initials', TeletexString, {'implicit': 2, 'optional': True}),
- ('generation_qualifier', TeletexString, {'implicit': 3, 'optional': True}),
- ]
-
-
-class OrganizationalUnitNames(SequenceOf):
- _child_spec = PrintableString
-
-
-class TeletexOrganizationalUnitNames(SequenceOf):
- _child_spec = TeletexString
-
-
-class BuiltInStandardAttributes(Sequence):
- _fields = [
- ('country_name', CountryName, {'optional': True}),
- ('administration_domain_name', AdministrationDomainName, {'optional': True}),
- ('network_address', NumericString, {'implicit': 0, 'optional': True}),
- ('terminal_identifier', PrintableString, {'implicit': 1, 'optional': True}),
- ('private_domain_name', PrivateDomainName, {'explicit': 2, 'optional': True}),
- ('organization_name', PrintableString, {'implicit': 3, 'optional': True}),
- ('numeric_user_identifier', NumericString, {'implicit': 4, 'optional': True}),
- ('personal_name', PersonalName, {'implicit': 5, 'optional': True}),
- ('organizational_unit_names', OrganizationalUnitNames, {'implicit': 6, 'optional': True}),
- ]
-
-
-class BuiltInDomainDefinedAttribute(Sequence):
- _fields = [
- ('type', PrintableString),
- ('value', PrintableString),
- ]
-
-
-class BuiltInDomainDefinedAttributes(SequenceOf):
- _child_spec = BuiltInDomainDefinedAttribute
-
-
-class TeletexDomainDefinedAttribute(Sequence):
- _fields = [
- ('type', TeletexString),
- ('value', TeletexString),
- ]
-
-
-class TeletexDomainDefinedAttributes(SequenceOf):
- _child_spec = TeletexDomainDefinedAttribute
-
-
-class PhysicalDeliveryCountryName(Choice):
- _alternatives = [
- ('x121_dcc_code', NumericString),
- ('iso_3166_alpha2_code', PrintableString),
- ]
-
-
-class PostalCode(Choice):
- _alternatives = [
- ('numeric_code', NumericString),
- ('printable_code', PrintableString),
- ]
-
-
-class PDSParameter(Set):
- _fields = [
- ('printable_string', PrintableString, {'optional': True}),
- ('teletex_string', TeletexString, {'optional': True}),
- ]
-
-
-class PrintableAddress(SequenceOf):
- _child_spec = PrintableString
-
-
-class UnformattedPostalAddress(Set):
- _fields = [
- ('printable_address', PrintableAddress, {'optional': True}),
- ('teletex_string', TeletexString, {'optional': True}),
- ]
-
-
-class E1634Address(Sequence):
- _fields = [
- ('number', NumericString, {'implicit': 0}),
- ('sub_address', NumericString, {'implicit': 1, 'optional': True}),
- ]
-
-
-class NAddresses(SetOf):
- _child_spec = OctetString
-
-
-class PresentationAddress(Sequence):
- _fields = [
- ('p_selector', OctetString, {'explicit': 0, 'optional': True}),
- ('s_selector', OctetString, {'explicit': 1, 'optional': True}),
- ('t_selector', OctetString, {'explicit': 2, 'optional': True}),
- ('n_addresses', NAddresses, {'explicit': 3}),
- ]
-
-
-class ExtendedNetworkAddress(Choice):
- _alternatives = [
- ('e163_4_address', E1634Address),
- ('psap_address', PresentationAddress, {'implicit': 0})
- ]
-
-
-class TerminalType(Integer):
- _map = {
- 3: 'telex',
- 4: 'teletex',
- 5: 'g3_facsimile',
- 6: 'g4_facsimile',
- 7: 'ia5_terminal',
- 8: 'videotex',
- }
-
-
-class ExtensionAttributeType(Integer):
- _map = {
- 1: 'common_name',
- 2: 'teletex_common_name',
- 3: 'teletex_organization_name',
- 4: 'teletex_personal_name',
- 5: 'teletex_organization_unit_names',
- 6: 'teletex_domain_defined_attributes',
- 7: 'pds_name',
- 8: 'physical_delivery_country_name',
- 9: 'postal_code',
- 10: 'physical_delivery_office_name',
- 11: 'physical_delivery_office_number',
- 12: 'extension_of_address_components',
- 13: 'physical_delivery_personal_name',
- 14: 'physical_delivery_organization_name',
- 15: 'extension_physical_delivery_address_components',
- 16: 'unformatted_postal_address',
- 17: 'street_address',
- 18: 'post_office_box_address',
- 19: 'poste_restante_address',
- 20: 'unique_postal_name',
- 21: 'local_postal_attributes',
- 22: 'extended_network_address',
- 23: 'terminal_type',
- }
-
-
-class ExtensionAttribute(Sequence):
- _fields = [
- ('extension_attribute_type', ExtensionAttributeType, {'implicit': 0}),
- ('extension_attribute_value', Any, {'explicit': 1}),
- ]
-
- _oid_pair = ('extension_attribute_type', 'extension_attribute_value')
- _oid_specs = {
- 'common_name': PrintableString,
- 'teletex_common_name': TeletexString,
- 'teletex_organization_name': TeletexString,
- 'teletex_personal_name': TeletexPersonalName,
- 'teletex_organization_unit_names': TeletexOrganizationalUnitNames,
- 'teletex_domain_defined_attributes': TeletexDomainDefinedAttributes,
- 'pds_name': PrintableString,
- 'physical_delivery_country_name': PhysicalDeliveryCountryName,
- 'postal_code': PostalCode,
- 'physical_delivery_office_name': PDSParameter,
- 'physical_delivery_office_number': PDSParameter,
- 'extension_of_address_components': PDSParameter,
- 'physical_delivery_personal_name': PDSParameter,
- 'physical_delivery_organization_name': PDSParameter,
- 'extension_physical_delivery_address_components': PDSParameter,
- 'unformatted_postal_address': UnformattedPostalAddress,
- 'street_address': PDSParameter,
- 'post_office_box_address': PDSParameter,
- 'poste_restante_address': PDSParameter,
- 'unique_postal_name': PDSParameter,
- 'local_postal_attributes': PDSParameter,
- 'extended_network_address': ExtendedNetworkAddress,
- 'terminal_type': TerminalType,
- }
-
-
-class ExtensionAttributes(SequenceOf):
- _child_spec = ExtensionAttribute
-
-
-class ORAddress(Sequence):
- _fields = [
- ('built_in_standard_attributes', BuiltInStandardAttributes),
- ('built_in_domain_defined_attributes', BuiltInDomainDefinedAttributes, {'optional': True}),
- ('extension_attributes', ExtensionAttributes, {'optional': True}),
- ]
-
-
-class EDIPartyName(Sequence):
- _fields = [
- ('name_assigner', DirectoryString, {'implicit': 0, 'optional': True}),
- ('party_name', DirectoryString, {'implicit': 1}),
- ]
-
-
-class GeneralName(Choice):
- _alternatives = [
- ('other_name', AnotherName, {'implicit': 0}),
- ('rfc822_name', EmailAddress, {'implicit': 1}),
- ('dns_name', DNSName, {'implicit': 2}),
- ('x400_address', ORAddress, {'implicit': 3}),
- ('directory_name', Name, {'explicit': 4}),
- ('edi_party_name', EDIPartyName, {'implicit': 5}),
- ('uniform_resource_identifier', URI, {'implicit': 6}),
- ('ip_address', IPAddress, {'implicit': 7}),
- ('registered_id', ObjectIdentifier, {'implicit': 8}),
- ]
-
- def __ne__(self, other):
- return not self == other
-
- def __eq__(self, other):
- """
- Does not support other_name, x400_address or edi_party_name
-
- :param other:
- The other GeneralName to compare to
-
- :return:
- A boolean
- """
-
- if self.name in ('other_name', 'x400_address', 'edi_party_name'):
- raise ValueError(unwrap(
- '''
- Comparison is not supported for GeneralName objects of
- choice %s
- ''',
- self.name
- ))
-
- if other.name in ('other_name', 'x400_address', 'edi_party_name'):
- raise ValueError(unwrap(
- '''
- Comparison is not supported for GeneralName objects of choice
- %s''',
- other.name
- ))
-
- if self.name != other.name:
- return False
-
- return self.chosen == other.chosen
-
-
-class GeneralNames(SequenceOf):
- _child_spec = GeneralName
-
-
-class Time(Choice):
- _alternatives = [
- ('utc_time', UTCTime),
- ('general_time', GeneralizedTime),
- ]
-
-
-class Validity(Sequence):
- _fields = [
- ('not_before', Time),
- ('not_after', Time),
- ]
-
-
-class BasicConstraints(Sequence):
- _fields = [
- ('ca', Boolean, {'default': False}),
- ('path_len_constraint', Integer, {'optional': True}),
- ]
-
-
-class AuthorityKeyIdentifier(Sequence):
- _fields = [
- ('key_identifier', OctetString, {'implicit': 0, 'optional': True}),
- ('authority_cert_issuer', GeneralNames, {'implicit': 1, 'optional': True}),
- ('authority_cert_serial_number', Integer, {'implicit': 2, 'optional': True}),
- ]
-
-
-class DistributionPointName(Choice):
- _alternatives = [
- ('full_name', GeneralNames, {'implicit': 0}),
- ('name_relative_to_crl_issuer', RelativeDistinguishedName, {'implicit': 1}),
- ]
-
-
-class ReasonFlags(BitString):
- _map = {
- 0: 'unused',
- 1: 'key_compromise',
- 2: 'ca_compromise',
- 3: 'affiliation_changed',
- 4: 'superseded',
- 5: 'cessation_of_operation',
- 6: 'certificate_hold',
- 7: 'privilege_withdrawn',
- 8: 'aa_compromise',
- }
-
-
-class GeneralSubtree(Sequence):
- _fields = [
- ('base', GeneralName),
- ('minimum', Integer, {'implicit': 0, 'default': 0}),
- ('maximum', Integer, {'implicit': 1, 'optional': True}),
- ]
-
-
-class GeneralSubtrees(SequenceOf):
- _child_spec = GeneralSubtree
-
-
-class NameConstraints(Sequence):
- _fields = [
- ('permitted_subtrees', GeneralSubtrees, {'implicit': 0, 'optional': True}),
- ('excluded_subtrees', GeneralSubtrees, {'implicit': 1, 'optional': True}),
- ]
-
-
-class DistributionPoint(Sequence):
- _fields = [
- ('distribution_point', DistributionPointName, {'explicit': 0, 'optional': True}),
- ('reasons', ReasonFlags, {'implicit': 1, 'optional': True}),
- ('crl_issuer', GeneralNames, {'implicit': 2, 'optional': True}),
- ]
-
- _url = False
-
- @property
- def url(self):
- """
- :return:
- None or a unicode string of the distribution point's URL
- """
-
- if self._url is False:
- self._url = None
- name = self['distribution_point']
- if name.name != 'full_name':
- raise ValueError(unwrap(
- '''
- CRL distribution points that are relative to the issuer are
- not supported
- '''
- ))
-
- for general_name in name.chosen:
- if general_name.name == 'uniform_resource_identifier':
- url = general_name.native
- if url.lower().startswith(('http://', 'https://', 'ldap://', 'ldaps://')):
- self._url = url
- break
-
- return self._url
-
-
-class CRLDistributionPoints(SequenceOf):
- _child_spec = DistributionPoint
-
-
-class DisplayText(Choice):
- _alternatives = [
- ('ia5_string', IA5String),
- ('visible_string', VisibleString),
- ('bmp_string', BMPString),
- ('utf8_string', UTF8String),
- ]
-
-
-class NoticeNumbers(SequenceOf):
- _child_spec = Integer
-
-
-class NoticeReference(Sequence):
- _fields = [
- ('organization', DisplayText),
- ('notice_numbers', NoticeNumbers),
- ]
-
-
-class UserNotice(Sequence):
- _fields = [
- ('notice_ref', NoticeReference, {'optional': True}),
- ('explicit_text', DisplayText, {'optional': True}),
- ]
-
-
-class PolicyQualifierId(ObjectIdentifier):
- _map = {
- '1.3.6.1.5.5.7.2.1': 'certification_practice_statement',
- '1.3.6.1.5.5.7.2.2': 'user_notice',
- }
-
-
-class PolicyQualifierInfo(Sequence):
- _fields = [
- ('policy_qualifier_id', PolicyQualifierId),
- ('qualifier', Any),
- ]
-
- _oid_pair = ('policy_qualifier_id', 'qualifier')
- _oid_specs = {
- 'certification_practice_statement': IA5String,
- 'user_notice': UserNotice,
- }
-
-
-class PolicyQualifierInfos(SequenceOf):
- _child_spec = PolicyQualifierInfo
-
-
-class PolicyIdentifier(ObjectIdentifier):
- _map = {
- '2.5.29.32.0': 'any_policy',
- }
-
-
-class PolicyInformation(Sequence):
- _fields = [
- ('policy_identifier', PolicyIdentifier),
- ('policy_qualifiers', PolicyQualifierInfos, {'optional': True})
- ]
-
-
-class CertificatePolicies(SequenceOf):
- _child_spec = PolicyInformation
-
-
-class PolicyMapping(Sequence):
- _fields = [
- ('issuer_domain_policy', PolicyIdentifier),
- ('subject_domain_policy', PolicyIdentifier),
- ]
-
-
-class PolicyMappings(SequenceOf):
- _child_spec = PolicyMapping
-
-
-class PolicyConstraints(Sequence):
- _fields = [
- ('require_explicit_policy', Integer, {'implicit': 0, 'optional': True}),
- ('inhibit_policy_mapping', Integer, {'implicit': 1, 'optional': True}),
- ]
-
-
-class KeyPurposeId(ObjectIdentifier):
- _map = {
- # https://tools.ietf.org/html/rfc5280#page-45
- '2.5.29.37.0': 'any_extended_key_usage',
- '1.3.6.1.5.5.7.3.1': 'server_auth',
- '1.3.6.1.5.5.7.3.2': 'client_auth',
- '1.3.6.1.5.5.7.3.3': 'code_signing',
- '1.3.6.1.5.5.7.3.4': 'email_protection',
- '1.3.6.1.5.5.7.3.5': 'ipsec_end_system',
- '1.3.6.1.5.5.7.3.6': 'ipsec_tunnel',
- '1.3.6.1.5.5.7.3.7': 'ipsec_user',
- '1.3.6.1.5.5.7.3.8': 'time_stamping',
- '1.3.6.1.5.5.7.3.9': 'ocsp_signing',
- # http://tools.ietf.org/html/rfc3029.html#page-9
- '1.3.6.1.5.5.7.3.10': 'dvcs',
- # http://tools.ietf.org/html/rfc6268.html#page-16
- '1.3.6.1.5.5.7.3.13': 'eap_over_ppp',
- '1.3.6.1.5.5.7.3.14': 'eap_over_lan',
- # https://tools.ietf.org/html/rfc5055#page-76
- '1.3.6.1.5.5.7.3.15': 'scvp_server',
- '1.3.6.1.5.5.7.3.16': 'scvp_client',
- # https://tools.ietf.org/html/rfc4945#page-31
- '1.3.6.1.5.5.7.3.17': 'ipsec_ike',
- # https://tools.ietf.org/html/rfc5415#page-38
- '1.3.6.1.5.5.7.3.18': 'capwap_ac',
- '1.3.6.1.5.5.7.3.19': 'capwap_wtp',
- # https://tools.ietf.org/html/rfc5924#page-8
- '1.3.6.1.5.5.7.3.20': 'sip_domain',
- # https://tools.ietf.org/html/rfc6187#page-7
- '1.3.6.1.5.5.7.3.21': 'secure_shell_client',
- '1.3.6.1.5.5.7.3.22': 'secure_shell_server',
- # https://tools.ietf.org/html/rfc6494#page-7
- '1.3.6.1.5.5.7.3.23': 'send_router',
- '1.3.6.1.5.5.7.3.24': 'send_proxied_router',
- '1.3.6.1.5.5.7.3.25': 'send_owner',
- '1.3.6.1.5.5.7.3.26': 'send_proxied_owner',
- # https://tools.ietf.org/html/rfc6402#page-10
- '1.3.6.1.5.5.7.3.27': 'cmc_ca',
- '1.3.6.1.5.5.7.3.28': 'cmc_ra',
- '1.3.6.1.5.5.7.3.29': 'cmc_archive',
- # https://tools.ietf.org/html/draft-ietf-sidr-bgpsec-pki-profiles-15#page-6
- '1.3.6.1.5.5.7.3.30': 'bgpspec_router',
- # https://msdn.microsoft.com/en-us/library/windows/desktop/aa378132(v=vs.85).aspx
- # and https://support.microsoft.com/en-us/kb/287547
- '1.3.6.1.4.1.311.10.3.1': 'microsoft_trust_list_signing',
- '1.3.6.1.4.1.311.10.3.2': 'microsoft_time_stamp_signing',
- '1.3.6.1.4.1.311.10.3.3': 'microsoft_server_gated',
- '1.3.6.1.4.1.311.10.3.3.1': 'microsoft_serialized',
- '1.3.6.1.4.1.311.10.3.4': 'microsoft_efs',
- '1.3.6.1.4.1.311.10.3.4.1': 'microsoft_efs_recovery',
- '1.3.6.1.4.1.311.10.3.5': 'microsoft_whql',
- '1.3.6.1.4.1.311.10.3.6': 'microsoft_nt5',
- '1.3.6.1.4.1.311.10.3.7': 'microsoft_oem_whql',
- '1.3.6.1.4.1.311.10.3.8': 'microsoft_embedded_nt',
- '1.3.6.1.4.1.311.10.3.9': 'microsoft_root_list_signer',
- '1.3.6.1.4.1.311.10.3.10': 'microsoft_qualified_subordination',
- '1.3.6.1.4.1.311.10.3.11': 'microsoft_key_recovery',
- '1.3.6.1.4.1.311.10.3.12': 'microsoft_document_signing',
- '1.3.6.1.4.1.311.10.3.13': 'microsoft_lifetime_signing',
- '1.3.6.1.4.1.311.10.3.14': 'microsoft_mobile_device_software',
- # https://opensource.apple.com/source
- # - /Security/Security-57031.40.6/Security/libsecurity_keychain/lib/SecPolicy.cpp
- # - /libsecurity_cssm/libsecurity_cssm-36064/lib/oidsalg.c
- '1.2.840.113635.100.1.2': 'apple_x509_basic',
- '1.2.840.113635.100.1.3': 'apple_ssl',
- '1.2.840.113635.100.1.4': 'apple_local_cert_gen',
- '1.2.840.113635.100.1.5': 'apple_csr_gen',
- '1.2.840.113635.100.1.6': 'apple_revocation_crl',
- '1.2.840.113635.100.1.7': 'apple_revocation_ocsp',
- '1.2.840.113635.100.1.8': 'apple_smime',
- '1.2.840.113635.100.1.9': 'apple_eap',
- '1.2.840.113635.100.1.10': 'apple_software_update_signing',
- '1.2.840.113635.100.1.11': 'apple_ipsec',
- '1.2.840.113635.100.1.12': 'apple_ichat',
- '1.2.840.113635.100.1.13': 'apple_resource_signing',
- '1.2.840.113635.100.1.14': 'apple_pkinit_client',
- '1.2.840.113635.100.1.15': 'apple_pkinit_server',
- '1.2.840.113635.100.1.16': 'apple_code_signing',
- '1.2.840.113635.100.1.17': 'apple_package_signing',
- '1.2.840.113635.100.1.18': 'apple_id_validation',
- '1.2.840.113635.100.1.20': 'apple_time_stamping',
- '1.2.840.113635.100.1.21': 'apple_revocation',
- '1.2.840.113635.100.1.22': 'apple_passbook_signing',
- '1.2.840.113635.100.1.23': 'apple_mobile_store',
- '1.2.840.113635.100.1.24': 'apple_escrow_service',
- '1.2.840.113635.100.1.25': 'apple_profile_signer',
- '1.2.840.113635.100.1.26': 'apple_qa_profile_signer',
- '1.2.840.113635.100.1.27': 'apple_test_mobile_store',
- '1.2.840.113635.100.1.28': 'apple_otapki_signer',
- '1.2.840.113635.100.1.29': 'apple_test_otapki_signer',
- '1.2.840.113625.100.1.30': 'apple_id_validation_record_signing_policy',
- '1.2.840.113625.100.1.31': 'apple_smp_encryption',
- '1.2.840.113625.100.1.32': 'apple_test_smp_encryption',
- '1.2.840.113635.100.1.33': 'apple_server_authentication',
- '1.2.840.113635.100.1.34': 'apple_pcs_escrow_service',
- }
-
-
-class ExtKeyUsageSyntax(SequenceOf):
- _child_spec = KeyPurposeId
-
-
-class AccessMethod(ObjectIdentifier):
- _map = {
- '1.3.6.1.5.5.7.48.1': 'ocsp',
- '1.3.6.1.5.5.7.48.2': 'ca_issuers',
- '1.3.6.1.5.5.7.48.3': 'time_stamping',
- '1.3.6.1.5.5.7.48.5': 'ca_repository',
- }
-
-
-class AccessDescription(Sequence):
- _fields = [
- ('access_method', AccessMethod),
- ('access_location', GeneralName),
- ]
-
-
-class AuthorityInfoAccessSyntax(SequenceOf):
- _child_spec = AccessDescription
-
-
-class SubjectInfoAccessSyntax(SequenceOf):
- _child_spec = AccessDescription
-
-
-# https://tools.ietf.org/html/rfc7633
-class Features(SequenceOf):
- _child_spec = Integer
-
-
-class EntrustVersionInfo(Sequence):
- _fields = [
- ('entrust_vers', GeneralString),
- ('entrust_info_flags', BitString)
- ]
-
-
-class NetscapeCertificateType(BitString):
- _map = {
- 0: 'ssl_client',
- 1: 'ssl_server',
- 2: 'email',
- 3: 'object_signing',
- 4: 'reserved',
- 5: 'ssl_ca',
- 6: 'email_ca',
- 7: 'object_signing_ca',
- }
-
-
-class ExtensionId(ObjectIdentifier):
- _map = {
- '2.5.29.9': 'subject_directory_attributes',
- '2.5.29.14': 'key_identifier',
- '2.5.29.15': 'key_usage',
- '2.5.29.16': 'private_key_usage_period',
- '2.5.29.17': 'subject_alt_name',
- '2.5.29.18': 'issuer_alt_name',
- '2.5.29.19': 'basic_constraints',
- '2.5.29.30': 'name_constraints',
- '2.5.29.31': 'crl_distribution_points',
- '2.5.29.32': 'certificate_policies',
- '2.5.29.33': 'policy_mappings',
- '2.5.29.35': 'authority_key_identifier',
- '2.5.29.36': 'policy_constraints',
- '2.5.29.37': 'extended_key_usage',
- '2.5.29.46': 'freshest_crl',
- '2.5.29.54': 'inhibit_any_policy',
- '1.3.6.1.5.5.7.1.1': 'authority_information_access',
- '1.3.6.1.5.5.7.1.11': 'subject_information_access',
- # https://tools.ietf.org/html/rfc7633
- '1.3.6.1.5.5.7.1.24': 'tls_feature',
- '1.3.6.1.5.5.7.48.1.5': 'ocsp_no_check',
- '1.2.840.113533.7.65.0': 'entrust_version_extension',
- '2.16.840.1.113730.1.1': 'netscape_certificate_type',
- # https://tools.ietf.org/html/rfc6962.html#page-14
- '1.3.6.1.4.1.11129.2.4.2': 'signed_certificate_timestamp_list',
- }
-
-
-class Extension(Sequence):
- _fields = [
- ('extn_id', ExtensionId),
- ('critical', Boolean, {'default': False}),
- ('extn_value', ParsableOctetString),
- ]
-
- _oid_pair = ('extn_id', 'extn_value')
- _oid_specs = {
- 'subject_directory_attributes': Attributes,
- 'key_identifier': OctetString,
- 'key_usage': KeyUsage,
- 'private_key_usage_period': PrivateKeyUsagePeriod,
- 'subject_alt_name': GeneralNames,
- 'issuer_alt_name': GeneralNames,
- 'basic_constraints': BasicConstraints,
- 'name_constraints': NameConstraints,
- 'crl_distribution_points': CRLDistributionPoints,
- 'certificate_policies': CertificatePolicies,
- 'policy_mappings': PolicyMappings,
- 'authority_key_identifier': AuthorityKeyIdentifier,
- 'policy_constraints': PolicyConstraints,
- 'extended_key_usage': ExtKeyUsageSyntax,
- 'freshest_crl': CRLDistributionPoints,
- 'inhibit_any_policy': Integer,
- 'authority_information_access': AuthorityInfoAccessSyntax,
- 'subject_information_access': SubjectInfoAccessSyntax,
- 'tls_feature': Features,
- 'ocsp_no_check': Null,
- 'entrust_version_extension': EntrustVersionInfo,
- 'netscape_certificate_type': NetscapeCertificateType,
- 'signed_certificate_timestamp_list': OctetString,
- }
-
-
-class Extensions(SequenceOf):
- _child_spec = Extension
-
-
-class Version(Integer):
- _map = {
- 0: 'v1',
- 1: 'v2',
- 2: 'v3',
- }
-
-
-class TbsCertificate(Sequence):
- _fields = [
- ('version', Version, {'explicit': 0, 'default': 'v1'}),
- ('serial_number', Integer),
- ('signature', SignedDigestAlgorithm),
- ('issuer', Name),
- ('validity', Validity),
- ('subject', Name),
- ('subject_public_key_info', PublicKeyInfo),
- ('issuer_unique_id', OctetBitString, {'implicit': 1, 'optional': True}),
- ('subject_unique_id', OctetBitString, {'implicit': 2, 'optional': True}),
- ('extensions', Extensions, {'explicit': 3, 'optional': True}),
- ]
-
-
-class Certificate(Sequence):
- _fields = [
- ('tbs_certificate', TbsCertificate),
- ('signature_algorithm', SignedDigestAlgorithm),
- ('signature_value', OctetBitString),
- ]
-
- _processed_extensions = False
- _critical_extensions = None
- _subject_directory_attributes = None
- _key_identifier_value = None
- _key_usage_value = None
- _subject_alt_name_value = None
- _issuer_alt_name_value = None
- _basic_constraints_value = None
- _name_constraints_value = None
- _crl_distribution_points_value = None
- _certificate_policies_value = None
- _policy_mappings_value = None
- _authority_key_identifier_value = None
- _policy_constraints_value = None
- _freshest_crl_value = None
- _inhibit_any_policy_value = None
- _extended_key_usage_value = None
- _authority_information_access_value = None
- _subject_information_access_value = None
- _tls_feature_value = None
- _ocsp_no_check_value = None
- _issuer_serial = None
- _authority_issuer_serial = False
- _crl_distribution_points = None
- _delta_crl_distribution_points = None
- _valid_domains = None
- _valid_ips = None
- _self_issued = None
- _self_signed = None
- _sha1 = None
- _sha256 = None
-
- def _set_extensions(self):
- """
- Sets common named extensions to private attributes and creates a list
- of critical extensions
- """
-
- self._critical_extensions = set()
-
- for extension in self['tbs_certificate']['extensions']:
- name = extension['extn_id'].native
- attribute_name = '_%s_value' % name
- if hasattr(self, attribute_name):
- setattr(self, attribute_name, extension['extn_value'].parsed)
- if extension['critical'].native:
- self._critical_extensions.add(name)
-
- self._processed_extensions = True
-
- @property
- def critical_extensions(self):
- """
- Returns a set of the names (or OID if not a known extension) of the
- extensions marked as critical
-
- :return:
- A set of unicode strings
- """
-
- if not self._processed_extensions:
- self._set_extensions()
- return self._critical_extensions
-
- @property
- def subject_directory_attributes_value(self):
- """
- This extension is used to contain additional identification attributes
- about the subject.
-
- :return:
- None or an Attributes object
- """
-
- if not self._processed_extensions:
- self._set_extensions()
- return self._key_identifier_value
-
- @property
- def key_identifier_value(self):
- """
- This extension is used to help in creating certificate validation paths.
- It contains an identifier that should generally, but is not guaranteed
- to, be unique.
-
- :return:
- None or an OctetString object
- """
-
- if not self._processed_extensions:
- self._set_extensions()
- return self._key_identifier_value
-
- @property
- def key_usage_value(self):
- """
- This extension is used to define the purpose of the public key
- contained within the certificate.
-
- :return:
- None or a KeyUsage
- """
-
- if not self._processed_extensions:
- self._set_extensions()
- return self._key_usage_value
-
- @property
- def subject_alt_name_value(self):
- """
- This extension allows for additional names to be associate with the
- subject of the certificate. While it may contain a whole host of
- possible names, it is usually used to allow certificates to be used
- with multiple different domain names.
-
- :return:
- None or a GeneralNames object
- """
-
- if not self._processed_extensions:
- self._set_extensions()
- return self._subject_alt_name_value
-
- @property
- def issuer_alt_name_value(self):
- """
- This extension allows associating one or more alternative names with
- the issuer of the certificate.
-
- :return:
- None or an x509.GeneralNames object
- """
-
- if not self._processed_extensions:
- self._set_extensions()
- return self._issuer_alt_name_value
-
- @property
- def basic_constraints_value(self):
- """
- This extension is used to determine if the subject of the certificate
- is a CA, and if so, what the maximum number of intermediate CA certs
- after this are, before an end-entity certificate is found.
-
- :return:
- None or a BasicConstraints object
- """
-
- if not self._processed_extensions:
- self._set_extensions()
- return self._basic_constraints_value
-
- @property
- def name_constraints_value(self):
- """
- This extension is used in CA certificates, and is used to limit the
- possible names of certificates issued.
-
- :return:
- None or a NameConstraints object
- """
-
- if not self._processed_extensions:
- self._set_extensions()
- return self._name_constraints_value
-
- @property
- def crl_distribution_points_value(self):
- """
- This extension is used to help in locating the CRL for this certificate.
-
- :return:
- None or a CRLDistributionPoints object
- extension
- """
-
- if not self._processed_extensions:
- self._set_extensions()
- return self._crl_distribution_points_value
-
- @property
- def certificate_policies_value(self):
- """
- This extension defines policies in CA certificates under which
- certificates may be issued. In end-entity certificates, the inclusion
- of a policy indicates the issuance of the certificate follows the
- policy.
-
- :return:
- None or a CertificatePolicies object
- """
-
- if not self._processed_extensions:
- self._set_extensions()
- return self._certificate_policies_value
-
- @property
- def policy_mappings_value(self):
- """
- This extension allows mapping policy OIDs to other OIDs. This is used
- to allow different policies to be treated as equivalent in the process
- of validation.
-
- :return:
- None or a PolicyMappings object
- """
-
- if not self._processed_extensions:
- self._set_extensions()
- return self._policy_mappings_value
-
- @property
- def authority_key_identifier_value(self):
- """
- This extension helps in identifying the public key with which to
- validate the authenticity of the certificate.
-
- :return:
- None or an AuthorityKeyIdentifier object
- """
-
- if not self._processed_extensions:
- self._set_extensions()
- return self._authority_key_identifier_value
-
- @property
- def policy_constraints_value(self):
- """
- This extension is used to control if policy mapping is allowed and
- when policies are required.
-
- :return:
- None or a PolicyConstraints object
- """
-
- if not self._processed_extensions:
- self._set_extensions()
- return self._policy_constraints_value
-
- @property
- def freshest_crl_value(self):
- """
- This extension is used to help locate any available delta CRLs
-
- :return:
- None or an CRLDistributionPoints object
- """
-
- if not self._processed_extensions:
- self._set_extensions()
- return self._freshest_crl_value
-
- @property
- def inhibit_any_policy_value(self):
- """
- This extension is used to prevent mapping of the any policy to
- specific requirements
-
- :return:
- None or a Integer object
- """
-
- if not self._processed_extensions:
- self._set_extensions()
- return self._inhibit_any_policy_value
-
- @property
- def extended_key_usage_value(self):
- """
- This extension is used to define additional purposes for the public key
- beyond what is contained in the basic constraints.
-
- :return:
- None or an ExtKeyUsageSyntax object
- """
-
- if not self._processed_extensions:
- self._set_extensions()
- return self._extended_key_usage_value
-
- @property
- def authority_information_access_value(self):
- """
- This extension is used to locate the CA certificate used to sign this
- certificate, or the OCSP responder for this certificate.
-
- :return:
- None or an AuthorityInfoAccessSyntax object
- """
-
- if not self._processed_extensions:
- self._set_extensions()
- return self._authority_information_access_value
-
- @property
- def subject_information_access_value(self):
- """
- This extension is used to access information about the subject of this
- certificate.
-
- :return:
- None or a SubjectInfoAccessSyntax object
- """
-
- if not self._processed_extensions:
- self._set_extensions()
- return self._subject_information_access_value
-
- @property
- def tls_feature_value(self):
- """
- This extension is used to list the TLS features a server must respond
- with if a client initiates a request supporting them.
-
- :return:
- None or a Features object
- """
-
- if not self._processed_extensions:
- self._set_extensions()
- return self._tls_feature_value
-
- @property
- def ocsp_no_check_value(self):
- """
- This extension is used on certificates of OCSP responders, indicating
- that revocation information for the certificate should never need to
- be verified, thus preventing possible loops in path validation.
-
- :return:
- None or a Null object (if present)
- """
-
- if not self._processed_extensions:
- self._set_extensions()
- return self._ocsp_no_check_value
-
- @property
- def signature(self):
- """
- :return:
- A byte string of the signature
- """
-
- return self['signature_value'].native
-
- @property
- def signature_algo(self):
- """
- :return:
- A unicode string of "rsassa_pkcs1v15", "rsassa_pss", "dsa", "ecdsa"
- """
-
- return self['signature_algorithm'].signature_algo
-
- @property
- def hash_algo(self):
- """
- :return:
- A unicode string of "md2", "md5", "sha1", "sha224", "sha256",
- "sha384", "sha512", "sha512_224", "sha512_256"
- """
-
- return self['signature_algorithm'].hash_algo
-
- @property
- def public_key(self):
- """
- :return:
- The PublicKeyInfo object for this certificate
- """
-
- return self['tbs_certificate']['subject_public_key_info']
-
- @property
- def subject(self):
- """
- :return:
- The Name object for the subject of this certificate
- """
-
- return self['tbs_certificate']['subject']
-
- @property
- def issuer(self):
- """
- :return:
- The Name object for the issuer of this certificate
- """
-
- return self['tbs_certificate']['issuer']
-
- @property
- def serial_number(self):
- """
- :return:
- An integer of the certificate's serial number
- """
-
- return self['tbs_certificate']['serial_number'].native
-
- @property
- def key_identifier(self):
- """
- :return:
- None or a byte string of the certificate's key identifier from the
- key identifier extension
- """
-
- if not self.key_identifier_value:
- return None
-
- return self.key_identifier_value.native
-
- @property
- def issuer_serial(self):
- """
- :return:
- A byte string of the SHA-256 hash of the issuer concatenated with
- the ascii character ":", concatenated with the serial number as
- an ascii string
- """
-
- if self._issuer_serial is None:
- self._issuer_serial = self.issuer.sha256 + b':' + str_cls(self.serial_number).encode('ascii')
- return self._issuer_serial
-
- @property
- def authority_key_identifier(self):
- """
- :return:
- None or a byte string of the key_identifier from the authority key
- identifier extension
- """
-
- if not self.authority_key_identifier_value:
- return None
-
- return self.authority_key_identifier_value['key_identifier'].native
-
- @property
- def authority_issuer_serial(self):
- """
- :return:
- None or a byte string of the SHA-256 hash of the isser from the
- authority key identifier extension concatenated with the ascii
- character ":", concatenated with the serial number from the
- authority key identifier extension as an ascii string
- """
-
- if self._authority_issuer_serial is False:
- akiv = self.authority_key_identifier_value
- if akiv and akiv['authority_cert_issuer'].native:
- issuer = self.authority_key_identifier_value['authority_cert_issuer'][0].chosen
- # We untag the element since it is tagged via being a choice from GeneralName
- issuer = issuer.untag()
- authority_serial = self.authority_key_identifier_value['authority_cert_serial_number'].native
- self._authority_issuer_serial = issuer.sha256 + b':' + str_cls(authority_serial).encode('ascii')
- else:
- self._authority_issuer_serial = None
- return self._authority_issuer_serial
-
- @property
- def crl_distribution_points(self):
- """
- Returns complete CRL URLs - does not include delta CRLs
-
- :return:
- A list of zero or more DistributionPoint objects
- """
-
- if self._crl_distribution_points is None:
- self._crl_distribution_points = self._get_http_crl_distribution_points(self.crl_distribution_points_value)
- return self._crl_distribution_points
-
- @property
- def delta_crl_distribution_points(self):
- """
- Returns delta CRL URLs - does not include complete CRLs
-
- :return:
- A list of zero or more DistributionPoint objects
- """
-
- if self._delta_crl_distribution_points is None:
- self._delta_crl_distribution_points = self._get_http_crl_distribution_points(self.freshest_crl_value)
- return self._delta_crl_distribution_points
-
- def _get_http_crl_distribution_points(self, crl_distribution_points):
- """
- Fetches the DistributionPoint object for non-relative, HTTP CRLs
- referenced by the certificate
-
- :param crl_distribution_points:
- A CRLDistributionPoints object to grab the DistributionPoints from
-
- :return:
- A list of zero or more DistributionPoint objects
- """
-
- output = []
-
- if crl_distribution_points is None:
- return []
-
- for distribution_point in crl_distribution_points:
- distribution_point_name = distribution_point['distribution_point']
- if distribution_point_name is VOID:
- continue
- # RFC 5280 indicates conforming CA should not use the relative form
- if distribution_point_name.name == 'name_relative_to_crl_issuer':
- continue
- # This library is currently only concerned with HTTP-based CRLs
- for general_name in distribution_point_name.chosen:
- if general_name.name == 'uniform_resource_identifier':
- output.append(distribution_point)
-
- return output
-
- @property
- def ocsp_urls(self):
- """
- :return:
- A list of zero or more unicode strings of the OCSP URLs for this
- cert
- """
-
- if not self.authority_information_access_value:
- return []
-
- output = []
- for entry in self.authority_information_access_value:
- if entry['access_method'].native == 'ocsp':
- location = entry['access_location']
- if location.name != 'uniform_resource_identifier':
- continue
- url = location.native
- if url.lower().startswith(('http://', 'https://', 'ldap://', 'ldaps://')):
- output.append(url)
- return output
-
- @property
- def valid_domains(self):
- """
- :return:
- A list of unicode strings of valid domain names for the certificate.
- Wildcard certificates will have a domain in the form: *.example.com
- """
-
- if self._valid_domains is None:
- self._valid_domains = []
-
- # For the subject alt name extension, we can look at the name of
- # the choice selected since it distinguishes between domain names,
- # email addresses, IPs, etc
- if self.subject_alt_name_value:
- for general_name in self.subject_alt_name_value:
- if general_name.name == 'dns_name' and general_name.native not in self._valid_domains:
- self._valid_domains.append(general_name.native)
-
- # If there was no subject alt name extension, and the common name
- # in the subject looks like a domain, that is considered the valid
- # list. This is done because according to
- # https://tools.ietf.org/html/rfc6125#section-6.4.4, the common
- # name should not be used if the subject alt name is present.
- else:
- pattern = re.compile('^(\\*\\.)?(?:[a-zA-Z0-9](?:[a-zA-Z0-9\\-]*[a-zA-Z0-9])?\\.)+[a-zA-Z]{2,}$')
- for rdn in self.subject.chosen:
- for name_type_value in rdn:
- if name_type_value['type'].native == 'common_name':
- value = name_type_value['value'].native
- if pattern.match(value):
- self._valid_domains.append(value)
-
- return self._valid_domains
-
- @property
- def valid_ips(self):
- """
- :return:
- A list of unicode strings of valid IP addresses for the certificate
- """
-
- if self._valid_ips is None:
- self._valid_ips = []
-
- if self.subject_alt_name_value:
- for general_name in self.subject_alt_name_value:
- if general_name.name == 'ip_address':
- self._valid_ips.append(general_name.native)
-
- return self._valid_ips
-
- @property
- def ca(self):
- """
- :return;
- A boolean - if the certificate is marked as a CA
- """
-
- return self.basic_constraints_value and self.basic_constraints_value['ca'].native
-
- @property
- def max_path_length(self):
- """
- :return;
- None or an integer of the maximum path length
- """
-
- if not self.ca:
- return None
- return self.basic_constraints_value['path_len_constraint'].native
-
- @property
- def self_issued(self):
- """
- :return:
- A boolean - if the certificate is self-issued, as defined by RFC
- 5280
- """
-
- if self._self_issued is None:
- self._self_issued = self.subject == self.issuer
- return self._self_issued
-
- @property
- def self_signed(self):
- """
- :return:
- A unicode string of "yes", "no" or "maybe". The "maybe" result will
- be returned if the certificate does not contain a key identifier
- extension, but is issued by the subject. In this case the
- certificate signature will need to be verified using the subject
- public key to determine a "yes" or "no" answer.
- """
-
- if self._self_signed is None:
- self._self_signed = 'no'
- if self.self_issued:
- if self.key_identifier:
- if not self.authority_key_identifier:
- self._self_signed = 'yes'
- elif self.authority_key_identifier == self.key_identifier:
- self._self_signed = 'yes'
- else:
- self._self_signed = 'maybe'
- return self._self_signed
-
- @property
- def sha1(self):
- """
- :return:
- The SHA-1 hash of the DER-encoded bytes of this complete certificate
- """
-
- if self._sha1 is None:
- self._sha1 = hashlib.sha1(self.dump()).digest()
- return self._sha1
-
- @property
- def sha1_fingerprint(self):
- """
- :return:
- A unicode string of the SHA-1 hash, formatted using hex encoding
- with a space between each pair of characters, all uppercase
- """
-
- return ' '.join('%02X' % c for c in bytes_to_list(self.sha1))
-
- @property
- def sha256(self):
- """
- :return:
- The SHA-256 hash of the DER-encoded bytes of this complete
- certificate
- """
-
- if self._sha256 is None:
- self._sha256 = hashlib.sha256(self.dump()).digest()
- return self._sha256
-
- @property
- def sha256_fingerprint(self):
- """
- :return:
- A unicode string of the SHA-256 hash, formatted using hex encoding
- with a space between each pair of characters, all uppercase
- """
-
- return ' '.join('%02X' % c for c in bytes_to_list(self.sha256))
-
- def is_valid_domain_ip(self, domain_ip):
- """
- Check if a domain name or IP address is valid according to the
- certificate
-
- :param domain_ip:
- A unicode string of a domain name or IP address
-
- :return:
- A boolean - if the domain or IP is valid for the certificate
- """
-
- if not isinstance(domain_ip, str_cls):
- raise TypeError(unwrap(
- '''
- domain_ip must be a unicode string, not %s
- ''',
- type_name(domain_ip)
- ))
-
- encoded_domain_ip = domain_ip.encode('idna').decode('ascii').lower()
-
- is_ipv6 = encoded_domain_ip.find(':') != -1
- is_ipv4 = not is_ipv6 and re.match('^\\d+\\.\\d+\\.\\d+\\.\\d+$', encoded_domain_ip)
- is_domain = not is_ipv6 and not is_ipv4
-
- # Handle domain name checks
- if is_domain:
- if not self.valid_domains:
- return False
-
- domain_labels = encoded_domain_ip.split('.')
-
- for valid_domain in self.valid_domains:
- encoded_valid_domain = valid_domain.encode('idna').decode('ascii').lower()
- valid_domain_labels = encoded_valid_domain.split('.')
-
- # The domain must be equal in label length to match
- if len(valid_domain_labels) != len(domain_labels):
- continue
-
- if valid_domain_labels == domain_labels:
- return True
-
- is_wildcard = self._is_wildcard_domain(encoded_valid_domain)
- if is_wildcard and self._is_wildcard_match(domain_labels, valid_domain_labels):
- return True
-
- return False
-
- # Handle IP address checks
- if not self.valid_ips:
- return False
-
- family = socket.AF_INET if is_ipv4 else socket.AF_INET6
- normalized_ip = inet_pton(family, encoded_domain_ip)
-
- for valid_ip in self.valid_ips:
- valid_family = socket.AF_INET if valid_ip.find('.') != -1 else socket.AF_INET6
- normalized_valid_ip = inet_pton(valid_family, valid_ip)
-
- if normalized_valid_ip == normalized_ip:
- return True
-
- return False
-
- def _is_wildcard_domain(self, domain):
- """
- Checks if a domain is a valid wildcard according to
- https://tools.ietf.org/html/rfc6125#section-6.4.3
-
- :param domain:
- A unicode string of the domain name, where any U-labels from an IDN
- have been converted to A-labels
-
- :return:
- A boolean - if the domain is a valid wildcard domain
- """
-
- # The * character must be present for a wildcard match, and if there is
- # most than one, it is an invalid wildcard specification
- if domain.count('*') != 1:
- return False
-
- labels = domain.lower().split('.')
-
- if not labels:
- return False
-
- # Wildcards may only appear in the left-most label
- if labels[0].find('*') == -1:
- return False
-
- # Wildcards may not be embedded in an A-label from an IDN
- if labels[0][0:4] == 'xn--':
- return False
-
- return True
-
- def _is_wildcard_match(self, domain_labels, valid_domain_labels):
- """
- Determines if the labels in a domain are a match for labels from a
- wildcard valid domain name
-
- :param domain_labels:
- A list of unicode strings, with A-label form for IDNs, of the labels
- in the domain name to check
-
- :param valid_domain_labels:
- A list of unicode strings, with A-label form for IDNs, of the labels
- in a wildcard domain pattern
-
- :return:
- A boolean - if the domain matches the valid domain
- """
-
- first_domain_label = domain_labels[0]
- other_domain_labels = domain_labels[1:]
-
- wildcard_label = valid_domain_labels[0]
- other_valid_domain_labels = valid_domain_labels[1:]
-
- # The wildcard is only allowed in the first label, so if
- # The subsequent labels are not equal, there is no match
- if other_domain_labels != other_valid_domain_labels:
- return False
-
- if wildcard_label == '*':
- return True
-
- wildcard_regex = re.compile('^' + wildcard_label.replace('*', '.*') + '$')
- if wildcard_regex.match(first_domain_label):
- return True
-
- return False
-
-
-# The structures are taken from the OpenSSL source file x_x509a.c, and specify
-# extra information that is added to X.509 certificates to store trust
-# information about the certificate.
-
-class KeyPurposeIdentifiers(SequenceOf):
- _child_spec = KeyPurposeId
-
-
-class SequenceOfAlgorithmIdentifiers(SequenceOf):
- _child_spec = AlgorithmIdentifier
-
-
-class CertificateAux(Sequence):
- _fields = [
- ('trust', KeyPurposeIdentifiers, {'optional': True}),
- ('reject', KeyPurposeIdentifiers, {'implicit': 0, 'optional': True}),
- ('alias', UTF8String, {'optional': True}),
- ('keyid', OctetString, {'optional': True}),
- ('other', SequenceOfAlgorithmIdentifiers, {'implicit': 1, 'optional': True}),
- ]
-
-
-class TrustedCertificate(Concat):
- _child_specs = [Certificate, CertificateAux]
diff --git a/functions/source/CreateSSHKey/cffi-1.14.2.dist-info/INSTALLER b/functions/source/CreateSSHKey/cffi-1.14.2.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/functions/source/CreateSSHKey/cffi-1.14.2.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/functions/source/CreateSSHKey/cffi-1.14.2.dist-info/LICENSE b/functions/source/CreateSSHKey/cffi-1.14.2.dist-info/LICENSE
new file mode 100644
index 0000000..29225ee
--- /dev/null
+++ b/functions/source/CreateSSHKey/cffi-1.14.2.dist-info/LICENSE
@@ -0,0 +1,26 @@
+
+Except when otherwise stated (look for LICENSE files in directories or
+information at the beginning of each file) all software and
+documentation is licensed as follows:
+
+ The MIT License
+
+ Permission is hereby granted, free of charge, to any person
+ obtaining a copy of this software and associated documentation
+ files (the "Software"), to deal in the Software without
+ restriction, including without limitation the rights to use,
+ copy, modify, merge, publish, distribute, sublicense, and/or
+ sell copies of the Software, and to permit persons to whom the
+ Software is furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included
+ in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+ OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+ THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ DEALINGS IN THE SOFTWARE.
+
diff --git a/functions/source/CreateSSHKey/cffi-1.14.2.dist-info/METADATA b/functions/source/CreateSSHKey/cffi-1.14.2.dist-info/METADATA
new file mode 100644
index 0000000..a946f44
--- /dev/null
+++ b/functions/source/CreateSSHKey/cffi-1.14.2.dist-info/METADATA
@@ -0,0 +1,37 @@
+Metadata-Version: 2.1
+Name: cffi
+Version: 1.14.2
+Summary: Foreign Function Interface for Python calling C code.
+Home-page: http://cffi.readthedocs.org
+Author: Armin Rigo, Maciej Fijalkowski
+Author-email: python-cffi@googlegroups.com
+License: MIT
+Platform: UNKNOWN
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: License :: OSI Approved :: MIT License
+Requires-Dist: pycparser
+
+
+CFFI
+====
+
+Foreign Function Interface for Python calling C code.
+Please see the `Documentation `_.
+
+Contact
+-------
+
+`Mailing list `_
+
+
diff --git a/functions/source/CreateSSHKey/cffi-1.14.2.dist-info/RECORD b/functions/source/CreateSSHKey/cffi-1.14.2.dist-info/RECORD
new file mode 100644
index 0000000..bb3dc32
--- /dev/null
+++ b/functions/source/CreateSSHKey/cffi-1.14.2.dist-info/RECORD
@@ -0,0 +1,45 @@
+_cffi_backend.cpython-38-x86_64-linux-gnu.so,sha256=V3Tli3M4hKjo8OVs59Sdo2erFZ3JD8esxa6246WCIXs,886688
+cffi-1.14.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+cffi-1.14.2.dist-info/LICENSE,sha256=BLgPWwd7vtaICM_rreteNSPyqMmpZJXFh72W3x6sKjM,1294
+cffi-1.14.2.dist-info/METADATA,sha256=1BxD7t68xfE0P2uSvkFtLIliUnFPSioTgs5gbFYID2M,1191
+cffi-1.14.2.dist-info/RECORD,,
+cffi-1.14.2.dist-info/WHEEL,sha256=0wGQBSV-BlYX9ESMZyEiLMpXIYwrZGj6McPYyDp_RjA,108
+cffi-1.14.2.dist-info/entry_points.txt,sha256=Q9f5C9IpjYxo0d2PK9eUcnkgxHc9pHWwjEMaANPKNCI,76
+cffi-1.14.2.dist-info/top_level.txt,sha256=rE7WR3rZfNKxWI9-jn6hsHCAl7MDkB-FmuQbxWjFehQ,19
+cffi.libs/libffi-806b1a9d.so.6.0.4,sha256=xgERNdZXaam4q4jB4p5DTqbAIUIhQmoxdgvKHaKyA8Y,46632
+cffi/__init__.py,sha256=yFIGDfk4ufsHPyu8u-ESAbBpFlsjeITeYLc4ECiH1MU,513
+cffi/__pycache__/__init__.cpython-38.pyc,,
+cffi/__pycache__/api.cpython-38.pyc,,
+cffi/__pycache__/backend_ctypes.cpython-38.pyc,,
+cffi/__pycache__/cffi_opcode.cpython-38.pyc,,
+cffi/__pycache__/commontypes.cpython-38.pyc,,
+cffi/__pycache__/cparser.cpython-38.pyc,,
+cffi/__pycache__/error.cpython-38.pyc,,
+cffi/__pycache__/ffiplatform.cpython-38.pyc,,
+cffi/__pycache__/lock.cpython-38.pyc,,
+cffi/__pycache__/model.cpython-38.pyc,,
+cffi/__pycache__/pkgconfig.cpython-38.pyc,,
+cffi/__pycache__/recompiler.cpython-38.pyc,,
+cffi/__pycache__/setuptools_ext.cpython-38.pyc,,
+cffi/__pycache__/vengine_cpy.cpython-38.pyc,,
+cffi/__pycache__/vengine_gen.cpython-38.pyc,,
+cffi/__pycache__/verifier.cpython-38.pyc,,
+cffi/_cffi_errors.h,sha256=6nFQ-4dRQI1bXRoSeqdvyKU33TmutQJB_2fAhWSzdl8,3856
+cffi/_cffi_include.h,sha256=tKnA1rdSoPHp23FnDL1mDGwFo-Uj6fXfA6vA6kcoEUc,14800
+cffi/_embedding.h,sha256=e-RMVKxMlTBdEBOoYdhXAbdP_80A2z1IFz9yWjymWrs,17433
+cffi/api.py,sha256=yxJalIePbr1mz_WxAHokSwyP5CVYde44m-nolHnbJNo,42064
+cffi/backend_ctypes.py,sha256=h5ZIzLc6BFVXnGyc9xPqZWUS7qGy7yFSDqXe68Sa8z4,42454
+cffi/cffi_opcode.py,sha256=v9RdD_ovA8rCtqsC95Ivki5V667rAOhGgs3fb2q9xpM,5724
+cffi/commontypes.py,sha256=QS4uxCDI7JhtTyjh1hlnCA-gynmaszWxJaRRLGkJa1A,2689
+cffi/cparser.py,sha256=rO_1pELRw1gI1DE1m4gi2ik5JMfpxouAACLXpRPlVEA,44231
+cffi/error.py,sha256=v6xTiS4U0kvDcy4h_BDRo5v39ZQuj-IMRYLv5ETddZs,877
+cffi/ffiplatform.py,sha256=HMXqR8ks2wtdsNxGaWpQ_PyqIvtiuos_vf1qKCy-cwg,4046
+cffi/lock.py,sha256=l9TTdwMIMpi6jDkJGnQgE9cvTIR7CAntIJr8EGHt3pY,747
+cffi/model.py,sha256=_GH_UF1Rn9vC4AvmgJm6qj7RUXXG3eqKPc8bPxxyBKE,21768
+cffi/parse_c_type.h,sha256=OdwQfwM9ktq6vlCB43exFQmxDBtj2MBNdK8LYl15tjw,5976
+cffi/pkgconfig.py,sha256=LP1w7vmWvmKwyqLaU1Z243FOWGNQMrgMUZrvgFuOlco,4374
+cffi/recompiler.py,sha256=XtQNxkxXz5Ze5Lz-j9yHZj_JIZqbuZz4_STIOBv4sCU,64061
+cffi/setuptools_ext.py,sha256=RUR17N5f8gpiQBBlXL34P9FtOu1mhHIaAf3WJlg5S4I,8931
+cffi/vengine_cpy.py,sha256=YglN8YS-UaHEv2k2cxgotNWE87dHX20-68EyKoiKUYA,43320
+cffi/vengine_gen.py,sha256=5dX7s1DU6pTBOMI6oTVn_8Bnmru_lj932B6b4v29Hlg,26684
+cffi/verifier.py,sha256=J9Enz2rbJb9CHPqWlWQ5uQESoyr0uc7MNWugchjXBv4,11207
diff --git a/functions/source/CreateSSHKey/cffi-1.14.2.dist-info/WHEEL b/functions/source/CreateSSHKey/cffi-1.14.2.dist-info/WHEEL
new file mode 100644
index 0000000..0796649
--- /dev/null
+++ b/functions/source/CreateSSHKey/cffi-1.14.2.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.34.2)
+Root-Is-Purelib: false
+Tag: cp38-cp38-manylinux1_x86_64
+
diff --git a/functions/source/CreateSSHKey/cffi-1.14.2.dist-info/entry_points.txt b/functions/source/CreateSSHKey/cffi-1.14.2.dist-info/entry_points.txt
new file mode 100644
index 0000000..eee7e0f
--- /dev/null
+++ b/functions/source/CreateSSHKey/cffi-1.14.2.dist-info/entry_points.txt
@@ -0,0 +1,3 @@
+[distutils.setup_keywords]
+cffi_modules = cffi.setuptools_ext:cffi_modules
+
diff --git a/functions/source/CreateSSHKey/cffi-1.14.2.dist-info/top_level.txt b/functions/source/CreateSSHKey/cffi-1.14.2.dist-info/top_level.txt
new file mode 100644
index 0000000..f645779
--- /dev/null
+++ b/functions/source/CreateSSHKey/cffi-1.14.2.dist-info/top_level.txt
@@ -0,0 +1,2 @@
+_cffi_backend
+cffi
diff --git a/functions/source/CreateSSHKey/cffi.libs/libffi-806b1a9d.so.6.0.4 b/functions/source/CreateSSHKey/cffi.libs/libffi-806b1a9d.so.6.0.4
new file mode 100755
index 0000000..13bc481
Binary files /dev/null and b/functions/source/CreateSSHKey/cffi.libs/libffi-806b1a9d.so.6.0.4 differ
diff --git a/functions/source/CreateSSHKey/cffi/__init__.py b/functions/source/CreateSSHKey/cffi/__init__.py
index 0ac46d9..acf959c 100644
--- a/functions/source/CreateSSHKey/cffi/__init__.py
+++ b/functions/source/CreateSSHKey/cffi/__init__.py
@@ -3,9 +3,10 @@
from .api import FFI
from .error import CDefError, FFIError, VerificationError, VerificationMissing
+from .error import PkgConfigError
-__version__ = "1.11.2"
-__version_info__ = (1, 11, 2)
+__version__ = "1.14.2"
+__version_info__ = (1, 14, 2)
# The verifier module file names are based on the CRC32 of a string that
# contains the following version number. It may be older than __version__
diff --git a/functions/source/CreateSSHKey/cffi/__pycache__/__init__.cpython-38.pyc b/functions/source/CreateSSHKey/cffi/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000..8986de1
Binary files /dev/null and b/functions/source/CreateSSHKey/cffi/__pycache__/__init__.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cffi/__pycache__/api.cpython-38.pyc b/functions/source/CreateSSHKey/cffi/__pycache__/api.cpython-38.pyc
new file mode 100644
index 0000000..21b727a
Binary files /dev/null and b/functions/source/CreateSSHKey/cffi/__pycache__/api.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cffi/__pycache__/backend_ctypes.cpython-38.pyc b/functions/source/CreateSSHKey/cffi/__pycache__/backend_ctypes.cpython-38.pyc
new file mode 100644
index 0000000..209cd7c
Binary files /dev/null and b/functions/source/CreateSSHKey/cffi/__pycache__/backend_ctypes.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cffi/__pycache__/cffi_opcode.cpython-38.pyc b/functions/source/CreateSSHKey/cffi/__pycache__/cffi_opcode.cpython-38.pyc
new file mode 100644
index 0000000..c4f27b1
Binary files /dev/null and b/functions/source/CreateSSHKey/cffi/__pycache__/cffi_opcode.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cffi/__pycache__/commontypes.cpython-38.pyc b/functions/source/CreateSSHKey/cffi/__pycache__/commontypes.cpython-38.pyc
new file mode 100644
index 0000000..15373a9
Binary files /dev/null and b/functions/source/CreateSSHKey/cffi/__pycache__/commontypes.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cffi/__pycache__/cparser.cpython-38.pyc b/functions/source/CreateSSHKey/cffi/__pycache__/cparser.cpython-38.pyc
new file mode 100644
index 0000000..d0590ea
Binary files /dev/null and b/functions/source/CreateSSHKey/cffi/__pycache__/cparser.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cffi/__pycache__/error.cpython-38.pyc b/functions/source/CreateSSHKey/cffi/__pycache__/error.cpython-38.pyc
new file mode 100644
index 0000000..17ddc72
Binary files /dev/null and b/functions/source/CreateSSHKey/cffi/__pycache__/error.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cffi/__pycache__/ffiplatform.cpython-38.pyc b/functions/source/CreateSSHKey/cffi/__pycache__/ffiplatform.cpython-38.pyc
new file mode 100644
index 0000000..c5e2f65
Binary files /dev/null and b/functions/source/CreateSSHKey/cffi/__pycache__/ffiplatform.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cffi/__pycache__/lock.cpython-38.pyc b/functions/source/CreateSSHKey/cffi/__pycache__/lock.cpython-38.pyc
new file mode 100644
index 0000000..6cfdf3e
Binary files /dev/null and b/functions/source/CreateSSHKey/cffi/__pycache__/lock.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cffi/__pycache__/model.cpython-38.pyc b/functions/source/CreateSSHKey/cffi/__pycache__/model.cpython-38.pyc
new file mode 100644
index 0000000..5059119
Binary files /dev/null and b/functions/source/CreateSSHKey/cffi/__pycache__/model.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cffi/__pycache__/pkgconfig.cpython-38.pyc b/functions/source/CreateSSHKey/cffi/__pycache__/pkgconfig.cpython-38.pyc
new file mode 100644
index 0000000..9bd0e1b
Binary files /dev/null and b/functions/source/CreateSSHKey/cffi/__pycache__/pkgconfig.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cffi/__pycache__/recompiler.cpython-38.pyc b/functions/source/CreateSSHKey/cffi/__pycache__/recompiler.cpython-38.pyc
new file mode 100644
index 0000000..f7f5036
Binary files /dev/null and b/functions/source/CreateSSHKey/cffi/__pycache__/recompiler.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cffi/__pycache__/setuptools_ext.cpython-38.pyc b/functions/source/CreateSSHKey/cffi/__pycache__/setuptools_ext.cpython-38.pyc
new file mode 100644
index 0000000..9d2b05d
Binary files /dev/null and b/functions/source/CreateSSHKey/cffi/__pycache__/setuptools_ext.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cffi/__pycache__/vengine_cpy.cpython-38.pyc b/functions/source/CreateSSHKey/cffi/__pycache__/vengine_cpy.cpython-38.pyc
new file mode 100644
index 0000000..e9688f2
Binary files /dev/null and b/functions/source/CreateSSHKey/cffi/__pycache__/vengine_cpy.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cffi/__pycache__/vengine_gen.cpython-38.pyc b/functions/source/CreateSSHKey/cffi/__pycache__/vengine_gen.cpython-38.pyc
new file mode 100644
index 0000000..4d03c5e
Binary files /dev/null and b/functions/source/CreateSSHKey/cffi/__pycache__/vengine_gen.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cffi/__pycache__/verifier.cpython-38.pyc b/functions/source/CreateSSHKey/cffi/__pycache__/verifier.cpython-38.pyc
new file mode 100644
index 0000000..7e69bb2
Binary files /dev/null and b/functions/source/CreateSSHKey/cffi/__pycache__/verifier.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cffi/_cffi_errors.h b/functions/source/CreateSSHKey/cffi/_cffi_errors.h
index 60dcc3b..83cdad0 100644
--- a/functions/source/CreateSSHKey/cffi/_cffi_errors.h
+++ b/functions/source/CreateSSHKey/cffi/_cffi_errors.h
@@ -50,7 +50,9 @@ static PyObject *_cffi_start_error_capture(void)
"import sys\n"
"class FileLike:\n"
" def write(self, x):\n"
- " of.write(x)\n"
+ " try:\n"
+ " of.write(x)\n"
+ " except: pass\n"
" self.buf += x\n"
"fl = FileLike()\n"
"fl.buf = ''\n"
diff --git a/functions/source/CreateSSHKey/cffi/_cffi_include.h b/functions/source/CreateSSHKey/cffi/_cffi_include.h
index 4f8ef82..e4c0a67 100644
--- a/functions/source/CreateSSHKey/cffi/_cffi_include.h
+++ b/functions/source/CreateSSHKey/cffi/_cffi_include.h
@@ -7,11 +7,50 @@
we can learn about Py_DEBUG from pyconfig.h, but it is unclear if
the same works for the other two macros. Py_DEBUG implies them,
but not the other way around.
+
+ The implementation is messy (issue #350): on Windows, with _MSC_VER,
+ we have to define Py_LIMITED_API even before including pyconfig.h.
+ In that case, we guess what pyconfig.h will do to the macros above,
+ and check our guess after the #include.
+
+ Note that on Windows, with CPython 3.x, you need >= 3.5 and virtualenv
+ version >= 16.0.0. With older versions of either, you don't get a
+ copy of PYTHON3.DLL in the virtualenv. We can't check the version of
+ CPython *before* we even include pyconfig.h. ffi.set_source() puts
+ a ``#define _CFFI_NO_LIMITED_API'' at the start of this file if it is
+ running on Windows < 3.5, as an attempt at fixing it, but that's
+ arguably wrong because it may not be the target version of Python.
+ Still better than nothing I guess. As another workaround, you can
+ remove the definition of Py_LIMITED_API here.
+
+ See also 'py_limited_api' in cffi/setuptools_ext.py.
*/
#if !defined(_CFFI_USE_EMBEDDING) && !defined(Py_LIMITED_API)
-# include
-# if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG)
-# define Py_LIMITED_API
+# ifdef _MSC_VER
+# if !defined(_DEBUG) && !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API)
+# define Py_LIMITED_API
+# endif
+# include
+ /* sanity-check: Py_LIMITED_API will cause crashes if any of these
+ are also defined. Normally, the Python file PC/pyconfig.h does not
+ cause any of these to be defined, with the exception that _DEBUG
+ causes Py_DEBUG. Double-check that. */
+# ifdef Py_LIMITED_API
+# if defined(Py_DEBUG)
+# error "pyconfig.h unexpectedly defines Py_DEBUG, but Py_LIMITED_API is set"
+# endif
+# if defined(Py_TRACE_REFS)
+# error "pyconfig.h unexpectedly defines Py_TRACE_REFS, but Py_LIMITED_API is set"
+# endif
+# if defined(Py_REF_DEBUG)
+# error "pyconfig.h unexpectedly defines Py_REF_DEBUG, but Py_LIMITED_API is set"
+# endif
+# endif
+# else
+# include
+# if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API)
+# define Py_LIMITED_API
+# endif
# endif
#endif
@@ -251,14 +290,62 @@ _CFFI_UNUSED_FN static int _cffi_to_c_char32_t(PyObject *o)
return (int)_cffi_to_c_wchar3216_t(o);
}
-_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char32_t(int x)
+_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char32_t(unsigned int x)
{
if (sizeof(_cffi_wchar_t) == 4)
return _cffi_from_c_wchar_t((_cffi_wchar_t)x);
else
- return _cffi_from_c_wchar3216_t(x);
+ return _cffi_from_c_wchar3216_t((int)x);
}
+union _cffi_union_alignment_u {
+ unsigned char m_char;
+ unsigned short m_short;
+ unsigned int m_int;
+ unsigned long m_long;
+ unsigned long long m_longlong;
+ float m_float;
+ double m_double;
+ long double m_longdouble;
+};
+
+struct _cffi_freeme_s {
+ struct _cffi_freeme_s *next;
+ union _cffi_union_alignment_u alignment;
+};
+
+_CFFI_UNUSED_FN static int
+_cffi_convert_array_argument(struct _cffi_ctypedescr *ctptr, PyObject *arg,
+ char **output_data, Py_ssize_t datasize,
+ struct _cffi_freeme_s **freeme)
+{
+ char *p;
+ if (datasize < 0)
+ return -1;
+
+ p = *output_data;
+ if (p == NULL) {
+ struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc(
+ offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize);
+ if (fp == NULL)
+ return -1;
+ fp->next = *freeme;
+ *freeme = fp;
+ p = *output_data = (char *)&fp->alignment;
+ }
+ memset((void *)p, 0, (size_t)datasize);
+ return _cffi_convert_array_from_object(p, ctptr, arg);
+}
+
+_CFFI_UNUSED_FN static void
+_cffi_free_array_arguments(struct _cffi_freeme_s *freeme)
+{
+ do {
+ void *p = (void *)freeme;
+ freeme = freeme->next;
+ PyObject_Free(p);
+ } while (freeme != NULL);
+}
/********** end CPython-specific section **********/
#else
diff --git a/functions/source/CreateSSHKey/cffi/_embedding.h b/functions/source/CreateSSHKey/cffi/_embedding.h
index b31f009..207d683 100644
--- a/functions/source/CreateSSHKey/cffi/_embedding.h
+++ b/functions/source/CreateSSHKey/cffi/_embedding.h
@@ -145,33 +145,8 @@ static int _cffi_initialize_python(void)
int result;
PyGILState_STATE state;
PyObject *pycode=NULL, *global_dict=NULL, *x;
+ PyObject *builtins;
-#if PY_MAJOR_VERSION >= 3
- /* see comments in _cffi_carefully_make_gil() about the
- Python2/Python3 difference
- */
-#else
- /* Acquire the GIL. We have no threadstate here. If Python is
- already initialized, it is possible that there is already one
- existing for this thread, but it is not made current now.
- */
- PyEval_AcquireLock();
-
- _cffi_py_initialize();
-
- /* The Py_InitializeEx() sometimes made a threadstate for us, but
- not always. Indeed Py_InitializeEx() could be called and do
- nothing. So do we have a threadstate, or not? We don't know,
- but we can replace it with NULL in all cases.
- */
- (void)PyThreadState_Swap(NULL);
-
- /* Now we can release the GIL and re-acquire immediately using the
- logic of PyGILState(), which handles making or installing the
- correct threadstate.
- */
- PyEval_ReleaseLock();
-#endif
state = PyGILState_Ensure();
/* Call the initxxx() function from the present module. It will
@@ -195,8 +170,10 @@ static int _cffi_initialize_python(void)
global_dict = PyDict_New();
if (global_dict == NULL)
goto error;
- if (PyDict_SetItemString(global_dict, "__builtins__",
- PyThreadState_GET()->interp->builtins) < 0)
+ builtins = PyEval_GetBuiltins();
+ if (builtins == NULL)
+ goto error;
+ if (PyDict_SetItemString(global_dict, "__builtins__", builtins) < 0)
goto error;
x = PyEval_EvalCode(
#if PY_MAJOR_VERSION < 3
@@ -247,7 +224,7 @@ static int _cffi_initialize_python(void)
if (f != NULL && f != Py_None) {
PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME
- "\ncompiled with cffi version: 1.11.2"
+ "\ncompiled with cffi version: 1.14.2"
"\n_cffi_backend module: ", f);
modules = PyImport_GetModuleDict();
mod = PyDict_GetItemString(modules, "_cffi_backend");
@@ -269,7 +246,9 @@ static int _cffi_initialize_python(void)
goto done;
}
+#if PY_VERSION_HEX < 0x03080000
PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */
+#endif
static int _cffi_carefully_make_gil(void)
{
@@ -278,48 +257,44 @@ static int _cffi_carefully_make_gil(void)
that we don't hold the GIL before (if it exists), and we don't
hold it afterwards.
- What it really does is completely different in Python 2 and
- Python 3.
-
- Python 2
- ========
-
- Initialize the GIL, without initializing the rest of Python,
- by calling PyEval_InitThreads().
+ (What it really does used to be completely different in Python 2
+ and Python 3, with the Python 2 solution avoiding the spin-lock
+ around the Py_InitializeEx() call. However, after recent changes
+ to CPython 2.7 (issue #358) it no longer works. So we use the
+ Python 3 solution everywhere.)
- PyEval_InitThreads() must not be called concurrently at all.
+ This initializes Python by calling Py_InitializeEx().
+ Important: this must not be called concurrently at all.
So we use a global variable as a simple spin lock. This global
variable must be from 'libpythonX.Y.so', not from this
cffi-based extension module, because it must be shared from
- different cffi-based extension modules. We choose
+ different cffi-based extension modules.
+
+ In Python < 3.8, we choose
_PyParser_TokenNames[0] as a completely arbitrary pointer value
that is never written to. The default is to point to the
string "ENDMARKER". We change it temporarily to point to the
next character in that string. (Yes, I know it's REALLY
obscure.)
- Python 3
- ========
-
- In Python 3, PyEval_InitThreads() cannot be called before
- Py_InitializeEx() any more. So this function calls
- Py_InitializeEx() first. It uses the same obscure logic to
- make sure we never call it concurrently.
-
- Arguably, this is less good on the spinlock, because
- Py_InitializeEx() takes much longer to run than
- PyEval_InitThreads(). But I didn't find a way around it.
+ In Python >= 3.8, this string array is no longer writable, so
+ instead we pick PyCapsuleType.tp_version_tag. We can't change
+ Python < 3.8 because someone might use a mixture of cffi
+ embedded modules, some of which were compiled before this file
+ changed.
*/
#ifdef WITH_THREAD
+# if PY_VERSION_HEX < 0x03080000
char *volatile *lock = (char *volatile *)_PyParser_TokenNames;
- char *old_value;
+ char *old_value, *locked_value;
while (1) { /* spin loop */
old_value = *lock;
+ locked_value = old_value + 1;
if (old_value[0] == 'E') {
assert(old_value[1] == 'N');
- if (cffi_compare_and_swap(lock, old_value, old_value + 1))
+ if (cffi_compare_and_swap(lock, old_value, locked_value))
break;
}
else {
@@ -330,35 +305,46 @@ static int _cffi_carefully_make_gil(void)
this is only run at start-up anyway. */
}
}
-#endif
+# else
+ int volatile *lock = (int volatile *)&PyCapsule_Type.tp_version_tag;
+ int old_value, locked_value;
+ assert(!(PyCapsule_Type.tp_flags & Py_TPFLAGS_HAVE_VERSION_TAG));
-#if PY_MAJOR_VERSION >= 3
- /* Python 3: call Py_InitializeEx() */
- {
- PyGILState_STATE state = PyGILState_UNLOCKED;
- if (!Py_IsInitialized())
- _cffi_py_initialize();
- else
- state = PyGILState_Ensure();
+ while (1) { /* spin loop */
+ old_value = *lock;
+ locked_value = -42;
+ if (old_value == 0) {
+ if (cffi_compare_and_swap(lock, old_value, locked_value))
+ break;
+ }
+ else {
+ assert(old_value == locked_value);
+ /* should ideally do a spin loop instruction here, but
+ hard to do it portably and doesn't really matter I
+ think: PyEval_InitThreads() should be very fast, and
+ this is only run at start-up anyway. */
+ }
+ }
+# endif
+#endif
+ /* call Py_InitializeEx() */
+ if (!Py_IsInitialized()) {
+ _cffi_py_initialize();
PyEval_InitThreads();
- PyGILState_Release(state);
+ PyEval_SaveThread(); /* release the GIL */
+ /* the returned tstate must be the one that has been stored into the
+ autoTLSkey by _PyGILState_Init() called from Py_Initialize(). */
}
-#else
- /* Python 2: call PyEval_InitThreads() */
-# ifdef WITH_THREAD
- if (!PyEval_ThreadsInitialized()) {
- PyEval_InitThreads(); /* makes the GIL */
- PyEval_ReleaseLock(); /* then release it */
+ else {
+ PyGILState_STATE state = PyGILState_Ensure();
+ PyEval_InitThreads();
+ PyGILState_Release(state);
}
- /* else: there is already a GIL, but we still needed to do the
- spinlock dance to make sure that we see it as fully ready */
-# endif
-#endif
#ifdef WITH_THREAD
/* release the lock */
- while (!cffi_compare_and_swap(lock, old_value + 1, old_value))
+ while (!cffi_compare_and_swap(lock, locked_value, old_value))
;
#endif
@@ -377,11 +363,11 @@ PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */
static struct _cffi_pypy_init_s {
const char *name;
- void (*func)(const void *[]);
+ void *func; /* function pointer */
const char *code;
} _cffi_pypy_init = {
_CFFI_MODULE_NAME,
- (void(*)(const void *[]))_CFFI_PYTHON_STARTUP_FUNC,
+ _CFFI_PYTHON_STARTUP_FUNC,
_CFFI_PYTHON_STARTUP_CODE,
};
diff --git a/functions/source/CreateSSHKey/cffi/api.py b/functions/source/CreateSSHKey/cffi/api.py
index 446e554..999a8ae 100644
--- a/functions/source/CreateSSHKey/cffi/api.py
+++ b/functions/source/CreateSSHKey/cffi/api.py
@@ -16,6 +16,8 @@
# Python 3.x
basestring = str
+_unspecified = object()
+
class FFI(object):
@@ -96,18 +98,21 @@ def __init__(self, backend=None):
self.CData, self.CType = backend._get_types()
self.buffer = backend.buffer
- def cdef(self, csource, override=False, packed=False):
+ def cdef(self, csource, override=False, packed=False, pack=None):
"""Parse the given C source. This registers all declared functions,
types, and global variables. The functions and global variables can
then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'.
The types can be used in 'ffi.new()' and other functions.
If 'packed' is specified as True, all structs declared inside this
cdef are packed, i.e. laid out without any field alignment at all.
+ Alternatively, 'pack' can be a small integer, and requests for
+ alignment greater than that are ignored (pack=1 is equivalent to
+ packed=True).
"""
- self._cdef(csource, override=override, packed=packed)
+ self._cdef(csource, override=override, packed=packed, pack=pack)
- def embedding_api(self, csource, packed=False):
- self._cdef(csource, packed=packed, dllexport=True)
+ def embedding_api(self, csource, packed=False, pack=None):
+ self._cdef(csource, packed=packed, pack=pack, dllexport=True)
if self._embedding is None:
self._embedding = ''
@@ -136,13 +141,24 @@ def dlopen(self, name, flags=0):
linked to a particular library, just like C headers; in the
library we only look for the actual (untyped) symbols.
"""
- assert isinstance(name, basestring) or name is None
+ if not (isinstance(name, basestring) or
+ name is None or
+ isinstance(name, self.CData)):
+ raise TypeError("dlopen(name): name must be a file name, None, "
+ "or an already-opened 'void *' handle")
with self._lock:
lib, function_cache = _make_ffi_library(self, name, flags)
self._function_caches.append(function_cache)
self._libraries.append(lib)
return lib
+ def dlclose(self, lib):
+ """Close a library obtained with ffi.dlopen(). After this call,
+ access to functions or variables from the library will fail
+ (possibly with a segmentation fault).
+ """
+ type(lib).__cffi_close__(lib)
+
def _typeof_locked(self, cdecl):
# call me with the lock!
key = cdecl
@@ -331,15 +347,23 @@ def unpack(self, cdata, length):
# """
# note that 'buffer' is a type, set on this instance by __init__
- def from_buffer(self, python_buffer):
- """Return a that points to the data of the
+ def from_buffer(self, cdecl, python_buffer=_unspecified,
+ require_writable=False):
+ """Return a cdata of the given type pointing to the data of the
given Python object, which must support the buffer interface.
Note that this is not meant to be used on the built-in types
str or unicode (you can build 'char[]' arrays explicitly)
but only on objects containing large quantities of raw data
in some other format, like 'array.array' or numpy arrays.
+
+ The first argument is optional and default to 'char[]'.
"""
- return self._backend.from_buffer(self.BCharA, python_buffer)
+ if python_buffer is _unspecified:
+ cdecl, python_buffer = self.BCharA, cdecl
+ elif isinstance(cdecl, basestring):
+ cdecl = self._typeof(cdecl)
+ return self._backend.from_buffer(cdecl, python_buffer,
+ require_writable)
def memmove(self, dest, src, n):
"""ffi.memmove(dest, src, n) copies n bytes of memory from src to dest.
@@ -519,6 +543,9 @@ def new_handle(self, x):
def from_handle(self, x):
return self._backend.from_handle(x)
+ def release(self, x):
+ self._backend.release(x)
+
def set_unicode(self, enabled_flag):
"""Windows: if 'enabled_flag' is True, enable the UNICODE and
_UNICODE defines in C, and declare the types like TCHAR and LPTCSTR
@@ -569,7 +596,7 @@ def ensure(key, value):
if sys.platform == "win32":
# we need 'libpypy-c.lib'. Current distributions of
# pypy (>= 4.1) contain it as 'libs/python27.lib'.
- pythonlib = "python27"
+ pythonlib = "python{0[0]}{0[1]}".format(sys.version_info)
if hasattr(sys, 'prefix'):
ensure('library_dirs', os.path.join(sys.prefix, 'libs'))
else:
@@ -620,6 +647,16 @@ def set_source(self, module_name, source, source_extension='.c', **kwds):
self._assigned_source = (str(module_name), source,
source_extension, kwds)
+ def set_source_pkgconfig(self, module_name, pkgconfig_libs, source,
+ source_extension='.c', **kwds):
+ from . import pkgconfig
+ if not isinstance(pkgconfig_libs, list):
+ raise TypeError("the pkgconfig_libs argument must be a list "
+ "of package names")
+ kwds2 = pkgconfig.flags_from_pkgconfig(pkgconfig_libs)
+ pkgconfig.merge_flags(kwds, kwds2)
+ self.set_source(module_name, source, source_extension, **kwds)
+
def distutils_extension(self, tmpdir='build', verbose=True):
from distutils.dir_util import mkpath
from .recompiler import recompile
@@ -766,9 +803,9 @@ def list_types(self):
def _load_backend_lib(backend, name, flags):
import os
- if name is None:
- if sys.platform != "win32":
- return backend.load_library(None, flags)
+ if not isinstance(name, basestring):
+ if sys.platform != "win32" or name is not None:
+ return backend.load_library(name, flags)
name = "c" # Windows: load_library(None) fails, but this works
# on Python 2 (backward compatibility hack only)
first_error = None
@@ -898,8 +935,11 @@ def __addressof__(self, name):
return addressof_var(name)
raise AttributeError("cffi library has no function or "
"global variable named '%s'" % (name,))
+ def __cffi_close__(self):
+ backendlib.close_lib()
+ self.__dict__.clear()
#
- if libname is not None:
+ if isinstance(libname, basestring):
try:
if not isinstance(libname, str): # unicode, on Python 2
libname = libname.encode('utf-8')
diff --git a/functions/source/CreateSSHKey/cffi/backend_ctypes.py b/functions/source/CreateSSHKey/cffi/backend_ctypes.py
index 5ef3c13..e7956a7 100644
--- a/functions/source/CreateSSHKey/cffi/backend_ctypes.py
+++ b/functions/source/CreateSSHKey/cffi/backend_ctypes.py
@@ -403,7 +403,7 @@ def _cast_from(cls, source):
source = _cast_source_to_int(source)
return cls(bool(source))
def __int__(self):
- return self._value
+ return int(self._value)
if kind == 'char':
@classmethod
@@ -636,6 +636,10 @@ def _initialize(blob, init):
if isinstance(init, bytes):
init = [init[i:i+1] for i in range(len(init))]
else:
+ if isinstance(init, CTypesGenericArray):
+ if (len(init) != len(blob) or
+ not isinstance(init, CTypesArray)):
+ raise TypeError("length/type mismatch: %s" % (init,))
init = tuple(init)
if len(init) > len(blob):
raise IndexError("too many initializers")
@@ -730,7 +734,8 @@ def new_union_type(self, name):
return self._new_struct_or_union('union', name, ctypes.Union)
def complete_struct_or_union(self, CTypesStructOrUnion, fields, tp,
- totalsize=-1, totalalignment=-1, sflags=0):
+ totalsize=-1, totalalignment=-1, sflags=0,
+ pack=0):
if totalsize >= 0 or totalalignment >= 0:
raise NotImplementedError("the ctypes backend of CFFI does not support "
"structures completed by verify(); please "
@@ -751,6 +756,8 @@ def complete_struct_or_union(self, CTypesStructOrUnion, fields, tp,
bfield_types[fname] = Ellipsis
if sflags & 8:
struct_or_union._pack_ = 1
+ elif pack:
+ struct_or_union._pack_ = pack
struct_or_union._fields_ = cfields
CTypesStructOrUnion._bfield_types = bfield_types
#
diff --git a/functions/source/CreateSSHKey/cffi/cparser.py b/functions/source/CreateSSHKey/cffi/cparser.py
index f7e2e35..74830e9 100644
--- a/functions/source/CreateSSHKey/cffi/cparser.py
+++ b/functions/source/CreateSSHKey/cffi/cparser.py
@@ -16,12 +16,20 @@
except ImportError:
lock = None
+def _workaround_for_static_import_finders():
+ # Issue #392: packaging tools like cx_Freeze can not find these
+ # because pycparser uses exec dynamic import. This is an obscure
+ # workaround. This function is never called.
+ import pycparser.yacctab
+ import pycparser.lextab
+
CDEF_SOURCE_STRING = ""
_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$",
re.DOTALL | re.MULTILINE)
_r_define = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)"
r"\b((?:[^\n\\]|\\.)*?)$",
re.DOTALL | re.MULTILINE)
+_r_line_directive = re.compile(r"^[ \t]*#[ \t]*(?:line|\d+)\b.*$", re.MULTILINE)
_r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}")
_r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$")
_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]")
@@ -137,10 +145,56 @@ def _preprocess_extern_python(csource):
parts.append(csource)
return ''.join(parts)
+def _warn_for_string_literal(csource):
+ if '"' not in csource:
+ return
+ for line in csource.splitlines():
+ if '"' in line and not line.lstrip().startswith('#'):
+ import warnings
+ warnings.warn("String literal found in cdef() or type source. "
+ "String literals are ignored here, but you should "
+ "remove them anyway because some character sequences "
+ "confuse pre-parsing.")
+ break
+
+def _warn_for_non_extern_non_static_global_variable(decl):
+ if not decl.storage:
+ import warnings
+ warnings.warn("Global variable '%s' in cdef(): for consistency "
+ "with C it should have a storage class specifier "
+ "(usually 'extern')" % (decl.name,))
+
+def _remove_line_directives(csource):
+ # _r_line_directive matches whole lines, without the final \n, if they
+ # start with '#line' with some spacing allowed, or '#NUMBER'. This
+ # function stores them away and replaces them with exactly the string
+ # '#line@N', where N is the index in the list 'line_directives'.
+ line_directives = []
+ def replace(m):
+ i = len(line_directives)
+ line_directives.append(m.group())
+ return '#line@%d' % i
+ csource = _r_line_directive.sub(replace, csource)
+ return csource, line_directives
+
+def _put_back_line_directives(csource, line_directives):
+ def replace(m):
+ s = m.group()
+ if not s.startswith('#line@'):
+ raise AssertionError("unexpected #line directive "
+ "(should have been processed and removed")
+ return line_directives[int(s[6:])]
+ return _r_line_directive.sub(replace, csource)
+
def _preprocess(csource):
+ # First, remove the lines of the form '#line N "filename"' because
+ # the "filename" part could confuse the rest
+ csource, line_directives = _remove_line_directives(csource)
# Remove comments. NOTE: this only work because the cdef() section
- # should not contain any string literal!
- csource = _r_comment.sub(' ', csource)
+ # should not contain any string literals (except in line directives)!
+ def replace_keeping_newlines(m):
+ return ' ' + m.group().count('\n') * '\n'
+ csource = _r_comment.sub(replace_keeping_newlines, csource)
# Remove the "#define FOO x" lines
macros = {}
for match in _r_define.finditer(csource):
@@ -164,6 +218,9 @@ def _preprocess(csource):
# Replace `extern "Python"` with start/end markers
csource = _preprocess_extern_python(csource)
#
+ # Now there should not be any string literal left; warn if we get one
+ _warn_for_string_literal(csource)
+ #
# Replace "[...]" with "[__dotdotdotarray__]"
csource = _r_partial_array.sub('[__dotdotdotarray__]', csource)
#
@@ -190,7 +247,10 @@ def _preprocess(csource):
csource = _r_float_dotdotdot.sub(' __dotdotdotfloat__ ', csource)
# Replace all remaining "..." with the same name, "__dotdotdot__",
# which is declared with a typedef for the purpose of C parsing.
- return csource.replace('...', ' __dotdotdot__ '), macros
+ csource = csource.replace('...', ' __dotdotdot__ ')
+ # Finally, put back the line directives
+ csource = _put_back_line_directives(csource, line_directives)
+ return csource, macros
def _common_type_names(csource):
# Look in the source for what looks like usages of types from the
@@ -306,11 +366,25 @@ def convert_pycparser_error(self, e, csource):
msg = 'parse error\n%s' % (msg,)
raise CDefError(msg)
- def parse(self, csource, override=False, packed=False, dllexport=False):
+ def parse(self, csource, override=False, packed=False, pack=None,
+ dllexport=False):
+ if packed:
+ if packed != True:
+ raise ValueError("'packed' should be False or True; use "
+ "'pack' to give another value")
+ if pack:
+ raise ValueError("cannot give both 'pack' and 'packed'")
+ pack = 1
+ elif pack:
+ if pack & (pack - 1):
+ raise ValueError("'pack' must be a power of two, not %r" %
+ (pack,))
+ else:
+ pack = 0
prev_options = self._options
try:
self._options = {'override': override,
- 'packed': packed,
+ 'packed': pack,
'dllexport': dllexport}
self._internal_parse(csource)
finally:
@@ -352,7 +426,8 @@ def _internal_parse(self, csource):
realtype = self._get_unknown_ptr_type(decl)
else:
realtype, quals = self._get_type_and_quals(
- decl.type, name=decl.name, partial_length_ok=True)
+ decl.type, name=decl.name, partial_length_ok=True,
+ typedef_example="*(%s *)0" % (decl.name,))
self._declare('typedef ' + decl.name, realtype, quals=quals)
elif decl.__class__.__name__ == 'Pragma':
pass # skip pragma, only in pycparser 2.15
@@ -470,6 +545,7 @@ def _parse_decl(self, decl):
if (quals & model.Q_CONST) and not tp.is_array_type:
self._declare('constant ' + decl.name, tp, quals=quals)
else:
+ _warn_for_non_extern_non_static_global_variable(decl)
self._declare('variable ' + decl.name, tp, quals=quals)
def parse_type(self, cdecl):
@@ -518,7 +594,8 @@ def _get_type_pointer(self, type, quals, declname=None):
return model.NamedPointerType(type, declname, quals)
return model.PointerType(type, quals)
- def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False):
+ def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False,
+ typedef_example=None):
# first, dereference typedefs, if we have it already parsed, we're good
if (isinstance(typenode, pycparser.c_ast.TypeDecl) and
isinstance(typenode.type, pycparser.c_ast.IdentifierType) and
@@ -535,8 +612,18 @@ def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False):
else:
length = self._parse_constant(
typenode.dim, partial_length_ok=partial_length_ok)
+ # a hack: in 'typedef int foo_t[...][...];', don't use '...' as
+ # the length but use directly the C expression that would be
+ # generated by recompiler.py. This lets the typedef be used in
+ # many more places within recompiler.py
+ if typedef_example is not None:
+ if length == '...':
+ length = '_cffi_array_len(%s)' % (typedef_example,)
+ typedef_example = "*" + typedef_example
+ #
tp, quals = self._get_type_and_quals(typenode.type,
- partial_length_ok=partial_length_ok)
+ partial_length_ok=partial_length_ok,
+ typedef_example=typedef_example)
return model.ArrayType(tp, length), quals
#
if isinstance(typenode, pycparser.c_ast.PtrDecl):
@@ -785,12 +872,20 @@ def _parse_constant(self, exprnode, partial_length_ok=False):
# or positive/negative number
if isinstance(exprnode, pycparser.c_ast.Constant):
s = exprnode.value
- if s.startswith('0'):
- if s.startswith('0x') or s.startswith('0X'):
- return int(s, 16)
- return int(s, 8)
- elif '1' <= s[0] <= '9':
- return int(s, 10)
+ if '0' <= s[0] <= '9':
+ s = s.rstrip('uUlL')
+ try:
+ if s.startswith('0'):
+ return int(s, 8)
+ else:
+ return int(s, 10)
+ except ValueError:
+ if len(s) > 1:
+ if s.lower()[0:2] == '0x':
+ return int(s, 16)
+ elif s.lower()[0:2] == '0b':
+ return int(s, 2)
+ raise CDefError("invalid constant %r" % (s,))
elif s[0] == "'" and s[-1] == "'" and (
len(s) == 3 or (len(s) == 4 and s[1] == "\\")):
return ord(s[-2])
@@ -818,19 +913,39 @@ def _parse_constant(self, exprnode, partial_length_ok=False):
"the actual array length in this context"
% exprnode.coord.line)
#
- if (isinstance(exprnode, pycparser.c_ast.BinaryOp) and
- exprnode.op == '+'):
- return (self._parse_constant(exprnode.left) +
- self._parse_constant(exprnode.right))
- #
- if (isinstance(exprnode, pycparser.c_ast.BinaryOp) and
- exprnode.op == '-'):
- return (self._parse_constant(exprnode.left) -
- self._parse_constant(exprnode.right))
+ if isinstance(exprnode, pycparser.c_ast.BinaryOp):
+ left = self._parse_constant(exprnode.left)
+ right = self._parse_constant(exprnode.right)
+ if exprnode.op == '+':
+ return left + right
+ elif exprnode.op == '-':
+ return left - right
+ elif exprnode.op == '*':
+ return left * right
+ elif exprnode.op == '/':
+ return self._c_div(left, right)
+ elif exprnode.op == '%':
+ return left - self._c_div(left, right) * right
+ elif exprnode.op == '<<':
+ return left << right
+ elif exprnode.op == '>>':
+ return left >> right
+ elif exprnode.op == '&':
+ return left & right
+ elif exprnode.op == '|':
+ return left | right
+ elif exprnode.op == '^':
+ return left ^ right
#
raise FFIError(":%d: unsupported expression: expected a "
"simple numeric constant" % exprnode.coord.line)
+ def _c_div(self, a, b):
+ result = a // b
+ if ((a < 0) ^ (b < 0)) and (a % b) != 0:
+ result += 1
+ return result
+
def _build_enum_type(self, explicit_name, decls):
if decls is not None:
partial = False
diff --git a/functions/source/CreateSSHKey/cffi/error.py b/functions/source/CreateSSHKey/cffi/error.py
index ec19964..0a27247 100644
--- a/functions/source/CreateSSHKey/cffi/error.py
+++ b/functions/source/CreateSSHKey/cffi/error.py
@@ -1,8 +1,9 @@
class FFIError(Exception):
- pass
+ __module__ = 'cffi'
class CDefError(Exception):
+ __module__ = 'cffi'
def __str__(self):
try:
current_decl = self.args[1]
@@ -16,8 +17,15 @@ def __str__(self):
class VerificationError(Exception):
""" An error raised when verification fails
"""
+ __module__ = 'cffi'
class VerificationMissing(Exception):
""" An error raised when incomplete structures are passed into
cdef, but no verification has been done
"""
+ __module__ = 'cffi'
+
+class PkgConfigError(Exception):
+ """ An error raised for missing modules in pkg-config
+ """
+ __module__ = 'cffi'
diff --git a/functions/source/CreateSSHKey/cffi/model.py b/functions/source/CreateSSHKey/cffi/model.py
index fb30f7d..ad1c176 100644
--- a/functions/source/CreateSSHKey/cffi/model.py
+++ b/functions/source/CreateSSHKey/cffi/model.py
@@ -307,11 +307,14 @@ def __init__(self, item, length):
self.c_name_with_marker = (
self.item.c_name_with_marker.replace('&', brackets))
+ def length_is_unknown(self):
+ return isinstance(self.length, str)
+
def resolve_length(self, newlength):
return ArrayType(self.item, newlength)
def build_backend_type(self, ffi, finishlist):
- if self.length == '...':
+ if self.length_is_unknown():
raise CDefError("cannot render the type %r: unknown length" %
(self,))
self.item.get_cached_btype(ffi, finishlist) # force the item BType
@@ -342,7 +345,7 @@ class StructOrUnion(StructOrUnionOrEnum):
fixedlayout = None
completed = 0
partial = False
- packed = False
+ packed = 0
def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None):
self.name = name
@@ -352,21 +355,20 @@ def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None):
self.fldquals = fldquals
self.build_c_name_with_marker()
- def has_anonymous_struct_fields(self):
- if self.fldtypes is None:
- return False
- for name, type in zip(self.fldnames, self.fldtypes):
- if name == '' and isinstance(type, StructOrUnion):
- return True
- return False
+ def anonymous_struct_fields(self):
+ if self.fldtypes is not None:
+ for name, type in zip(self.fldnames, self.fldtypes):
+ if name == '' and isinstance(type, StructOrUnion):
+ yield type
- def enumfields(self):
+ def enumfields(self, expand_anonymous_struct_union=True):
fldquals = self.fldquals
if fldquals is None:
fldquals = (0,) * len(self.fldnames)
for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes,
self.fldbitsize, fldquals):
- if name == '' and isinstance(type, StructOrUnion):
+ if (name == '' and isinstance(type, StructOrUnion)
+ and expand_anonymous_struct_union):
# nested anonymous struct/union
for result in type.enumfields():
yield result
@@ -415,11 +417,14 @@ def finish_backend_type(self, ffi, finishlist):
fldtypes = [tp.get_cached_btype(ffi, finishlist)
for tp in self.fldtypes]
lst = list(zip(self.fldnames, fldtypes, self.fldbitsize))
- sflags = 0
+ extra_flags = ()
if self.packed:
- sflags = 8 # SF_PACKED
+ if self.packed == 1:
+ extra_flags = (8,) # SF_PACKED
+ else:
+ extra_flags = (0, self.packed)
ffi._backend.complete_struct_or_union(BType, lst, self,
- -1, -1, sflags)
+ -1, -1, *extra_flags)
#
else:
fldtypes = []
@@ -428,7 +433,7 @@ def finish_backend_type(self, ffi, finishlist):
fsize = fieldsize[i]
ftype = self.fldtypes[i]
#
- if isinstance(ftype, ArrayType) and ftype.length == '...':
+ if isinstance(ftype, ArrayType) and ftype.length_is_unknown():
# fix the length to match the total size
BItemType = ftype.item.get_cached_btype(ffi, finishlist)
nlen, nrest = divmod(fsize, ffi.sizeof(BItemType))
diff --git a/functions/source/CreateSSHKey/cffi/pkgconfig.py b/functions/source/CreateSSHKey/cffi/pkgconfig.py
new file mode 100644
index 0000000..5c93f15
--- /dev/null
+++ b/functions/source/CreateSSHKey/cffi/pkgconfig.py
@@ -0,0 +1,121 @@
+# pkg-config, https://www.freedesktop.org/wiki/Software/pkg-config/ integration for cffi
+import sys, os, subprocess
+
+from .error import PkgConfigError
+
+
+def merge_flags(cfg1, cfg2):
+ """Merge values from cffi config flags cfg2 to cf1
+
+ Example:
+ merge_flags({"libraries": ["one"]}, {"libraries": ["two"]})
+ {"libraries": ["one", "two"]}
+ """
+ for key, value in cfg2.items():
+ if key not in cfg1:
+ cfg1[key] = value
+ else:
+ if not isinstance(cfg1[key], list):
+ raise TypeError("cfg1[%r] should be a list of strings" % (key,))
+ if not isinstance(value, list):
+ raise TypeError("cfg2[%r] should be a list of strings" % (key,))
+ cfg1[key].extend(value)
+ return cfg1
+
+
+def call(libname, flag, encoding=sys.getfilesystemencoding()):
+ """Calls pkg-config and returns the output if found
+ """
+ a = ["pkg-config", "--print-errors"]
+ a.append(flag)
+ a.append(libname)
+ try:
+ pc = subprocess.Popen(a, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ except EnvironmentError as e:
+ raise PkgConfigError("cannot run pkg-config: %s" % (str(e).strip(),))
+
+ bout, berr = pc.communicate()
+ if pc.returncode != 0:
+ try:
+ berr = berr.decode(encoding)
+ except Exception:
+ pass
+ raise PkgConfigError(berr.strip())
+
+ if sys.version_info >= (3,) and not isinstance(bout, str): # Python 3.x
+ try:
+ bout = bout.decode(encoding)
+ except UnicodeDecodeError:
+ raise PkgConfigError("pkg-config %s %s returned bytes that cannot "
+ "be decoded with encoding %r:\n%r" %
+ (flag, libname, encoding, bout))
+
+ if os.altsep != '\\' and '\\' in bout:
+ raise PkgConfigError("pkg-config %s %s returned an unsupported "
+ "backslash-escaped output:\n%r" %
+ (flag, libname, bout))
+ return bout
+
+
+def flags_from_pkgconfig(libs):
+ r"""Return compiler line flags for FFI.set_source based on pkg-config output
+
+ Usage
+ ...
+ ffibuilder.set_source("_foo", pkgconfig = ["libfoo", "libbar >= 1.8.3"])
+
+ If pkg-config is installed on build machine, then arguments include_dirs,
+ library_dirs, libraries, define_macros, extra_compile_args and
+ extra_link_args are extended with an output of pkg-config for libfoo and
+ libbar.
+
+ Raises PkgConfigError in case the pkg-config call fails.
+ """
+
+ def get_include_dirs(string):
+ return [x[2:] for x in string.split() if x.startswith("-I")]
+
+ def get_library_dirs(string):
+ return [x[2:] for x in string.split() if x.startswith("-L")]
+
+ def get_libraries(string):
+ return [x[2:] for x in string.split() if x.startswith("-l")]
+
+ # convert -Dfoo=bar to list of tuples [("foo", "bar")] expected by distutils
+ def get_macros(string):
+ def _macro(x):
+ x = x[2:] # drop "-D"
+ if '=' in x:
+ return tuple(x.split("=", 1)) # "-Dfoo=bar" => ("foo", "bar")
+ else:
+ return (x, None) # "-Dfoo" => ("foo", None)
+ return [_macro(x) for x in string.split() if x.startswith("-D")]
+
+ def get_other_cflags(string):
+ return [x for x in string.split() if not x.startswith("-I") and
+ not x.startswith("-D")]
+
+ def get_other_libs(string):
+ return [x for x in string.split() if not x.startswith("-L") and
+ not x.startswith("-l")]
+
+ # return kwargs for given libname
+ def kwargs(libname):
+ fse = sys.getfilesystemencoding()
+ all_cflags = call(libname, "--cflags")
+ all_libs = call(libname, "--libs")
+ return {
+ "include_dirs": get_include_dirs(all_cflags),
+ "library_dirs": get_library_dirs(all_libs),
+ "libraries": get_libraries(all_libs),
+ "define_macros": get_macros(all_cflags),
+ "extra_compile_args": get_other_cflags(all_cflags),
+ "extra_link_args": get_other_libs(all_libs),
+ }
+
+ # merge all arguments together
+ ret = {}
+ for libname in libs:
+ lib_flags = kwargs(libname)
+ merge_flags(ret, lib_flags)
+ return ret
diff --git a/functions/source/CreateSSHKey/cffi/recompiler.py b/functions/source/CreateSSHKey/cffi/recompiler.py
index 722adb4..1aeae5b 100644
--- a/functions/source/CreateSSHKey/cffi/recompiler.py
+++ b/functions/source/CreateSSHKey/cffi/recompiler.py
@@ -7,6 +7,9 @@
VERSION_EMBEDDED = 0x2701
VERSION_CHAR16CHAR32 = 0x2801
+USE_LIMITED_API = (sys.platform != 'win32' or sys.version_info < (3, 0) or
+ sys.version_info >= (3, 5))
+
class GlobalExpr:
def __init__(self, name, address, type_op, size=0, check_value=0):
@@ -283,6 +286,8 @@ def write_c_source_to_f(self, f, preamble):
prnt = self._prnt
if self.ffi._embedding is not None:
prnt('#define _CFFI_USE_EMBEDDING')
+ if not USE_LIMITED_API:
+ prnt('#define _CFFI_NO_LIMITED_API')
#
# first the '#include' (actually done by inlining the file's content)
lines = self._rel_readlines('_cffi_include.h')
@@ -295,8 +300,9 @@ def write_c_source_to_f(self, f, preamble):
base_module_name = self.module_name.split('.')[-1]
if self.ffi._embedding is not None:
prnt('#define _CFFI_MODULE_NAME "%s"' % (self.module_name,))
- prnt('#define _CFFI_PYTHON_STARTUP_CODE %s' %
- (self._string_literal(self.ffi._embedding),))
+ prnt('static const char _CFFI_PYTHON_STARTUP_CODE[] = {')
+ self._print_string_literal_in_array(self.ffi._embedding)
+ prnt('0 };')
prnt('#ifdef PYPY_VERSION')
prnt('# define _CFFI_PYTHON_STARTUP_FUNC _cffi_pypyinit_%s' % (
base_module_name,))
@@ -559,23 +565,24 @@ def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode):
tovar, tp.get_c_name(''), errvalue))
self._prnt(' %s;' % errcode)
- def _extra_local_variables(self, tp, localvars):
+ def _extra_local_variables(self, tp, localvars, freelines):
if isinstance(tp, model.PointerType):
localvars.add('Py_ssize_t datasize')
+ localvars.add('struct _cffi_freeme_s *large_args_free = NULL')
+ freelines.add('if (large_args_free != NULL)'
+ ' _cffi_free_array_arguments(large_args_free);')
def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode):
self._prnt(' datasize = _cffi_prepare_pointer_call_argument(')
self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % (
self._gettypenum(tp), fromvar, tovar))
self._prnt(' if (datasize != 0) {')
- self._prnt(' if (datasize < 0)')
- self._prnt(' %s;' % errcode)
- self._prnt(' %s = (%s)alloca((size_t)datasize);' % (
+ self._prnt(' %s = ((size_t)datasize) <= 640 ? '
+ '(%s)alloca((size_t)datasize) : NULL;' % (
tovar, tp.get_c_name('')))
- self._prnt(' memset((void *)%s, 0, (size_t)datasize);' % (tovar,))
- self._prnt(' if (_cffi_convert_array_from_object('
- '(char *)%s, _cffi_type(%d), %s) < 0)' % (
- tovar, self._gettypenum(tp), fromvar))
+ self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, '
+ '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar))
+ self._prnt(' datasize, &large_args_free) < 0)')
self._prnt(' %s;' % errcode)
self._prnt(' }')
@@ -698,9 +705,10 @@ def _generate_cpy_function_decl(self, tp, name):
prnt(' %s;' % arg)
#
localvars = set()
+ freelines = set()
for type in tp.args:
- self._extra_local_variables(type, localvars)
- for decl in localvars:
+ self._extra_local_variables(type, localvars, freelines)
+ for decl in sorted(localvars):
prnt(' %s;' % (decl,))
#
if not isinstance(tp.result, model.VoidType):
@@ -708,6 +716,7 @@ def _generate_cpy_function_decl(self, tp, name):
context = 'result of %s' % name
result_decl = ' %s;' % tp.result.get_c_name(' result', context)
prnt(result_decl)
+ prnt(' PyObject *pyresult;')
else:
result_decl = None
result_code = ''
@@ -741,9 +750,14 @@ def _generate_cpy_function_decl(self, tp, name):
if numargs == 0:
prnt(' (void)noarg; /* unused */')
if result_code:
- prnt(' return %s;' %
+ prnt(' pyresult = %s;' %
self._convert_expr_from_c(tp.result, 'result', 'result type'))
+ for freeline in freelines:
+ prnt(' ' + freeline)
+ prnt(' return pyresult;')
else:
+ for freeline in freelines:
+ prnt(' ' + freeline)
prnt(' Py_INCREF(Py_None);')
prnt(' return Py_None;')
prnt('}')
@@ -835,6 +849,10 @@ def _field_type(self, tp_struct, field_name, tp_field):
def _struct_collecttype(self, tp):
self._do_collect_type(tp)
+ if self.target_is_python:
+ # also requires nested anon struct/unions in ABI mode, recursively
+ for fldtype in tp.anonymous_struct_fields():
+ self._struct_collecttype(fldtype)
def _struct_decl(self, tp, cname, approxname):
if tp.fldtypes is None:
@@ -850,8 +868,9 @@ def _struct_decl(self, tp, cname, approxname):
try:
if ftype.is_integer_type() or fbitsize >= 0:
# accept all integers, but complain on float or double
- prnt(" (void)((p->%s) | 0); /* check that '%s.%s' is "
- "an integer */" % (fname, cname, fname))
+ if fname != '':
+ prnt(" (void)((p->%s) | 0); /* check that '%s.%s' is "
+ "an integer */" % (fname, cname, fname))
continue
# only accept exactly the type declared, except that '[]'
# is interpreted as a '*' and so will match any array length.
@@ -883,11 +902,17 @@ def _struct_ctx(self, tp, cname, approxname, named_ptr=None):
named_ptr not in self.ffi._parser._included_declarations)):
if tp.fldtypes is None:
pass # opaque
- elif tp.partial or tp.has_anonymous_struct_fields():
+ elif tp.partial or any(tp.anonymous_struct_fields()):
pass # field layout obtained silently from the C compiler
else:
flags.append("_CFFI_F_CHECK_FIELDS")
if tp.packed:
+ if tp.packed > 1:
+ raise NotImplementedError(
+ "%r is declared with 'pack=%r'; only 0 or 1 are "
+ "supported in API mode (try to use \"...;\", which "
+ "does not require a 'pack' declaration)" %
+ (tp, tp.packed))
flags.append("_CFFI_F_PACKED")
else:
flags.append("_CFFI_F_EXTERNAL")
@@ -895,7 +920,8 @@ def _struct_ctx(self, tp, cname, approxname, named_ptr=None):
flags = '|'.join(flags) or '0'
c_fields = []
if reason_for_not_expanding is None:
- enumfields = list(tp.enumfields())
+ expand_anonymous_struct_union = not self.target_is_python
+ enumfields = list(tp.enumfields(expand_anonymous_struct_union))
for fldname, fldtype, fbitsize, fqual in enumfields:
fldtype = self._field_type(tp, fldname, fldtype)
self._check_not_opaque(fldtype,
@@ -1203,7 +1229,8 @@ def _extern_python_decl(self, tp, name, tag_and_space):
size_of_result = '(int)sizeof(%s)' % (
tp.result.get_c_name('', context),)
prnt('static struct _cffi_externpy_s _cffi_externpy__%s =' % name)
- prnt(' { "%s.%s", %s };' % (self.module_name, name, size_of_result))
+ prnt(' { "%s.%s", %s, 0, 0 };' % (
+ self.module_name, name, size_of_result))
prnt()
#
arguments = []
@@ -1271,17 +1298,32 @@ def _generate_cpy_extern_python_ctx(self, tp, name):
_generate_cpy_extern_python_plus_c_ctx = \
_generate_cpy_extern_python_ctx
- def _string_literal(self, s):
- def _char_repr(c):
- # escape with a '\' the characters '\', '"' or (for trigraphs) '?'
- if c in '\\"?': return '\\' + c
- if ' ' <= c < '\x7F': return c
- if c == '\n': return '\\n'
- return '\\%03o' % ord(c)
- lines = []
- for line in s.splitlines(True) or ['']:
- lines.append('"%s"' % ''.join([_char_repr(c) for c in line]))
- return ' \\\n'.join(lines)
+ def _print_string_literal_in_array(self, s):
+ prnt = self._prnt
+ prnt('// # NB. this is not a string because of a size limit in MSVC')
+ if not isinstance(s, bytes): # unicode
+ s = s.encode('utf-8') # -> bytes
+ else:
+ s.decode('utf-8') # got bytes, check for valid utf-8
+ try:
+ s.decode('ascii')
+ except UnicodeDecodeError:
+ s = b'# -*- encoding: utf8 -*-\n' + s
+ for line in s.splitlines(True):
+ comment = line
+ if type('//') is bytes: # python2
+ line = map(ord, line) # make a list of integers
+ else: # python3
+ # type(line) is bytes, which enumerates like a list of integers
+ comment = ascii(comment)[1:-1]
+ prnt(('// ' + comment).rstrip())
+ printed_line = ''
+ for c in line:
+ if len(printed_line) >= 76:
+ prnt(printed_line)
+ printed_line = ''
+ printed_line += '%d,' % (c,)
+ prnt(printed_line)
# ----------
# emitting the opcodes for individual types
@@ -1527,27 +1569,3 @@ def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True,
else:
return None, updated
-def _verify(ffi, module_name, preamble, *args, **kwds):
- # FOR TESTS ONLY
- from testing.udir import udir
- import imp
- assert module_name not in sys.modules, "module name conflict: %r" % (
- module_name,)
- kwds.setdefault('tmpdir', str(udir))
- outputfilename = recompile(ffi, module_name, preamble, *args, **kwds)
- module = imp.load_dynamic(module_name, outputfilename)
- #
- # hack hack hack: copy all *bound methods* from module.ffi back to the
- # ffi instance. Then calls like ffi.new() will invoke module.ffi.new().
- for name in dir(module.ffi):
- if not name.startswith('_'):
- attr = getattr(module.ffi, name)
- if attr is not getattr(ffi, name, object()):
- setattr(ffi, name, attr)
- def typeof_disabled(*args, **kwds):
- raise NotImplementedError
- ffi._typeof = typeof_disabled
- for name in dir(ffi):
- if not name.startswith('_') and not hasattr(module.ffi, name):
- setattr(ffi, name, NotImplemented)
- return module.lib
diff --git a/functions/source/CreateSSHKey/cffi/setuptools_ext.py b/functions/source/CreateSSHKey/cffi/setuptools_ext.py
index 5b0f296..8fe3614 100644
--- a/functions/source/CreateSSHKey/cffi/setuptools_ext.py
+++ b/functions/source/CreateSSHKey/cffi/setuptools_ext.py
@@ -81,8 +81,16 @@ def _set_py_limited_api(Extension, kwds):
it doesn't so far, creating troubles. That's why we check
for "not hasattr(sys, 'gettotalrefcount')" (the 2.7 compatible equivalent
of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401)
+
+ On Windows, with CPython <= 3.4, it's better not to use py_limited_api
+ because virtualenv *still* doesn't copy PYTHON3.DLL on these versions.
+ Recently (2020) we started shipping only >= 3.5 wheels, though. So
+ we'll give it another try and set py_limited_api on Windows >= 3.5.
"""
- if 'py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount'):
+ from cffi import recompiler
+
+ if ('py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount')
+ and recompiler.USE_LIMITED_API):
import setuptools
try:
setuptools_major_version = int(setuptools.__version__.partition('.')[0])
@@ -143,8 +151,8 @@ def run(self):
def _add_py_module(dist, ffi, module_name):
from distutils.dir_util import mkpath
- from distutils.command.build_py import build_py
- from distutils.command.build_ext import build_ext
+ from setuptools.command.build_py import build_py
+ from setuptools.command.build_ext import build_ext
from distutils import log
from cffi import recompiler
@@ -162,8 +170,31 @@ def run(self):
module_path = module_name.split('.')
module_path[-1] += '.py'
generate_mod(os.path.join(self.build_lib, *module_path))
+ def get_source_files(self):
+ # This is called from 'setup.py sdist' only. Exclude
+ # the generate .py module in this case.
+ saved_py_modules = self.py_modules
+ try:
+ if saved_py_modules:
+ self.py_modules = [m for m in saved_py_modules
+ if m != module_name]
+ return base_class.get_source_files(self)
+ finally:
+ self.py_modules = saved_py_modules
dist.cmdclass['build_py'] = build_py_make_mod
+ # distutils and setuptools have no notion I could find of a
+ # generated python module. If we don't add module_name to
+ # dist.py_modules, then things mostly work but there are some
+ # combination of options (--root and --record) that will miss
+ # the module. So we add it here, which gives a few apparently
+ # harmless warnings about not finding the file outside the
+ # build directory.
+ # Then we need to hack more in get_source_files(); see above.
+ if dist.py_modules is None:
+ dist.py_modules = []
+ dist.py_modules.append(module_name)
+
# the following is only for "build_ext -i"
base_class_2 = dist.cmdclass.get('build_ext', build_ext)
class build_ext_make_mod(base_class_2):
diff --git a/functions/source/CreateSSHKey/cffi/vengine_cpy.py b/functions/source/CreateSSHKey/cffi/vengine_cpy.py
index 536f11f..6de0df0 100644
--- a/functions/source/CreateSSHKey/cffi/vengine_cpy.py
+++ b/functions/source/CreateSSHKey/cffi/vengine_cpy.py
@@ -275,22 +275,23 @@ def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode):
tovar, tp.get_c_name(''), errvalue))
self._prnt(' %s;' % errcode)
- def _extra_local_variables(self, tp, localvars):
+ def _extra_local_variables(self, tp, localvars, freelines):
if isinstance(tp, model.PointerType):
localvars.add('Py_ssize_t datasize')
+ localvars.add('struct _cffi_freeme_s *large_args_free = NULL')
+ freelines.add('if (large_args_free != NULL)'
+ ' _cffi_free_array_arguments(large_args_free);')
def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode):
self._prnt(' datasize = _cffi_prepare_pointer_call_argument(')
self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % (
self._gettypenum(tp), fromvar, tovar))
self._prnt(' if (datasize != 0) {')
- self._prnt(' if (datasize < 0)')
- self._prnt(' %s;' % errcode)
- self._prnt(' %s = alloca((size_t)datasize);' % (tovar,))
- self._prnt(' memset((void *)%s, 0, (size_t)datasize);' % (tovar,))
- self._prnt(' if (_cffi_convert_array_from_object('
- '(char *)%s, _cffi_type(%d), %s) < 0)' % (
- tovar, self._gettypenum(tp), fromvar))
+ self._prnt(' %s = ((size_t)datasize) <= 640 ? '
+ 'alloca((size_t)datasize) : NULL;' % (tovar,))
+ self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, '
+ '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar))
+ self._prnt(' datasize, &large_args_free) < 0)')
self._prnt(' %s;' % errcode)
self._prnt(' }')
@@ -369,15 +370,17 @@ def _generate_cpy_function_decl(self, tp, name):
prnt(' %s;' % type.get_c_name(' x%d' % i, context))
#
localvars = set()
+ freelines = set()
for type in tp.args:
- self._extra_local_variables(type, localvars)
- for decl in localvars:
+ self._extra_local_variables(type, localvars, freelines)
+ for decl in sorted(localvars):
prnt(' %s;' % (decl,))
#
if not isinstance(tp.result, model.VoidType):
result_code = 'result = '
context = 'result of %s' % name
prnt(' %s;' % tp.result.get_c_name(' result', context))
+ prnt(' PyObject *pyresult;')
else:
result_code = ''
#
@@ -409,9 +412,14 @@ def _generate_cpy_function_decl(self, tp, name):
if numargs == 0:
prnt(' (void)noarg; /* unused */')
if result_code:
- prnt(' return %s;' %
+ prnt(' pyresult = %s;' %
self._convert_expr_from_c(tp.result, 'result', 'result type'))
+ for freeline in freelines:
+ prnt(' ' + freeline)
+ prnt(' return pyresult;')
else:
+ for freeline in freelines:
+ prnt(' ' + freeline)
prnt(' Py_INCREF(Py_None);')
prnt(' return Py_None;')
prnt('}')
@@ -754,7 +762,7 @@ def _generate_cpy_variable_decl(self, tp, name):
if isinstance(tp, model.ArrayType):
tp_ptr = model.PointerType(tp.item)
self._generate_cpy_const(False, name, tp, vartp=tp_ptr,
- size_too = (tp.length == '...'))
+ size_too = tp.length_is_unknown())
else:
tp_ptr = model.PointerType(tp)
self._generate_cpy_const(False, name, tp_ptr, category='var')
@@ -766,7 +774,7 @@ def _loaded_cpy_variable(self, tp, name, module, library):
value = getattr(library, name)
if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the
# sense that "a=..." is forbidden
- if tp.length == '...':
+ if tp.length_is_unknown():
assert isinstance(value, tuple)
(value, size) = value
BItemType = self.ffi._get_cached_btype(tp.item)
@@ -981,6 +989,59 @@ def _generate_setup_custom(self):
return PyBool_FromLong(was_alive);
}
+union _cffi_union_alignment_u {
+ unsigned char m_char;
+ unsigned short m_short;
+ unsigned int m_int;
+ unsigned long m_long;
+ unsigned long long m_longlong;
+ float m_float;
+ double m_double;
+ long double m_longdouble;
+};
+
+struct _cffi_freeme_s {
+ struct _cffi_freeme_s *next;
+ union _cffi_union_alignment_u alignment;
+};
+
+#ifdef __GNUC__
+ __attribute__((unused))
+#endif
+static int _cffi_convert_array_argument(CTypeDescrObject *ctptr, PyObject *arg,
+ char **output_data, Py_ssize_t datasize,
+ struct _cffi_freeme_s **freeme)
+{
+ char *p;
+ if (datasize < 0)
+ return -1;
+
+ p = *output_data;
+ if (p == NULL) {
+ struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc(
+ offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize);
+ if (fp == NULL)
+ return -1;
+ fp->next = *freeme;
+ *freeme = fp;
+ p = *output_data = (char *)&fp->alignment;
+ }
+ memset((void *)p, 0, (size_t)datasize);
+ return _cffi_convert_array_from_object(p, ctptr, arg);
+}
+
+#ifdef __GNUC__
+ __attribute__((unused))
+#endif
+static void _cffi_free_array_arguments(struct _cffi_freeme_s *freeme)
+{
+ do {
+ void *p = (void *)freeme;
+ freeme = freeme->next;
+ PyObject_Free(p);
+ } while (freeme != NULL);
+}
+
static int _cffi_init(void)
{
PyObject *module, *c_api_object = NULL;
diff --git a/functions/source/CreateSSHKey/cffi/vengine_gen.py b/functions/source/CreateSSHKey/cffi/vengine_gen.py
index a64ff64..2642152 100644
--- a/functions/source/CreateSSHKey/cffi/vengine_gen.py
+++ b/functions/source/CreateSSHKey/cffi/vengine_gen.py
@@ -565,7 +565,7 @@ def _loaded_gen_macro(self, tp, name, module, library):
def _generate_gen_variable_decl(self, tp, name):
if isinstance(tp, model.ArrayType):
- if tp.length == '...':
+ if tp.length_is_unknown():
prnt = self._prnt
funcname = '_cffi_sizeof_%s' % (name,)
self.export_symbols.append(funcname)
@@ -584,7 +584,7 @@ def _generate_gen_variable_decl(self, tp, name):
def _loaded_gen_variable(self, tp, name, module, library):
if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the
# sense that "a=..." is forbidden
- if tp.length == '...':
+ if tp.length_is_unknown():
funcname = '_cffi_sizeof_%s' % (name,)
BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0]
function = module.load_function(BFunc, funcname)
diff --git a/functions/source/CreateSSHKey/cffi/verifier.py b/functions/source/CreateSSHKey/cffi/verifier.py
index 3cfeecb..59b78c2 100644
--- a/functions/source/CreateSSHKey/cffi/verifier.py
+++ b/functions/source/CreateSSHKey/cffi/verifier.py
@@ -301,7 +301,6 @@ def _get_so_suffixes():
return suffixes
def _ensure_dir(filename):
- try:
- os.makedirs(os.path.dirname(filename))
- except OSError:
- pass
+ dirname = os.path.dirname(filename)
+ if dirname and not os.path.isdir(dirname):
+ os.makedirs(dirname)
diff --git a/functions/source/CreateSSHKey/cfnresponse.py b/functions/source/CreateSSHKey/cfnresponse.py
index bb955a5..d73855b 100644
--- a/functions/source/CreateSSHKey/cfnresponse.py
+++ b/functions/source/CreateSSHKey/cfnresponse.py
@@ -1,20 +1,20 @@
-# Copyright 2016 Amazon Web Services, Inc. or its affiliates. All Rights Reserved.
+# Copyright 2020 Amazon Web Services, Inc. or its affiliates. All Rights Reserved.
# This file is licensed to you under the AWS Customer Agreement (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at http://aws.amazon.com/agreement/ .
# This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or implied.
# See the License for the specific language governing permissions and limitations under the License.
-from botocore.vendored import requests
+import urllib3
import json
-
+http = urllib3.PoolManager()
SUCCESS = "SUCCESS"
FAILED = "FAILED"
-def send(event, context, responseStatus, responseData, physicalResourceId):
+def send(event, context, responseStatus, responseData, physicalResourceId=None, noEcho=False):
responseUrl = event['ResponseURL']
- print responseUrl
+ print(responseUrl)
responseBody = {}
responseBody['Status'] = responseStatus
@@ -23,21 +23,21 @@ def send(event, context, responseStatus, responseData, physicalResourceId):
responseBody['StackId'] = event['StackId']
responseBody['RequestId'] = event['RequestId']
responseBody['LogicalResourceId'] = event['LogicalResourceId']
+ responseBody['NoEcho'] = noEcho
responseBody['Data'] = responseData
json_responseBody = json.dumps(responseBody)
-
- print "Response body:\n" + json_responseBody
+
+ print("Response body:\n" + json_responseBody)
headers = {
- 'content-type' : '',
+ 'content-type' : '',
'content-length' : str(len(json_responseBody))
}
-
+
try:
- response = requests.put(responseUrl,
- data=json_responseBody,
- headers=headers)
- print "Status code: " + response.reason
+
+ response = http.request('PUT',responseUrl,body=json_responseBody.encode('utf-8'),headers=headers)
+ print("Status code: " + response.reason)
except Exception as e:
- print "send(..) failed executing requests.put(..): " + str(e)
+ print("send(..) failed executing requests.put(..): " + str(e))
diff --git a/functions/source/CreateSSHKey/cryptography-3.1.dist-info/AUTHORS.rst b/functions/source/CreateSSHKey/cryptography-3.1.dist-info/AUTHORS.rst
new file mode 100644
index 0000000..8ba7e0e
--- /dev/null
+++ b/functions/source/CreateSSHKey/cryptography-3.1.dist-info/AUTHORS.rst
@@ -0,0 +1,44 @@
+AUTHORS
+=======
+
+PGP key fingerprints are enclosed in parentheses.
+
+* Alex Gaynor (E27D 4AA0 1651 72CB C5D2 AF2B 125F 5C67 DFE9 4084)
+* Hynek Schlawack (C2A0 4F86 ACE2 8ADC F817 DBB7 AE25 3622 7F69 F181)
+* Donald Stufft
+* Laurens Van Houtven <_@lvh.io> (D9DC 4315 772F 8E91 DD22 B153 DFD1 3DF7 A8DD 569B)
+* Christian Heimes
+* Paul Kehrer (05FD 9FA1 6CF7 5735 0D91 A560 235A E5F1 29F9 ED98)
+* Jarret Raim
+* Alex Stapleton (A1C7 E50B 66DE 39ED C847 9665 8E3C 20D1 9BD9 5C4C)
+* David Reid (0F83 CC87 B32F 482B C726 B58A 9FBF D8F4 DA89 6D74)
+* Matthew Lefkowitz (06AB F638 E878 CD29 1264 18AB 7EC2 8125 0FBC 4A07)
+* Konstantinos Koukopoulos (D6BD 52B6 8C99 A91C E2C8 934D 3300 566B 3A46 726E)
+* Stephen Holsapple
+* Terry Chia
+* Matthew Iversen (2F04 3DCC D6E6 D5AC D262 2E0B C046 E8A8 7452 2973)
+* Mohammed Attia
+* Michael Hart
+* Mark Adams (A18A 7DD3 283C CF2A B0CE FE0E C7A0 5E3F C972 098C)
+* Gregory Haynes (6FB6 44BF 9FD0 EBA2 1CE9 471F B08F 42F9 0DC6 599F)
+* Chelsea Winfree
+* Steven Buss (1FB9 2EC1 CF93 DFD6 B47F F583 B1A5 6C22 290D A4C3)
+* Andre Caron
+* Jiangge Zhang (BBEC 782B 015F 71B1 5FF7 EACA 1A8C AA98 255F 5000)
+* Major Hayden (1BF9 9264 9596 0033 698C 252B 7370 51E0 C101 1FB1)
+* Phoebe Queen (10D4 7741 AB65 50F4 B264 3888 DA40 201A 072B C1FA)
+* Google Inc.
+* Amaury Forgeot d'Arc
+* Dirkjan Ochtman (25BB BAC1 13C1 BFD5 AA59 4A4C 9F96 B929 3038 0381)
+* Maximilian Hils
+* Simo Sorce
+* Thomas Sileo
+* Fraser Tweedale
+* Ofek Lev (FFB6 B92B 30B1 7848 546E 9912 972F E913 DAD5 A46E)
+* Erik Daguerre
+* Aviv Palivoda
+* Chris Wolfe
+* Jeremy Lainé
+* Denis Gladkikh
+* John Pacific (2CF6 0381 B5EF 29B7 D48C 2020 7BB9 71A0 E891 44D9)
+* Marti Raudsepp
diff --git a/functions/source/CreateSSHKey/cryptography-3.1.dist-info/INSTALLER b/functions/source/CreateSSHKey/cryptography-3.1.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/functions/source/CreateSSHKey/cryptography-3.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/functions/source/CreateSSHKey/cryptography-3.1.dist-info/LICENSE b/functions/source/CreateSSHKey/cryptography-3.1.dist-info/LICENSE
new file mode 100644
index 0000000..fe5af51
--- /dev/null
+++ b/functions/source/CreateSSHKey/cryptography-3.1.dist-info/LICENSE
@@ -0,0 +1,6 @@
+This software is made available under the terms of *either* of the licenses
+found in LICENSE.APACHE or LICENSE.BSD. Contributions to cryptography are made
+under the terms of *both* these licenses.
+
+The code used in the OpenSSL locking callback and OS random engine is derived
+from CPython, and is licensed under the terms of the PSF License Agreement.
diff --git a/functions/source/CreateSSHKey/cryptography-3.1.dist-info/LICENSE.APACHE b/functions/source/CreateSSHKey/cryptography-3.1.dist-info/LICENSE.APACHE
new file mode 100644
index 0000000..62589ed
--- /dev/null
+++ b/functions/source/CreateSSHKey/cryptography-3.1.dist-info/LICENSE.APACHE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ https://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ https://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/functions/source/CreateSSHKey/cryptography-3.1.dist-info/LICENSE.BSD b/functions/source/CreateSSHKey/cryptography-3.1.dist-info/LICENSE.BSD
new file mode 100644
index 0000000..ec1a29d
--- /dev/null
+++ b/functions/source/CreateSSHKey/cryptography-3.1.dist-info/LICENSE.BSD
@@ -0,0 +1,27 @@
+Copyright (c) Individual contributors.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ 1. Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+ 3. Neither the name of PyCA Cryptography nor the names of its contributors
+ may be used to endorse or promote products derived from this software
+ without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/functions/source/CreateSSHKey/cryptography-3.1.dist-info/LICENSE.PSF b/functions/source/CreateSSHKey/cryptography-3.1.dist-info/LICENSE.PSF
new file mode 100644
index 0000000..4d3a4f5
--- /dev/null
+++ b/functions/source/CreateSSHKey/cryptography-3.1.dist-info/LICENSE.PSF
@@ -0,0 +1,41 @@
+1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and
+ the Individual or Organization ("Licensee") accessing and otherwise using Python
+ 2.7.12 software in source or binary form and its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+ grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+ analyze, test, perform and/or display publicly, prepare derivative works,
+ distribute, and otherwise use Python 2.7.12 alone or in any derivative
+ version, provided, however, that PSF's License Agreement and PSF's notice of
+ copyright, i.e., "Copyright © 2001-2016 Python Software Foundation; All Rights
+ Reserved" are retained in Python 2.7.12 alone or in any derivative version
+ prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on or
+ incorporates Python 2.7.12 or any part thereof, and wants to make the
+ derivative work available to others as provided herein, then Licensee hereby
+ agrees to include in any such work a brief summary of the changes made to Python
+ 2.7.12.
+
+4. PSF is making Python 2.7.12 available to Licensee on an "AS IS" basis.
+ PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF
+ EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR
+ WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE
+ USE OF PYTHON 2.7.12 WILL NOT INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON 2.7.12
+ FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF
+ MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 2.7.12, OR ANY DERIVATIVE
+ THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material breach of
+ its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any relationship
+ of agency, partnership, or joint venture between PSF and Licensee. This License
+ Agreement does not grant permission to use PSF trademarks or trade name in a
+ trademark sense to endorse or promote products or services of Licensee, or any
+ third party.
+
+8. By copying, installing or otherwise using Python 2.7.12, Licensee agrees
+ to be bound by the terms and conditions of this License Agreement.
diff --git a/functions/source/CreateSSHKey/cryptography-3.1.dist-info/METADATA b/functions/source/CreateSSHKey/cryptography-3.1.dist-info/METADATA
new file mode 100644
index 0000000..32b2e8a
--- /dev/null
+++ b/functions/source/CreateSSHKey/cryptography-3.1.dist-info/METADATA
@@ -0,0 +1,134 @@
+Metadata-Version: 2.1
+Name: cryptography
+Version: 3.1
+Summary: cryptography is a package which provides cryptographic recipes and primitives to Python developers.
+Home-page: https://github.com/pyca/cryptography
+Author: The cryptography developers
+Author-email: cryptography-dev@python.org
+License: BSD or Apache License, Version 2.0
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Natural Language :: English
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: POSIX :: BSD
+Classifier: Operating System :: POSIX :: Linux
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Security :: Cryptography
+Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*
+Description-Content-Type: text/x-rst
+Requires-Dist: six (>=1.4.1)
+Requires-Dist: cffi (!=1.11.3,>=1.8)
+Requires-Dist: enum34 ; python_version < '3'
+Requires-Dist: ipaddress ; python_version < '3'
+Provides-Extra: docs
+Requires-Dist: sphinx (!=1.8.0,!=3.1.0,!=3.1.1,>=1.6.5) ; extra == 'docs'
+Requires-Dist: sphinx-rtd-theme ; extra == 'docs'
+Provides-Extra: docstest
+Requires-Dist: doc8 ; extra == 'docstest'
+Requires-Dist: pyenchant (>=1.6.11) ; extra == 'docstest'
+Requires-Dist: twine (>=1.12.0) ; extra == 'docstest'
+Requires-Dist: sphinxcontrib-spelling (>=4.0.1) ; extra == 'docstest'
+Provides-Extra: pep8test
+Requires-Dist: black ; extra == 'pep8test'
+Requires-Dist: flake8 ; extra == 'pep8test'
+Requires-Dist: flake8-import-order ; extra == 'pep8test'
+Requires-Dist: pep8-naming ; extra == 'pep8test'
+Provides-Extra: ssh
+Requires-Dist: bcrypt (>=3.1.5) ; extra == 'ssh'
+Provides-Extra: test
+Requires-Dist: pytest (!=3.9.0,!=3.9.1,!=3.9.2,>=3.6.0) ; extra == 'test'
+Requires-Dist: pretend ; extra == 'test'
+Requires-Dist: iso8601 ; extra == 'test'
+Requires-Dist: pytz ; extra == 'test'
+Requires-Dist: hypothesis (!=3.79.2,>=1.11.4) ; extra == 'test'
+
+pyca/cryptography
+=================
+
+.. image:: https://img.shields.io/pypi/v/cryptography.svg
+ :target: https://pypi.org/project/cryptography/
+ :alt: Latest Version
+
+.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest
+ :target: https://cryptography.io
+ :alt: Latest Docs
+
+.. image:: https://travis-ci.org/pyca/cryptography.svg?branch=master
+ :target: https://travis-ci.org/pyca/cryptography
+
+.. image:: https://github.com/pyca/cryptography/workflows/CI/badge.svg?branch=master
+ :target: https://github.com/pyca/cryptography/actions?query=workflow%3ACI+branch%3Amaster
+
+.. image:: https://codecov.io/github/pyca/cryptography/coverage.svg?branch=master
+ :target: https://codecov.io/github/pyca/cryptography?branch=master
+
+
+``cryptography`` is a package which provides cryptographic recipes and
+primitives to Python developers. Our goal is for it to be your "cryptographic
+standard library". It supports Python 2.7, Python 3.5+, and PyPy 5.4+.
+
+``cryptography`` includes both high level recipes and low level interfaces to
+common cryptographic algorithms such as symmetric ciphers, message digests, and
+key derivation functions. For example, to encrypt something with
+``cryptography``'s high level symmetric encryption recipe:
+
+.. code-block:: pycon
+
+ >>> from cryptography.fernet import Fernet
+ >>> # Put this somewhere safe!
+ >>> key = Fernet.generate_key()
+ >>> f = Fernet(key)
+ >>> token = f.encrypt(b"A really secret message. Not for prying eyes.")
+ >>> token
+ '...'
+ >>> f.decrypt(token)
+ 'A really secret message. Not for prying eyes.'
+
+You can find more information in the `documentation`_.
+
+You can install ``cryptography`` with:
+
+.. code-block:: console
+
+ $ pip install cryptography
+
+For full details see `the installation documentation`_.
+
+Discussion
+~~~~~~~~~~
+
+If you run into bugs, you can file them in our `issue tracker`_.
+
+We maintain a `cryptography-dev`_ mailing list for development discussion.
+
+You can also join ``#cryptography-dev`` on Freenode to ask questions or get
+involved.
+
+Security
+~~~~~~~~
+
+Need to report a security issue? Please consult our `security reporting`_
+documentation.
+
+
+.. _`documentation`: https://cryptography.io/
+.. _`the installation documentation`: https://cryptography.io/en/latest/installation/
+.. _`issue tracker`: https://github.com/pyca/cryptography/issues
+.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev
+.. _`security reporting`: https://cryptography.io/en/latest/security/
+
+
diff --git a/functions/source/CreateSSHKey/cryptography-3.1.dist-info/RECORD b/functions/source/CreateSSHKey/cryptography-3.1.dist-info/RECORD
new file mode 100644
index 0000000..d8e355a
--- /dev/null
+++ b/functions/source/CreateSSHKey/cryptography-3.1.dist-info/RECORD
@@ -0,0 +1,179 @@
+cryptography-3.1.dist-info/AUTHORS.rst,sha256=MoKTlP6yOmnLC_KXarHVQP0sItBk11dtZ7LzV0VhNB0,2475
+cryptography-3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+cryptography-3.1.dist-info/LICENSE,sha256=NUUrVX-rDvsegNftucTlEYuThAgq2qBR3eNCECy53o0,352
+cryptography-3.1.dist-info/LICENSE.APACHE,sha256=qsc7MUj20dcRHbyjIJn2jSbGRMaBOuHk8F9leaomY_4,11360
+cryptography-3.1.dist-info/LICENSE.BSD,sha256=YCxMdILeZHndLpeTzaJ15eY9dz2s0eymiSMqtwCPtPs,1532
+cryptography-3.1.dist-info/LICENSE.PSF,sha256=aT7ApmKzn5laTyUrA6YiKUVHDBtvEsoCkY5O_g32S58,2415
+cryptography-3.1.dist-info/METADATA,sha256=O_PTfImK1O69unFNgjGqF_shxUGKists93PJPJJT1sc,5174
+cryptography-3.1.dist-info/RECORD,,
+cryptography-3.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+cryptography-3.1.dist-info/WHEEL,sha256=idZhaAgCajrx6mqUbq2PISSyVyiBdD7cMlbcSPPaZS8,111
+cryptography-3.1.dist-info/top_level.txt,sha256=rR2wh6A6juD02TBZNJqqonh8x9UP9Sa5Z9Hl1pCPCiM,31
+cryptography/__about__.py,sha256=-u30VU1UeklfxhoD508OQDCzcljI_xLA3qqKrkjmV30,833
+cryptography/__init__.py,sha256=1ciCmVjkvUYdyrPg1tXtLkFPO7dlRJPfDvaAuh5jSG8,1207
+cryptography/__pycache__/__about__.cpython-38.pyc,,
+cryptography/__pycache__/__init__.cpython-38.pyc,,
+cryptography/__pycache__/exceptions.cpython-38.pyc,,
+cryptography/__pycache__/fernet.cpython-38.pyc,,
+cryptography/__pycache__/utils.cpython-38.pyc,,
+cryptography/exceptions.py,sha256=NPtDqIq1lsQ1Gb1BXkjsGIvbMrWMaKCaT8epiSgi010,1259
+cryptography/fernet.py,sha256=sg5RNOCKx9BrPV6wIfyXB9sDWJcw9-GPcPgN4lVmr8w,5980
+cryptography/hazmat/__init__.py,sha256=hEPNQw8dgjIPIn42qaLwXNRLCyTGNZeSvkQb57DPhbs,483
+cryptography/hazmat/__pycache__/__init__.cpython-38.pyc,,
+cryptography/hazmat/__pycache__/_der.cpython-38.pyc,,
+cryptography/hazmat/__pycache__/_oid.cpython-38.pyc,,
+cryptography/hazmat/_der.py,sha256=NkwxQBcrR_KMAZCM3WKidXgx8CHFVU5iBnoFIrhQMQs,5205
+cryptography/hazmat/_oid.py,sha256=3L1KLxAsQJJoy15ZCl0T4I-PU-DVvzGS-ZTdS-PNy14,2432
+cryptography/hazmat/backends/__init__.py,sha256=EEhjIZgqApO7coGuybLXyaEaWIHcdg8oC0i2vxQ4RSI,616
+cryptography/hazmat/backends/__pycache__/__init__.cpython-38.pyc,,
+cryptography/hazmat/backends/__pycache__/interfaces.cpython-38.pyc,,
+cryptography/hazmat/backends/interfaces.py,sha256=GXySHrpGLgeTrjUgxOYtK6viaphO1dDKAOA95JFj_pM,10770
+cryptography/hazmat/backends/openssl/__init__.py,sha256=k4DMe228_hTuB2kY3Lwk62JdI3EmCd7VkV01zJm57ps,336
+cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-38.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/aead.cpython-38.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-38.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/ciphers.cpython-38.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/cmac.cpython-38.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/decode_asn1.cpython-38.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/dh.cpython-38.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/dsa.cpython-38.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/ec.cpython-38.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/ed25519.cpython-38.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/ed448.cpython-38.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/encode_asn1.cpython-38.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/hashes.cpython-38.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/hmac.cpython-38.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/ocsp.cpython-38.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/poly1305.cpython-38.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/rsa.cpython-38.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/utils.cpython-38.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/x25519.cpython-38.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/x448.cpython-38.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/x509.cpython-38.pyc,,
+cryptography/hazmat/backends/openssl/aead.py,sha256=ljOSkI7NXgXi9OyfHjm9J07m3EVHFNm9kfHAIogSWtc,5765
+cryptography/hazmat/backends/openssl/backend.py,sha256=fCsajtK984CXB0wFgtbq6LMTAYz6kSfAunoahRmPRj0,103629
+cryptography/hazmat/backends/openssl/ciphers.py,sha256=2zpsySSbMslmUTveGZAN44UXZcA4WAMoQRglMtzg-OQ,8604
+cryptography/hazmat/backends/openssl/cmac.py,sha256=n34WXNXt-r0trp207u0cSKwGMth8qEiEs2jjgmHNtWE,2855
+cryptography/hazmat/backends/openssl/decode_asn1.py,sha256=AHOzN74MwF3b_fZhQf5_0hB649nS7Z1Dz8tJ6WeD780,32345
+cryptography/hazmat/backends/openssl/dh.py,sha256=1fZn8one2aSla85LIe6vXbf0qoLTDS-B7tYMcrJshnY,10239
+cryptography/hazmat/backends/openssl/dsa.py,sha256=Cp1w1Z6J_PEW-Qd2RAzfC04MU9YxqYOaef57f_QVpYI,10036
+cryptography/hazmat/backends/openssl/ec.py,sha256=HC2Q3drRTdBAJmUiiSs4qdZXCgZ5xhGmha4Qk77RVJY,12085
+cryptography/hazmat/backends/openssl/ed25519.py,sha256=1uIjZ6OC0JJssRF9lMQLlGPCcQf_FGE5voVkaz6RwF8,5670
+cryptography/hazmat/backends/openssl/ed448.py,sha256=mXAHwlMNSP_jQ0hPBLB5GtZUAyZL3MsKuqf8iRTkrk0,5626
+cryptography/hazmat/backends/openssl/encode_asn1.py,sha256=DsIMFa6dpA0gTW9IS2_K2dd0RIgfDXISz7X0hS2OSgY,23553
+cryptography/hazmat/backends/openssl/hashes.py,sha256=7AOmGxZTAiuMpbWLs15HIG2Nr06V-2nM3u91HlY90r0,3169
+cryptography/hazmat/backends/openssl/hmac.py,sha256=6LtwqIFF7HpuhtVEY4Ytjt_EmeVY4eYnDz66iLNb1d4,3015
+cryptography/hazmat/backends/openssl/ocsp.py,sha256=NEGrc30GfPBLbjnt-K3K48-dZK2dEyQa2oCyv7-laMs,14028
+cryptography/hazmat/backends/openssl/poly1305.py,sha256=ZPIuTJ0JoG8XYz-qnbSMUrLG1RVb58gbfZnQ6eVaKbk,2419
+cryptography/hazmat/backends/openssl/rsa.py,sha256=4XONPzjx8ag13HcI7LcAfrBKh2sPOxTyLB3NyrJc4mY,17075
+cryptography/hazmat/backends/openssl/utils.py,sha256=JI8K4BYq7Dwsdm2l-ff1dOw8Kxu4f8dNL1dTwQCrZXg,2304
+cryptography/hazmat/backends/openssl/x25519.py,sha256=-MNAPGS_DZ37-skSn17-gIakFLoJmuNx8PlC8s2-00g,4488
+cryptography/hazmat/backends/openssl/x448.py,sha256=5WH3Rw7kZGLS3EDDVzjrYriAG-tzUnyWetyqMYTiEhA,4011
+cryptography/hazmat/backends/openssl/x509.py,sha256=EMN9qSPW1BVZ1VAOHzgi8oO8idI8iOb0wrWjdrr5FpI,21620
+cryptography/hazmat/bindings/__init__.py,sha256=0wGw2OF9R7fHX7NWENCmrsYigbXHU2ojgn-N4Rkjs9U,246
+cryptography/hazmat/bindings/__pycache__/__init__.cpython-38.pyc,,
+cryptography/hazmat/bindings/_openssl.abi3.so,sha256=Gc5238KajUkDyo-fhNQCh98pTNjpaj9dFgBu2Jy5KVI,6984664
+cryptography/hazmat/bindings/_padding.abi3.so,sha256=O4XAHIvSNpmDJULLcYtTsAOCs8zFKoF-b2Zv-bfayiw,37232
+cryptography/hazmat/bindings/openssl/__init__.py,sha256=0wGw2OF9R7fHX7NWENCmrsYigbXHU2ojgn-N4Rkjs9U,246
+cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-38.pyc,,
+cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-38.pyc,,
+cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-38.pyc,,
+cryptography/hazmat/bindings/openssl/_conditional.py,sha256=E6a6IYDJR5rIG6QxgjRHHBcTjMaAfIFBcdtXBt3y1XU,8620
+cryptography/hazmat/bindings/openssl/binding.py,sha256=AE2PLb-vxYgCo1MI-YGnVf05FYAGPLnPmliDumwO3JE,7368
+cryptography/hazmat/primitives/__init__.py,sha256=0wGw2OF9R7fHX7NWENCmrsYigbXHU2ojgn-N4Rkjs9U,246
+cryptography/hazmat/primitives/__pycache__/__init__.cpython-38.pyc,,
+cryptography/hazmat/primitives/__pycache__/cmac.cpython-38.pyc,,
+cryptography/hazmat/primitives/__pycache__/constant_time.cpython-38.pyc,,
+cryptography/hazmat/primitives/__pycache__/hashes.cpython-38.pyc,,
+cryptography/hazmat/primitives/__pycache__/hmac.cpython-38.pyc,,
+cryptography/hazmat/primitives/__pycache__/keywrap.cpython-38.pyc,,
+cryptography/hazmat/primitives/__pycache__/padding.cpython-38.pyc,,
+cryptography/hazmat/primitives/__pycache__/poly1305.cpython-38.pyc,,
+cryptography/hazmat/primitives/asymmetric/__init__.py,sha256=WhUn3tGxoLAxGAsZHElJ2aOILXSh55AZi04MBudYmQA,1020
+cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-38.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-38.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-38.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-38.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-38.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-38.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-38.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-38.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-38.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-38.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-38.pyc,,
+cryptography/hazmat/primitives/asymmetric/dh.py,sha256=ZObmAtofuhW1RZJZNVJ5xl8JPnH2ML-wZjK5GedChHw,5661
+cryptography/hazmat/primitives/asymmetric/dsa.py,sha256=XuE2mUXl-fXi2q7w22qKyiCTFUz-852cFTwV4WOUQgw,7181
+cryptography/hazmat/primitives/asymmetric/ec.py,sha256=2rorlIEXHGkLnI8bbeFKMRr-gJfEipuJigQDQh4xk7w,14006
+cryptography/hazmat/primitives/asymmetric/ed25519.py,sha256=rfImUQH-PcTliuxiF864aSww7dQCWVwZgjPPbDXiGlI,2401
+cryptography/hazmat/primitives/asymmetric/ed448.py,sha256=JyrEHwYF_Ftj_E60t-Gmvm3CGnQSxVbasptZBW84eBk,2328
+cryptography/hazmat/primitives/asymmetric/padding.py,sha256=2pPqBu4dGERtFPHnPRTZ0iRO_XY9hr9RTwlTcr_J5bw,2250
+cryptography/hazmat/primitives/asymmetric/rsa.py,sha256=UYFExBDj-8IT6Z6nc-oF3vqQArdsb2cmsGkLjY72YYQ,10494
+cryptography/hazmat/primitives/asymmetric/utils.py,sha256=w2lQIcKrFvS9D_Ekt7qWed39TXM6hueg72FFrfwIo58,1201
+cryptography/hazmat/primitives/asymmetric/x25519.py,sha256=vrN1jcO6sjbQrc7auIlf2aEvcH3P17cKUuaVXxaTvxI,2277
+cryptography/hazmat/primitives/asymmetric/x448.py,sha256=u3v-L1IJIG2RyLVTh7FMkXh_Y-oVb3HdEj5b1c-JlKk,2255
+cryptography/hazmat/primitives/ciphers/__init__.py,sha256=mi4yR3Fxc4-Au3yX4PyhFNaiFn0yywZKiTzecdI77EI,647
+cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-38.pyc,,
+cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-38.pyc,,
+cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-38.pyc,,
+cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-38.pyc,,
+cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-38.pyc,,
+cryptography/hazmat/primitives/ciphers/aead.py,sha256=NsNjstdbIOAlLddffPPCUr0HZZ5apZ-vE0LEQmHxQxE,6107
+cryptography/hazmat/primitives/ciphers/algorithms.py,sha256=GKFIhvOoqsYscjjP7onl8XnAmOa-kSQ6jiMMS2zeGBM,4225
+cryptography/hazmat/primitives/ciphers/base.py,sha256=vceN5l7yxLWmNTptlzC3gmfFY-K_ANKk4HdNl2Ptz2k,7253
+cryptography/hazmat/primitives/ciphers/modes.py,sha256=_PhdnJHdIb3ePWz8Ul1k1_Ioqc5oLUUexqVadvohqO4,6730
+cryptography/hazmat/primitives/cmac.py,sha256=eJpysDFbc7W6OiplzWKWrL4owy30Cq6Nsao8mzapqbE,2130
+cryptography/hazmat/primitives/constant_time.py,sha256=_x4mrHW-9ihfgY89BwhATFiIuG2_1l-HMkCxmOUkydM,430
+cryptography/hazmat/primitives/hashes.py,sha256=dzL1QcEFj4eElzczo8QmuOeooZ96EFwBy3c-6cpew0w,6315
+cryptography/hazmat/primitives/hmac.py,sha256=AYzTQMDiruKmZKKLR6ceVjX5yQ3mpciWIx__tpNLyr4,2306
+cryptography/hazmat/primitives/kdf/__init__.py,sha256=nod5HjPswjZr8wFp6Tsu6en9blHYF3khgXI5R0zIcnM,771
+cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-38.pyc,,
+cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-38.pyc,,
+cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-38.pyc,,
+cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-38.pyc,,
+cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-38.pyc,,
+cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-38.pyc,,
+cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-38.pyc,,
+cryptography/hazmat/primitives/kdf/concatkdf.py,sha256=gW-xAU6sPE6aZhg_G9ucZ5b_uctSbPcfSpHyyt7Q8MA,4095
+cryptography/hazmat/primitives/kdf/hkdf.py,sha256=SJJQzeQ9OH0t3tUdUq2GT6IQXv9oPLDjulT7wnLTkMg,3598
+cryptography/hazmat/primitives/kdf/kbkdf.py,sha256=awf7zessT-amokp2VBdyW8TWrDnmTXGzHHX4scBO9Uc,5100
+cryptography/hazmat/primitives/kdf/pbkdf2.py,sha256=RYexIlGomzUEU-_QQXTW81rdY5YVZB30XrfnJq8NsIU,2220
+cryptography/hazmat/primitives/kdf/scrypt.py,sha256=C0C3m-gEnlLlAVxzRFdzx1mfDuWs_BkZDoSV2hfahfk,2268
+cryptography/hazmat/primitives/kdf/x963kdf.py,sha256=26-b_ckyUYiqbWM9mZ7FEWbuvR7eTLksIeWQeW1TJ04,2407
+cryptography/hazmat/primitives/keywrap.py,sha256=fF-HA5ETz9RH8s8LB94uDoWRLPvwPkYAC5_Kylej6sA,5730
+cryptography/hazmat/primitives/padding.py,sha256=EwCEIodfnnasyeuwjsoJYVCZsW89gk132dIReLrXlZI,5721
+cryptography/hazmat/primitives/poly1305.py,sha256=NNC1WYiYQGNJ8mblkaHRxBm1PLdaKRzkILocsYH5zgY,1679
+cryptography/hazmat/primitives/serialization/__init__.py,sha256=eLzmqoHgVlPK1aTGiEfpaIrUf9mX5PRrM7IHEc8FeQU,1132
+cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-38.pyc,,
+cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-38.pyc,,
+cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-38.pyc,,
+cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-38.pyc,,
+cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-38.pyc,,
+cryptography/hazmat/primitives/serialization/base.py,sha256=qoZtZDEEcEjdjA2TUB5YASwHIx62jSXt3pQTr_VOnu8,2228
+cryptography/hazmat/primitives/serialization/pkcs12.py,sha256=oJxangAtSSsniXfguLaoPgejVchs-VpCTBdWSW4rF54,1853
+cryptography/hazmat/primitives/serialization/pkcs7.py,sha256=c8I06bEk92WmffmKtwzDlIoURJontXNkgT7xY9PMlbk,555
+cryptography/hazmat/primitives/serialization/ssh.py,sha256=a_FKWuqpHO-RzUBEoBWS5q7WyMZwS56MD92Wr6j3KBA,21682
+cryptography/hazmat/primitives/twofactor/__init__.py,sha256=BWrm3DKDoAa281E7U_nzz8v44OmAiXmlIycFcsehwfE,288
+cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-38.pyc,,
+cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-38.pyc,,
+cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-38.pyc,,
+cryptography/hazmat/primitives/twofactor/__pycache__/utils.cpython-38.pyc,,
+cryptography/hazmat/primitives/twofactor/hotp.py,sha256=2uCTCTHMFmWL9kOjA890F0CVrljsvOjJYISKBup7GyI,2679
+cryptography/hazmat/primitives/twofactor/totp.py,sha256=iJRTxPNWPdsTQHePgSE6KGdRNURTv188VNqpyvBwvBY,1780
+cryptography/hazmat/primitives/twofactor/utils.py,sha256=ZKZSOL2cLsGCsSNfx3kYlYt91A4bcU1w9up2EL1hwaA,982
+cryptography/utils.py,sha256=QpZgLOABfeaDciPlrF-W8giJiOL2AzU6Ajjq6h6WkzY,4745
+cryptography/x509/__init__.py,sha256=1juFH-nvLS7kU0x52VMN7pN6s7H55Y86NqUszaBhhi4,7699
+cryptography/x509/__pycache__/__init__.cpython-38.pyc,,
+cryptography/x509/__pycache__/base.cpython-38.pyc,,
+cryptography/x509/__pycache__/certificate_transparency.cpython-38.pyc,,
+cryptography/x509/__pycache__/extensions.cpython-38.pyc,,
+cryptography/x509/__pycache__/general_name.cpython-38.pyc,,
+cryptography/x509/__pycache__/name.cpython-38.pyc,,
+cryptography/x509/__pycache__/ocsp.cpython-38.pyc,,
+cryptography/x509/__pycache__/oid.cpython-38.pyc,,
+cryptography/x509/base.py,sha256=burWvWUouPiPzmPUzNZUzEe64gR-WMkNyiDpjYCvEc8,26409
+cryptography/x509/certificate_transparency.py,sha256=eJ9lrITdyMn4XsrcVdrTaFVI_RR7mX_VzMZyiaEpbps,1000
+cryptography/x509/extensions.py,sha256=HOwYCKAy-4qK5eWWYB4UnJejC9Ru3FBQMsLXodasR9Y,52924
+cryptography/x509/general_name.py,sha256=nNIG--rJ-TzREkhEq727Fe3tjvxVflW7iPIMjJs6LrI,7942
+cryptography/x509/name.py,sha256=j2khdee8jQBkbZd4RV60ji8V0ZngbsB07i5cnflDBPk,8291
+cryptography/x509/ocsp.py,sha256=nr5Bk3B_b9LaG-1njEmo0f_smAg2B6CU5Wr6wMr81MI,13245
+cryptography/x509/oid.py,sha256=Wp6Y4WMrFa7vsUmV4tbMvPPAl0Iiu4QxQ7on2np94QU,12594
diff --git a/functions/source/CreateSSHKey/asn1crypto/_perf/__init__.py b/functions/source/CreateSSHKey/cryptography-3.1.dist-info/REQUESTED
similarity index 100%
rename from functions/source/CreateSSHKey/asn1crypto/_perf/__init__.py
rename to functions/source/CreateSSHKey/cryptography-3.1.dist-info/REQUESTED
diff --git a/functions/source/CreateSSHKey/cryptography-3.1.dist-info/WHEEL b/functions/source/CreateSSHKey/cryptography-3.1.dist-info/WHEEL
new file mode 100644
index 0000000..9983060
--- /dev/null
+++ b/functions/source/CreateSSHKey/cryptography-3.1.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.35.1)
+Root-Is-Purelib: false
+Tag: cp35-abi3-manylinux2010_x86_64
+
diff --git a/functions/source/CreateSSHKey/cryptography-3.1.dist-info/top_level.txt b/functions/source/CreateSSHKey/cryptography-3.1.dist-info/top_level.txt
new file mode 100644
index 0000000..52ccfc6
--- /dev/null
+++ b/functions/source/CreateSSHKey/cryptography-3.1.dist-info/top_level.txt
@@ -0,0 +1,3 @@
+_openssl
+_padding
+cryptography
diff --git a/functions/source/CreateSSHKey/cryptography/__about__.py b/functions/source/CreateSSHKey/cryptography/__about__.py
index a335c53..fd1b7a5 100644
--- a/functions/source/CreateSSHKey/cryptography/__about__.py
+++ b/functions/source/CreateSSHKey/cryptography/__about__.py
@@ -5,19 +5,27 @@
from __future__ import absolute_import, division, print_function
__all__ = [
- "__title__", "__summary__", "__uri__", "__version__", "__author__",
- "__email__", "__license__", "__copyright__",
+ "__title__",
+ "__summary__",
+ "__uri__",
+ "__version__",
+ "__author__",
+ "__email__",
+ "__license__",
+ "__copyright__",
]
__title__ = "cryptography"
-__summary__ = ("cryptography is a package which provides cryptographic recipes"
- " and primitives to Python developers.")
+__summary__ = (
+ "cryptography is a package which provides cryptographic recipes"
+ " and primitives to Python developers."
+)
__uri__ = "https://github.com/pyca/cryptography"
-__version__ = "2.1.2"
+__version__ = "3.1"
__author__ = "The cryptography developers"
__email__ = "cryptography-dev@python.org"
__license__ = "BSD or Apache License, Version 2.0"
-__copyright__ = "Copyright 2013-2017 {0}".format(__author__)
+__copyright__ = "Copyright 2013-2019 {}".format(__author__)
diff --git a/functions/source/CreateSSHKey/cryptography/__init__.py b/functions/source/CreateSSHKey/cryptography/__init__.py
index e5c34a2..f16efce 100644
--- a/functions/source/CreateSSHKey/cryptography/__init__.py
+++ b/functions/source/CreateSSHKey/cryptography/__init__.py
@@ -8,20 +8,41 @@
import warnings
from cryptography.__about__ import (
- __author__, __copyright__, __email__, __license__, __summary__, __title__,
- __uri__, __version__
+ __author__,
+ __copyright__,
+ __email__,
+ __license__,
+ __summary__,
+ __title__,
+ __uri__,
+ __version__,
)
+from cryptography.utils import CryptographyDeprecationWarning
__all__ = [
- "__title__", "__summary__", "__uri__", "__version__", "__author__",
- "__email__", "__license__", "__copyright__",
+ "__title__",
+ "__summary__",
+ "__uri__",
+ "__version__",
+ "__author__",
+ "__email__",
+ "__license__",
+ "__copyright__",
]
-if sys.version_info[:2] == (2, 6):
+if sys.version_info[0] == 2:
warnings.warn(
- "Python 2.6 is no longer supported by the Python core team, please "
- "upgrade your Python. The next version of cryptography will drop "
- "support for Python 2.6",
- DeprecationWarning
+ "Python 2 is no longer supported by the Python core team. Support for "
+ "it is now deprecated in cryptography, and will be removed in a "
+ "future release.",
+ CryptographyDeprecationWarning,
+ stacklevel=2,
+ )
+if sys.version_info[:2] == (3, 5):
+ warnings.warn(
+ "Python 3.5 support will be dropped in the next release of"
+ "cryptography. Please upgrade your Python.",
+ CryptographyDeprecationWarning,
+ stacklevel=2,
)
diff --git a/functions/source/CreateSSHKey/cryptography/__pycache__/__about__.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/__pycache__/__about__.cpython-38.pyc
new file mode 100644
index 0000000..26d1e0c
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/__pycache__/__about__.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/__pycache__/__init__.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000..9e70483
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/__pycache__/__init__.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/__pycache__/exceptions.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/__pycache__/exceptions.cpython-38.pyc
new file mode 100644
index 0000000..ca5d0d7
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/__pycache__/exceptions.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/__pycache__/fernet.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/__pycache__/fernet.cpython-38.pyc
new file mode 100644
index 0000000..300806a
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/__pycache__/fernet.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/__pycache__/utils.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/__pycache__/utils.cpython-38.pyc
new file mode 100644
index 0000000..b07e0aa
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/__pycache__/utils.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/exceptions.py b/functions/source/CreateSSHKey/cryptography/exceptions.py
index 648cf9d..1d52d7d 100644
--- a/functions/source/CreateSSHKey/cryptography/exceptions.py
+++ b/functions/source/CreateSSHKey/cryptography/exceptions.py
@@ -19,6 +19,7 @@ class _Reasons(Enum):
UNSUPPORTED_X509 = 8
UNSUPPORTED_EXCHANGE_ALGORITHM = 9
UNSUPPORTED_DIFFIE_HELLMAN = 10
+ UNSUPPORTED_MAC = 11
class UnsupportedAlgorithm(Exception):
diff --git a/functions/source/CreateSSHKey/cryptography/fernet.py b/functions/source/CreateSSHKey/cryptography/fernet.py
index 99eb10e..00c2528 100644
--- a/functions/source/CreateSSHKey/cryptography/fernet.py
+++ b/functions/source/CreateSSHKey/cryptography/fernet.py
@@ -12,8 +12,9 @@
import six
+from cryptography import utils
from cryptography.exceptions import InvalidSignature
-from cryptography.hazmat.backends import default_backend
+from cryptography.hazmat.backends import _get_backend
from cryptography.hazmat.primitives import hashes, padding
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from cryptography.hazmat.primitives.hmac import HMAC
@@ -28,8 +29,7 @@ class InvalidToken(Exception):
class Fernet(object):
def __init__(self, key, backend=None):
- if backend is None:
- backend = default_backend()
+ backend = _get_backend(backend)
key = base64.urlsafe_b64decode(key)
if len(key) != 32:
@@ -46,13 +46,14 @@ def generate_key(cls):
return base64.urlsafe_b64encode(os.urandom(32))
def encrypt(self, data):
- current_time = int(time.time())
+ return self.encrypt_at_time(data, int(time.time()))
+
+ def encrypt_at_time(self, data, current_time):
iv = os.urandom(16)
return self._encrypt_from_parts(data, current_time, iv)
def _encrypt_from_parts(self, data, current_time, iv):
- if not isinstance(data, bytes):
- raise TypeError("data must be bytes.")
+ utils._check_bytes("data", data)
padder = padding.PKCS7(algorithms.AES.block_size).padder()
padded_data = padder.update(data) + padder.finalize()
@@ -71,11 +72,26 @@ def _encrypt_from_parts(self, data, current_time, iv):
return base64.urlsafe_b64encode(basic_parts + hmac)
def decrypt(self, token, ttl=None):
- if not isinstance(token, bytes):
- raise TypeError("token must be bytes.")
-
- current_time = int(time.time())
+ timestamp, data = Fernet._get_unverified_token_data(token)
+ return self._decrypt_data(data, timestamp, ttl, int(time.time()))
+ def decrypt_at_time(self, token, ttl, current_time):
+ if ttl is None:
+ raise ValueError(
+ "decrypt_at_time() can only be used with a non-None ttl"
+ )
+ timestamp, data = Fernet._get_unverified_token_data(token)
+ return self._decrypt_data(data, timestamp, ttl, current_time)
+
+ def extract_timestamp(self, token):
+ timestamp, data = Fernet._get_unverified_token_data(token)
+ # Verify the token was not tampered with.
+ self._verify_signature(data)
+ return timestamp
+
+ @staticmethod
+ def _get_unverified_token_data(token):
+ utils._check_bytes("token", token)
try:
data = base64.urlsafe_b64decode(token)
except (TypeError, binascii.Error):
@@ -85,16 +101,12 @@ def decrypt(self, token, ttl=None):
raise InvalidToken
try:
- timestamp, = struct.unpack(">Q", data[1:9])
+ (timestamp,) = struct.unpack(">Q", data[1:9])
except struct.error:
raise InvalidToken
- if ttl is not None:
- if timestamp + ttl < current_time:
- raise InvalidToken
-
- if current_time + _MAX_CLOCK_SKEW < timestamp:
- raise InvalidToken
+ return timestamp, data
+ def _verify_signature(self, data):
h = HMAC(self._signing_key, hashes.SHA256(), backend=self._backend)
h.update(data[:-32])
try:
@@ -102,6 +114,16 @@ def decrypt(self, token, ttl=None):
except InvalidSignature:
raise InvalidToken
+ def _decrypt_data(self, data, timestamp, ttl, current_time):
+ if ttl is not None:
+ if timestamp + ttl < current_time:
+ raise InvalidToken
+
+ if current_time + _MAX_CLOCK_SKEW < timestamp:
+ raise InvalidToken
+
+ self._verify_signature(data)
+
iv = data[9:25]
ciphertext = data[25:-32]
decryptor = Cipher(
@@ -132,7 +154,24 @@ def __init__(self, fernets):
self._fernets = fernets
def encrypt(self, msg):
- return self._fernets[0].encrypt(msg)
+ return self.encrypt_at_time(msg, int(time.time()))
+
+ def encrypt_at_time(self, msg, current_time):
+ return self._fernets[0].encrypt_at_time(msg, current_time)
+
+ def rotate(self, msg):
+ timestamp, data = Fernet._get_unverified_token_data(msg)
+ for f in self._fernets:
+ try:
+ p = f._decrypt_data(data, timestamp, None, None)
+ break
+ except InvalidToken:
+ pass
+ else:
+ raise InvalidToken
+
+ iv = os.urandom(16)
+ return self._fernets[0]._encrypt_from_parts(p, timestamp, iv)
def decrypt(self, msg, ttl=None):
for f in self._fernets:
@@ -141,3 +180,11 @@ def decrypt(self, msg, ttl=None):
except InvalidToken:
pass
raise InvalidToken
+
+ def decrypt_at_time(self, msg, ttl, current_time):
+ for f in self._fernets:
+ try:
+ return f.decrypt_at_time(msg, ttl, current_time)
+ except InvalidToken:
+ pass
+ raise InvalidToken
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/__pycache__/__init__.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000..0fdba65
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/__pycache__/__init__.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/__pycache__/_der.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/__pycache__/_der.cpython-38.pyc
new file mode 100644
index 0000000..61062fc
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/__pycache__/_der.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/__pycache__/_oid.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/__pycache__/_oid.cpython-38.pyc
new file mode 100644
index 0000000..ccd0edc
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/__pycache__/_oid.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/_der.py b/functions/source/CreateSSHKey/cryptography/hazmat/_der.py
new file mode 100644
index 0000000..462b911
--- /dev/null
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/_der.py
@@ -0,0 +1,156 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import six
+
+from cryptography.utils import int_from_bytes, int_to_bytes
+
+
+# This module contains a lightweight DER encoder and decoder. See X.690 for the
+# specification. This module intentionally does not implement the more complex
+# BER encoding, only DER.
+#
+# Note this implementation treats an element's constructed bit as part of the
+# tag. This is fine for DER, where the bit is always computable from the type.
+
+
+CONSTRUCTED = 0x20
+CONTEXT_SPECIFIC = 0x80
+
+INTEGER = 0x02
+BIT_STRING = 0x03
+OCTET_STRING = 0x04
+NULL = 0x05
+OBJECT_IDENTIFIER = 0x06
+SEQUENCE = 0x10 | CONSTRUCTED
+SET = 0x11 | CONSTRUCTED
+PRINTABLE_STRING = 0x13
+UTC_TIME = 0x17
+GENERALIZED_TIME = 0x18
+
+
+class DERReader(object):
+ def __init__(self, data):
+ self.data = memoryview(data)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, tb):
+ if exc_value is None:
+ self.check_empty()
+
+ def is_empty(self):
+ return len(self.data) == 0
+
+ def check_empty(self):
+ if not self.is_empty():
+ raise ValueError("Invalid DER input: trailing data")
+
+ def read_byte(self):
+ if len(self.data) < 1:
+ raise ValueError("Invalid DER input: insufficient data")
+ ret = six.indexbytes(self.data, 0)
+ self.data = self.data[1:]
+ return ret
+
+ def read_bytes(self, n):
+ if len(self.data) < n:
+ raise ValueError("Invalid DER input: insufficient data")
+ ret = self.data[:n]
+ self.data = self.data[n:]
+ return ret
+
+ def read_any_element(self):
+ tag = self.read_byte()
+ # Tag numbers 31 or higher are stored in multiple bytes. No supported
+ # ASN.1 types use such tags, so reject these.
+ if tag & 0x1F == 0x1F:
+ raise ValueError("Invalid DER input: unexpected high tag number")
+ length_byte = self.read_byte()
+ if length_byte & 0x80 == 0:
+ # If the high bit is clear, the first length byte is the length.
+ length = length_byte
+ else:
+ # If the high bit is set, the first length byte encodes the length
+ # of the length.
+ length_byte &= 0x7F
+ if length_byte == 0:
+ raise ValueError(
+ "Invalid DER input: indefinite length form is not allowed "
+ "in DER"
+ )
+ length = 0
+ for i in range(length_byte):
+ length <<= 8
+ length |= self.read_byte()
+ if length == 0:
+ raise ValueError(
+ "Invalid DER input: length was not minimally-encoded"
+ )
+ if length < 0x80:
+ # If the length could have been encoded in short form, it must
+ # not use long form.
+ raise ValueError(
+ "Invalid DER input: length was not minimally-encoded"
+ )
+ body = self.read_bytes(length)
+ return tag, DERReader(body)
+
+ def read_element(self, expected_tag):
+ tag, body = self.read_any_element()
+ if tag != expected_tag:
+ raise ValueError("Invalid DER input: unexpected tag")
+ return body
+
+ def read_single_element(self, expected_tag):
+ with self:
+ return self.read_element(expected_tag)
+
+ def read_optional_element(self, expected_tag):
+ if len(self.data) > 0 and six.indexbytes(self.data, 0) == expected_tag:
+ return self.read_element(expected_tag)
+ return None
+
+ def as_integer(self):
+ if len(self.data) == 0:
+ raise ValueError("Invalid DER input: empty integer contents")
+ first = six.indexbytes(self.data, 0)
+ if first & 0x80 == 0x80:
+ raise ValueError("Negative DER integers are not supported")
+ # The first 9 bits must not all be zero or all be ones. Otherwise, the
+ # encoding should have been one byte shorter.
+ if len(self.data) > 1:
+ second = six.indexbytes(self.data, 1)
+ if first == 0 and second & 0x80 == 0:
+ raise ValueError(
+ "Invalid DER input: integer not minimally-encoded"
+ )
+ return int_from_bytes(self.data, "big")
+
+
+def encode_der_integer(x):
+ if not isinstance(x, six.integer_types):
+ raise ValueError("Value must be an integer")
+ if x < 0:
+ raise ValueError("Negative integers are not supported")
+ n = x.bit_length() // 8 + 1
+ return int_to_bytes(x, n)
+
+
+def encode_der(tag, *children):
+ length = 0
+ for child in children:
+ length += len(child)
+ chunks = [six.int2byte(tag)]
+ if length < 0x80:
+ chunks.append(six.int2byte(length))
+ else:
+ length_bytes = int_to_bytes(length)
+ chunks.append(six.int2byte(0x80 | len(length_bytes)))
+ chunks.append(length_bytes)
+ chunks.extend(children)
+ return b"".join(chunks)
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/_oid.py b/functions/source/CreateSSHKey/cryptography/hazmat/_oid.py
new file mode 100644
index 0000000..de2771a
--- /dev/null
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/_oid.py
@@ -0,0 +1,77 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography import utils
+
+
+class ObjectIdentifier(object):
+ def __init__(self, dotted_string):
+ self._dotted_string = dotted_string
+
+ nodes = self._dotted_string.split(".")
+ intnodes = []
+
+ # There must be at least 2 nodes, the first node must be 0..2, and
+ # if less than 2, the second node cannot have a value outside the
+ # range 0..39. All nodes must be integers.
+ for node in nodes:
+ try:
+ node_value = int(node, 10)
+ except ValueError:
+ raise ValueError(
+ "Malformed OID: %s (non-integer nodes)"
+ % (self._dotted_string)
+ )
+ if node_value < 0:
+ raise ValueError(
+ "Malformed OID: %s (negative-integer nodes)"
+ % (self._dotted_string)
+ )
+ intnodes.append(node_value)
+
+ if len(nodes) < 2:
+ raise ValueError(
+ "Malformed OID: %s (insufficient number of nodes)"
+ % (self._dotted_string)
+ )
+
+ if intnodes[0] > 2:
+ raise ValueError(
+ "Malformed OID: %s (first node outside valid range)"
+ % (self._dotted_string)
+ )
+
+ if intnodes[0] < 2 and intnodes[1] >= 40:
+ raise ValueError(
+ "Malformed OID: %s (second node outside valid range)"
+ % (self._dotted_string)
+ )
+
+ def __eq__(self, other):
+ if not isinstance(other, ObjectIdentifier):
+ return NotImplemented
+
+ return self.dotted_string == other.dotted_string
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __repr__(self):
+ return "".format(
+ self.dotted_string, self._name
+ )
+
+ def __hash__(self):
+ return hash(self.dotted_string)
+
+ @property
+ def _name(self):
+ # Lazy import to avoid an import cycle
+ from cryptography.x509.oid import _OID_NAMES
+
+ return _OID_NAMES.get(self, "Unknown OID")
+
+ dotted_string = utils.read_only_property("_dotted_string")
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/__init__.py b/functions/source/CreateSSHKey/cryptography/hazmat/backends/__init__.py
index 565bde7..1563936 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/backends/__init__.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/backends/__init__.py
@@ -13,6 +13,14 @@ def default_backend():
if _default_backend is None:
from cryptography.hazmat.backends.openssl.backend import backend
+
_default_backend = backend
return _default_backend
+
+
+def _get_backend(backend):
+ if backend is None:
+ return default_backend()
+ else:
+ return backend
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/__pycache__/__init__.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/backends/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000..ca2cdc5
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/backends/__pycache__/__init__.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/__pycache__/interfaces.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/backends/__pycache__/interfaces.cpython-38.pyc
new file mode 100644
index 0000000..1bcd313
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/backends/__pycache__/interfaces.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/interfaces.py b/functions/source/CreateSSHKey/cryptography/hazmat/backends/interfaces.py
index 0a476b9..418980a 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/backends/interfaces.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/backends/interfaces.py
@@ -57,7 +57,7 @@ def hmac_supported(self, algorithm):
@abc.abstractmethod
def create_hmac_ctx(self, key, algorithm):
"""
- Create a MACContext for calculating a message authentication code.
+ Create a context for calculating a message authentication code.
"""
@@ -72,7 +72,7 @@ def cmac_algorithm_supported(self, algorithm):
@abc.abstractmethod
def create_cmac_ctx(self, algorithm):
"""
- Create a MACContext for calculating a message authentication code.
+ Create a context for calculating a message authentication code.
"""
@@ -86,8 +86,9 @@ def pbkdf2_hmac_supported(self, algorithm):
"""
@abc.abstractmethod
- def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations,
- key_material):
+ def derive_pbkdf2_hmac(
+ self, algorithm, length, salt, iterations, key_material
+ ):
"""
Return length bytes derived from provided PBKDF2 parameters.
"""
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000..12add65
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/aead.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/aead.cpython-38.pyc
new file mode 100644
index 0000000..f60b78f
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/aead.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-38.pyc
new file mode 100644
index 0000000..6c9bafb
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/ciphers.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/ciphers.cpython-38.pyc
new file mode 100644
index 0000000..ca53095
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/ciphers.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/cmac.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/cmac.cpython-38.pyc
new file mode 100644
index 0000000..a19bf88
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/cmac.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/decode_asn1.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/decode_asn1.cpython-38.pyc
new file mode 100644
index 0000000..7113fd8
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/decode_asn1.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/dh.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/dh.cpython-38.pyc
new file mode 100644
index 0000000..7b5419e
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/dh.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/dsa.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/dsa.cpython-38.pyc
new file mode 100644
index 0000000..961d78d
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/dsa.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/ec.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/ec.cpython-38.pyc
new file mode 100644
index 0000000..4c74572
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/ec.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/ed25519.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/ed25519.cpython-38.pyc
new file mode 100644
index 0000000..7ad5517
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/ed25519.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/ed448.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/ed448.cpython-38.pyc
new file mode 100644
index 0000000..aa9cd1a
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/ed448.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/encode_asn1.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/encode_asn1.cpython-38.pyc
new file mode 100644
index 0000000..ed0b247
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/encode_asn1.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/hashes.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/hashes.cpython-38.pyc
new file mode 100644
index 0000000..0b8bd01
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/hashes.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/hmac.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/hmac.cpython-38.pyc
new file mode 100644
index 0000000..7ea8ed4
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/hmac.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/ocsp.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/ocsp.cpython-38.pyc
new file mode 100644
index 0000000..8ebb69b
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/ocsp.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/poly1305.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/poly1305.cpython-38.pyc
new file mode 100644
index 0000000..72306ac
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/poly1305.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/rsa.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/rsa.cpython-38.pyc
new file mode 100644
index 0000000..874dd1f
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/rsa.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/utils.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/utils.cpython-38.pyc
new file mode 100644
index 0000000..35436b2
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/utils.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/x25519.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/x25519.cpython-38.pyc
new file mode 100644
index 0000000..218182f
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/x25519.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/x448.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/x448.cpython-38.pyc
new file mode 100644
index 0000000..b21645b
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/x448.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/x509.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/x509.cpython-38.pyc
new file mode 100644
index 0000000..850ea30
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/__pycache__/x509.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/aead.py b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/aead.py
index 9cec3e2..4494916 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/aead.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/aead.py
@@ -13,15 +13,18 @@
def _aead_cipher_name(cipher):
from cryptography.hazmat.primitives.ciphers.aead import (
- AESCCM, AESGCM, ChaCha20Poly1305
+ AESCCM,
+ AESGCM,
+ ChaCha20Poly1305,
)
+
if isinstance(cipher, ChaCha20Poly1305):
return b"chacha20-poly1305"
elif isinstance(cipher, AESCCM):
- return "aes-{0}-ccm".format(len(cipher._key) * 8).encode("ascii")
+ return "aes-{}-ccm".format(len(cipher._key) * 8).encode("ascii")
else:
assert isinstance(cipher, AESGCM)
- return "aes-{0}-gcm".format(len(cipher._key) * 8).encode("ascii")
+ return "aes-{}-gcm".format(len(cipher._key) * 8).encode("ascii")
def _aead_setup(backend, cipher_name, key, nonce, tag, tag_len, operation):
@@ -30,18 +33,21 @@ def _aead_setup(backend, cipher_name, key, nonce, tag, tag_len, operation):
ctx = backend._lib.EVP_CIPHER_CTX_new()
ctx = backend._ffi.gc(ctx, backend._lib.EVP_CIPHER_CTX_free)
res = backend._lib.EVP_CipherInit_ex(
- ctx, evp_cipher,
+ ctx,
+ evp_cipher,
backend._ffi.NULL,
backend._ffi.NULL,
backend._ffi.NULL,
- int(operation == _ENCRYPT)
+ int(operation == _ENCRYPT),
)
backend.openssl_assert(res != 0)
res = backend._lib.EVP_CIPHER_CTX_set_key_length(ctx, len(key))
backend.openssl_assert(res != 0)
res = backend._lib.EVP_CIPHER_CTX_ctrl(
- ctx, backend._lib.EVP_CTRL_AEAD_SET_IVLEN, len(nonce),
- backend._ffi.NULL
+ ctx,
+ backend._lib.EVP_CTRL_AEAD_SET_IVLEN,
+ len(nonce),
+ backend._ffi.NULL,
)
backend.openssl_assert(res != 0)
if operation == _DECRYPT:
@@ -49,18 +55,21 @@ def _aead_setup(backend, cipher_name, key, nonce, tag, tag_len, operation):
ctx, backend._lib.EVP_CTRL_AEAD_SET_TAG, len(tag), tag
)
backend.openssl_assert(res != 0)
- else:
+ elif cipher_name.endswith(b"-ccm"):
res = backend._lib.EVP_CIPHER_CTX_ctrl(
ctx, backend._lib.EVP_CTRL_AEAD_SET_TAG, tag_len, backend._ffi.NULL
)
+ backend.openssl_assert(res != 0)
+ nonce_ptr = backend._ffi.from_buffer(nonce)
+ key_ptr = backend._ffi.from_buffer(key)
res = backend._lib.EVP_CipherInit_ex(
ctx,
backend._ffi.NULL,
backend._ffi.NULL,
- key,
- nonce,
- int(operation == _ENCRYPT)
+ key_ptr,
+ nonce_ptr,
+ int(operation == _ENCRYPT),
)
backend.openssl_assert(res != 0)
return ctx
@@ -69,11 +78,7 @@ def _aead_setup(backend, cipher_name, key, nonce, tag, tag_len, operation):
def _set_length(backend, ctx, data_len):
intptr = backend._ffi.new("int *")
res = backend._lib.EVP_CipherUpdate(
- ctx,
- backend._ffi.NULL,
- intptr,
- backend._ffi.NULL,
- data_len
+ ctx, backend._ffi.NULL, intptr, backend._ffi.NULL, data_len
)
backend.openssl_assert(res != 0)
@@ -96,6 +101,7 @@ def _process_data(backend, ctx, data):
def _encrypt(backend, cipher, nonce, data, associated_data, tag_length):
from cryptography.hazmat.primitives.ciphers.aead import AESCCM
+
cipher_name = _aead_cipher_name(cipher)
ctx = _aead_setup(
backend, cipher_name, cipher._key, nonce, None, tag_length, _ENCRYPT
@@ -123,6 +129,7 @@ def _encrypt(backend, cipher, nonce, data, associated_data, tag_length):
def _decrypt(backend, cipher, nonce, data, associated_data, tag_length):
from cryptography.hazmat.primitives.ciphers.aead import AESCCM
+
if len(data) < tag_length:
raise InvalidTag
tag = data[-tag_length:]
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/backend.py b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/backend.py
index 6abf4ec..97c7fd0 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/backend.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/backend.py
@@ -4,71 +4,165 @@
from __future__ import absolute_import, division, print_function
-import base64
-import calendar
import collections
import contextlib
import itertools
+import warnings
from contextlib import contextmanager
import six
+from six.moves import range
from cryptography import utils, x509
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat._der import (
+ INTEGER,
+ NULL,
+ SEQUENCE,
+ encode_der,
+ encode_der_integer,
+)
from cryptography.hazmat.backends.interfaces import (
- CMACBackend, CipherBackend, DERSerializationBackend, DHBackend, DSABackend,
- EllipticCurveBackend, HMACBackend, HashBackend, PBKDF2HMACBackend,
- PEMSerializationBackend, RSABackend, ScryptBackend, X509Backend
+ CMACBackend,
+ CipherBackend,
+ DERSerializationBackend,
+ DHBackend,
+ DSABackend,
+ EllipticCurveBackend,
+ HMACBackend,
+ HashBackend,
+ PBKDF2HMACBackend,
+ PEMSerializationBackend,
+ RSABackend,
+ ScryptBackend,
+ X509Backend,
)
from cryptography.hazmat.backends.openssl import aead
from cryptography.hazmat.backends.openssl.ciphers import _CipherContext
from cryptography.hazmat.backends.openssl.cmac import _CMACContext
-from cryptography.hazmat.backends.openssl.decode_asn1 import _Integers
+from cryptography.hazmat.backends.openssl.decode_asn1 import (
+ _CRL_ENTRY_REASON_ENUM_TO_CODE,
+ _CRL_EXTENSION_HANDLERS,
+ _EXTENSION_HANDLERS_BASE,
+ _EXTENSION_HANDLERS_SCT,
+ _OCSP_BASICRESP_EXTENSION_HANDLERS,
+ _OCSP_REQ_EXTENSION_HANDLERS,
+ _OCSP_SINGLERESP_EXTENSION_HANDLERS_SCT,
+ _REVOKED_EXTENSION_HANDLERS,
+ _X509ExtensionParser,
+)
from cryptography.hazmat.backends.openssl.dh import (
- _DHParameters, _DHPrivateKey, _DHPublicKey,
- _dh_params_dup
+ _DHParameters,
+ _DHPrivateKey,
+ _DHPublicKey,
+ _dh_params_dup,
)
from cryptography.hazmat.backends.openssl.dsa import (
- _DSAParameters, _DSAPrivateKey, _DSAPublicKey
+ _DSAParameters,
+ _DSAPrivateKey,
+ _DSAPublicKey,
)
from cryptography.hazmat.backends.openssl.ec import (
- _EllipticCurvePrivateKey, _EllipticCurvePublicKey
+ _EllipticCurvePrivateKey,
+ _EllipticCurvePublicKey,
+)
+from cryptography.hazmat.backends.openssl.ed25519 import (
+ _Ed25519PrivateKey,
+ _Ed25519PublicKey,
+)
+from cryptography.hazmat.backends.openssl.ed448 import (
+ _ED448_KEY_SIZE,
+ _Ed448PrivateKey,
+ _Ed448PublicKey,
)
from cryptography.hazmat.backends.openssl.encode_asn1 import (
_CRL_ENTRY_EXTENSION_ENCODE_HANDLERS,
- _CRL_EXTENSION_ENCODE_HANDLERS, _EXTENSION_ENCODE_HANDLERS,
- _encode_asn1_int_gc, _encode_asn1_str_gc, _encode_name_gc, _txt2obj_gc,
+ _CRL_EXTENSION_ENCODE_HANDLERS,
+ _EXTENSION_ENCODE_HANDLERS,
+ _OCSP_BASICRESP_EXTENSION_ENCODE_HANDLERS,
+ _OCSP_REQUEST_EXTENSION_ENCODE_HANDLERS,
+ _encode_asn1_int_gc,
+ _encode_asn1_str_gc,
+ _encode_name_gc,
+ _txt2obj_gc,
)
from cryptography.hazmat.backends.openssl.hashes import _HashContext
from cryptography.hazmat.backends.openssl.hmac import _HMACContext
+from cryptography.hazmat.backends.openssl.ocsp import (
+ _OCSPRequest,
+ _OCSPResponse,
+)
+from cryptography.hazmat.backends.openssl.poly1305 import (
+ _POLY1305_KEY_SIZE,
+ _Poly1305Context,
+)
from cryptography.hazmat.backends.openssl.rsa import (
- _RSAPrivateKey, _RSAPublicKey
+ _RSAPrivateKey,
+ _RSAPublicKey,
)
from cryptography.hazmat.backends.openssl.x25519 import (
- _X25519PrivateKey, _X25519PublicKey
+ _X25519PrivateKey,
+ _X25519PublicKey,
+)
+from cryptography.hazmat.backends.openssl.x448 import (
+ _X448PrivateKey,
+ _X448PublicKey,
)
from cryptography.hazmat.backends.openssl.x509 import (
- _Certificate, _CertificateRevocationList,
- _CertificateSigningRequest, _RevokedCertificate
+ _Certificate,
+ _CertificateRevocationList,
+ _CertificateSigningRequest,
+ _RevokedCertificate,
)
from cryptography.hazmat.bindings.openssl import binding
from cryptography.hazmat.primitives import hashes, serialization
-from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa
+from cryptography.hazmat.primitives.asymmetric import (
+ dsa,
+ ec,
+ ed25519,
+ ed448,
+ rsa,
+)
from cryptography.hazmat.primitives.asymmetric.padding import (
- MGF1, OAEP, PKCS1v15, PSS
+ MGF1,
+ OAEP,
+ PKCS1v15,
+ PSS,
)
from cryptography.hazmat.primitives.ciphers.algorithms import (
- AES, ARC4, Blowfish, CAST5, Camellia, ChaCha20, IDEA, SEED, TripleDES
+ AES,
+ ARC4,
+ Blowfish,
+ CAST5,
+ Camellia,
+ ChaCha20,
+ IDEA,
+ SEED,
+ TripleDES,
)
from cryptography.hazmat.primitives.ciphers.modes import (
- CBC, CFB, CFB8, CTR, ECB, GCM, OFB, XTS
+ CBC,
+ CFB,
+ CFB8,
+ CTR,
+ ECB,
+ GCM,
+ OFB,
+ XTS,
)
from cryptography.hazmat.primitives.kdf import scrypt
+from cryptography.hazmat.primitives.serialization import ssh
+from cryptography.x509 import ocsp
_MemoryBIO = collections.namedtuple("_MemoryBIO", ["bio", "char_ptr"])
+# Not actually supported, just used as a marker for some serialization tests.
+class _RC2(object):
+ pass
+
+
@utils.register_interface(CipherBackend)
@utils.register_interface(CMACBackend)
@utils.register_interface(DERSerializationBackend)
@@ -88,39 +182,93 @@ class Backend(object):
"""
OpenSSL API binding interfaces.
"""
+
name = "openssl"
+ # FIPS has opinions about acceptable algorithms and key sizes, but the
+ # disallowed algorithms are still present in OpenSSL. They just error if
+ # you try to use them. To avoid that we allowlist the algorithms in
+ # FIPS 140-3. This isn't ideal, but FIPS 140-3 is trash so here we are.
+ _fips_aead = {
+ b"aes-128-ccm",
+ b"aes-192-ccm",
+ b"aes-256-ccm",
+ b"aes-128-gcm",
+ b"aes-192-gcm",
+ b"aes-256-gcm",
+ }
+ _fips_ciphers = (AES, TripleDES)
+ _fips_hashes = (
+ hashes.SHA1,
+ hashes.SHA224,
+ hashes.SHA256,
+ hashes.SHA384,
+ hashes.SHA512,
+ hashes.SHA512_224,
+ hashes.SHA512_256,
+ hashes.SHA3_224,
+ hashes.SHA3_256,
+ hashes.SHA3_384,
+ hashes.SHA3_512,
+ hashes.SHAKE128,
+ hashes.SHAKE256,
+ )
+ _fips_rsa_min_key_size = 2048
+ _fips_rsa_min_public_exponent = 65537
+ _fips_dsa_min_modulus = 1 << 2048
+ _fips_dh_min_key_size = 2048
+ _fips_dh_min_modulus = 1 << _fips_dh_min_key_size
+
def __init__(self):
self._binding = binding.Binding()
self._ffi = self._binding.ffi
self._lib = self._binding.lib
+ self._fips_enabled = self._is_fips_enabled()
self._cipher_registry = {}
self._register_default_ciphers()
- self.activate_osrandom_engine()
+ self._register_x509_ext_parsers()
+ self._register_x509_encoders()
+ if self._fips_enabled and self._lib.CRYPTOGRAPHY_NEEDS_OSRANDOM_ENGINE:
+ warnings.warn(
+ "OpenSSL FIPS mode is enabled. Can't enable DRBG fork safety.",
+ UserWarning,
+ )
+ else:
+ self.activate_osrandom_engine()
self._dh_types = [self._lib.EVP_PKEY_DH]
if self._lib.Cryptography_HAS_EVP_PKEY_DHX:
self._dh_types.append(self._lib.EVP_PKEY_DHX)
- def openssl_assert(self, ok):
- return binding._openssl_assert(self._lib, ok)
+ def openssl_assert(self, ok, errors=None):
+ return binding._openssl_assert(self._lib, ok, errors=errors)
+
+ def _is_fips_enabled(self):
+ fips_mode = getattr(self._lib, "FIPS_mode", lambda: 0)
+ mode = fips_mode()
+ if mode == 0:
+ # OpenSSL without FIPS pushes an error on the error stack
+ self._lib.ERR_clear_error()
+ return bool(mode)
def activate_builtin_random(self):
- # Obtain a new structural reference.
- e = self._lib.ENGINE_get_default_RAND()
- if e != self._ffi.NULL:
- self._lib.ENGINE_unregister_RAND(e)
- # Reset the RNG to use the new engine.
- self._lib.RAND_cleanup()
- # decrement the structural reference from get_default_RAND
- res = self._lib.ENGINE_finish(e)
- self.openssl_assert(res == 1)
+ if self._lib.CRYPTOGRAPHY_NEEDS_OSRANDOM_ENGINE:
+ # Obtain a new structural reference.
+ e = self._lib.ENGINE_get_default_RAND()
+ if e != self._ffi.NULL:
+ self._lib.ENGINE_unregister_RAND(e)
+ # Reset the RNG to use the built-in.
+ res = self._lib.RAND_set_rand_method(self._ffi.NULL)
+ self.openssl_assert(res == 1)
+ # decrement the structural reference from get_default_RAND
+ res = self._lib.ENGINE_finish(e)
+ self.openssl_assert(res == 1)
@contextlib.contextmanager
def _get_osurandom_engine(self):
# Fetches an engine by id and returns it. This creates a structural
# reference.
- e = self._lib.ENGINE_by_id(self._binding._osrandom_engine_id)
+ e = self._lib.ENGINE_by_id(self._lib.Cryptography_osrandom_engine_id)
self.openssl_assert(e != self._ffi.NULL)
# Initialize the engine for use. This adds a functional reference.
res = self._lib.ENGINE_init(e)
@@ -137,30 +285,32 @@ def _get_osurandom_engine(self):
self.openssl_assert(res == 1)
def activate_osrandom_engine(self):
- # Unregister and free the current engine.
- self.activate_builtin_random()
- with self._get_osurandom_engine() as e:
- # Set the engine as the default RAND provider.
- res = self._lib.ENGINE_set_default_RAND(e)
+ if self._lib.CRYPTOGRAPHY_NEEDS_OSRANDOM_ENGINE:
+ # Unregister and free the current engine.
+ self.activate_builtin_random()
+ with self._get_osurandom_engine() as e:
+ # Set the engine as the default RAND provider.
+ res = self._lib.ENGINE_set_default_RAND(e)
+ self.openssl_assert(res == 1)
+ # Reset the RNG to use the engine
+ res = self._lib.RAND_set_rand_method(self._ffi.NULL)
self.openssl_assert(res == 1)
- # Reset the RNG to use the new engine.
- self._lib.RAND_cleanup()
def osrandom_engine_implementation(self):
buf = self._ffi.new("char[]", 64)
with self._get_osurandom_engine() as e:
- res = self._lib.ENGINE_ctrl_cmd(e, b"get_implementation",
- len(buf), buf,
- self._ffi.NULL, 0)
+ res = self._lib.ENGINE_ctrl_cmd(
+ e, b"get_implementation", len(buf), buf, self._ffi.NULL, 0
+ )
self.openssl_assert(res > 0)
- return self._ffi.string(buf).decode('ascii')
+ return self._ffi.string(buf).decode("ascii")
def openssl_version_text(self):
"""
Friendly string name of the loaded OpenSSL library. This is not
necessarily the same version as it was compiled against.
- Example: OpenSSL 1.0.1e 11 Feb 2013
+ Example: OpenSSL 1.1.1d 10 Sep 2019
"""
return self._ffi.string(
self._lib.OpenSSL_version(self._lib.OPENSSL_VERSION)
@@ -172,20 +322,28 @@ def openssl_version_number(self):
def create_hmac_ctx(self, key, algorithm):
return _HMACContext(self, key, algorithm)
- def _build_openssl_digest_name(self, algorithm):
+ def _evp_md_from_algorithm(self, algorithm):
if algorithm.name == "blake2b" or algorithm.name == "blake2s":
- alg = "{0}{1}".format(
+ alg = "{}{}".format(
algorithm.name, algorithm.digest_size * 8
).encode("ascii")
else:
alg = algorithm.name.encode("ascii")
- return alg
+ evp_md = self._lib.EVP_get_digestbyname(alg)
+ return evp_md
+
+ def _evp_md_non_null_from_algorithm(self, algorithm):
+ evp_md = self._evp_md_from_algorithm(algorithm)
+ self.openssl_assert(evp_md != self._ffi.NULL)
+ return evp_md
def hash_supported(self, algorithm):
- name = self._build_openssl_digest_name(algorithm)
- digest = self._lib.EVP_get_digestbyname(name)
- return digest != self._ffi.NULL
+ if self._fips_enabled and not isinstance(algorithm, self._fips_hashes):
+ return False
+
+ evp_md = self._evp_md_from_algorithm(algorithm)
+ return evp_md != self._ffi.NULL
def hmac_supported(self, algorithm):
return self.hash_supported(algorithm)
@@ -194,6 +352,8 @@ def create_hash_ctx(self, algorithm):
return _HashContext(self, algorithm)
def cipher_supported(self, cipher, mode):
+ if self._fips_enabled and not isinstance(cipher, self._fips_ciphers):
+ return False
try:
adapter = self._cipher_registry[type(cipher), type(mode)]
except KeyError:
@@ -203,8 +363,10 @@ def cipher_supported(self, cipher, mode):
def register_cipher_adapter(self, cipher_cls, mode_cls, adapter):
if (cipher_cls, mode_cls) in self._cipher_registry:
- raise ValueError("Duplicate registration for: {0} {1}.".format(
- cipher_cls, mode_cls)
+ raise ValueError(
+ "Duplicate registration for: {} {}.".format(
+ cipher_cls, mode_cls
+ )
)
self._cipher_registry[cipher_cls, mode_cls] = adapter
@@ -213,36 +375,28 @@ def _register_default_ciphers(self):
self.register_cipher_adapter(
AES,
mode_cls,
- GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}")
+ GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}"),
)
for mode_cls in [CBC, CTR, ECB, OFB, CFB]:
self.register_cipher_adapter(
Camellia,
mode_cls,
- GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}")
+ GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}"),
)
for mode_cls in [CBC, CFB, CFB8, OFB]:
self.register_cipher_adapter(
- TripleDES,
- mode_cls,
- GetCipherByName("des-ede3-{mode.name}")
+ TripleDES, mode_cls, GetCipherByName("des-ede3-{mode.name}")
)
self.register_cipher_adapter(
- TripleDES,
- ECB,
- GetCipherByName("des-ede3")
+ TripleDES, ECB, GetCipherByName("des-ede3")
)
for mode_cls in [CBC, CFB, OFB, ECB]:
self.register_cipher_adapter(
- Blowfish,
- mode_cls,
- GetCipherByName("bf-{mode.name}")
+ Blowfish, mode_cls, GetCipherByName("bf-{mode.name}")
)
for mode_cls in [CBC, CFB, OFB, ECB]:
self.register_cipher_adapter(
- SEED,
- mode_cls,
- GetCipherByName("seed-{mode.name}")
+ SEED, mode_cls, GetCipherByName("seed-{mode.name}")
)
for cipher_cls, mode_cls in itertools.product(
[CAST5, IDEA],
@@ -251,20 +405,84 @@ def _register_default_ciphers(self):
self.register_cipher_adapter(
cipher_cls,
mode_cls,
- GetCipherByName("{cipher.name}-{mode.name}")
+ GetCipherByName("{cipher.name}-{mode.name}"),
)
+ self.register_cipher_adapter(ARC4, type(None), GetCipherByName("rc4"))
+ # We don't actually support RC2, this is just used by some tests.
+ self.register_cipher_adapter(_RC2, type(None), GetCipherByName("rc2"))
self.register_cipher_adapter(
- ARC4,
- type(None),
- GetCipherByName("rc4")
- )
- self.register_cipher_adapter(
- ChaCha20,
- type(None),
- GetCipherByName("chacha20")
+ ChaCha20, type(None), GetCipherByName("chacha20")
)
self.register_cipher_adapter(AES, XTS, _get_xts_cipher)
+ def _register_x509_ext_parsers(self):
+ ext_handlers = _EXTENSION_HANDLERS_BASE.copy()
+ # All revoked extensions are valid single response extensions, see:
+ # https://tools.ietf.org/html/rfc6960#section-4.4.5
+ singleresp_handlers = _REVOKED_EXTENSION_HANDLERS.copy()
+
+ if self._lib.Cryptography_HAS_SCT:
+ ext_handlers.update(_EXTENSION_HANDLERS_SCT)
+ singleresp_handlers.update(_OCSP_SINGLERESP_EXTENSION_HANDLERS_SCT)
+
+ self._certificate_extension_parser = _X509ExtensionParser(
+ self,
+ ext_count=self._lib.X509_get_ext_count,
+ get_ext=self._lib.X509_get_ext,
+ handlers=ext_handlers,
+ )
+ self._csr_extension_parser = _X509ExtensionParser(
+ self,
+ ext_count=self._lib.sk_X509_EXTENSION_num,
+ get_ext=self._lib.sk_X509_EXTENSION_value,
+ handlers=ext_handlers,
+ )
+ self._revoked_cert_extension_parser = _X509ExtensionParser(
+ self,
+ ext_count=self._lib.X509_REVOKED_get_ext_count,
+ get_ext=self._lib.X509_REVOKED_get_ext,
+ handlers=_REVOKED_EXTENSION_HANDLERS,
+ )
+ self._crl_extension_parser = _X509ExtensionParser(
+ self,
+ ext_count=self._lib.X509_CRL_get_ext_count,
+ get_ext=self._lib.X509_CRL_get_ext,
+ handlers=_CRL_EXTENSION_HANDLERS,
+ )
+ self._ocsp_req_ext_parser = _X509ExtensionParser(
+ self,
+ ext_count=self._lib.OCSP_REQUEST_get_ext_count,
+ get_ext=self._lib.OCSP_REQUEST_get_ext,
+ handlers=_OCSP_REQ_EXTENSION_HANDLERS,
+ )
+ self._ocsp_basicresp_ext_parser = _X509ExtensionParser(
+ self,
+ ext_count=self._lib.OCSP_BASICRESP_get_ext_count,
+ get_ext=self._lib.OCSP_BASICRESP_get_ext,
+ handlers=_OCSP_BASICRESP_EXTENSION_HANDLERS,
+ )
+ self._ocsp_singleresp_ext_parser = _X509ExtensionParser(
+ self,
+ ext_count=self._lib.OCSP_SINGLERESP_get_ext_count,
+ get_ext=self._lib.OCSP_SINGLERESP_get_ext,
+ handlers=singleresp_handlers,
+ )
+
+ def _register_x509_encoders(self):
+ self._extension_encode_handlers = _EXTENSION_ENCODE_HANDLERS.copy()
+ self._crl_extension_encode_handlers = (
+ _CRL_EXTENSION_ENCODE_HANDLERS.copy()
+ )
+ self._crl_entry_extension_encode_handlers = (
+ _CRL_ENTRY_EXTENSION_ENCODE_HANDLERS.copy()
+ )
+ self._ocsp_request_extension_encode_handlers = (
+ _OCSP_REQUEST_EXTENSION_ENCODE_HANDLERS.copy()
+ )
+ self._ocsp_basicresp_extension_encode_handlers = (
+ _OCSP_BASICRESP_EXTENSION_ENCODE_HANDLERS.copy()
+ )
+
def create_symmetric_encryption_ctx(self, cipher, mode):
return _CipherContext(self, cipher, mode, _CipherContext._ENCRYPT)
@@ -274,21 +492,21 @@ def create_symmetric_decryption_ctx(self, cipher, mode):
def pbkdf2_hmac_supported(self, algorithm):
return self.hmac_supported(algorithm)
- def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations,
- key_material):
+ def derive_pbkdf2_hmac(
+ self, algorithm, length, salt, iterations, key_material
+ ):
buf = self._ffi.new("unsigned char[]", length)
- evp_md = self._lib.EVP_get_digestbyname(
- algorithm.name.encode("ascii"))
- self.openssl_assert(evp_md != self._ffi.NULL)
+ evp_md = self._evp_md_non_null_from_algorithm(algorithm)
+ key_material_ptr = self._ffi.from_buffer(key_material)
res = self._lib.PKCS5_PBKDF2_HMAC(
- key_material,
+ key_material_ptr,
len(key_material),
salt,
len(salt),
iterations,
evp_md,
length,
- buf
+ buf,
)
self.openssl_assert(res == 1)
return self._ffi.buffer(buf)[:]
@@ -296,17 +514,23 @@ def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations,
def _consume_errors(self):
return binding._consume_errors(self._lib)
+ def _consume_errors_with_text(self):
+ return binding._consume_errors_with_text(self._lib)
+
def _bn_to_int(self, bn):
assert bn != self._ffi.NULL
- if six.PY3:
+ if not six.PY2:
# Python 3 has constant time from_bytes, so use that.
bn_num_bytes = self._lib.BN_num_bytes(bn)
bin_ptr = self._ffi.new("unsigned char[]", bn_num_bytes)
bin_len = self._lib.BN_bn2bin(bn, bin_ptr)
# A zero length means the BN has value 0
self.openssl_assert(bin_len >= 0)
- return int.from_bytes(self._ffi.buffer(bin_ptr)[:bin_len], "big")
+ val = int.from_bytes(self._ffi.buffer(bin_ptr)[:bin_len], "big")
+ if self._lib.BN_is_negative(bn):
+ val = -val
+ return val
else:
# Under Python 2 the best we can do is hex()
hex_cdata = self._lib.BN_bn2hex(bn)
@@ -327,7 +551,7 @@ def _int_to_bn(self, num, bn=None):
if bn is None:
bn = self._ffi.NULL
- if six.PY3:
+ if not six.PY2:
# Python 3 has constant time to_bytes, so use that.
binary = num.to_bytes(int(num.bit_length() / 8.0 + 1), "big")
@@ -365,8 +589,11 @@ def generate_rsa_private_key(self, public_exponent, key_size):
return _RSAPrivateKey(self, rsa_cdata, evp_pkey)
def generate_rsa_parameters_supported(self, public_exponent, key_size):
- return (public_exponent >= 3 and public_exponent & 1 != 0 and
- key_size >= 512)
+ return (
+ public_exponent >= 3
+ and public_exponent & 1 != 0
+ and key_size >= 512
+ )
def load_rsa_private_numbers(self, numbers):
rsa._check_private_key_components(
@@ -377,7 +604,7 @@ def load_rsa_private_numbers(self, numbers):
numbers.dmq1,
numbers.iqmp,
numbers.public_numbers.e,
- numbers.public_numbers.n
+ numbers.public_numbers.n,
)
rsa_cdata = self._lib.RSA_new()
self.openssl_assert(rsa_cdata != self._ffi.NULL)
@@ -434,13 +661,11 @@ def _bytes_to_bio(self, data):
The char* is the storage for the BIO and it must stay alive until the
BIO is finished with.
"""
- data_char_p = self._ffi.new("char[]", data)
- bio = self._lib.BIO_new_mem_buf(
- data_char_p, len(data)
- )
+ data_ptr = self._ffi.from_buffer(data)
+ bio = self._lib.BIO_new_mem_buf(data_ptr, len(data))
self.openssl_assert(bio != self._ffi.NULL)
- return _MemoryBIO(self._ffi.gc(bio, self._lib.BIO_free), data_char_p)
+ return _MemoryBIO(self._ffi.gc(bio, self._lib.BIO_free), data_ptr)
def _create_mem_bio_gc(self):
"""
@@ -492,6 +717,18 @@ def _evp_pkey_to_private_key(self, evp_pkey):
self.openssl_assert(dh_cdata != self._ffi.NULL)
dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free)
return _DHPrivateKey(self, dh_cdata, evp_pkey)
+ elif key_type == getattr(self._lib, "EVP_PKEY_ED25519", None):
+ # EVP_PKEY_ED25519 is not present in OpenSSL < 1.1.1
+ return _Ed25519PrivateKey(self, evp_pkey)
+ elif key_type == getattr(self._lib, "EVP_PKEY_X448", None):
+ # EVP_PKEY_X448 is not present in OpenSSL < 1.1.1
+ return _X448PrivateKey(self, evp_pkey)
+ elif key_type == getattr(self._lib, "EVP_PKEY_X25519", None):
+ # EVP_PKEY_X25519 is not present in OpenSSL < 1.1.0
+ return _X25519PrivateKey(self, evp_pkey)
+ elif key_type == getattr(self._lib, "EVP_PKEY_ED448", None):
+ # EVP_PKEY_ED448 is not present in OpenSSL < 1.1.1
+ return _Ed448PrivateKey(self, evp_pkey)
else:
raise UnsupportedAlgorithm("Unsupported key type.")
@@ -523,19 +760,32 @@ def _evp_pkey_to_public_key(self, evp_pkey):
self.openssl_assert(dh_cdata != self._ffi.NULL)
dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free)
return _DHPublicKey(self, dh_cdata, evp_pkey)
+ elif key_type == getattr(self._lib, "EVP_PKEY_ED25519", None):
+ # EVP_PKEY_ED25519 is not present in OpenSSL < 1.1.1
+ return _Ed25519PublicKey(self, evp_pkey)
+ elif key_type == getattr(self._lib, "EVP_PKEY_X448", None):
+ # EVP_PKEY_X448 is not present in OpenSSL < 1.1.1
+ return _X448PublicKey(self, evp_pkey)
+ elif key_type == getattr(self._lib, "EVP_PKEY_X25519", None):
+ # EVP_PKEY_X25519 is not present in OpenSSL < 1.1.0
+ return _X25519PublicKey(self, evp_pkey)
+ elif key_type == getattr(self._lib, "EVP_PKEY_ED448", None):
+ # EVP_PKEY_X25519 is not present in OpenSSL < 1.1.1
+ return _Ed448PublicKey(self, evp_pkey)
else:
raise UnsupportedAlgorithm("Unsupported key type.")
def _oaep_hash_supported(self, algorithm):
if self._lib.Cryptography_HAS_RSA_OAEP_MD:
return isinstance(
- algorithm, (
+ algorithm,
+ (
hashes.SHA1,
hashes.SHA224,
hashes.SHA256,
hashes.SHA384,
hashes.SHA512,
- )
+ ),
)
else:
return isinstance(algorithm, hashes.SHA1)
@@ -547,27 +797,34 @@ def rsa_padding_supported(self, padding):
return self.hash_supported(padding._mgf._algorithm)
elif isinstance(padding, OAEP) and isinstance(padding._mgf, MGF1):
return (
- self._oaep_hash_supported(padding._mgf._algorithm) and
- self._oaep_hash_supported(padding._algorithm) and
- (
- (padding._label is None or len(padding._label) == 0) or
- self._lib.Cryptography_HAS_RSA_OAEP_LABEL == 1
+ self._oaep_hash_supported(padding._mgf._algorithm)
+ and self._oaep_hash_supported(padding._algorithm)
+ and (
+ (padding._label is None or len(padding._label) == 0)
+ or self._lib.Cryptography_HAS_RSA_OAEP_LABEL == 1
)
)
else:
return False
def generate_dsa_parameters(self, key_size):
- if key_size not in (1024, 2048, 3072):
- raise ValueError("Key size must be 1024 or 2048 or 3072 bits.")
+ if key_size not in (1024, 2048, 3072, 4096):
+ raise ValueError(
+ "Key size must be 1024, 2048, 3072, or 4096 bits."
+ )
ctx = self._lib.DSA_new()
self.openssl_assert(ctx != self._ffi.NULL)
ctx = self._ffi.gc(ctx, self._lib.DSA_free)
res = self._lib.DSA_generate_parameters_ex(
- ctx, key_size, self._ffi.NULL, 0,
- self._ffi.NULL, self._ffi.NULL, self._ffi.NULL
+ ctx,
+ key_size,
+ self._ffi.NULL,
+ 0,
+ self._ffi.NULL,
+ self._ffi.NULL,
+ self._ffi.NULL,
)
self.openssl_assert(res == 1)
@@ -663,23 +920,37 @@ def cmac_algorithm_supported(self, algorithm):
def create_cmac_ctx(self, algorithm):
return _CMACContext(self, algorithm)
- def create_x509_csr(self, builder, private_key, algorithm):
- if not isinstance(algorithm, hashes.HashAlgorithm):
- raise TypeError('Algorithm must be a registered hash algorithm.')
-
- if (
- isinstance(algorithm, hashes.MD5) and not
- isinstance(private_key, rsa.RSAPrivateKey)
+ def _x509_check_signature_params(self, private_key, algorithm):
+ if isinstance(
+ private_key, (ed25519.Ed25519PrivateKey, ed448.Ed448PrivateKey)
+ ):
+ if algorithm is not None:
+ raise ValueError(
+ "algorithm must be None when signing via ed25519 or ed448"
+ )
+ elif not isinstance(
+ private_key,
+ (rsa.RSAPrivateKey, dsa.DSAPrivateKey, ec.EllipticCurvePrivateKey),
+ ):
+ raise TypeError(
+ "Key must be an rsa, dsa, ec, ed25519, or ed448 private key."
+ )
+ elif not isinstance(algorithm, hashes.HashAlgorithm):
+ raise TypeError("Algorithm must be a registered hash algorithm.")
+ elif isinstance(algorithm, hashes.MD5) and not isinstance(
+ private_key, rsa.RSAPrivateKey
):
raise ValueError(
- "MD5 is not a supported hash algorithm for EC/DSA CSRs"
+ "MD5 hash algorithm is only supported with RSA keys"
)
+ def create_x509_csr(self, builder, private_key, algorithm):
+ if not isinstance(builder, x509.CertificateSigningRequestBuilder):
+ raise TypeError("Builder type mismatch.")
+ self._x509_check_signature_params(private_key, algorithm)
+
# Resolve the signature algorithm.
- evp_md = self._lib.EVP_get_digestbyname(
- algorithm.name.encode('ascii')
- )
- self.openssl_assert(evp_md != self._ffi.NULL)
+ evp_md = self._evp_md_x509_null_if_eddsa(private_key, algorithm)
# Create an empty request.
x509_req = self._lib.X509_REQ_new()
@@ -698,69 +969,64 @@ def create_x509_csr(self, builder, private_key, algorithm):
# Set subject public key.
public_key = private_key.public_key()
- res = self._lib.X509_REQ_set_pubkey(
- x509_req, public_key._evp_pkey
- )
+ res = self._lib.X509_REQ_set_pubkey(x509_req, public_key._evp_pkey)
self.openssl_assert(res == 1)
# Add extensions.
sk_extension = self._lib.sk_X509_EXTENSION_new_null()
self.openssl_assert(sk_extension != self._ffi.NULL)
sk_extension = self._ffi.gc(
- sk_extension, self._lib.sk_X509_EXTENSION_free
+ sk_extension,
+ lambda x: self._lib.sk_X509_EXTENSION_pop_free(
+ x,
+ self._ffi.addressof(
+ self._lib._original_lib, "X509_EXTENSION_free"
+ ),
+ ),
)
- # gc is not necessary for CSRs, as sk_X509_EXTENSION_free
- # will release all the X509_EXTENSIONs.
+ # Don't GC individual extensions because the memory is owned by
+ # sk_extensions and will be freed along with it.
self._create_x509_extensions(
extensions=builder._extensions,
- handlers=_EXTENSION_ENCODE_HANDLERS,
+ handlers=self._extension_encode_handlers,
x509_obj=sk_extension,
add_func=self._lib.sk_X509_EXTENSION_insert,
- gc=False
+ gc=False,
)
res = self._lib.X509_REQ_add_extensions(x509_req, sk_extension)
self.openssl_assert(res == 1)
- # Sign the request using the requester's private key.
- res = self._lib.X509_REQ_sign(
- x509_req, private_key._evp_pkey, evp_md
- )
- if res == 0:
- errors = self._consume_errors()
- self.openssl_assert(
- errors[0]._lib_reason_match(
- self._lib.ERR_LIB_RSA,
- self._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY
- )
+ # Add attributes (all bytes encoded as ASN1 UTF8_STRING)
+ for attr_oid, attr_val in builder._attributes:
+ obj = _txt2obj_gc(self, attr_oid.dotted_string)
+ res = self._lib.X509_REQ_add1_attr_by_OBJ(
+ x509_req,
+ obj,
+ x509.name._ASN1Type.UTF8String.value,
+ attr_val,
+ len(attr_val),
)
+ self.openssl_assert(res == 1)
- raise ValueError("Digest too big for RSA key")
+ # Sign the request using the requester's private key.
+ res = self._lib.X509_REQ_sign(x509_req, private_key._evp_pkey, evp_md)
+ if res == 0:
+ errors = self._consume_errors_with_text()
+ raise ValueError("Signing failed", errors)
return _CertificateSigningRequest(self, x509_req)
def create_x509_certificate(self, builder, private_key, algorithm):
if not isinstance(builder, x509.CertificateBuilder):
- raise TypeError('Builder type mismatch.')
- if not isinstance(algorithm, hashes.HashAlgorithm):
- raise TypeError('Algorithm must be a registered hash algorithm.')
-
- if (
- isinstance(algorithm, hashes.MD5) and not
- isinstance(private_key, rsa.RSAPrivateKey)
- ):
- raise ValueError(
- "MD5 is not a supported hash algorithm for EC/DSA certificates"
- )
+ raise TypeError("Builder type mismatch.")
+ self._x509_check_signature_params(private_key, algorithm)
# Resolve the signature algorithm.
- evp_md = self._lib.EVP_get_digestbyname(
- algorithm.name.encode('ascii')
- )
- self.openssl_assert(evp_md != self._ffi.NULL)
+ evp_md = self._evp_md_x509_null_if_eddsa(private_key, algorithm)
# Create an empty certificate.
x509_cert = self._lib.X509_new()
- x509_cert = self._ffi.gc(x509_cert, backend._lib.X509_free)
+ x509_cert = self._ffi.gc(x509_cert, self._lib.X509_free)
# Set the x509 version.
res = self._lib.X509_set_version(x509_cert, builder._version.value)
@@ -784,28 +1050,22 @@ def create_x509_certificate(self, builder, private_key, algorithm):
self.openssl_assert(res == 1)
# Set the "not before" time.
- res = self._lib.ASN1_TIME_set(
- self._lib.X509_get_notBefore(x509_cert),
- calendar.timegm(builder._not_valid_before.timetuple())
+ self._set_asn1_time(
+ self._lib.X509_getm_notBefore(x509_cert), builder._not_valid_before
)
- if res == self._ffi.NULL:
- self._raise_time_set_error()
# Set the "not after" time.
- res = self._lib.ASN1_TIME_set(
- self._lib.X509_get_notAfter(x509_cert),
- calendar.timegm(builder._not_valid_after.timetuple())
+ self._set_asn1_time(
+ self._lib.X509_getm_notAfter(x509_cert), builder._not_valid_after
)
- if res == self._ffi.NULL:
- self._raise_time_set_error()
# Add extensions.
self._create_x509_extensions(
extensions=builder._extensions,
- handlers=_EXTENSION_ENCODE_HANDLERS,
+ handlers=self._extension_encode_handlers,
x509_obj=x509_cert,
add_func=self._lib.X509_add_ext,
- gc=True
+ gc=True,
)
# Set the issuer name.
@@ -815,56 +1075,47 @@ def create_x509_certificate(self, builder, private_key, algorithm):
self.openssl_assert(res == 1)
# Sign the certificate with the issuer's private key.
- res = self._lib.X509_sign(
- x509_cert, private_key._evp_pkey, evp_md
- )
+ res = self._lib.X509_sign(x509_cert, private_key._evp_pkey, evp_md)
if res == 0:
- errors = self._consume_errors()
- self.openssl_assert(
- errors[0]._lib_reason_match(
- self._lib.ERR_LIB_RSA,
- self._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY
- )
- )
- raise ValueError("Digest too big for RSA key")
+ errors = self._consume_errors_with_text()
+ raise ValueError("Signing failed", errors)
return _Certificate(self, x509_cert)
- def _raise_time_set_error(self):
- errors = self._consume_errors()
- self.openssl_assert(
- errors[0]._lib_reason_match(
- self._lib.ERR_LIB_ASN1,
- self._lib.ASN1_R_ERROR_GETTING_TIME
- )
- )
- raise ValueError(
- "Invalid time. This error can occur if you set a time too far in "
- "the future on Windows."
- )
+ def _evp_md_x509_null_if_eddsa(self, private_key, algorithm):
+ if isinstance(
+ private_key, (ed25519.Ed25519PrivateKey, ed448.Ed448PrivateKey)
+ ):
+ # OpenSSL requires us to pass NULL for EVP_MD for ed25519/ed448
+ return self._ffi.NULL
+ else:
+ return self._evp_md_non_null_from_algorithm(algorithm)
+
+ def _set_asn1_time(self, asn1_time, time):
+ if time.year >= 2050:
+ asn1_str = time.strftime("%Y%m%d%H%M%SZ").encode("ascii")
+ else:
+ asn1_str = time.strftime("%y%m%d%H%M%SZ").encode("ascii")
+ res = self._lib.ASN1_TIME_set_string(asn1_time, asn1_str)
+ self.openssl_assert(res == 1)
+
+ def _create_asn1_time(self, time):
+ asn1_time = self._lib.ASN1_TIME_new()
+ self.openssl_assert(asn1_time != self._ffi.NULL)
+ asn1_time = self._ffi.gc(asn1_time, self._lib.ASN1_TIME_free)
+ self._set_asn1_time(asn1_time, time)
+ return asn1_time
def create_x509_crl(self, builder, private_key, algorithm):
if not isinstance(builder, x509.CertificateRevocationListBuilder):
- raise TypeError('Builder type mismatch.')
- if not isinstance(algorithm, hashes.HashAlgorithm):
- raise TypeError('Algorithm must be a registered hash algorithm.')
+ raise TypeError("Builder type mismatch.")
+ self._x509_check_signature_params(private_key, algorithm)
- if (
- isinstance(algorithm, hashes.MD5) and not
- isinstance(private_key, rsa.RSAPrivateKey)
- ):
- raise ValueError(
- "MD5 is not a supported hash algorithm for EC/DSA CRLs"
- )
-
- evp_md = self._lib.EVP_get_digestbyname(
- algorithm.name.encode('ascii')
- )
- self.openssl_assert(evp_md != self._ffi.NULL)
+ evp_md = self._evp_md_x509_null_if_eddsa(private_key, algorithm)
# Create an empty CRL.
x509_crl = self._lib.X509_CRL_new()
- x509_crl = self._ffi.gc(x509_crl, backend._lib.X509_CRL_free)
+ x509_crl = self._ffi.gc(x509_crl, self._lib.X509_CRL_free)
# Set the x509 CRL version. We only support v2 (integer value 1).
res = self._lib.X509_CRL_set_version(x509_crl, 1)
@@ -877,64 +1128,45 @@ def create_x509_crl(self, builder, private_key, algorithm):
self.openssl_assert(res == 1)
# Set the last update time.
- last_update = self._lib.ASN1_TIME_set(
- self._ffi.NULL, calendar.timegm(builder._last_update.timetuple())
- )
- self.openssl_assert(last_update != self._ffi.NULL)
- last_update = self._ffi.gc(last_update, self._lib.ASN1_TIME_free)
+ last_update = self._create_asn1_time(builder._last_update)
res = self._lib.X509_CRL_set_lastUpdate(x509_crl, last_update)
self.openssl_assert(res == 1)
# Set the next update time.
- next_update = self._lib.ASN1_TIME_set(
- self._ffi.NULL, calendar.timegm(builder._next_update.timetuple())
- )
- self.openssl_assert(next_update != self._ffi.NULL)
- next_update = self._ffi.gc(next_update, self._lib.ASN1_TIME_free)
+ next_update = self._create_asn1_time(builder._next_update)
res = self._lib.X509_CRL_set_nextUpdate(x509_crl, next_update)
self.openssl_assert(res == 1)
# Add extensions.
self._create_x509_extensions(
extensions=builder._extensions,
- handlers=_CRL_EXTENSION_ENCODE_HANDLERS,
+ handlers=self._crl_extension_encode_handlers,
x509_obj=x509_crl,
add_func=self._lib.X509_CRL_add_ext,
- gc=True
+ gc=True,
)
# add revoked certificates
for revoked_cert in builder._revoked_certificates:
# Duplicating because the X509_CRL takes ownership and will free
# this memory when X509_CRL_free is called.
- revoked = self._lib.Cryptography_X509_REVOKED_dup(
- revoked_cert._x509_revoked
- )
+ revoked = self._lib.X509_REVOKED_dup(revoked_cert._x509_revoked)
self.openssl_assert(revoked != self._ffi.NULL)
res = self._lib.X509_CRL_add0_revoked(x509_crl, revoked)
self.openssl_assert(res == 1)
- res = self._lib.X509_CRL_sign(
- x509_crl, private_key._evp_pkey, evp_md
- )
+ res = self._lib.X509_CRL_sign(x509_crl, private_key._evp_pkey, evp_md)
if res == 0:
- errors = self._consume_errors()
- self.openssl_assert(
- errors[0]._lib_reason_match(
- self._lib.ERR_LIB_RSA,
- self._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY
- )
- )
- raise ValueError("Digest too big for RSA key")
+ errors = self._consume_errors_with_text()
+ raise ValueError("Signing failed", errors)
return _CertificateRevocationList(self, x509_crl)
- def _create_x509_extensions(self, extensions, handlers, x509_obj,
- add_func, gc):
+ def _create_x509_extensions(
+ self, extensions, handlers, x509_obj, add_func, gc
+ ):
for i, extension in enumerate(extensions):
- x509_extension = self._create_x509_extension(
- handlers, extension
- )
+ x509_extension = self._create_x509_extension(handlers, extension)
self.openssl_assert(x509_extension != self._ffi.NULL)
if gc:
@@ -952,34 +1184,41 @@ def _create_raw_x509_extension(self, extension, value):
def _create_x509_extension(self, handlers, extension):
if isinstance(extension.value, x509.UnrecognizedExtension):
- value = _encode_asn1_str_gc(
- self, extension.value.value, len(extension.value.value)
- )
+ value = _encode_asn1_str_gc(self, extension.value.value)
return self._create_raw_x509_extension(extension, value)
elif isinstance(extension.value, x509.TLSFeature):
- asn1 = _Integers([x.value for x in extension.value]).dump()
- value = _encode_asn1_str_gc(self, asn1, len(asn1))
+ asn1 = encode_der(
+ SEQUENCE,
+ *[
+ encode_der(INTEGER, encode_der_integer(x.value))
+ for x in extension.value
+ ]
+ )
+ value = _encode_asn1_str_gc(self, asn1)
+ return self._create_raw_x509_extension(extension, value)
+ elif isinstance(extension.value, x509.PrecertPoison):
+ value = _encode_asn1_str_gc(self, encode_der(NULL))
return self._create_raw_x509_extension(extension, value)
else:
try:
encode = handlers[extension.oid]
except KeyError:
raise NotImplementedError(
- 'Extension not supported: {0}'.format(extension.oid)
+ "Extension not supported: {}".format(extension.oid)
)
ext_struct = encode(self, extension.value)
nid = self._lib.OBJ_txt2nid(
extension.oid.dotted_string.encode("ascii")
)
- backend.openssl_assert(nid != self._lib.NID_undef)
+ self.openssl_assert(nid != self._lib.NID_undef)
return self._lib.X509V3_EXT_i2d(
nid, 1 if extension.critical else 0, ext_struct
)
def create_x509_revoked_certificate(self, builder):
if not isinstance(builder, x509.RevokedCertificateBuilder):
- raise TypeError('Builder type mismatch.')
+ raise TypeError("Builder type mismatch.")
x509_revoked = self._lib.X509_REVOKED_new()
self.openssl_assert(x509_revoked != self._ffi.NULL)
@@ -989,21 +1228,16 @@ def create_x509_revoked_certificate(self, builder):
x509_revoked, serial_number
)
self.openssl_assert(res == 1)
- rev_date = self._lib.ASN1_TIME_set(
- self._ffi.NULL,
- calendar.timegm(builder._revocation_date.timetuple())
- )
- self.openssl_assert(rev_date != self._ffi.NULL)
- rev_date = self._ffi.gc(rev_date, self._lib.ASN1_TIME_free)
+ rev_date = self._create_asn1_time(builder._revocation_date)
res = self._lib.X509_REVOKED_set_revocationDate(x509_revoked, rev_date)
self.openssl_assert(res == 1)
# add CRL entry extensions
self._create_x509_extensions(
extensions=builder._extensions,
- handlers=_CRL_ENTRY_EXTENSION_ENCODE_HANDLERS,
+ handlers=self._crl_entry_extension_encode_handlers,
x509_obj=x509_revoked,
add_func=self._lib.X509_REVOKED_add_ext,
- gc=True
+ gc=True,
)
return _RevokedCertificate(self, None, x509_revoked)
@@ -1044,7 +1278,8 @@ def load_pem_parameters(self, data):
mem_bio = self._bytes_to_bio(data)
# only DH is supported currently
dh_cdata = self._lib.PEM_read_bio_DHparams(
- mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL)
+ mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL
+ )
if dh_cdata != self._ffi.NULL:
dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free)
return _DHParameters(self, dh_cdata)
@@ -1109,9 +1344,7 @@ def load_der_public_key(self, data):
def load_der_parameters(self, data):
mem_bio = self._bytes_to_bio(data)
- dh_cdata = self._lib.d2i_DHparams_bio(
- mem_bio.bio, self._ffi.NULL
- )
+ dh_cdata = self._lib.d2i_DHparams_bio(mem_bio.bio, self._ffi.NULL)
if dh_cdata != self._ffi.NULL:
dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free)
return _DHParameters(self, dh_cdata)
@@ -1136,7 +1369,10 @@ def load_pem_x509_certificate(self, data):
)
if x509 == self._ffi.NULL:
self._consume_errors()
- raise ValueError("Unable to load certificate")
+ raise ValueError(
+ "Unable to load certificate. See https://cryptography.io/en/la"
+ "test/faq/#why-can-t-i-import-my-pem-file for more details."
+ )
x509 = self._ffi.gc(x509, self._lib.X509_free)
return _Certificate(self, x509)
@@ -1158,7 +1394,10 @@ def load_pem_x509_crl(self, data):
)
if x509_crl == self._ffi.NULL:
self._consume_errors()
- raise ValueError("Unable to load CRL")
+ raise ValueError(
+ "Unable to load CRL. See https://cryptography.io/en/la"
+ "test/faq/#why-can-t-i-import-my-pem-file for more details."
+ )
x509_crl = self._ffi.gc(x509_crl, self._lib.X509_CRL_free)
return _CertificateRevocationList(self, x509_crl)
@@ -1180,7 +1419,10 @@ def load_pem_x509_csr(self, data):
)
if x509_req == self._ffi.NULL:
self._consume_errors()
- raise ValueError("Unable to load request")
+ raise ValueError(
+ "Unable to load request. See https://cryptography.io/en/la"
+ "test/faq/#why-can-t-i-import-my-pem-file for more details."
+ )
x509_req = self._ffi.gc(x509_req, self._lib.X509_REQ_free)
return _CertificateSigningRequest(self, x509_req)
@@ -1198,13 +1440,11 @@ def load_der_x509_csr(self, data):
def _load_key(self, openssl_read_func, convert_func, data, password):
mem_bio = self._bytes_to_bio(data)
- if password is not None and not isinstance(password, bytes):
- raise TypeError("Password must be bytes")
-
userdata = self._ffi.new("CRYPTOGRAPHY_PASSWORD_DATA *")
if password is not None:
- password_buf = self._ffi.new("char []", password)
- userdata.password = password_buf
+ utils._check_byteslike("password", password)
+ password_ptr = self._ffi.from_buffer(password)
+ userdata.password = password_ptr
userdata.length = len(password)
evp_pkey = openssl_read_func(
@@ -1227,7 +1467,7 @@ def _load_key(self, openssl_read_func, convert_func, data, password):
else:
assert userdata.error == -2
raise ValueError(
- "Passwords longer than {0} bytes are not supported "
+ "Passwords longer than {} bytes are not supported "
"by this backend.".format(userdata.maxsize - 1)
)
else:
@@ -1237,12 +1477,12 @@ def _load_key(self, openssl_read_func, convert_func, data, password):
if password is not None and userdata.called == 0:
raise TypeError(
- "Password was given but private key is not encrypted.")
+ "Password was given but private key is not encrypted."
+ )
assert (
- (password is not None and userdata.called == 1) or
- password is None
- )
+ password is not None and userdata.called == 1
+ ) or password is None
return convert_func(evp_pkey)
@@ -1252,32 +1492,28 @@ def _handle_key_loading_error(self):
if not errors:
raise ValueError("Could not deserialize key data.")
- elif (
- errors[0]._lib_reason_match(
- self._lib.ERR_LIB_EVP, self._lib.EVP_R_BAD_DECRYPT
- ) or errors[0]._lib_reason_match(
- self._lib.ERR_LIB_PKCS12,
- self._lib.PKCS12_R_PKCS12_CIPHERFINAL_ERROR
- )
+ elif errors[0]._lib_reason_match(
+ self._lib.ERR_LIB_EVP, self._lib.EVP_R_BAD_DECRYPT
+ ) or errors[0]._lib_reason_match(
+ self._lib.ERR_LIB_PKCS12,
+ self._lib.PKCS12_R_PKCS12_CIPHERFINAL_ERROR,
):
raise ValueError("Bad decrypt. Incorrect password?")
- elif (
- errors[0]._lib_reason_match(
- self._lib.ERR_LIB_EVP, self._lib.EVP_R_UNKNOWN_PBE_ALGORITHM
- ) or errors[0]._lib_reason_match(
- self._lib.ERR_LIB_PEM, self._lib.PEM_R_UNSUPPORTED_ENCRYPTION
- )
+ elif errors[0]._lib_reason_match(
+ self._lib.ERR_LIB_EVP, self._lib.EVP_R_UNKNOWN_PBE_ALGORITHM
+ ) or errors[0]._lib_reason_match(
+ self._lib.ERR_LIB_PEM, self._lib.PEM_R_UNSUPPORTED_ENCRYPTION
):
raise UnsupportedAlgorithm(
"PEM data is encrypted with an unsupported cipher",
- _Reasons.UNSUPPORTED_CIPHER
+ _Reasons.UNSUPPORTED_CIPHER,
)
elif any(
error._lib_reason_match(
self._lib.ERR_LIB_EVP,
- self._lib.EVP_R_UNSUPPORTED_PRIVATE_KEY_ALGORITHM
+ self._lib.EVP_R_UNSUPPORTED_PRIVATE_KEY_ALGORITHM,
)
for error in errors
):
@@ -1297,21 +1533,14 @@ def elliptic_curve_supported(self, curve):
except UnsupportedAlgorithm:
curve_nid = self._lib.NID_undef
- ctx = self._lib.EC_GROUP_new_by_curve_name(curve_nid)
+ group = self._lib.EC_GROUP_new_by_curve_name(curve_nid)
- if ctx == self._ffi.NULL:
- errors = self._consume_errors()
- self.openssl_assert(
- curve_nid == self._lib.NID_undef or
- errors[0]._lib_reason_match(
- self._lib.ERR_LIB_EC,
- self._lib.EC_R_UNKNOWN_GROUP
- )
- )
+ if group == self._ffi.NULL:
+ self._consume_errors()
return False
else:
self.openssl_assert(curve_nid != self._lib.NID_undef)
- self._lib.EC_GROUP_free(ctx)
+ self._lib.EC_GROUP_free(group)
return True
def elliptic_curve_signature_algorithm_supported(
@@ -1329,11 +1558,7 @@ def generate_elliptic_curve_private_key(self, curve):
"""
if self.elliptic_curve_supported(curve):
- curve_nid = self._elliptic_curve_to_nid(curve)
-
- ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid)
- self.openssl_assert(ec_cdata != self._ffi.NULL)
- ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free)
+ ec_cdata = self._ec_key_new_by_curve(curve)
res = self._lib.EC_KEY_generate_key(ec_cdata)
self.openssl_assert(res == 1)
@@ -1343,50 +1568,60 @@ def generate_elliptic_curve_private_key(self, curve):
return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey)
else:
raise UnsupportedAlgorithm(
- "Backend object does not support {0}.".format(curve.name),
- _Reasons.UNSUPPORTED_ELLIPTIC_CURVE
+ "Backend object does not support {}.".format(curve.name),
+ _Reasons.UNSUPPORTED_ELLIPTIC_CURVE,
)
def load_elliptic_curve_private_numbers(self, numbers):
public = numbers.public_numbers
- curve_nid = self._elliptic_curve_to_nid(public.curve)
-
- ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid)
- self.openssl_assert(ec_cdata != self._ffi.NULL)
- ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free)
-
- ec_cdata = self._ec_key_set_public_key_affine_coordinates(
- ec_cdata, public.x, public.y)
+ ec_cdata = self._ec_key_new_by_curve(public.curve)
private_value = self._ffi.gc(
- self._int_to_bn(numbers.private_value), self._lib.BN_free
+ self._int_to_bn(numbers.private_value), self._lib.BN_clear_free
)
res = self._lib.EC_KEY_set_private_key(ec_cdata, private_value)
self.openssl_assert(res == 1)
+
+ ec_cdata = self._ec_key_set_public_key_affine_coordinates(
+ ec_cdata, public.x, public.y
+ )
+
evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata)
return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey)
def load_elliptic_curve_public_numbers(self, numbers):
- curve_nid = self._elliptic_curve_to_nid(numbers.curve)
-
- ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid)
- self.openssl_assert(ec_cdata != self._ffi.NULL)
- ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free)
-
+ ec_cdata = self._ec_key_new_by_curve(numbers.curve)
ec_cdata = self._ec_key_set_public_key_affine_coordinates(
- ec_cdata, numbers.x, numbers.y)
+ ec_cdata, numbers.x, numbers.y
+ )
evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata)
return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey)
- def derive_elliptic_curve_private_key(self, private_value, curve):
- curve_nid = self._elliptic_curve_to_nid(curve)
+ def load_elliptic_curve_public_bytes(self, curve, point_bytes):
+ ec_cdata = self._ec_key_new_by_curve(curve)
+ group = self._lib.EC_KEY_get0_group(ec_cdata)
+ self.openssl_assert(group != self._ffi.NULL)
+ point = self._lib.EC_POINT_new(group)
+ self.openssl_assert(point != self._ffi.NULL)
+ point = self._ffi.gc(point, self._lib.EC_POINT_free)
+ with self._tmp_bn_ctx() as bn_ctx:
+ res = self._lib.EC_POINT_oct2point(
+ group, point, point_bytes, len(point_bytes), bn_ctx
+ )
+ if res != 1:
+ self._consume_errors()
+ raise ValueError("Invalid public bytes for the given curve")
- ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid)
- self.openssl_assert(ec_cdata != self._ffi.NULL)
- ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free)
+ res = self._lib.EC_KEY_set_public_key(ec_cdata, point)
+ self.openssl_assert(res == 1)
+ evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata)
+ return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey)
+
+ def derive_elliptic_curve_private_key(self, private_value, curve):
+ ec_cdata = self._ec_key_new_by_curve(curve)
get_func, group = self._ec_key_determine_group_get_func(ec_cdata)
@@ -1395,11 +1630,12 @@ def derive_elliptic_curve_private_key(self, private_value, curve):
point = self._ffi.gc(point, self._lib.EC_POINT_free)
value = self._int_to_bn(private_value)
- value = self._ffi.gc(value, self._lib.BN_free)
+ value = self._ffi.gc(value, self._lib.BN_clear_free)
with self._tmp_bn_ctx() as bn_ctx:
- res = self._lib.EC_POINT_mul(group, point, value, self._ffi.NULL,
- self._ffi.NULL, bn_ctx)
+ res = self._lib.EC_POINT_mul(
+ group, point, value, self._ffi.NULL, self._ffi.NULL, bn_ctx
+ )
self.openssl_assert(res == 1)
bn_x = self._lib.BN_CTX_get(bn_ctx)
@@ -1410,18 +1646,176 @@ def derive_elliptic_curve_private_key(self, private_value, curve):
res = self._lib.EC_KEY_set_public_key(ec_cdata, point)
self.openssl_assert(res == 1)
- res = self._lib.EC_KEY_set_private_key(
- ec_cdata, self._int_to_bn(private_value))
+ private = self._int_to_bn(private_value)
+ private = self._ffi.gc(private, self._lib.BN_clear_free)
+ res = self._lib.EC_KEY_set_private_key(ec_cdata, private)
self.openssl_assert(res == 1)
evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata)
return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey)
+ def _ec_key_new_by_curve(self, curve):
+ curve_nid = self._elliptic_curve_to_nid(curve)
+ return self._ec_key_new_by_curve_nid(curve_nid)
+
+ def _ec_key_new_by_curve_nid(self, curve_nid):
+ ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid)
+ self.openssl_assert(ec_cdata != self._ffi.NULL)
+ # Setting the ASN.1 flag to OPENSSL_EC_NAMED_CURVE is
+ # only necessary on OpenSSL 1.0.2t/u. Once we drop support for 1.0.2
+ # we can remove this as it's done automatically when getting an EC_KEY
+ # from new_by_curve_name
+ # CRYPTOGRAPHY_OPENSSL_102U_OR_GREATER
+ self._lib.EC_KEY_set_asn1_flag(
+ ec_cdata, backend._lib.OPENSSL_EC_NAMED_CURVE
+ )
+ return self._ffi.gc(ec_cdata, self._lib.EC_KEY_free)
+
+ def load_der_ocsp_request(self, data):
+ mem_bio = self._bytes_to_bio(data)
+ request = self._lib.d2i_OCSP_REQUEST_bio(mem_bio.bio, self._ffi.NULL)
+ if request == self._ffi.NULL:
+ self._consume_errors()
+ raise ValueError("Unable to load OCSP request")
+
+ request = self._ffi.gc(request, self._lib.OCSP_REQUEST_free)
+ return _OCSPRequest(self, request)
+
+ def load_der_ocsp_response(self, data):
+ mem_bio = self._bytes_to_bio(data)
+ response = self._lib.d2i_OCSP_RESPONSE_bio(mem_bio.bio, self._ffi.NULL)
+ if response == self._ffi.NULL:
+ self._consume_errors()
+ raise ValueError("Unable to load OCSP response")
+
+ response = self._ffi.gc(response, self._lib.OCSP_RESPONSE_free)
+ return _OCSPResponse(self, response)
+
+ def create_ocsp_request(self, builder):
+ ocsp_req = self._lib.OCSP_REQUEST_new()
+ self.openssl_assert(ocsp_req != self._ffi.NULL)
+ ocsp_req = self._ffi.gc(ocsp_req, self._lib.OCSP_REQUEST_free)
+ cert, issuer, algorithm = builder._request
+ evp_md = self._evp_md_non_null_from_algorithm(algorithm)
+ certid = self._lib.OCSP_cert_to_id(evp_md, cert._x509, issuer._x509)
+ self.openssl_assert(certid != self._ffi.NULL)
+ onereq = self._lib.OCSP_request_add0_id(ocsp_req, certid)
+ self.openssl_assert(onereq != self._ffi.NULL)
+ self._create_x509_extensions(
+ extensions=builder._extensions,
+ handlers=self._ocsp_request_extension_encode_handlers,
+ x509_obj=ocsp_req,
+ add_func=self._lib.OCSP_REQUEST_add_ext,
+ gc=True,
+ )
+ return _OCSPRequest(self, ocsp_req)
+
+ def _create_ocsp_basic_response(self, builder, private_key, algorithm):
+ self._x509_check_signature_params(private_key, algorithm)
+
+ basic = self._lib.OCSP_BASICRESP_new()
+ self.openssl_assert(basic != self._ffi.NULL)
+ basic = self._ffi.gc(basic, self._lib.OCSP_BASICRESP_free)
+ evp_md = self._evp_md_non_null_from_algorithm(
+ builder._response._algorithm
+ )
+ certid = self._lib.OCSP_cert_to_id(
+ evp_md,
+ builder._response._cert._x509,
+ builder._response._issuer._x509,
+ )
+ self.openssl_assert(certid != self._ffi.NULL)
+ certid = self._ffi.gc(certid, self._lib.OCSP_CERTID_free)
+ if builder._response._revocation_reason is None:
+ reason = -1
+ else:
+ reason = _CRL_ENTRY_REASON_ENUM_TO_CODE[
+ builder._response._revocation_reason
+ ]
+ if builder._response._revocation_time is None:
+ rev_time = self._ffi.NULL
+ else:
+ rev_time = self._create_asn1_time(
+ builder._response._revocation_time
+ )
+
+ next_update = self._ffi.NULL
+ if builder._response._next_update is not None:
+ next_update = self._create_asn1_time(
+ builder._response._next_update
+ )
+
+ this_update = self._create_asn1_time(builder._response._this_update)
+
+ res = self._lib.OCSP_basic_add1_status(
+ basic,
+ certid,
+ builder._response._cert_status.value,
+ reason,
+ rev_time,
+ this_update,
+ next_update,
+ )
+ self.openssl_assert(res != self._ffi.NULL)
+ # okay, now sign the basic structure
+ evp_md = self._evp_md_x509_null_if_eddsa(private_key, algorithm)
+ responder_cert, responder_encoding = builder._responder_id
+ flags = self._lib.OCSP_NOCERTS
+ if responder_encoding is ocsp.OCSPResponderEncoding.HASH:
+ flags |= self._lib.OCSP_RESPID_KEY
+
+ if builder._certs is not None:
+ for cert in builder._certs:
+ res = self._lib.OCSP_basic_add1_cert(basic, cert._x509)
+ self.openssl_assert(res == 1)
+
+ self._create_x509_extensions(
+ extensions=builder._extensions,
+ handlers=self._ocsp_basicresp_extension_encode_handlers,
+ x509_obj=basic,
+ add_func=self._lib.OCSP_BASICRESP_add_ext,
+ gc=True,
+ )
+
+ res = self._lib.OCSP_basic_sign(
+ basic,
+ responder_cert._x509,
+ private_key._evp_pkey,
+ evp_md,
+ self._ffi.NULL,
+ flags,
+ )
+ if res != 1:
+ errors = self._consume_errors_with_text()
+ raise ValueError(
+ "Error while signing. responder_cert must be signed "
+ "by private_key",
+ errors,
+ )
+
+ return basic
+
+ def create_ocsp_response(
+ self, response_status, builder, private_key, algorithm
+ ):
+ if response_status is ocsp.OCSPResponseStatus.SUCCESSFUL:
+ basic = self._create_ocsp_basic_response(
+ builder, private_key, algorithm
+ )
+ else:
+ basic = self._ffi.NULL
+
+ ocsp_resp = self._lib.OCSP_response_create(
+ response_status.value, basic
+ )
+ self.openssl_assert(ocsp_resp != self._ffi.NULL)
+ ocsp_resp = self._ffi.gc(ocsp_resp, self._lib.OCSP_RESPONSE_free)
+ return _OCSPResponse(self, ocsp_resp)
+
def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve):
- return (
- self.elliptic_curve_supported(curve) and
- isinstance(algorithm, ec.ECDH)
+ return self.elliptic_curve_supported(curve) and isinstance(
+ algorithm, ec.ECDH
)
def _ec_cdata_to_evp_pkey(self, ec_cdata):
@@ -1435,18 +1829,15 @@ def _elliptic_curve_to_nid(self, curve):
Get the NID for a curve name.
"""
- curve_aliases = {
- "secp192r1": "prime192v1",
- "secp256r1": "prime256v1"
- }
+ curve_aliases = {"secp192r1": "prime192v1", "secp256r1": "prime256v1"}
curve_name = curve_aliases.get(curve.name, curve.name)
curve_nid = self._lib.OBJ_sn2nid(curve_name.encode())
if curve_nid == self._lib.NID_undef:
raise UnsupportedAlgorithm(
- "{0} is not a supported elliptic curve".format(curve.name),
- _Reasons.UNSUPPORTED_ELLIPTIC_CURVE
+ "{} is not a supported elliptic curve".format(curve.name),
+ _Reasons.UNSUPPORTED_ELLIPTIC_CURVE,
)
return curve_nid
@@ -1509,33 +1900,32 @@ def _ec_key_set_public_key_affine_coordinates(self, ctx, x, y):
return ctx
- def _private_key_bytes(self, encoding, format, encryption_algorithm,
- evp_pkey, cdata):
+ def _private_key_bytes(
+ self, encoding, format, encryption_algorithm, key, evp_pkey, cdata
+ ):
+ # validate argument types
+ if not isinstance(encoding, serialization.Encoding):
+ raise TypeError("encoding must be an item from the Encoding enum")
if not isinstance(format, serialization.PrivateFormat):
raise TypeError(
"format must be an item from the PrivateFormat enum"
)
-
- if not isinstance(encryption_algorithm,
- serialization.KeySerializationEncryption):
+ if not isinstance(
+ encryption_algorithm, serialization.KeySerializationEncryption
+ ):
raise TypeError(
"Encryption algorithm must be a KeySerializationEncryption "
"instance"
)
+ # validate password
if isinstance(encryption_algorithm, serialization.NoEncryption):
password = b""
- passlen = 0
- evp_cipher = self._ffi.NULL
- elif isinstance(encryption_algorithm,
- serialization.BestAvailableEncryption):
- # This is a curated value that we will update over time.
- evp_cipher = self._lib.EVP_get_cipherbyname(
- b"aes-256-cbc"
- )
+ elif isinstance(
+ encryption_algorithm, serialization.BestAvailableEncryption
+ ):
password = encryption_algorithm.password
- passlen = len(password)
- if passlen > 1023:
+ if len(password) > 1023:
raise ValueError(
"Passwords longer than 1023 bytes are not supported by "
"this backend"
@@ -1543,161 +1933,156 @@ def _private_key_bytes(self, encoding, format, encryption_algorithm,
else:
raise ValueError("Unsupported encryption type")
- key_type = self._lib.EVP_PKEY_id(evp_pkey)
- if encoding is serialization.Encoding.PEM:
- if format is serialization.PrivateFormat.PKCS8:
+ # PKCS8 + PEM/DER
+ if format is serialization.PrivateFormat.PKCS8:
+ if encoding is serialization.Encoding.PEM:
write_bio = self._lib.PEM_write_bio_PKCS8PrivateKey
- key = evp_pkey
+ elif encoding is serialization.Encoding.DER:
+ write_bio = self._lib.i2d_PKCS8PrivateKey_bio
else:
- assert format is serialization.PrivateFormat.TraditionalOpenSSL
+ raise ValueError("Unsupported encoding for PKCS8")
+ return self._private_key_bytes_via_bio(
+ write_bio, evp_pkey, password
+ )
+
+ # TraditionalOpenSSL + PEM/DER
+ if format is serialization.PrivateFormat.TraditionalOpenSSL:
+ if self._fips_enabled and not isinstance(
+ encryption_algorithm, serialization.NoEncryption
+ ):
+ raise ValueError(
+ "Encrypted traditional OpenSSL format is not "
+ "supported in FIPS mode."
+ )
+ key_type = self._lib.EVP_PKEY_id(evp_pkey)
+
+ if encoding is serialization.Encoding.PEM:
if key_type == self._lib.EVP_PKEY_RSA:
write_bio = self._lib.PEM_write_bio_RSAPrivateKey
elif key_type == self._lib.EVP_PKEY_DSA:
write_bio = self._lib.PEM_write_bio_DSAPrivateKey
- else:
- assert key_type == self._lib.EVP_PKEY_EC
+ elif key_type == self._lib.EVP_PKEY_EC:
write_bio = self._lib.PEM_write_bio_ECPrivateKey
+ else:
+ raise ValueError(
+ "Unsupported key type for TraditionalOpenSSL"
+ )
+ return self._private_key_bytes_via_bio(
+ write_bio, cdata, password
+ )
- key = cdata
- elif encoding is serialization.Encoding.DER:
- if format is serialization.PrivateFormat.TraditionalOpenSSL:
- if not isinstance(
- encryption_algorithm, serialization.NoEncryption
- ):
+ if encoding is serialization.Encoding.DER:
+ if password:
raise ValueError(
"Encryption is not supported for DER encoded "
"traditional OpenSSL keys"
)
+ if key_type == self._lib.EVP_PKEY_RSA:
+ write_bio = self._lib.i2d_RSAPrivateKey_bio
+ elif key_type == self._lib.EVP_PKEY_EC:
+ write_bio = self._lib.i2d_ECPrivateKey_bio
+ elif key_type == self._lib.EVP_PKEY_DSA:
+ write_bio = self._lib.i2d_DSAPrivateKey_bio
+ else:
+ raise ValueError(
+ "Unsupported key type for TraditionalOpenSSL"
+ )
+ return self._bio_func_output(write_bio, cdata)
- return self._private_key_bytes_traditional_der(key_type, cdata)
- else:
- assert format is serialization.PrivateFormat.PKCS8
- write_bio = self._lib.i2d_PKCS8PrivateKey_bio
- key = evp_pkey
+ raise ValueError("Unsupported encoding for TraditionalOpenSSL")
+
+ # OpenSSH + PEM
+ if format is serialization.PrivateFormat.OpenSSH:
+ if encoding is serialization.Encoding.PEM:
+ return ssh.serialize_ssh_private_key(key, password)
+
+ raise ValueError(
+ "OpenSSH private key format can only be used"
+ " with PEM encoding"
+ )
+
+ # Anything that key-specific code was supposed to handle earlier,
+ # like Raw.
+ raise ValueError("format is invalid with this key")
+
+ def _private_key_bytes_via_bio(self, write_bio, evp_pkey, password):
+ if not password:
+ evp_cipher = self._ffi.NULL
else:
- raise TypeError("encoding must be an item from the Encoding enum")
+ # This is a curated value that we will update over time.
+ evp_cipher = self._lib.EVP_get_cipherbyname(b"aes-256-cbc")
- bio = self._create_mem_bio_gc()
- res = write_bio(
- bio,
- key,
+ return self._bio_func_output(
+ write_bio,
+ evp_pkey,
evp_cipher,
password,
- passlen,
+ len(password),
+ self._ffi.NULL,
self._ffi.NULL,
- self._ffi.NULL
)
- self.openssl_assert(res == 1)
- return self._read_mem_bio(bio)
-
- def _private_key_bytes_traditional_der(self, key_type, cdata):
- if key_type == self._lib.EVP_PKEY_RSA:
- write_bio = self._lib.i2d_RSAPrivateKey_bio
- elif key_type == self._lib.EVP_PKEY_EC:
- write_bio = self._lib.i2d_ECPrivateKey_bio
- else:
- self.openssl_assert(key_type == self._lib.EVP_PKEY_DSA)
- write_bio = self._lib.i2d_DSAPrivateKey_bio
+ def _bio_func_output(self, write_bio, *args):
bio = self._create_mem_bio_gc()
- res = write_bio(bio, cdata)
+ res = write_bio(bio, *args)
self.openssl_assert(res == 1)
return self._read_mem_bio(bio)
def _public_key_bytes(self, encoding, format, key, evp_pkey, cdata):
if not isinstance(encoding, serialization.Encoding):
raise TypeError("encoding must be an item from the Encoding enum")
+ if not isinstance(format, serialization.PublicFormat):
+ raise TypeError(
+ "format must be an item from the PublicFormat enum"
+ )
- if (
- format is serialization.PublicFormat.OpenSSH or
- encoding is serialization.Encoding.OpenSSH
- ):
- if (
- format is not serialization.PublicFormat.OpenSSH or
- encoding is not serialization.Encoding.OpenSSH
- ):
- raise ValueError(
- "OpenSSH format must be used with OpenSSH encoding"
- )
- return self._openssh_public_key_bytes(key)
- elif format is serialization.PublicFormat.SubjectPublicKeyInfo:
+ # SubjectPublicKeyInfo + PEM/DER
+ if format is serialization.PublicFormat.SubjectPublicKeyInfo:
if encoding is serialization.Encoding.PEM:
write_bio = self._lib.PEM_write_bio_PUBKEY
- else:
- assert encoding is serialization.Encoding.DER
+ elif encoding is serialization.Encoding.DER:
write_bio = self._lib.i2d_PUBKEY_bio
+ else:
+ raise ValueError(
+ "SubjectPublicKeyInfo works only with PEM or DER encoding"
+ )
+ return self._bio_func_output(write_bio, evp_pkey)
- key = evp_pkey
- elif format is serialization.PublicFormat.PKCS1:
+ # PKCS1 + PEM/DER
+ if format is serialization.PublicFormat.PKCS1:
# Only RSA is supported here.
- assert self._lib.EVP_PKEY_id(evp_pkey) == self._lib.EVP_PKEY_RSA
+ key_type = self._lib.EVP_PKEY_id(evp_pkey)
+ if key_type != self._lib.EVP_PKEY_RSA:
+ raise ValueError("PKCS1 format is supported only for RSA keys")
+
if encoding is serialization.Encoding.PEM:
write_bio = self._lib.PEM_write_bio_RSAPublicKey
- else:
- assert encoding is serialization.Encoding.DER
+ elif encoding is serialization.Encoding.DER:
write_bio = self._lib.i2d_RSAPublicKey_bio
+ else:
+ raise ValueError("PKCS1 works only with PEM or DER encoding")
+ return self._bio_func_output(write_bio, cdata)
- key = cdata
- else:
- raise TypeError(
- "format must be an item from the PublicFormat enum"
- )
-
- bio = self._create_mem_bio_gc()
- res = write_bio(bio, key)
- self.openssl_assert(res == 1)
- return self._read_mem_bio(bio)
+ # OpenSSH + OpenSSH
+ if format is serialization.PublicFormat.OpenSSH:
+ if encoding is serialization.Encoding.OpenSSH:
+ return ssh.serialize_ssh_public_key(key)
- def _openssh_public_key_bytes(self, key):
- if isinstance(key, rsa.RSAPublicKey):
- public_numbers = key.public_numbers()
- return b"ssh-rsa " + base64.b64encode(
- serialization._ssh_write_string(b"ssh-rsa") +
- serialization._ssh_write_mpint(public_numbers.e) +
- serialization._ssh_write_mpint(public_numbers.n)
- )
- elif isinstance(key, dsa.DSAPublicKey):
- public_numbers = key.public_numbers()
- parameter_numbers = public_numbers.parameter_numbers
- return b"ssh-dss " + base64.b64encode(
- serialization._ssh_write_string(b"ssh-dss") +
- serialization._ssh_write_mpint(parameter_numbers.p) +
- serialization._ssh_write_mpint(parameter_numbers.q) +
- serialization._ssh_write_mpint(parameter_numbers.g) +
- serialization._ssh_write_mpint(public_numbers.y)
- )
- else:
- assert isinstance(key, ec.EllipticCurvePublicKey)
- public_numbers = key.public_numbers()
- try:
- curve_name = {
- ec.SECP256R1: b"nistp256",
- ec.SECP384R1: b"nistp384",
- ec.SECP521R1: b"nistp521",
- }[type(public_numbers.curve)]
- except KeyError:
- raise ValueError(
- "Only SECP256R1, SECP384R1, and SECP521R1 curves are "
- "supported by the SSH public key format"
- )
- return b"ecdsa-sha2-" + curve_name + b" " + base64.b64encode(
- serialization._ssh_write_string(b"ecdsa-sha2-" + curve_name) +
- serialization._ssh_write_string(curve_name) +
- serialization._ssh_write_string(public_numbers.encode_point())
+ raise ValueError(
+ "OpenSSH format must be used with OpenSSH encoding"
)
+ # Anything that key-specific code was supposed to handle earlier,
+ # like Raw, CompressedPoint, UncompressedPoint
+ raise ValueError("format is invalid with this key")
+
def _parameter_bytes(self, encoding, format, cdata):
if encoding is serialization.Encoding.OpenSSH:
- raise TypeError(
- "OpenSSH encoding is not supported"
- )
+ raise TypeError("OpenSSH encoding is not supported")
# Only DH is supported here currently.
q = self._ffi.new("BIGNUM **")
- self._lib.DH_get0_pqg(cdata,
- self._ffi.NULL,
- q,
- self._ffi.NULL)
+ self._lib.DH_get0_pqg(cdata, self._ffi.NULL, q, self._ffi.NULL)
if encoding is serialization.Encoding.PEM:
if q[0] != self._ffi.NULL:
write_bio = self._lib.PEM_write_bio_DHxparams
@@ -1728,10 +2113,7 @@ def generate_dh_parameters(self, generator, key_size):
dh_param_cdata = self._ffi.gc(dh_param_cdata, self._lib.DH_free)
res = self._lib.DH_generate_parameters_ex(
- dh_param_cdata,
- key_size,
- generator,
- self._ffi.NULL
+ dh_param_cdata, key_size, generator, self._ffi.NULL
)
self.openssl_assert(res == 1)
@@ -1755,7 +2137,8 @@ def generate_dh_private_key(self, parameters):
def generate_dh_private_key_and_parameters(self, generator, key_size):
return self.generate_dh_private_key(
- self.generate_dh_parameters(generator, key_size))
+ self.generate_dh_parameters(generator, key_size)
+ )
def load_dh_private_numbers(self, numbers):
parameter_numbers = numbers.public_numbers.parameter_numbers
@@ -1794,12 +2177,10 @@ def load_dh_private_numbers(self, numbers):
# the key to the attacker in exchange for having the full key space
# available. See: https://crypto.stackexchange.com/questions/12961
if codes[0] != 0 and not (
- parameter_numbers.g == 2 and
- codes[0] ^ self._lib.DH_NOT_SUITABLE_GENERATOR == 0
+ parameter_numbers.g == 2
+ and codes[0] ^ self._lib.DH_NOT_SUITABLE_GENERATOR == 0
):
- raise ValueError(
- "DH private numbers did not pass safety checks."
- )
+ raise ValueError("DH private numbers did not pass safety checks.")
evp_pkey = self._dh_cdata_to_evp_pkey(dh_cdata)
@@ -1887,19 +2268,27 @@ def x509_name_bytes(self, name):
return self._ffi.buffer(pp[0], res)[:]
def x25519_load_public_bytes(self, data):
+ # When we drop support for CRYPTOGRAPHY_OPENSSL_LESS_THAN_111 we can
+ # switch this to EVP_PKEY_new_raw_public_key
+ if len(data) != 32:
+ raise ValueError("An X25519 public key is 32 bytes long")
+
evp_pkey = self._create_evp_pkey_gc()
res = self._lib.EVP_PKEY_set_type(evp_pkey, self._lib.NID_X25519)
- backend.openssl_assert(res == 1)
+ self.openssl_assert(res == 1)
res = self._lib.EVP_PKEY_set1_tls_encodedpoint(
evp_pkey, data, len(data)
)
- backend.openssl_assert(res == 1)
+ self.openssl_assert(res == 1)
return _X25519PublicKey(self, evp_pkey)
def x25519_load_private_bytes(self, data):
+ # When we drop support for CRYPTOGRAPHY_OPENSSL_LESS_THAN_111 we can
+ # switch this to EVP_PKEY_new_raw_private_key and drop the
+ # zeroed_bytearray garbage.
# OpenSSL only has facilities for loading PKCS8 formatted private
# keys using the algorithm identifiers specified in
- # https://tools.ietf.org/html/draft-ietf-curdle-pkix-03.
+ # https://tools.ietf.org/html/draft-ietf-curdle-pkix-09.
# This is the standard PKCS8 prefix for a 32 byte X25519 key.
# The form is:
# 0:d=0 hl=2 l= 46 cons: SEQUENCE
@@ -1910,21 +2299,27 @@ def x25519_load_private_bytes(self, data):
# Of course there's a bit more complexity. In reality OCTET STRING
# contains an OCTET STRING of length 32! So the last two bytes here
# are \x04\x20, which is an OCTET STRING of length 32.
+ if len(data) != 32:
+ raise ValueError("An X25519 private key is 32 bytes long")
+
pkcs8_prefix = b'0.\x02\x01\x000\x05\x06\x03+en\x04"\x04 '
- bio = self._bytes_to_bio(pkcs8_prefix + data)
- evp_pkey = backend._lib.d2i_PrivateKey_bio(bio.bio, self._ffi.NULL)
+ with self._zeroed_bytearray(48) as ba:
+ ba[0:16] = pkcs8_prefix
+ ba[16:] = data
+ bio = self._bytes_to_bio(ba)
+ evp_pkey = self._lib.d2i_PrivateKey_bio(bio.bio, self._ffi.NULL)
+
self.openssl_assert(evp_pkey != self._ffi.NULL)
evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+ self.openssl_assert(
+ self._lib.EVP_PKEY_id(evp_pkey) == self._lib.EVP_PKEY_X25519
+ )
return _X25519PrivateKey(self, evp_pkey)
- def x25519_generate_key(self):
- evp_pkey_ctx = self._lib.EVP_PKEY_CTX_new_id(
- self._lib.NID_X25519, self._ffi.NULL
- )
+ def _evp_pkey_keygen_gc(self, nid):
+ evp_pkey_ctx = self._lib.EVP_PKEY_CTX_new_id(nid, self._ffi.NULL)
self.openssl_assert(evp_pkey_ctx != self._ffi.NULL)
- evp_pkey_ctx = self._ffi.gc(
- evp_pkey_ctx, self._lib.EVP_PKEY_CTX_free
- )
+ evp_pkey_ctx = self._ffi.gc(evp_pkey_ctx, self._lib.EVP_PKEY_CTX_free)
res = self._lib.EVP_PKEY_keygen_init(evp_pkey_ctx)
self.openssl_assert(res == 1)
evp_ppkey = self._ffi.new("EVP_PKEY **")
@@ -1932,25 +2327,368 @@ def x25519_generate_key(self):
self.openssl_assert(res == 1)
self.openssl_assert(evp_ppkey[0] != self._ffi.NULL)
evp_pkey = self._ffi.gc(evp_ppkey[0], self._lib.EVP_PKEY_free)
+ return evp_pkey
+
+ def x25519_generate_key(self):
+ evp_pkey = self._evp_pkey_keygen_gc(self._lib.NID_X25519)
return _X25519PrivateKey(self, evp_pkey)
def x25519_supported(self):
+ if self._fips_enabled:
+ return False
return self._lib.CRYPTOGRAPHY_OPENSSL_110_OR_GREATER
+ def x448_load_public_bytes(self, data):
+ if len(data) != 56:
+ raise ValueError("An X448 public key is 56 bytes long")
+
+ evp_pkey = self._lib.EVP_PKEY_new_raw_public_key(
+ self._lib.NID_X448, self._ffi.NULL, data, len(data)
+ )
+ self.openssl_assert(evp_pkey != self._ffi.NULL)
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+ return _X448PublicKey(self, evp_pkey)
+
+ def x448_load_private_bytes(self, data):
+ if len(data) != 56:
+ raise ValueError("An X448 private key is 56 bytes long")
+
+ data_ptr = self._ffi.from_buffer(data)
+ evp_pkey = self._lib.EVP_PKEY_new_raw_private_key(
+ self._lib.NID_X448, self._ffi.NULL, data_ptr, len(data)
+ )
+ self.openssl_assert(evp_pkey != self._ffi.NULL)
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+ return _X448PrivateKey(self, evp_pkey)
+
+ def x448_generate_key(self):
+ evp_pkey = self._evp_pkey_keygen_gc(self._lib.NID_X448)
+ return _X448PrivateKey(self, evp_pkey)
+
+ def x448_supported(self):
+ if self._fips_enabled:
+ return False
+ return not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111
+
+ def ed25519_supported(self):
+ if self._fips_enabled:
+ return False
+ return not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111B
+
+ def ed25519_load_public_bytes(self, data):
+ utils._check_bytes("data", data)
+
+ if len(data) != ed25519._ED25519_KEY_SIZE:
+ raise ValueError("An Ed25519 public key is 32 bytes long")
+
+ evp_pkey = self._lib.EVP_PKEY_new_raw_public_key(
+ self._lib.NID_ED25519, self._ffi.NULL, data, len(data)
+ )
+ self.openssl_assert(evp_pkey != self._ffi.NULL)
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+
+ return _Ed25519PublicKey(self, evp_pkey)
+
+ def ed25519_load_private_bytes(self, data):
+ if len(data) != ed25519._ED25519_KEY_SIZE:
+ raise ValueError("An Ed25519 private key is 32 bytes long")
+
+ utils._check_byteslike("data", data)
+ data_ptr = self._ffi.from_buffer(data)
+ evp_pkey = self._lib.EVP_PKEY_new_raw_private_key(
+ self._lib.NID_ED25519, self._ffi.NULL, data_ptr, len(data)
+ )
+ self.openssl_assert(evp_pkey != self._ffi.NULL)
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+
+ return _Ed25519PrivateKey(self, evp_pkey)
+
+ def ed25519_generate_key(self):
+ evp_pkey = self._evp_pkey_keygen_gc(self._lib.NID_ED25519)
+ return _Ed25519PrivateKey(self, evp_pkey)
+
+ def ed448_supported(self):
+ if self._fips_enabled:
+ return False
+ return not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111B
+
+ def ed448_load_public_bytes(self, data):
+ utils._check_bytes("data", data)
+ if len(data) != _ED448_KEY_SIZE:
+ raise ValueError("An Ed448 public key is 57 bytes long")
+
+ evp_pkey = self._lib.EVP_PKEY_new_raw_public_key(
+ self._lib.NID_ED448, self._ffi.NULL, data, len(data)
+ )
+ self.openssl_assert(evp_pkey != self._ffi.NULL)
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+
+ return _Ed448PublicKey(self, evp_pkey)
+
+ def ed448_load_private_bytes(self, data):
+ utils._check_byteslike("data", data)
+ if len(data) != _ED448_KEY_SIZE:
+ raise ValueError("An Ed448 private key is 57 bytes long")
+
+ data_ptr = self._ffi.from_buffer(data)
+ evp_pkey = self._lib.EVP_PKEY_new_raw_private_key(
+ self._lib.NID_ED448, self._ffi.NULL, data_ptr, len(data)
+ )
+ self.openssl_assert(evp_pkey != self._ffi.NULL)
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+
+ return _Ed448PrivateKey(self, evp_pkey)
+
+ def ed448_generate_key(self):
+ evp_pkey = self._evp_pkey_keygen_gc(self._lib.NID_ED448)
+ return _Ed448PrivateKey(self, evp_pkey)
+
def derive_scrypt(self, key_material, salt, length, n, r, p):
buf = self._ffi.new("unsigned char[]", length)
+ key_material_ptr = self._ffi.from_buffer(key_material)
res = self._lib.EVP_PBE_scrypt(
- key_material, len(key_material), salt, len(salt), n, r, p,
- scrypt._MEM_LIMIT, buf, length
+ key_material_ptr,
+ len(key_material),
+ salt,
+ len(salt),
+ n,
+ r,
+ p,
+ scrypt._MEM_LIMIT,
+ buf,
+ length,
)
- self.openssl_assert(res == 1)
+ if res != 1:
+ errors = self._consume_errors_with_text()
+ # memory required formula explained here:
+ # https://blog.filippo.io/the-scrypt-parameters/
+ min_memory = 128 * n * r // (1024 ** 2)
+ raise MemoryError(
+ "Not enough memory to derive key. These parameters require"
+ " {} MB of memory.".format(min_memory),
+ errors,
+ )
return self._ffi.buffer(buf)[:]
def aead_cipher_supported(self, cipher):
cipher_name = aead._aead_cipher_name(cipher)
- return (
- self._lib.EVP_get_cipherbyname(cipher_name) != self._ffi.NULL
+ if self._fips_enabled and cipher_name not in self._fips_aead:
+ return False
+ return self._lib.EVP_get_cipherbyname(cipher_name) != self._ffi.NULL
+
+ @contextlib.contextmanager
+ def _zeroed_bytearray(self, length):
+ """
+ This method creates a bytearray, which we copy data into (hopefully
+ also from a mutable buffer that can be dynamically erased!), and then
+ zero when we're done.
+ """
+ ba = bytearray(length)
+ try:
+ yield ba
+ finally:
+ self._zero_data(ba, length)
+
+ def _zero_data(self, data, length):
+ # We clear things this way because at the moment we're not
+ # sure of a better way that can guarantee it overwrites the
+ # memory of a bytearray and doesn't just replace the underlying char *.
+ for i in range(length):
+ data[i] = 0
+
+ @contextlib.contextmanager
+ def _zeroed_null_terminated_buf(self, data):
+ """
+ This method takes bytes, which can be a bytestring or a mutable
+ buffer like a bytearray, and yields a null-terminated version of that
+ data. This is required because PKCS12_parse doesn't take a length with
+ its password char * and ffi.from_buffer doesn't provide null
+ termination. So, to support zeroing the data via bytearray we
+ need to build this ridiculous construct that copies the memory, but
+ zeroes it after use.
+ """
+ if data is None:
+ yield self._ffi.NULL
+ else:
+ data_len = len(data)
+ buf = self._ffi.new("char[]", data_len + 1)
+ self._ffi.memmove(buf, data, data_len)
+ try:
+ yield buf
+ finally:
+ # Cast to a uint8_t * so we can assign by integer
+ self._zero_data(self._ffi.cast("uint8_t *", buf), data_len)
+
+ def load_key_and_certificates_from_pkcs12(self, data, password):
+ if password is not None:
+ utils._check_byteslike("password", password)
+
+ bio = self._bytes_to_bio(data)
+ p12 = self._lib.d2i_PKCS12_bio(bio.bio, self._ffi.NULL)
+ if p12 == self._ffi.NULL:
+ self._consume_errors()
+ raise ValueError("Could not deserialize PKCS12 data")
+
+ p12 = self._ffi.gc(p12, self._lib.PKCS12_free)
+ evp_pkey_ptr = self._ffi.new("EVP_PKEY **")
+ x509_ptr = self._ffi.new("X509 **")
+ sk_x509_ptr = self._ffi.new("Cryptography_STACK_OF_X509 **")
+ with self._zeroed_null_terminated_buf(password) as password_buf:
+ res = self._lib.PKCS12_parse(
+ p12, password_buf, evp_pkey_ptr, x509_ptr, sk_x509_ptr
+ )
+
+ if res == 0:
+ self._consume_errors()
+ raise ValueError("Invalid password or PKCS12 data")
+
+ cert = None
+ key = None
+ additional_certificates = []
+
+ if evp_pkey_ptr[0] != self._ffi.NULL:
+ evp_pkey = self._ffi.gc(evp_pkey_ptr[0], self._lib.EVP_PKEY_free)
+ key = self._evp_pkey_to_private_key(evp_pkey)
+
+ if x509_ptr[0] != self._ffi.NULL:
+ x509 = self._ffi.gc(x509_ptr[0], self._lib.X509_free)
+ cert = _Certificate(self, x509)
+
+ if sk_x509_ptr[0] != self._ffi.NULL:
+ sk_x509 = self._ffi.gc(sk_x509_ptr[0], self._lib.sk_X509_free)
+ num = self._lib.sk_X509_num(sk_x509_ptr[0])
+ for i in range(num):
+ x509 = self._lib.sk_X509_value(sk_x509, i)
+ self.openssl_assert(x509 != self._ffi.NULL)
+ x509 = self._ffi.gc(x509, self._lib.X509_free)
+ additional_certificates.append(_Certificate(self, x509))
+
+ return (key, cert, additional_certificates)
+
+ def serialize_key_and_certificates_to_pkcs12(
+ self, name, key, cert, cas, encryption_algorithm
+ ):
+ password = None
+ if name is not None:
+ utils._check_bytes("name", name)
+
+ if isinstance(encryption_algorithm, serialization.NoEncryption):
+ nid_cert = -1
+ nid_key = -1
+ pkcs12_iter = 0
+ mac_iter = 0
+ elif isinstance(
+ encryption_algorithm, serialization.BestAvailableEncryption
+ ):
+ # PKCS12 encryption is hopeless trash and can never be fixed.
+ # This is the least terrible option.
+ nid_cert = self._lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC
+ nid_key = self._lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC
+ # At least we can set this higher than OpenSSL's default
+ pkcs12_iter = 20000
+ # mac_iter chosen for compatibility reasons, see:
+ # https://www.openssl.org/docs/man1.1.1/man3/PKCS12_create.html
+ # Did we mention how lousy PKCS12 encryption is?
+ mac_iter = 1
+ password = encryption_algorithm.password
+ else:
+ raise ValueError("Unsupported key encryption type")
+
+ if cas is None or len(cas) == 0:
+ sk_x509 = self._ffi.NULL
+ else:
+ sk_x509 = self._lib.sk_X509_new_null()
+ sk_x509 = self._ffi.gc(sk_x509, self._lib.sk_X509_free)
+
+ # reverse the list when building the stack so that they're encoded
+ # in the order they were originally provided. it is a mystery
+ for ca in reversed(cas):
+ res = self._lib.sk_X509_push(sk_x509, ca._x509)
+ backend.openssl_assert(res >= 1)
+
+ with self._zeroed_null_terminated_buf(password) as password_buf:
+ with self._zeroed_null_terminated_buf(name) as name_buf:
+ p12 = self._lib.PKCS12_create(
+ password_buf,
+ name_buf,
+ key._evp_pkey if key else self._ffi.NULL,
+ cert._x509 if cert else self._ffi.NULL,
+ sk_x509,
+ nid_key,
+ nid_cert,
+ pkcs12_iter,
+ mac_iter,
+ 0,
+ )
+
+ self.openssl_assert(p12 != self._ffi.NULL)
+ p12 = self._ffi.gc(p12, self._lib.PKCS12_free)
+
+ bio = self._create_mem_bio_gc()
+ res = self._lib.i2d_PKCS12_bio(bio, p12)
+ self.openssl_assert(res > 0)
+ return self._read_mem_bio(bio)
+
+ def poly1305_supported(self):
+ if self._fips_enabled:
+ return False
+ return self._lib.Cryptography_HAS_POLY1305 == 1
+
+ def create_poly1305_ctx(self, key):
+ utils._check_byteslike("key", key)
+ if len(key) != _POLY1305_KEY_SIZE:
+ raise ValueError("A poly1305 key is 32 bytes long")
+
+ return _Poly1305Context(self, key)
+
+ def load_pem_pkcs7_certificates(self, data):
+ utils._check_bytes("data", data)
+ bio = self._bytes_to_bio(data)
+ p7 = self._lib.PEM_read_bio_PKCS7(
+ bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL
)
+ if p7 == self._ffi.NULL:
+ self._consume_errors()
+ raise ValueError("Unable to parse PKCS7 data")
+
+ p7 = self._ffi.gc(p7, self._lib.PKCS7_free)
+ return self._load_pkcs7_certificates(p7)
+
+ def load_der_pkcs7_certificates(self, data):
+ utils._check_bytes("data", data)
+ bio = self._bytes_to_bio(data)
+ p7 = self._lib.d2i_PKCS7_bio(bio.bio, self._ffi.NULL)
+ if p7 == self._ffi.NULL:
+ self._consume_errors()
+ raise ValueError("Unable to parse PKCS7 data")
+
+ p7 = self._ffi.gc(p7, self._lib.PKCS7_free)
+ return self._load_pkcs7_certificates(p7)
+
+ def _load_pkcs7_certificates(self, p7):
+ nid = self._lib.OBJ_obj2nid(p7.type)
+ self.openssl_assert(nid != self._lib.NID_undef)
+ if nid != self._lib.NID_pkcs7_signed:
+ raise UnsupportedAlgorithm(
+ "Only basic signed structures are currently supported. NID"
+ " for this data was {}".format(nid),
+ _Reasons.UNSUPPORTED_SERIALIZATION,
+ )
+
+ sk_x509 = p7.d.sign.cert
+ num = self._lib.sk_X509_num(sk_x509)
+ certs = []
+ for i in range(num):
+ x509 = self._lib.sk_X509_value(sk_x509, i)
+ self.openssl_assert(x509 != self._ffi.NULL)
+ res = self._lib.X509_up_ref(x509)
+ # When OpenSSL is less than 1.1.0 up_ref returns the current
+ # refcount. On 1.1.0+ it returns 1 for success.
+ self.openssl_assert(res >= 1)
+ x509 = self._ffi.gc(x509, self._lib.X509_free)
+ certs.append(_Certificate(self, x509))
+
+ return certs
class GetCipherByName(object):
@@ -1963,7 +2701,7 @@ def __call__(self, backend, cipher, mode):
def _get_xts_cipher(backend, cipher, mode):
- cipher_name = "aes-{0}-xts".format(cipher.key_size // 2)
+ cipher_name = "aes-{}-xts".format(cipher.key_size // 2)
return backend._lib.EVP_get_cipherbyname(cipher_name.encode("ascii"))
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/ciphers.py b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/ciphers.py
index 8e55e28..171605a 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/ciphers.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/ciphers.py
@@ -17,6 +17,7 @@
class _CipherContext(object):
_ENCRYPT = 1
_DECRYPT = 0
+ _MAX_CHUNK_SIZE = 2 ** 31
def __init__(self, backend, cipher, mode, operation):
self._backend = backend
@@ -40,37 +41,45 @@ def __init__(self, backend, cipher, mode, operation):
adapter = registry[type(cipher), type(mode)]
except KeyError:
raise UnsupportedAlgorithm(
- "cipher {0} in {1} mode is not supported "
+ "cipher {} in {} mode is not supported "
"by this backend.".format(
- cipher.name, mode.name if mode else mode),
- _Reasons.UNSUPPORTED_CIPHER
+ cipher.name, mode.name if mode else mode
+ ),
+ _Reasons.UNSUPPORTED_CIPHER,
)
evp_cipher = adapter(self._backend, cipher, mode)
if evp_cipher == self._backend._ffi.NULL:
- raise UnsupportedAlgorithm(
- "cipher {0} in {1} mode is not supported "
- "by this backend.".format(
- cipher.name, mode.name if mode else mode),
- _Reasons.UNSUPPORTED_CIPHER
- )
+ msg = "cipher {0.name} ".format(cipher)
+ if mode is not None:
+ msg += "in {0.name} mode ".format(mode)
+ msg += (
+ "is not supported by this backend (Your version of OpenSSL "
+ "may be too old. Current version: {}.)"
+ ).format(self._backend.openssl_version_text())
+ raise UnsupportedAlgorithm(msg, _Reasons.UNSUPPORTED_CIPHER)
if isinstance(mode, modes.ModeWithInitializationVector):
- iv_nonce = mode.initialization_vector
+ iv_nonce = self._backend._ffi.from_buffer(
+ mode.initialization_vector
+ )
elif isinstance(mode, modes.ModeWithTweak):
- iv_nonce = mode.tweak
+ iv_nonce = self._backend._ffi.from_buffer(mode.tweak)
elif isinstance(mode, modes.ModeWithNonce):
- iv_nonce = mode.nonce
+ iv_nonce = self._backend._ffi.from_buffer(mode.nonce)
elif isinstance(cipher, modes.ModeWithNonce):
- iv_nonce = cipher.nonce
+ iv_nonce = self._backend._ffi.from_buffer(cipher.nonce)
else:
iv_nonce = self._backend._ffi.NULL
# begin init with cipher and operation type
- res = self._backend._lib.EVP_CipherInit_ex(ctx, evp_cipher,
- self._backend._ffi.NULL,
- self._backend._ffi.NULL,
- self._backend._ffi.NULL,
- operation)
+ res = self._backend._lib.EVP_CipherInit_ex(
+ ctx,
+ evp_cipher,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ operation,
+ )
self._backend.openssl_assert(res != 0)
# set the key length to handle variable key ciphers
res = self._backend._lib.EVP_CIPHER_CTX_set_key_length(
@@ -79,35 +88,30 @@ def __init__(self, backend, cipher, mode, operation):
self._backend.openssl_assert(res != 0)
if isinstance(mode, modes.GCM):
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
- ctx, self._backend._lib.EVP_CTRL_AEAD_SET_IVLEN,
- len(iv_nonce), self._backend._ffi.NULL
+ ctx,
+ self._backend._lib.EVP_CTRL_AEAD_SET_IVLEN,
+ len(iv_nonce),
+ self._backend._ffi.NULL,
)
self._backend.openssl_assert(res != 0)
if mode.tag is not None:
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
- ctx, self._backend._lib.EVP_CTRL_AEAD_SET_TAG,
- len(mode.tag), mode.tag
+ ctx,
+ self._backend._lib.EVP_CTRL_AEAD_SET_TAG,
+ len(mode.tag),
+ mode.tag,
)
self._backend.openssl_assert(res != 0)
self._tag = mode.tag
- elif (
- self._operation == self._DECRYPT and
- self._backend._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_102 and
- not self._backend._lib.CRYPTOGRAPHY_IS_LIBRESSL
- ):
- raise NotImplementedError(
- "delayed passing of GCM tag requires OpenSSL >= 1.0.2."
- " To use this feature please update OpenSSL"
- )
# pass key/iv
res = self._backend._lib.EVP_CipherInit_ex(
ctx,
self._backend._ffi.NULL,
self._backend._ffi.NULL,
- cipher.key,
+ self._backend._ffi.from_buffer(cipher.key),
iv_nonce,
- operation
+ operation,
)
self._backend.openssl_assert(res != 0)
# We purposely disable padding here as it's handled higher up in the
@@ -121,34 +125,38 @@ def update(self, data):
return bytes(buf[:n])
def update_into(self, data, buf):
- if len(buf) < (len(data) + self._block_size_bytes - 1):
+ total_data_len = len(data)
+ if len(buf) < (total_data_len + self._block_size_bytes - 1):
raise ValueError(
- "buffer must be at least {0} bytes for this "
+ "buffer must be at least {} bytes for this "
"payload".format(len(data) + self._block_size_bytes - 1)
)
- buf = self._backend._ffi.cast(
- "unsigned char *", self._backend._ffi.from_buffer(buf)
- )
+ data_processed = 0
+ total_out = 0
outlen = self._backend._ffi.new("int *")
- res = self._backend._lib.EVP_CipherUpdate(self._ctx, buf, outlen,
- data, len(data))
- self._backend.openssl_assert(res != 0)
- return outlen[0]
+ baseoutbuf = self._backend._ffi.from_buffer(buf)
+ baseinbuf = self._backend._ffi.from_buffer(data)
- def finalize(self):
- # OpenSSL 1.0.1 on Ubuntu 12.04 (and possibly other distributions)
- # appears to have a bug where you must make at least one call to update
- # even if you are only using authenticate_additional_data or the
- # GCM tag will be wrong. An (empty) call to update resolves this
- # and is harmless for all other versions of OpenSSL.
- if isinstance(self._mode, modes.GCM):
- self.update(b"")
+ while data_processed != total_data_len:
+ outbuf = baseoutbuf + total_out
+ inbuf = baseinbuf + data_processed
+ inlen = min(self._MAX_CHUNK_SIZE, total_data_len - data_processed)
+ res = self._backend._lib.EVP_CipherUpdate(
+ self._ctx, outbuf, outlen, inbuf, inlen
+ )
+ self._backend.openssl_assert(res != 0)
+ data_processed += inlen
+ total_out += outlen[0]
+
+ return total_out
+
+ def finalize(self):
if (
- self._operation == self._DECRYPT and
- isinstance(self._mode, modes.ModeWithAuthenticationTag) and
- self.tag is None
+ self._operation == self._DECRYPT
+ and isinstance(self._mode, modes.ModeWithAuthenticationTag)
+ and self.tag is None
):
raise ValueError(
"Authentication tag must be provided when decrypting."
@@ -166,45 +174,44 @@ def finalize(self):
self._backend.openssl_assert(
errors[0]._lib_reason_match(
self._backend._lib.ERR_LIB_EVP,
- self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH
- ) or errors[0]._lib_reason_match(
- self._backend._lib.ERR_LIB_EVP,
- self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH
- )
+ self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH,
+ ),
+ errors=errors,
)
raise ValueError(
"The length of the provided data is not a multiple of "
"the block length."
)
- if (isinstance(self._mode, modes.GCM) and
- self._operation == self._ENCRYPT):
+ if (
+ isinstance(self._mode, modes.GCM)
+ and self._operation == self._ENCRYPT
+ ):
tag_buf = self._backend._ffi.new(
"unsigned char[]", self._block_size_bytes
)
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
- self._ctx, self._backend._lib.EVP_CTRL_AEAD_GET_TAG,
- self._block_size_bytes, tag_buf
+ self._ctx,
+ self._backend._lib.EVP_CTRL_AEAD_GET_TAG,
+ self._block_size_bytes,
+ tag_buf,
)
self._backend.openssl_assert(res != 0)
self._tag = self._backend._ffi.buffer(tag_buf)[:]
res = self._backend._lib.EVP_CIPHER_CTX_cleanup(self._ctx)
self._backend.openssl_assert(res == 1)
- return self._backend._ffi.buffer(buf)[:outlen[0]]
+ return self._backend._ffi.buffer(buf)[: outlen[0]]
def finalize_with_tag(self, tag):
- if (
- self._backend._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_102 and
- not self._backend._lib.CRYPTOGRAPHY_IS_LIBRESSL
- ):
- raise NotImplementedError(
- "finalize_with_tag requires OpenSSL >= 1.0.2. To use this "
- "method please update OpenSSL"
+ if len(tag) < self._mode._min_tag_length:
+ raise ValueError(
+ "Authentication tag must be {} bytes or longer.".format(
+ self._mode._min_tag_length
+ )
)
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
- self._ctx, self._backend._lib.EVP_CTRL_AEAD_SET_TAG,
- len(tag), tag
+ self._ctx, self._backend._lib.EVP_CTRL_AEAD_SET_TAG, len(tag), tag
)
self._backend.openssl_assert(res != 0)
self._tag = tag
@@ -213,7 +220,11 @@ def finalize_with_tag(self, tag):
def authenticate_additional_data(self, data):
outlen = self._backend._ffi.new("int *")
res = self._backend._lib.EVP_CipherUpdate(
- self._ctx, self._backend._ffi.NULL, outlen, data, len(data)
+ self._ctx,
+ self._backend._ffi.NULL,
+ outlen,
+ self._backend._ffi.from_buffer(data),
+ len(data),
)
self._backend.openssl_assert(res != 0)
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/cmac.py b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/cmac.py
index 5919017..195fc23 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/cmac.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/cmac.py
@@ -7,18 +7,21 @@
from cryptography import utils
from cryptography.exceptions import (
- InvalidSignature, UnsupportedAlgorithm, _Reasons
+ InvalidSignature,
+ UnsupportedAlgorithm,
+ _Reasons,
)
-from cryptography.hazmat.primitives import constant_time, mac
+from cryptography.hazmat.primitives import constant_time
from cryptography.hazmat.primitives.ciphers.modes import CBC
-@utils.register_interface(mac.MACContext)
class _CMACContext(object):
def __init__(self, backend, algorithm, ctx=None):
if not backend.cmac_algorithm_supported(algorithm):
- raise UnsupportedAlgorithm("This backend does not support CMAC.",
- _Reasons.UNSUPPORTED_CIPHER)
+ raise UnsupportedAlgorithm(
+ "This backend does not support CMAC.",
+ _Reasons.UNSUPPORTED_CIPHER,
+ )
self._backend = backend
self._key = algorithm.key
@@ -36,10 +39,15 @@ def __init__(self, backend, algorithm, ctx=None):
self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
ctx = self._backend._ffi.gc(ctx, self._backend._lib.CMAC_CTX_free)
- self._backend._lib.CMAC_Init(
- ctx, self._key, len(self._key),
- evp_cipher, self._backend._ffi.NULL
+ key_ptr = self._backend._ffi.from_buffer(self._key)
+ res = self._backend._lib.CMAC_Init(
+ ctx,
+ key_ptr,
+ len(self._key),
+ evp_cipher,
+ self._backend._ffi.NULL,
)
+ self._backend.openssl_assert(res == 1)
self._ctx = ctx
@@ -52,9 +60,7 @@ def update(self, data):
def finalize(self):
buf = self._backend._ffi.new("unsigned char[]", self._output_length)
length = self._backend._ffi.new("size_t *", self._output_length)
- res = self._backend._lib.CMAC_Final(
- self._ctx, buf, length
- )
+ res = self._backend._lib.CMAC_Final(self._ctx, buf, length)
self._backend.openssl_assert(res == 1)
self._ctx = None
@@ -66,13 +72,9 @@ def copy(self):
copied_ctx = self._backend._ffi.gc(
copied_ctx, self._backend._lib.CMAC_CTX_free
)
- res = self._backend._lib.CMAC_CTX_copy(
- copied_ctx, self._ctx
- )
+ res = self._backend._lib.CMAC_CTX_copy(copied_ctx, self._ctx)
self._backend.openssl_assert(res == 1)
- return _CMACContext(
- self._backend, self._algorithm, ctx=copied_ctx
- )
+ return _CMACContext(self._backend, self._algorithm, ctx=copied_ctx)
def verify(self, signature):
digest = self.finalize()
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/decode_asn1.py b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/decode_asn1.py
index 24eb55b..279b00c 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/decode_asn1.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/decode_asn1.py
@@ -7,20 +7,20 @@
import datetime
import ipaddress
-from asn1crypto.core import Integer, SequenceOf
+import six
from cryptography import x509
+from cryptography.hazmat._der import DERReader, INTEGER, NULL, SEQUENCE
from cryptography.x509.extensions import _TLS_FEATURE_TYPE_TO_ENUM
from cryptography.x509.name import _ASN1_TYPE_TO_ENUM
from cryptography.x509.oid import (
- CRLEntryExtensionOID, CertificatePoliciesOID, ExtensionOID
+ CRLEntryExtensionOID,
+ CertificatePoliciesOID,
+ ExtensionOID,
+ OCSPExtensionOID,
)
-class _Integers(SequenceOf):
- _child_spec = Integer
-
-
def _obj2txt(backend, obj):
# Set to 80 on the recommendation of
# https://www.openssl.org/docs/crypto/OBJ_nid2ln.html#return_values
@@ -66,7 +66,7 @@ def _decode_x509_name(backend, x509_name):
attribute = _decode_x509_name_entry(backend, entry)
set_id = backend._lib.Cryptography_X509_NAME_ENTRY_set(entry)
if set_id != prev_set_id:
- attributes.append(set([attribute]))
+ attributes.append({attribute})
else:
# is in the same RDN a previous entry
attributes[-1].add(attribute)
@@ -121,10 +121,10 @@ def _decode_general_name(backend, gn):
# netmask. To handle this we convert the netmask to integer, then
# find the first 0 bit, which will be the prefix. If another 1
# bit is present after that the netmask is invalid.
- base = ipaddress.ip_address(data[:data_len // 2])
- netmask = ipaddress.ip_address(data[data_len // 2:])
+ base = ipaddress.ip_address(data[: data_len // 2])
+ netmask = ipaddress.ip_address(data[data_len // 2 :])
bits = bin(int(netmask))[2:]
- prefix = bits.find('0')
+ prefix = bits.find("0")
# If no 0 bits are found it is a /32 or /128
if prefix == -1:
prefix = len(bits)
@@ -132,7 +132,7 @@ def _decode_general_name(backend, gn):
if "1" in bits[prefix:]:
raise ValueError("Invalid netmask")
- ip = ipaddress.ip_network(base.exploded + u"/{0}".format(prefix))
+ ip = ipaddress.ip_network(base.exploded + u"/{}".format(prefix))
else:
ip = ipaddress.ip_address(data)
@@ -157,10 +157,10 @@ def _decode_general_name(backend, gn):
else:
# x400Address or ediPartyName
raise x509.UnsupportedGeneralNameType(
- "{0} is not a supported type".format(
+ "{} is not a supported type".format(
x509._GENERAL_NAMES.get(gn.type, gn.type)
),
- gn.type
+ gn.type,
)
@@ -181,61 +181,79 @@ def _decode_delta_crl_indicator(backend, ext):
class _X509ExtensionParser(object):
- def __init__(self, ext_count, get_ext, handlers):
+ def __init__(self, backend, ext_count, get_ext, handlers):
self.ext_count = ext_count
self.get_ext = get_ext
self.handlers = handlers
+ self._backend = backend
- def parse(self, backend, x509_obj):
+ def parse(self, x509_obj):
extensions = []
seen_oids = set()
- for i in range(self.ext_count(backend, x509_obj)):
- ext = self.get_ext(backend, x509_obj, i)
- backend.openssl_assert(ext != backend._ffi.NULL)
- crit = backend._lib.X509_EXTENSION_get_critical(ext)
+ for i in range(self.ext_count(x509_obj)):
+ ext = self.get_ext(x509_obj, i)
+ self._backend.openssl_assert(ext != self._backend._ffi.NULL)
+ crit = self._backend._lib.X509_EXTENSION_get_critical(ext)
critical = crit == 1
oid = x509.ObjectIdentifier(
- _obj2txt(backend, backend._lib.X509_EXTENSION_get_object(ext))
+ _obj2txt(
+ self._backend,
+ self._backend._lib.X509_EXTENSION_get_object(ext),
+ )
)
if oid in seen_oids:
raise x509.DuplicateExtension(
- "Duplicate {0} extension found".format(oid), oid
+ "Duplicate {} extension found".format(oid), oid
)
- # This OID is only supported in OpenSSL 1.1.0+ but we want
- # to support it in all versions of OpenSSL so we decode it
+ # These OIDs are only supported in OpenSSL 1.1.0+ but we want
+ # to support them in all versions of OpenSSL so we decode them
# ourselves.
if oid == ExtensionOID.TLS_FEATURE:
- data = backend._lib.X509_EXTENSION_get_data(ext)
- parsed = _Integers.load(_asn1_string_to_bytes(backend, data))
+ # The extension contents are a SEQUENCE OF INTEGERs.
+ data = self._backend._lib.X509_EXTENSION_get_data(ext)
+ data_bytes = _asn1_string_to_bytes(self._backend, data)
+ features = DERReader(data_bytes).read_single_element(SEQUENCE)
+ parsed = []
+ while not features.is_empty():
+ parsed.append(features.read_element(INTEGER).as_integer())
+ # Map the features to their enum value.
value = x509.TLSFeature(
- [_TLS_FEATURE_TYPE_TO_ENUM[x.native] for x in parsed]
+ [_TLS_FEATURE_TYPE_TO_ENUM[x] for x in parsed]
)
extensions.append(x509.Extension(oid, critical, value))
seen_oids.add(oid)
continue
+ elif oid == ExtensionOID.PRECERT_POISON:
+ data = self._backend._lib.X509_EXTENSION_get_data(ext)
+ # The contents of the extension must be an ASN.1 NULL.
+ reader = DERReader(_asn1_string_to_bytes(self._backend, data))
+ reader.read_single_element(NULL).check_empty()
+ extensions.append(
+ x509.Extension(oid, critical, x509.PrecertPoison())
+ )
+ seen_oids.add(oid)
+ continue
try:
handler = self.handlers[oid]
except KeyError:
# Dump the DER payload into an UnrecognizedExtension object
- data = backend._lib.X509_EXTENSION_get_data(ext)
- backend.openssl_assert(data != backend._ffi.NULL)
- der = backend._ffi.buffer(data.data, data.length)[:]
+ data = self._backend._lib.X509_EXTENSION_get_data(ext)
+ self._backend.openssl_assert(data != self._backend._ffi.NULL)
+ der = self._backend._ffi.buffer(data.data, data.length)[:]
unrecognized = x509.UnrecognizedExtension(oid, der)
- extensions.append(
- x509.Extension(oid, critical, unrecognized)
- )
+ extensions.append(x509.Extension(oid, critical, unrecognized))
else:
- ext_data = backend._lib.X509V3_EXT_d2i(ext)
- if ext_data == backend._ffi.NULL:
- backend._consume_errors()
+ ext_data = self._backend._lib.X509V3_EXT_d2i(ext)
+ if ext_data == self._backend._ffi.NULL:
+ self._backend._consume_errors()
raise ValueError(
- "The {0} extension is invalid and can't be "
+ "The {} extension is invalid and can't be "
"parsed".format(oid)
)
- value = handler(backend, ext_data)
+ value = handler(self._backend, ext_data)
extensions.append(x509.Extension(oid, critical, value))
seen_oids.add(oid)
@@ -257,16 +275,12 @@ def _decode_certificate_policies(backend, cp):
qnum = backend._lib.sk_POLICYQUALINFO_num(pi.qualifiers)
qualifiers = []
for j in range(qnum):
- pqi = backend._lib.sk_POLICYQUALINFO_value(
- pi.qualifiers, j
- )
- pqualid = x509.ObjectIdentifier(
- _obj2txt(backend, pqi.pqualid)
- )
+ pqi = backend._lib.sk_POLICYQUALINFO_value(pi.qualifiers, j)
+ pqualid = x509.ObjectIdentifier(_obj2txt(backend, pqi.pqualid))
if pqualid == CertificatePoliciesOID.CPS_QUALIFIER:
cpsuri = backend._ffi.buffer(
pqi.d.cpsuri.data, pqi.d.cpsuri.length
- )[:].decode('ascii')
+ )[:].decode("ascii")
qualifiers.append(cpsuri)
else:
assert pqualid == CertificatePoliciesOID.CPS_USER_NOTICE
@@ -275,9 +289,7 @@ def _decode_certificate_policies(backend, cp):
)
qualifiers.append(user_notice)
- certificate_policies.append(
- x509.PolicyInformation(oid, qualifiers)
- )
+ certificate_policies.append(x509.PolicyInformation(oid, qualifiers))
return x509.CertificatePolicies(certificate_policies)
@@ -290,13 +302,9 @@ def _decode_user_notice(backend, un):
explicit_text = _asn1_string_to_utf8(backend, un.exptext)
if un.noticeref != backend._ffi.NULL:
- organization = _asn1_string_to_utf8(
- backend, un.noticeref.organization
- )
+ organization = _asn1_string_to_utf8(backend, un.noticeref.organization)
- num = backend._lib.sk_ASN1_INTEGER_num(
- un.noticeref.noticenos
- )
+ num = backend._lib.sk_ASN1_INTEGER_num(un.noticeref.noticenos)
notice_numbers = []
for i in range(num):
asn1_int = backend._lib.sk_ASN1_INTEGER_value(
@@ -305,9 +313,7 @@ def _decode_user_notice(backend, un):
notice_num = _asn1_integer_to_int(backend, asn1_int)
notice_numbers.append(notice_num)
- notice_reference = x509.NoticeReference(
- organization, notice_numbers
- )
+ notice_reference = x509.NoticeReference(organization, notice_numbers)
return x509.UserNotice(notice_reference, explicit_text)
@@ -350,9 +356,7 @@ def _decode_authority_key_identifier(backend, akid):
)[:]
if akid.issuer != backend._ffi.NULL:
- authority_cert_issuer = _decode_general_names(
- backend, akid.issuer
- )
+ authority_cert_issuer = _decode_general_names(backend, akid.issuer)
authority_cert_serial_number = _asn1_integer_to_int_or_none(
backend, akid.serial
@@ -363,22 +367,40 @@ def _decode_authority_key_identifier(backend, akid):
)
-def _decode_authority_information_access(backend, aia):
- aia = backend._ffi.cast("Cryptography_STACK_OF_ACCESS_DESCRIPTION *", aia)
- aia = backend._ffi.gc(aia, backend._lib.sk_ACCESS_DESCRIPTION_free)
- num = backend._lib.sk_ACCESS_DESCRIPTION_num(aia)
+def _decode_information_access(backend, ia):
+ ia = backend._ffi.cast("Cryptography_STACK_OF_ACCESS_DESCRIPTION *", ia)
+ ia = backend._ffi.gc(
+ ia,
+ lambda x: backend._lib.sk_ACCESS_DESCRIPTION_pop_free(
+ x,
+ backend._ffi.addressof(
+ backend._lib._original_lib, "ACCESS_DESCRIPTION_free"
+ ),
+ ),
+ )
+ num = backend._lib.sk_ACCESS_DESCRIPTION_num(ia)
access_descriptions = []
for i in range(num):
- ad = backend._lib.sk_ACCESS_DESCRIPTION_value(aia, i)
+ ad = backend._lib.sk_ACCESS_DESCRIPTION_value(ia, i)
backend.openssl_assert(ad.method != backend._ffi.NULL)
oid = x509.ObjectIdentifier(_obj2txt(backend, ad.method))
backend.openssl_assert(ad.location != backend._ffi.NULL)
gn = _decode_general_name(backend, ad.location)
access_descriptions.append(x509.AccessDescription(oid, gn))
+ return access_descriptions
+
+
+def _decode_authority_information_access(backend, aia):
+ access_descriptions = _decode_information_access(backend, aia)
return x509.AuthorityInformationAccess(access_descriptions)
+def _decode_subject_information_access(backend, aia):
+ access_descriptions = _decode_information_access(backend, aia)
+ return x509.SubjectInformationAccess(access_descriptions)
+
+
def _decode_key_usage(backend, bit_string):
bit_string = backend._ffi.cast("ASN1_BIT_STRING *", bit_string)
bit_string = backend._ffi.gc(bit_string, backend._lib.ASN1_BIT_STRING_free)
@@ -401,7 +423,7 @@ def _decode_key_usage(backend, bit_string):
key_cert_sign,
crl_sign,
encipher_only,
- decipher_only
+ decipher_only,
)
@@ -450,6 +472,35 @@ def _decode_general_subtrees(backend, stack_subtrees):
return subtrees
+def _decode_issuing_dist_point(backend, idp):
+ idp = backend._ffi.cast("ISSUING_DIST_POINT *", idp)
+ idp = backend._ffi.gc(idp, backend._lib.ISSUING_DIST_POINT_free)
+ if idp.distpoint != backend._ffi.NULL:
+ full_name, relative_name = _decode_distpoint(backend, idp.distpoint)
+ else:
+ full_name = None
+ relative_name = None
+
+ only_user = idp.onlyuser == 255
+ only_ca = idp.onlyCA == 255
+ indirect_crl = idp.indirectCRL == 255
+ only_attr = idp.onlyattr == 255
+ if idp.onlysomereasons != backend._ffi.NULL:
+ only_some_reasons = _decode_reasons(backend, idp.onlysomereasons)
+ else:
+ only_some_reasons = None
+
+ return x509.IssuingDistributionPoint(
+ full_name,
+ relative_name,
+ only_user,
+ only_ca,
+ only_some_reasons,
+ indirect_crl,
+ only_attr,
+ )
+
+
def _decode_policy_constraints(backend, pc):
pc = backend._ffi.cast("POLICY_CONSTRAINTS *", pc)
pc = backend._ffi.gc(pc, backend._lib.POLICY_CONSTRAINTS_free)
@@ -498,44 +549,7 @@ def _decode_dist_points(backend, cdps):
reasons = None
cdp = backend._lib.sk_DIST_POINT_value(cdps, i)
if cdp.reasons != backend._ffi.NULL:
- # We will check each bit from RFC 5280
- # ReasonFlags ::= BIT STRING {
- # unused (0),
- # keyCompromise (1),
- # cACompromise (2),
- # affiliationChanged (3),
- # superseded (4),
- # cessationOfOperation (5),
- # certificateHold (6),
- # privilegeWithdrawn (7),
- # aACompromise (8) }
- reasons = []
- get_bit = backend._lib.ASN1_BIT_STRING_get_bit
- if get_bit(cdp.reasons, 1):
- reasons.append(x509.ReasonFlags.key_compromise)
-
- if get_bit(cdp.reasons, 2):
- reasons.append(x509.ReasonFlags.ca_compromise)
-
- if get_bit(cdp.reasons, 3):
- reasons.append(x509.ReasonFlags.affiliation_changed)
-
- if get_bit(cdp.reasons, 4):
- reasons.append(x509.ReasonFlags.superseded)
-
- if get_bit(cdp.reasons, 5):
- reasons.append(x509.ReasonFlags.cessation_of_operation)
-
- if get_bit(cdp.reasons, 6):
- reasons.append(x509.ReasonFlags.certificate_hold)
-
- if get_bit(cdp.reasons, 7):
- reasons.append(x509.ReasonFlags.privilege_withdrawn)
-
- if get_bit(cdp.reasons, 8):
- reasons.append(x509.ReasonFlags.aa_compromise)
-
- reasons = frozenset(reasons)
+ reasons = _decode_reasons(backend, cdp.reasons)
if cdp.CRLissuer != backend._ffi.NULL:
crl_issuer = _decode_general_names(backend, cdp.CRLissuer)
@@ -543,32 +557,9 @@ def _decode_dist_points(backend, cdps):
# Certificates may have a crl_issuer/reasons and no distribution
# point so make sure it's not null.
if cdp.distpoint != backend._ffi.NULL:
- # Type 0 is fullName, there is no #define for it in the code.
- if cdp.distpoint.type == _DISTPOINT_TYPE_FULLNAME:
- full_name = _decode_general_names(
- backend, cdp.distpoint.name.fullname
- )
- # OpenSSL code doesn't test for a specific type for
- # relativename, everything that isn't fullname is considered
- # relativename. Per RFC 5280:
- #
- # DistributionPointName ::= CHOICE {
- # fullName [0] GeneralNames,
- # nameRelativeToCRLIssuer [1] RelativeDistinguishedName }
- else:
- rns = cdp.distpoint.name.relativename
- rnum = backend._lib.sk_X509_NAME_ENTRY_num(rns)
- attributes = set()
- for i in range(rnum):
- rn = backend._lib.sk_X509_NAME_ENTRY_value(
- rns, i
- )
- backend.openssl_assert(rn != backend._ffi.NULL)
- attributes.add(
- _decode_x509_name_entry(backend, rn)
- )
-
- relative_name = x509.RelativeDistinguishedName(attributes)
+ full_name, relative_name = _decode_distpoint(
+ backend, cdp.distpoint
+ )
dist_points.append(
x509.DistributionPoint(
@@ -579,6 +570,63 @@ def _decode_dist_points(backend, cdps):
return dist_points
+# ReasonFlags ::= BIT STRING {
+# unused (0),
+# keyCompromise (1),
+# cACompromise (2),
+# affiliationChanged (3),
+# superseded (4),
+# cessationOfOperation (5),
+# certificateHold (6),
+# privilegeWithdrawn (7),
+# aACompromise (8) }
+_REASON_BIT_MAPPING = {
+ 1: x509.ReasonFlags.key_compromise,
+ 2: x509.ReasonFlags.ca_compromise,
+ 3: x509.ReasonFlags.affiliation_changed,
+ 4: x509.ReasonFlags.superseded,
+ 5: x509.ReasonFlags.cessation_of_operation,
+ 6: x509.ReasonFlags.certificate_hold,
+ 7: x509.ReasonFlags.privilege_withdrawn,
+ 8: x509.ReasonFlags.aa_compromise,
+}
+
+
+def _decode_reasons(backend, reasons):
+ # We will check each bit from RFC 5280
+ enum_reasons = []
+ for bit_position, reason in six.iteritems(_REASON_BIT_MAPPING):
+ if backend._lib.ASN1_BIT_STRING_get_bit(reasons, bit_position):
+ enum_reasons.append(reason)
+
+ return frozenset(enum_reasons)
+
+
+def _decode_distpoint(backend, distpoint):
+ if distpoint.type == _DISTPOINT_TYPE_FULLNAME:
+ full_name = _decode_general_names(backend, distpoint.name.fullname)
+ return full_name, None
+
+ # OpenSSL code doesn't test for a specific type for
+ # relativename, everything that isn't fullname is considered
+ # relativename. Per RFC 5280:
+ #
+ # DistributionPointName ::= CHOICE {
+ # fullName [0] GeneralNames,
+ # nameRelativeToCRLIssuer [1] RelativeDistinguishedName }
+ rns = distpoint.name.relativename
+ rnum = backend._lib.sk_X509_NAME_ENTRY_num(rns)
+ attributes = set()
+ for i in range(rnum):
+ rn = backend._lib.sk_X509_NAME_ENTRY_value(rns, i)
+ backend.openssl_assert(rn != backend._ffi.NULL)
+ attributes.add(_decode_x509_name_entry(backend, rn))
+
+ relative_name = x509.RelativeDistinguishedName(attributes)
+
+ return None, relative_name
+
+
def _decode_crl_distribution_points(backend, cdps):
dist_points = _decode_dist_points(backend, cdps)
return x509.CRLDistributionPoints(dist_points)
@@ -596,10 +644,11 @@ def _decode_inhibit_any_policy(backend, asn1_int):
return x509.InhibitAnyPolicy(skip_certs)
-def _decode_precert_signed_certificate_timestamps(backend, asn1_scts):
+def _decode_scts(backend, asn1_scts):
from cryptography.hazmat.backends.openssl.x509 import (
- _SignedCertificateTimestamp
+ _SignedCertificateTimestamp,
)
+
asn1_scts = backend._ffi.cast("Cryptography_STACK_OF_SCT *", asn1_scts)
asn1_scts = backend._ffi.gc(asn1_scts, backend._lib.SCT_LIST_free)
@@ -608,7 +657,17 @@ def _decode_precert_signed_certificate_timestamps(backend, asn1_scts):
sct = backend._lib.sk_SCT_value(asn1_scts, i)
scts.append(_SignedCertificateTimestamp(backend, asn1_scts, sct))
- return x509.PrecertificateSignedCertificateTimestamps(scts)
+ return scts
+
+
+def _decode_precert_signed_certificate_timestamps(backend, asn1_scts):
+ return x509.PrecertificateSignedCertificateTimestamps(
+ _decode_scts(backend, asn1_scts)
+ )
+
+
+def _decode_signed_certificate_timestamps(backend, asn1_scts):
+ return x509.SignedCertificateTimestamps(_decode_scts(backend, asn1_scts))
# CRLReason ::= ENUMERATED {
@@ -647,7 +706,7 @@ def _decode_precert_signed_certificate_timestamps(backend, asn1_scts):
x509.ReasonFlags.certificate_hold: 6,
x509.ReasonFlags.remove_from_crl: 8,
x509.ReasonFlags.privilege_withdrawn: 9,
- x509.ReasonFlags.aa_compromise: 10
+ x509.ReasonFlags.aa_compromise: 10,
}
@@ -659,13 +718,11 @@ def _decode_crl_reason(backend, enum):
try:
return x509.CRLReason(_CRL_ENTRY_REASON_CODE_TO_ENUM[code])
except KeyError:
- raise ValueError("Unsupported reason code: {0}".format(code))
+ raise ValueError("Unsupported reason code: {}".format(code))
def _decode_invalidity_date(backend, inv_date):
- generalized_time = backend._ffi.cast(
- "ASN1_GENERALIZEDTIME *", inv_date
- )
+ generalized_time = backend._ffi.cast("ASN1_GENERALIZEDTIME *", inv_date)
generalized_time = backend._ffi.gc(
generalized_time, backend._lib.ASN1_GENERALIZEDTIME_free
)
@@ -719,14 +776,14 @@ def _asn1_string_to_utf8(backend, asn1_string):
res = backend._lib.ASN1_STRING_to_UTF8(buf, asn1_string)
if res == -1:
raise ValueError(
- "Unsupported ASN1 string type. Type: {0}".format(asn1_string.type)
+ "Unsupported ASN1 string type. Type: {}".format(asn1_string.type)
)
backend.openssl_assert(buf[0] != backend._ffi.NULL)
buf = backend._ffi.gc(
buf, lambda buffer: backend._lib.OPENSSL_free(buffer[0])
)
- return backend._ffi.buffer(buf[0], res)[:].decode('utf8')
+ return backend._ffi.buffer(buf[0], res)[:].decode("utf8")
def _parse_asn1_time(backend, asn1_time):
@@ -734,7 +791,13 @@ def _parse_asn1_time(backend, asn1_time):
generalized_time = backend._lib.ASN1_TIME_to_generalizedtime(
asn1_time, backend._ffi.NULL
)
- backend.openssl_assert(generalized_time != backend._ffi.NULL)
+ if generalized_time == backend._ffi.NULL:
+ raise ValueError(
+ "Couldn't parse ASN.1 time as generalizedtime {!r}".format(
+ _asn1_string_to_bytes(backend, asn1_time)
+ )
+ )
+
generalized_time = backend._ffi.gc(
generalized_time, backend._lib.ASN1_GENERALIZEDTIME_free
)
@@ -748,7 +811,13 @@ def _parse_asn1_generalized_time(backend, generalized_time):
return datetime.datetime.strptime(time, "%Y%m%d%H%M%SZ")
-_EXTENSION_HANDLERS_NO_SCT = {
+def _decode_nonce(backend, nonce):
+ nonce = backend._ffi.cast("ASN1_OCTET_STRING *", nonce)
+ nonce = backend._ffi.gc(nonce, backend._lib.ASN1_OCTET_STRING_free)
+ return x509.OCSPNonce(_asn1_string_to_bytes(backend, nonce))
+
+
+_EXTENSION_HANDLERS_BASE = {
ExtensionOID.BASIC_CONSTRAINTS: _decode_basic_constraints,
ExtensionOID.SUBJECT_KEY_IDENTIFIER: _decode_subject_key_identifier,
ExtensionOID.KEY_USAGE: _decode_key_usage,
@@ -758,6 +827,9 @@ def _parse_asn1_generalized_time(backend, generalized_time):
ExtensionOID.AUTHORITY_INFORMATION_ACCESS: (
_decode_authority_information_access
),
+ ExtensionOID.SUBJECT_INFORMATION_ACCESS: (
+ _decode_subject_information_access
+ ),
ExtensionOID.CERTIFICATE_POLICIES: _decode_certificate_policies,
ExtensionOID.CRL_DISTRIBUTION_POINTS: _decode_crl_distribution_points,
ExtensionOID.FRESHEST_CRL: _decode_freshest_crl,
@@ -767,11 +839,11 @@ def _parse_asn1_generalized_time(backend, generalized_time):
ExtensionOID.NAME_CONSTRAINTS: _decode_name_constraints,
ExtensionOID.POLICY_CONSTRAINTS: _decode_policy_constraints,
}
-_EXTENSION_HANDLERS = _EXTENSION_HANDLERS_NO_SCT.copy()
-_EXTENSION_HANDLERS[
- ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS
-] = _decode_precert_signed_certificate_timestamps
-
+_EXTENSION_HANDLERS_SCT = {
+ ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS: (
+ _decode_precert_signed_certificate_timestamps
+ )
+}
_REVOKED_EXTENSION_HANDLERS = {
CRLEntryExtensionOID.CRL_REASON: _decode_crl_reason,
@@ -787,34 +859,20 @@ def _parse_asn1_generalized_time(backend, generalized_time):
ExtensionOID.AUTHORITY_INFORMATION_ACCESS: (
_decode_authority_information_access
),
+ ExtensionOID.ISSUING_DISTRIBUTION_POINT: _decode_issuing_dist_point,
+ ExtensionOID.FRESHEST_CRL: _decode_freshest_crl,
}
-_CERTIFICATE_EXTENSION_PARSER_NO_SCT = _X509ExtensionParser(
- ext_count=lambda backend, x: backend._lib.X509_get_ext_count(x),
- get_ext=lambda backend, x, i: backend._lib.X509_get_ext(x, i),
- handlers=_EXTENSION_HANDLERS_NO_SCT
-)
-
-_CERTIFICATE_EXTENSION_PARSER = _X509ExtensionParser(
- ext_count=lambda backend, x: backend._lib.X509_get_ext_count(x),
- get_ext=lambda backend, x, i: backend._lib.X509_get_ext(x, i),
- handlers=_EXTENSION_HANDLERS
-)
-
-_CSR_EXTENSION_PARSER = _X509ExtensionParser(
- ext_count=lambda backend, x: backend._lib.sk_X509_EXTENSION_num(x),
- get_ext=lambda backend, x, i: backend._lib.sk_X509_EXTENSION_value(x, i),
- handlers=_EXTENSION_HANDLERS
-)
+_OCSP_REQ_EXTENSION_HANDLERS = {
+ OCSPExtensionOID.NONCE: _decode_nonce,
+}
-_REVOKED_CERTIFICATE_EXTENSION_PARSER = _X509ExtensionParser(
- ext_count=lambda backend, x: backend._lib.X509_REVOKED_get_ext_count(x),
- get_ext=lambda backend, x, i: backend._lib.X509_REVOKED_get_ext(x, i),
- handlers=_REVOKED_EXTENSION_HANDLERS,
-)
+_OCSP_BASICRESP_EXTENSION_HANDLERS = {
+ OCSPExtensionOID.NONCE: _decode_nonce,
+}
-_CRL_EXTENSION_PARSER = _X509ExtensionParser(
- ext_count=lambda backend, x: backend._lib.X509_CRL_get_ext_count(x),
- get_ext=lambda backend, x, i: backend._lib.X509_CRL_get_ext(x, i),
- handlers=_CRL_EXTENSION_HANDLERS,
-)
+_OCSP_SINGLERESP_EXTENSION_HANDLERS_SCT = {
+ ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS: (
+ _decode_signed_certificate_timestamps
+ )
+}
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/dh.py b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/dh.py
index e5f7644..2862676 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/dh.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/dh.py
@@ -17,8 +17,8 @@ def _dh_params_dup(dh_cdata, backend):
param_cdata = lib.DHparams_dup(dh_cdata)
backend.openssl_assert(param_cdata != ffi.NULL)
param_cdata = ffi.gc(param_cdata, lib.DH_free)
- if lib.OPENSSL_VERSION_NUMBER < 0x10002000 or lib.CRYPTOGRAPHY_IS_LIBRESSL:
- # In OpenSSL versions < 1.0.2 or libressl DHparams_dup don't copy q
+ if lib.CRYPTOGRAPHY_IS_LIBRESSL:
+ # In libressl DHparams_dup don't copy q
q = ffi.new("BIGNUM **")
lib.DH_get0_pqg(dh_cdata, ffi.NULL, q, ffi.NULL)
q_dup = lib.BN_dup(q[0])
@@ -53,7 +53,7 @@ def parameter_numbers(self):
return dh.DHParameterNumbers(
p=self._backend._bn_to_int(p[0]),
g=self._backend._bn_to_int(g[0]),
- q=q_val
+ q=q_val,
)
def generate_private_key(self):
@@ -61,44 +61,27 @@ def generate_private_key(self):
def parameter_bytes(self, encoding, format):
if format is not serialization.ParameterFormat.PKCS3:
- raise ValueError(
- "Only PKCS3 serialization is supported"
- )
+ raise ValueError("Only PKCS3 serialization is supported")
if not self._backend._lib.Cryptography_HAS_EVP_PKEY_DHX:
q = self._backend._ffi.new("BIGNUM **")
- self._backend._lib.DH_get0_pqg(self._dh_cdata,
- self._backend._ffi.NULL,
- q,
- self._backend._ffi.NULL)
+ self._backend._lib.DH_get0_pqg(
+ self._dh_cdata,
+ self._backend._ffi.NULL,
+ q,
+ self._backend._ffi.NULL,
+ )
if q[0] != self._backend._ffi.NULL:
raise UnsupportedAlgorithm(
"DH X9.42 serialization is not supported",
- _Reasons.UNSUPPORTED_SERIALIZATION)
+ _Reasons.UNSUPPORTED_SERIALIZATION,
+ )
- return self._backend._parameter_bytes(
- encoding,
- format,
- self._dh_cdata
- )
-
-
-def _handle_dh_compute_key_error(errors, backend):
- lib = backend._lib
-
- backend.openssl_assert(
- errors[0]._lib_reason_match(
- lib.ERR_LIB_DH, lib.DH_R_INVALID_PUBKEY
- )
- )
-
- raise ValueError("Public key value is invalid for this exchange.")
+ return self._backend._parameter_bytes(encoding, format, self._dh_cdata)
def _get_dh_num_bits(backend, dh_cdata):
p = backend._ffi.new("BIGNUM **")
- backend._lib.DH_get0_pqg(dh_cdata, p,
- backend._ffi.NULL,
- backend._ffi.NULL)
+ backend._lib.DH_get0_pqg(dh_cdata, p, backend._ffi.NULL, backend._ffi.NULL)
backend.openssl_assert(p[0] != backend._ffi.NULL)
return backend._lib.BN_num_bits(p[0])
@@ -136,29 +119,32 @@ def private_numbers(self):
parameter_numbers=dh.DHParameterNumbers(
p=self._backend._bn_to_int(p[0]),
g=self._backend._bn_to_int(g[0]),
- q=q_val
+ q=q_val,
),
- y=self._backend._bn_to_int(pub_key[0])
+ y=self._backend._bn_to_int(pub_key[0]),
),
- x=self._backend._bn_to_int(priv_key[0])
+ x=self._backend._bn_to_int(priv_key[0]),
)
def exchange(self, peer_public_key):
buf = self._backend._ffi.new("unsigned char[]", self._key_size_bytes)
pub_key = self._backend._ffi.new("BIGNUM **")
- self._backend._lib.DH_get0_key(peer_public_key._dh_cdata, pub_key,
- self._backend._ffi.NULL)
+ self._backend._lib.DH_get0_key(
+ peer_public_key._dh_cdata, pub_key, self._backend._ffi.NULL
+ )
self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
res = self._backend._lib.DH_compute_key(
- buf,
- pub_key[0],
- self._dh_cdata
+ buf, pub_key[0], self._dh_cdata
)
if res == -1:
- errors = self._backend._consume_errors()
- return _handle_dh_compute_key_error(errors, self._backend)
+ errors_with_text = self._backend._consume_errors_with_text()
+ raise ValueError(
+ "Error computing shared key. Public key is likely invalid "
+ "for this exchange.",
+ errors_with_text,
+ )
else:
self._backend.openssl_assert(res >= 1)
@@ -173,15 +159,16 @@ def exchange(self, peer_public_key):
def public_key(self):
dh_cdata = _dh_params_dup(self._dh_cdata, self._backend)
pub_key = self._backend._ffi.new("BIGNUM **")
- self._backend._lib.DH_get0_key(self._dh_cdata,
- pub_key, self._backend._ffi.NULL)
+ self._backend._lib.DH_get0_key(
+ self._dh_cdata, pub_key, self._backend._ffi.NULL
+ )
self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
pub_key_dup = self._backend._lib.BN_dup(pub_key[0])
self._backend.openssl_assert(pub_key_dup != self._backend._ffi.NULL)
- res = self._backend._lib.DH_set0_key(dh_cdata,
- pub_key_dup,
- self._backend._ffi.NULL)
+ res = self._backend._lib.DH_set0_key(
+ dh_cdata, pub_key_dup, self._backend._ffi.NULL
+ )
self._backend.openssl_assert(res == 1)
evp_pkey = self._backend._dh_cdata_to_evp_pkey(dh_cdata)
return _DHPublicKey(self._backend, dh_cdata, evp_pkey)
@@ -196,21 +183,25 @@ def private_bytes(self, encoding, format, encryption_algorithm):
)
if not self._backend._lib.Cryptography_HAS_EVP_PKEY_DHX:
q = self._backend._ffi.new("BIGNUM **")
- self._backend._lib.DH_get0_pqg(self._dh_cdata,
- self._backend._ffi.NULL,
- q,
- self._backend._ffi.NULL)
+ self._backend._lib.DH_get0_pqg(
+ self._dh_cdata,
+ self._backend._ffi.NULL,
+ q,
+ self._backend._ffi.NULL,
+ )
if q[0] != self._backend._ffi.NULL:
raise UnsupportedAlgorithm(
"DH X9.42 serialization is not supported",
- _Reasons.UNSUPPORTED_SERIALIZATION)
+ _Reasons.UNSUPPORTED_SERIALIZATION,
+ )
return self._backend._private_key_bytes(
encoding,
format,
encryption_algorithm,
+ self,
self._evp_pkey,
- self._dh_cdata
+ self._dh_cdata,
)
@@ -238,16 +229,17 @@ def public_numbers(self):
else:
q_val = self._backend._bn_to_int(q[0])
pub_key = self._backend._ffi.new("BIGNUM **")
- self._backend._lib.DH_get0_key(self._dh_cdata,
- pub_key, self._backend._ffi.NULL)
+ self._backend._lib.DH_get0_key(
+ self._dh_cdata, pub_key, self._backend._ffi.NULL
+ )
self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
return dh.DHPublicNumbers(
parameter_numbers=dh.DHParameterNumbers(
p=self._backend._bn_to_int(p[0]),
g=self._backend._bn_to_int(g[0]),
- q=q_val
+ q=q_val,
),
- y=self._backend._bn_to_int(pub_key[0])
+ y=self._backend._bn_to_int(pub_key[0]),
)
def parameters(self):
@@ -262,19 +254,18 @@ def public_bytes(self, encoding, format):
if not self._backend._lib.Cryptography_HAS_EVP_PKEY_DHX:
q = self._backend._ffi.new("BIGNUM **")
- self._backend._lib.DH_get0_pqg(self._dh_cdata,
- self._backend._ffi.NULL,
- q,
- self._backend._ffi.NULL)
+ self._backend._lib.DH_get0_pqg(
+ self._dh_cdata,
+ self._backend._ffi.NULL,
+ q,
+ self._backend._ffi.NULL,
+ )
if q[0] != self._backend._ffi.NULL:
raise UnsupportedAlgorithm(
"DH X9.42 serialization is not supported",
- _Reasons.UNSUPPORTED_SERIALIZATION)
+ _Reasons.UNSUPPORTED_SERIALIZATION,
+ )
return self._backend._public_key_bytes(
- encoding,
- format,
- self,
- self._evp_pkey,
- None
+ encoding, format, self, self._evp_pkey, None
)
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/dsa.py b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/dsa.py
index 48886e4..0c5faba 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/dsa.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/dsa.py
@@ -7,12 +7,15 @@
from cryptography import utils
from cryptography.exceptions import InvalidSignature
from cryptography.hazmat.backends.openssl.utils import (
- _calculate_digest_and_algorithm, _check_not_prehashed,
- _warn_sign_verify_deprecated
+ _calculate_digest_and_algorithm,
+ _check_not_prehashed,
+ _warn_sign_verify_deprecated,
)
-from cryptography.hazmat.primitives import hashes, serialization
+from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import (
- AsymmetricSignatureContext, AsymmetricVerificationContext, dsa
+ AsymmetricSignatureContext,
+ AsymmetricVerificationContext,
+ dsa,
)
@@ -29,7 +32,7 @@ def _dsa_sig_sign(backend, private_key, data):
backend.openssl_assert(res == 1)
backend.openssl_assert(buflen[0])
- return backend._ffi.buffer(sig_buf)[:buflen[0]]
+ return backend._ffi.buffer(sig_buf)[: buflen[0]]
def _dsa_sig_verify(backend, public_key, signature, data):
@@ -98,7 +101,7 @@ def parameter_numbers(self):
return dsa.DSAParameterNumbers(
p=self._backend._bn_to_int(p[0]),
q=self._backend._bn_to_int(q[0]),
- g=self._backend._bn_to_int(g[0])
+ g=self._backend._bn_to_int(g[0]),
)
def generate_private_key(self):
@@ -144,11 +147,11 @@ def private_numbers(self):
parameter_numbers=dsa.DSAParameterNumbers(
p=self._backend._bn_to_int(p[0]),
q=self._backend._bn_to_int(q[0]),
- g=self._backend._bn_to_int(g[0])
+ g=self._backend._bn_to_int(g[0]),
),
- y=self._backend._bn_to_int(pub_key[0])
+ y=self._backend._bn_to_int(pub_key[0]),
),
- x=self._backend._bn_to_int(priv_key[0])
+ x=self._backend._bn_to_int(priv_key[0]),
)
def public_key(self):
@@ -183,8 +186,9 @@ def private_bytes(self, encoding, format, encryption_algorithm):
encoding,
format,
encryption_algorithm,
+ self,
self._evp_pkey,
- self._dsa_cdata
+ self._dsa_cdata,
)
def sign(self, data, algorithm):
@@ -211,8 +215,7 @@ def __init__(self, backend, dsa_cdata, evp_pkey):
def verifier(self, signature, signature_algorithm):
_warn_sign_verify_deprecated()
- if not isinstance(signature, bytes):
- raise TypeError("signature must be bytes.")
+ utils._check_bytes("signature", signature)
_check_not_prehashed(signature_algorithm)
return _DSAVerificationContext(
@@ -236,9 +239,9 @@ def public_numbers(self):
parameter_numbers=dsa.DSAParameterNumbers(
p=self._backend._bn_to_int(p[0]),
q=self._backend._bn_to_int(q[0]),
- g=self._backend._bn_to_int(g[0])
+ g=self._backend._bn_to_int(g[0]),
),
- y=self._backend._bn_to_int(pub_key[0])
+ y=self._backend._bn_to_int(pub_key[0]),
)
def parameters(self):
@@ -249,17 +252,8 @@ def parameters(self):
return _DSAParameters(self._backend, dsa_cdata)
def public_bytes(self, encoding, format):
- if format is serialization.PublicFormat.PKCS1:
- raise ValueError(
- "DSA public keys do not support PKCS1 serialization"
- )
-
return self._backend._public_key_bytes(
- encoding,
- format,
- self,
- self._evp_pkey,
- None
+ encoding, format, self, self._evp_pkey, None
)
def verify(self, signature, data, algorithm):
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/ec.py b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/ec.py
index 69da234..bf61bcf 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/ec.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/ec.py
@@ -6,15 +6,20 @@
from cryptography import utils
from cryptography.exceptions import (
- InvalidSignature, UnsupportedAlgorithm, _Reasons
+ InvalidSignature,
+ UnsupportedAlgorithm,
+ _Reasons,
)
from cryptography.hazmat.backends.openssl.utils import (
- _calculate_digest_and_algorithm, _check_not_prehashed,
- _warn_sign_verify_deprecated
+ _calculate_digest_and_algorithm,
+ _check_not_prehashed,
+ _warn_sign_verify_deprecated,
)
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import (
- AsymmetricSignatureContext, AsymmetricVerificationContext, ec
+ AsymmetricSignatureContext,
+ AsymmetricVerificationContext,
+ ec,
)
@@ -22,7 +27,8 @@ def _check_signature_algorithm(signature_algorithm):
if not isinstance(signature_algorithm, ec.ECDSA):
raise UnsupportedAlgorithm(
"Unsupported elliptic curve signature algorithm.",
- _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
def _ec_key_curve_sn(backend, ec_key):
@@ -34,14 +40,24 @@ def _ec_key_curve_sn(backend, ec_key):
# an error for now.
if nid == backend._lib.NID_undef:
raise NotImplementedError(
- "ECDSA certificates with unnamed curves are unsupported "
- "at this time"
+ "ECDSA keys with unnamed curves are unsupported " "at this time"
+ )
+
+ # This is like the above check, but it also catches the case where you
+ # explicitly encoded a curve with the same parameters as a named curve.
+ # Don't do that.
+ if (
+ backend._lib.CRYPTOGRAPHY_OPENSSL_102U_OR_GREATER
+ and backend._lib.EC_GROUP_get_asn1_flag(group) == 0
+ ):
+ raise NotImplementedError(
+ "ECDSA keys with unnamed curves are unsupported " "at this time"
)
curve_name = backend._lib.OBJ_nid2sn(nid)
backend.openssl_assert(curve_name != backend._ffi.NULL)
- sn = backend._ffi.string(curve_name).decode('ascii')
+ sn = backend._ffi.string(curve_name).decode("ascii")
return sn
@@ -62,8 +78,8 @@ def _sn_to_elliptic_curve(backend, sn):
return ec._CURVE_TYPES[sn]()
except KeyError:
raise UnsupportedAlgorithm(
- "{0} is not a supported elliptic curve".format(sn),
- _Reasons.UNSUPPORTED_ELLIPTIC_CURVE
+ "{} is not a supported elliptic curve".format(sn),
+ _Reasons.UNSUPPORTED_ELLIPTIC_CURVE,
)
@@ -77,7 +93,7 @@ def _ecdsa_sig_sign(backend, private_key, data):
0, data, len(data), sigbuf, siglen_ptr, private_key._ec_key
)
backend.openssl_assert(res == 1)
- return backend._ffi.buffer(sigbuf)[:siglen_ptr[0]]
+ return backend._ffi.buffer(sigbuf)[: siglen_ptr[0]]
def _ecdsa_sig_verify(backend, public_key, signature, data):
@@ -127,12 +143,12 @@ def verify(self):
class _EllipticCurvePrivateKey(object):
def __init__(self, backend, ec_key_cdata, evp_pkey):
self._backend = backend
- _mark_asn1_named_ec_curve(backend, ec_key_cdata)
self._ec_key = ec_key_cdata
self._evp_pkey = evp_pkey
sn = _ec_key_curve_sn(backend, ec_key_cdata)
self._curve = _sn_to_elliptic_curve(backend, sn)
+ _mark_asn1_named_ec_curve(backend, ec_key_cdata)
curve = utils.read_only_property("_curve")
@@ -156,7 +172,7 @@ def exchange(self, algorithm, peer_public_key):
):
raise UnsupportedAlgorithm(
"This backend does not support the ECDH algorithm.",
- _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
)
if peer_public_key.curve.name != self.curve.name:
@@ -183,12 +199,7 @@ def public_key(self):
self._backend.openssl_assert(group != self._backend._ffi.NULL)
curve_nid = self._backend._lib.EC_GROUP_get_curve_name(group)
-
- public_ec_key = self._backend._lib.EC_KEY_new_by_curve_name(curve_nid)
- self._backend.openssl_assert(public_ec_key != self._backend._ffi.NULL)
- public_ec_key = self._backend._ffi.gc(
- public_ec_key, self._backend._lib.EC_KEY_free
- )
+ public_ec_key = self._backend._ec_key_new_by_curve_nid(curve_nid)
point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
self._backend.openssl_assert(point != self._backend._ffi.NULL)
@@ -205,7 +216,7 @@ def private_numbers(self):
private_value = self._backend._bn_to_int(bn)
return ec.EllipticCurvePrivateNumbers(
private_value=private_value,
- public_numbers=self.public_key().public_numbers()
+ public_numbers=self.public_key().public_numbers(),
)
def private_bytes(self, encoding, format, encryption_algorithm):
@@ -213,8 +224,9 @@ def private_bytes(self, encoding, format, encryption_algorithm):
encoding,
format,
encryption_algorithm,
+ self,
self._evp_pkey,
- self._ec_key
+ self._ec_key,
)
def sign(self, data, signature_algorithm):
@@ -229,12 +241,12 @@ def sign(self, data, signature_algorithm):
class _EllipticCurvePublicKey(object):
def __init__(self, backend, ec_key_cdata, evp_pkey):
self._backend = backend
- _mark_asn1_named_ec_curve(backend, ec_key_cdata)
self._ec_key = ec_key_cdata
self._evp_pkey = evp_pkey
sn = _ec_key_curve_sn(backend, ec_key_cdata)
self._curve = _sn_to_elliptic_curve(backend, sn)
+ _mark_asn1_named_ec_curve(backend, ec_key_cdata)
curve = utils.read_only_property("_curve")
@@ -244,8 +256,7 @@ def key_size(self):
def verifier(self, signature, signature_algorithm):
_warn_sign_verify_deprecated()
- if not isinstance(signature, bytes):
- raise TypeError("signature must be bytes.")
+ utils._check_bytes("signature", signature)
_check_signature_algorithm(signature_algorithm)
_check_not_prehashed(signature_algorithm.algorithm)
@@ -254,8 +265,8 @@ def verifier(self, signature, signature_algorithm):
)
def public_numbers(self):
- get_func, group = (
- self._backend._ec_key_determine_group_get_func(self._ec_key)
+ get_func, group = self._backend._ec_key_determine_group_get_func(
+ self._ec_key
)
point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
self._backend.openssl_assert(point != self._backend._ffi.NULL)
@@ -270,25 +281,53 @@ def public_numbers(self):
x = self._backend._bn_to_int(bn_x)
y = self._backend._bn_to_int(bn_y)
- return ec.EllipticCurvePublicNumbers(
- x=x,
- y=y,
- curve=self._curve
- )
+ return ec.EllipticCurvePublicNumbers(x=x, y=y, curve=self._curve)
- def public_bytes(self, encoding, format):
- if format is serialization.PublicFormat.PKCS1:
- raise ValueError(
- "EC public keys do not support PKCS1 serialization"
+ def _encode_point(self, format):
+ if format is serialization.PublicFormat.CompressedPoint:
+ conversion = self._backend._lib.POINT_CONVERSION_COMPRESSED
+ else:
+ assert format is serialization.PublicFormat.UncompressedPoint
+ conversion = self._backend._lib.POINT_CONVERSION_UNCOMPRESSED
+
+ group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
+ self._backend.openssl_assert(group != self._backend._ffi.NULL)
+ point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
+ self._backend.openssl_assert(point != self._backend._ffi.NULL)
+ with self._backend._tmp_bn_ctx() as bn_ctx:
+ buflen = self._backend._lib.EC_POINT_point2oct(
+ group, point, conversion, self._backend._ffi.NULL, 0, bn_ctx
+ )
+ self._backend.openssl_assert(buflen > 0)
+ buf = self._backend._ffi.new("char[]", buflen)
+ res = self._backend._lib.EC_POINT_point2oct(
+ group, point, conversion, buf, buflen, bn_ctx
)
+ self._backend.openssl_assert(buflen == res)
- return self._backend._public_key_bytes(
- encoding,
- format,
- self,
- self._evp_pkey,
- None
- )
+ return self._backend._ffi.buffer(buf)[:]
+
+ def public_bytes(self, encoding, format):
+
+ if (
+ encoding is serialization.Encoding.X962
+ or format is serialization.PublicFormat.CompressedPoint
+ or format is serialization.PublicFormat.UncompressedPoint
+ ):
+ if encoding is not serialization.Encoding.X962 or format not in (
+ serialization.PublicFormat.CompressedPoint,
+ serialization.PublicFormat.UncompressedPoint,
+ ):
+ raise ValueError(
+ "X962 encoding must be used with CompressedPoint or "
+ "UncompressedPoint format"
+ )
+
+ return self._encode_point(format)
+ else:
+ return self._backend._public_key_bytes(
+ encoding, format, self, self._evp_pkey, None
+ )
def verify(self, signature, data, signature_algorithm):
_check_signature_algorithm(signature_algorithm)
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/ed25519.py b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/ed25519.py
new file mode 100644
index 0000000..7565337
--- /dev/null
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/ed25519.py
@@ -0,0 +1,145 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography import exceptions, utils
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives.asymmetric.ed25519 import (
+ Ed25519PrivateKey,
+ Ed25519PublicKey,
+ _ED25519_KEY_SIZE,
+ _ED25519_SIG_SIZE,
+)
+
+
+@utils.register_interface(Ed25519PublicKey)
+class _Ed25519PublicKey(object):
+ def __init__(self, backend, evp_pkey):
+ self._backend = backend
+ self._evp_pkey = evp_pkey
+
+ def public_bytes(self, encoding, format):
+ if (
+ encoding is serialization.Encoding.Raw
+ or format is serialization.PublicFormat.Raw
+ ):
+ if (
+ encoding is not serialization.Encoding.Raw
+ or format is not serialization.PublicFormat.Raw
+ ):
+ raise ValueError(
+ "When using Raw both encoding and format must be Raw"
+ )
+
+ return self._raw_public_bytes()
+
+ return self._backend._public_key_bytes(
+ encoding, format, self, self._evp_pkey, None
+ )
+
+ def _raw_public_bytes(self):
+ buf = self._backend._ffi.new("unsigned char []", _ED25519_KEY_SIZE)
+ buflen = self._backend._ffi.new("size_t *", _ED25519_KEY_SIZE)
+ res = self._backend._lib.EVP_PKEY_get_raw_public_key(
+ self._evp_pkey, buf, buflen
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _ED25519_KEY_SIZE)
+ return self._backend._ffi.buffer(buf, _ED25519_KEY_SIZE)[:]
+
+ def verify(self, signature, data):
+ evp_md_ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new()
+ self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL)
+ evp_md_ctx = self._backend._ffi.gc(
+ evp_md_ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free
+ )
+ res = self._backend._lib.EVP_DigestVerifyInit(
+ evp_md_ctx,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._evp_pkey,
+ )
+ self._backend.openssl_assert(res == 1)
+ res = self._backend._lib.EVP_DigestVerify(
+ evp_md_ctx, signature, len(signature), data, len(data)
+ )
+ if res != 1:
+ self._backend._consume_errors()
+ raise exceptions.InvalidSignature
+
+
+@utils.register_interface(Ed25519PrivateKey)
+class _Ed25519PrivateKey(object):
+ def __init__(self, backend, evp_pkey):
+ self._backend = backend
+ self._evp_pkey = evp_pkey
+
+ def public_key(self):
+ buf = self._backend._ffi.new("unsigned char []", _ED25519_KEY_SIZE)
+ buflen = self._backend._ffi.new("size_t *", _ED25519_KEY_SIZE)
+ res = self._backend._lib.EVP_PKEY_get_raw_public_key(
+ self._evp_pkey, buf, buflen
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _ED25519_KEY_SIZE)
+ public_bytes = self._backend._ffi.buffer(buf)[:]
+ return self._backend.ed25519_load_public_bytes(public_bytes)
+
+ def sign(self, data):
+ evp_md_ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new()
+ self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL)
+ evp_md_ctx = self._backend._ffi.gc(
+ evp_md_ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free
+ )
+ res = self._backend._lib.EVP_DigestSignInit(
+ evp_md_ctx,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._evp_pkey,
+ )
+ self._backend.openssl_assert(res == 1)
+ buf = self._backend._ffi.new("unsigned char[]", _ED25519_SIG_SIZE)
+ buflen = self._backend._ffi.new("size_t *", len(buf))
+ res = self._backend._lib.EVP_DigestSign(
+ evp_md_ctx, buf, buflen, data, len(data)
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _ED25519_SIG_SIZE)
+ return self._backend._ffi.buffer(buf, buflen[0])[:]
+
+ def private_bytes(self, encoding, format, encryption_algorithm):
+ if (
+ encoding is serialization.Encoding.Raw
+ or format is serialization.PublicFormat.Raw
+ ):
+ if (
+ format is not serialization.PrivateFormat.Raw
+ or encoding is not serialization.Encoding.Raw
+ or not isinstance(
+ encryption_algorithm, serialization.NoEncryption
+ )
+ ):
+ raise ValueError(
+ "When using Raw both encoding and format must be Raw "
+ "and encryption_algorithm must be NoEncryption()"
+ )
+
+ return self._raw_private_bytes()
+
+ return self._backend._private_key_bytes(
+ encoding, format, encryption_algorithm, self, self._evp_pkey, None
+ )
+
+ def _raw_private_bytes(self):
+ buf = self._backend._ffi.new("unsigned char []", _ED25519_KEY_SIZE)
+ buflen = self._backend._ffi.new("size_t *", _ED25519_KEY_SIZE)
+ res = self._backend._lib.EVP_PKEY_get_raw_private_key(
+ self._evp_pkey, buf, buflen
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _ED25519_KEY_SIZE)
+ return self._backend._ffi.buffer(buf, _ED25519_KEY_SIZE)[:]
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/ed448.py b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/ed448.py
new file mode 100644
index 0000000..4a8dab1
--- /dev/null
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/ed448.py
@@ -0,0 +1,146 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography import exceptions, utils
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives.asymmetric.ed448 import (
+ Ed448PrivateKey,
+ Ed448PublicKey,
+)
+
+_ED448_KEY_SIZE = 57
+_ED448_SIG_SIZE = 114
+
+
+@utils.register_interface(Ed448PublicKey)
+class _Ed448PublicKey(object):
+ def __init__(self, backend, evp_pkey):
+ self._backend = backend
+ self._evp_pkey = evp_pkey
+
+ def public_bytes(self, encoding, format):
+ if (
+ encoding is serialization.Encoding.Raw
+ or format is serialization.PublicFormat.Raw
+ ):
+ if (
+ encoding is not serialization.Encoding.Raw
+ or format is not serialization.PublicFormat.Raw
+ ):
+ raise ValueError(
+ "When using Raw both encoding and format must be Raw"
+ )
+
+ return self._raw_public_bytes()
+
+ return self._backend._public_key_bytes(
+ encoding, format, self, self._evp_pkey, None
+ )
+
+ def _raw_public_bytes(self):
+ buf = self._backend._ffi.new("unsigned char []", _ED448_KEY_SIZE)
+ buflen = self._backend._ffi.new("size_t *", _ED448_KEY_SIZE)
+ res = self._backend._lib.EVP_PKEY_get_raw_public_key(
+ self._evp_pkey, buf, buflen
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _ED448_KEY_SIZE)
+ return self._backend._ffi.buffer(buf, _ED448_KEY_SIZE)[:]
+
+ def verify(self, signature, data):
+ evp_md_ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new()
+ self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL)
+ evp_md_ctx = self._backend._ffi.gc(
+ evp_md_ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free
+ )
+ res = self._backend._lib.EVP_DigestVerifyInit(
+ evp_md_ctx,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._evp_pkey,
+ )
+ self._backend.openssl_assert(res == 1)
+ res = self._backend._lib.EVP_DigestVerify(
+ evp_md_ctx, signature, len(signature), data, len(data)
+ )
+ if res != 1:
+ self._backend._consume_errors()
+ raise exceptions.InvalidSignature
+
+
+@utils.register_interface(Ed448PrivateKey)
+class _Ed448PrivateKey(object):
+ def __init__(self, backend, evp_pkey):
+ self._backend = backend
+ self._evp_pkey = evp_pkey
+
+ def public_key(self):
+ buf = self._backend._ffi.new("unsigned char []", _ED448_KEY_SIZE)
+ buflen = self._backend._ffi.new("size_t *", _ED448_KEY_SIZE)
+ res = self._backend._lib.EVP_PKEY_get_raw_public_key(
+ self._evp_pkey, buf, buflen
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _ED448_KEY_SIZE)
+ public_bytes = self._backend._ffi.buffer(buf)[:]
+ return self._backend.ed448_load_public_bytes(public_bytes)
+
+ def sign(self, data):
+ evp_md_ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new()
+ self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL)
+ evp_md_ctx = self._backend._ffi.gc(
+ evp_md_ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free
+ )
+ res = self._backend._lib.EVP_DigestSignInit(
+ evp_md_ctx,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._evp_pkey,
+ )
+ self._backend.openssl_assert(res == 1)
+ buf = self._backend._ffi.new("unsigned char[]", _ED448_SIG_SIZE)
+ buflen = self._backend._ffi.new("size_t *", len(buf))
+ res = self._backend._lib.EVP_DigestSign(
+ evp_md_ctx, buf, buflen, data, len(data)
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _ED448_SIG_SIZE)
+ return self._backend._ffi.buffer(buf, buflen[0])[:]
+
+ def private_bytes(self, encoding, format, encryption_algorithm):
+ if (
+ encoding is serialization.Encoding.Raw
+ or format is serialization.PublicFormat.Raw
+ ):
+ if (
+ format is not serialization.PrivateFormat.Raw
+ or encoding is not serialization.Encoding.Raw
+ or not isinstance(
+ encryption_algorithm, serialization.NoEncryption
+ )
+ ):
+ raise ValueError(
+ "When using Raw both encoding and format must be Raw "
+ "and encryption_algorithm must be NoEncryption()"
+ )
+
+ return self._raw_private_bytes()
+
+ return self._backend._private_key_bytes(
+ encoding, format, encryption_algorithm, self, self._evp_pkey, None
+ )
+
+ def _raw_private_bytes(self):
+ buf = self._backend._ffi.new("unsigned char []", _ED448_KEY_SIZE)
+ buflen = self._backend._ffi.new("size_t *", _ED448_KEY_SIZE)
+ res = self._backend._lib.EVP_PKEY_get_raw_private_key(
+ self._evp_pkey, buf, buflen
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _ED448_KEY_SIZE)
+ return self._backend._ffi.buffer(buf, _ED448_KEY_SIZE)[:]
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/encode_asn1.py b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/encode_asn1.py
index 89b2f7f..88d709d 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/encode_asn1.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/encode_asn1.py
@@ -11,10 +11,16 @@
from cryptography import utils, x509
from cryptography.hazmat.backends.openssl.decode_asn1 import (
- _CRL_ENTRY_REASON_ENUM_TO_CODE, _DISTPOINT_TYPE_FULLNAME,
- _DISTPOINT_TYPE_RELATIVENAME
+ _CRL_ENTRY_REASON_ENUM_TO_CODE,
+ _DISTPOINT_TYPE_FULLNAME,
+ _DISTPOINT_TYPE_RELATIVENAME,
+)
+from cryptography.x509.name import _ASN1Type
+from cryptography.x509.oid import (
+ CRLEntryExtensionOID,
+ ExtensionOID,
+ OCSPExtensionOID,
)
-from cryptography.x509.oid import CRLEntryExtensionOID, ExtensionOID
def _encode_asn1_int(backend, x):
@@ -43,12 +49,12 @@ def _encode_asn1_int_gc(backend, x):
return i
-def _encode_asn1_str(backend, data, length):
+def _encode_asn1_str(backend, data):
"""
Create an ASN1_OCTET_STRING from a Python byte string.
"""
s = backend._lib.ASN1_OCTET_STRING_new()
- res = backend._lib.ASN1_OCTET_STRING_set(s, data, length)
+ res = backend._lib.ASN1_OCTET_STRING_set(s, data, len(data))
backend.openssl_assert(res == 1)
return s
@@ -67,8 +73,8 @@ def _encode_asn1_utf8_str(backend, string):
return s
-def _encode_asn1_str_gc(backend, data, length):
- s = _encode_asn1_str(backend, data, length)
+def _encode_asn1_str_gc(backend, data):
+ s = _encode_asn1_str(backend, data)
s = backend._ffi.gc(s, backend._lib.ASN1_OCTET_STRING_free)
return s
@@ -91,7 +97,8 @@ def _encode_name(backend, name):
name_entry, backend._lib.X509_NAME_ENTRY_free
)
res = backend._lib.X509_NAME_add_entry(
- subject, name_entry, -1, set_flag)
+ subject, name_entry, -1, set_flag
+ )
backend.openssl_assert(res == 1)
set_flag = -1
return subject
@@ -105,22 +112,28 @@ def _encode_name_gc(backend, attributes):
def _encode_sk_name_entry(backend, attributes):
"""
- The sk_X50_NAME_ENTRY created will not be gc'd.
+ The sk_X509_NAME_ENTRY created will not be gc'd.
"""
stack = backend._lib.sk_X509_NAME_ENTRY_new_null()
for attribute in attributes:
name_entry = _encode_name_entry(backend, attribute)
res = backend._lib.sk_X509_NAME_ENTRY_push(stack, name_entry)
- backend.openssl_assert(res == 1)
+ backend.openssl_assert(res >= 1)
return stack
def _encode_name_entry(backend, attribute):
- value = attribute.value.encode('utf8')
+ if attribute._type is _ASN1Type.BMPString:
+ value = attribute.value.encode("utf_16_be")
+ elif attribute._type is _ASN1Type.UniversalString:
+ value = attribute.value.encode("utf_32_be")
+ else:
+ value = attribute.value.encode("utf8")
+
obj = _txt2obj_gc(backend, attribute.oid.dotted_string)
name_entry = backend._lib.X509_NAME_ENTRY_create_by_OBJ(
- backend._ffi.NULL, obj, attribute._type.value, value, -1
+ backend._ffi.NULL, obj, attribute._type.value, value, len(value)
)
return name_entry
@@ -129,6 +142,28 @@ def _encode_crl_number_delta_crl_indicator(backend, ext):
return _encode_asn1_int_gc(backend, ext.crl_number)
+def _encode_issuing_dist_point(backend, ext):
+ idp = backend._lib.ISSUING_DIST_POINT_new()
+ backend.openssl_assert(idp != backend._ffi.NULL)
+ idp = backend._ffi.gc(idp, backend._lib.ISSUING_DIST_POINT_free)
+ idp.onlyuser = 255 if ext.only_contains_user_certs else 0
+ idp.onlyCA = 255 if ext.only_contains_ca_certs else 0
+ idp.indirectCRL = 255 if ext.indirect_crl else 0
+ idp.onlyattr = 255 if ext.only_contains_attribute_certs else 0
+ if ext.only_some_reasons:
+ idp.onlysomereasons = _encode_reasonflags(
+ backend, ext.only_some_reasons
+ )
+
+ if ext.full_name:
+ idp.distpoint = _encode_full_name(backend, ext.full_name)
+
+ if ext.relative_name:
+ idp.distpoint = _encode_relative_name(backend, ext.relative_name)
+
+ return idp
+
+
def _encode_crl_reason(backend, crl_reason):
asn1enum = backend._lib.ASN1_ENUMERATED_new()
backend.openssl_assert(asn1enum != backend._ffi.NULL)
@@ -143,9 +178,8 @@ def _encode_crl_reason(backend, crl_reason):
def _encode_invalidity_date(backend, invalidity_date):
time = backend._lib.ASN1_GENERALIZEDTIME_set(
- backend._ffi.NULL, calendar.timegm(
- invalidity_date.invalidity_date.timetuple()
- )
+ backend._ffi.NULL,
+ calendar.timegm(invalidity_date.invalidity_date.timetuple()),
)
backend.openssl_assert(time != backend._ffi.NULL)
time = backend._ffi.gc(time, backend._lib.ASN1_GENERALIZEDTIME_free)
@@ -179,7 +213,6 @@ def _encode_certificate_policies(backend, certificate_policies):
pqi.d.cpsuri = _encode_asn1_str(
backend,
qualifier.encode("ascii"),
- len(qualifier.encode("ascii"))
)
else:
assert isinstance(qualifier, x509.UserNotice)
@@ -227,7 +260,7 @@ def _txt2obj(backend, name):
Converts a Python string with an ASN.1 object ID in dotted form to a
ASN1_OBJECT.
"""
- name = name.encode('ascii')
+ name = name.encode("ascii")
obj = backend._lib.OBJ_txt2obj(name, 1)
backend.openssl_assert(obj != backend._ffi.NULL)
return obj
@@ -240,11 +273,8 @@ def _txt2obj_gc(backend, name):
def _encode_ocsp_nocheck(backend, ext):
- """
- The OCSP No Check extension is defined as a null ASN.1 value embedded in
- an ASN.1 string.
- """
- return _encode_asn1_str_gc(backend, b"\x05\x00", 2)
+ # Doesn't need to be GC'd
+ return backend._lib.ASN1_NULL_new()
def _encode_key_usage(backend, key_usage):
@@ -287,7 +317,6 @@ def _encode_authority_key_identifier(backend, authority_keyid):
akid.keyid = _encode_asn1_str(
backend,
authority_keyid.key_identifier,
- len(authority_keyid.key_identifier)
)
if authority_keyid.authority_cert_issuer is not None:
@@ -317,20 +346,27 @@ def _encode_basic_constraints(backend, basic_constraints):
return constraints
-def _encode_authority_information_access(backend, authority_info_access):
+def _encode_information_access(backend, info_access):
aia = backend._lib.sk_ACCESS_DESCRIPTION_new_null()
backend.openssl_assert(aia != backend._ffi.NULL)
aia = backend._ffi.gc(
- aia, backend._lib.sk_ACCESS_DESCRIPTION_free
+ aia,
+ lambda x: backend._lib.sk_ACCESS_DESCRIPTION_pop_free(
+ x,
+ backend._ffi.addressof(
+ backend._lib._original_lib, "ACCESS_DESCRIPTION_free"
+ ),
+ ),
)
- for access_description in authority_info_access:
+ for access_description in info_access:
ad = backend._lib.ACCESS_DESCRIPTION_new()
method = _txt2obj(
backend, access_description.access_method.dotted_string
)
- gn = _encode_general_name(backend, access_description.access_location)
+ _encode_general_name_preallocated(
+ backend, access_description.access_location, ad.location
+ )
ad.method = method
- ad.location = gn
res = backend._lib.sk_ACCESS_DESCRIPTION_push(aia, ad)
backend.openssl_assert(res >= 1)
@@ -357,12 +393,17 @@ def _encode_alt_name(backend, san):
def _encode_subject_key_identifier(backend, ski):
- return _encode_asn1_str_gc(backend, ski.digest, len(ski.digest))
+ return _encode_asn1_str_gc(backend, ski.digest)
def _encode_general_name(backend, name):
+ gn = backend._lib.GENERAL_NAME_new()
+ _encode_general_name_preallocated(backend, name, gn)
+ return gn
+
+
+def _encode_general_name_preallocated(backend, name, gn):
if isinstance(name, x509.DNSName):
- gn = backend._lib.GENERAL_NAME_new()
backend.openssl_assert(gn != backend._ffi.NULL)
gn.type = backend._lib.GEN_DNS
@@ -376,46 +417,40 @@ def _encode_general_name(backend, name):
backend.openssl_assert(res == 1)
gn.d.dNSName = ia5
elif isinstance(name, x509.RegisteredID):
- gn = backend._lib.GENERAL_NAME_new()
backend.openssl_assert(gn != backend._ffi.NULL)
gn.type = backend._lib.GEN_RID
obj = backend._lib.OBJ_txt2obj(
- name.value.dotted_string.encode('ascii'), 1
+ name.value.dotted_string.encode("ascii"), 1
)
backend.openssl_assert(obj != backend._ffi.NULL)
gn.d.registeredID = obj
elif isinstance(name, x509.DirectoryName):
- gn = backend._lib.GENERAL_NAME_new()
backend.openssl_assert(gn != backend._ffi.NULL)
dir_name = _encode_name(backend, name.value)
gn.type = backend._lib.GEN_DIRNAME
gn.d.directoryName = dir_name
elif isinstance(name, x509.IPAddress):
- gn = backend._lib.GENERAL_NAME_new()
backend.openssl_assert(gn != backend._ffi.NULL)
if isinstance(name.value, ipaddress.IPv4Network):
- packed = (
- name.value.network_address.packed +
- utils.int_to_bytes(((1 << 32) - name.value.num_addresses), 4)
+ packed = name.value.network_address.packed + utils.int_to_bytes(
+ ((1 << 32) - name.value.num_addresses), 4
)
elif isinstance(name.value, ipaddress.IPv6Network):
- packed = (
- name.value.network_address.packed +
- utils.int_to_bytes((1 << 128) - name.value.num_addresses, 16)
+ packed = name.value.network_address.packed + utils.int_to_bytes(
+ (1 << 128) - name.value.num_addresses, 16
)
else:
packed = name.value.packed
- ipaddr = _encode_asn1_str(backend, packed, len(packed))
+ ipaddr = _encode_asn1_str(backend, packed)
gn.type = backend._lib.GEN_IPADD
gn.d.iPAddress = ipaddr
elif isinstance(name, x509.OtherName):
- gn = backend._lib.GENERAL_NAME_new()
backend.openssl_assert(gn != backend._ffi.NULL)
other_name = backend._lib.OTHERNAME_new()
backend.openssl_assert(other_name != backend._ffi.NULL)
type_id = backend._lib.OBJ_txt2obj(
- name.type_id.dotted_string.encode('ascii'), 1
+ name.type_id.dotted_string.encode("ascii"), 1
)
backend.openssl_assert(type_id != backend._ffi.NULL)
data = backend._ffi.new("unsigned char[]", name.value)
@@ -432,29 +467,23 @@ def _encode_general_name(backend, name):
gn.type = backend._lib.GEN_OTHERNAME
gn.d.otherName = other_name
elif isinstance(name, x509.RFC822Name):
- gn = backend._lib.GENERAL_NAME_new()
backend.openssl_assert(gn != backend._ffi.NULL)
# ia5strings are supposed to be ITU T.50 but to allow round-tripping
# of broken certs that encode utf8 we'll encode utf8 here too.
data = name.value.encode("utf8")
- asn1_str = _encode_asn1_str(backend, data, len(data))
+ asn1_str = _encode_asn1_str(backend, data)
gn.type = backend._lib.GEN_EMAIL
gn.d.rfc822Name = asn1_str
elif isinstance(name, x509.UniformResourceIdentifier):
- gn = backend._lib.GENERAL_NAME_new()
backend.openssl_assert(gn != backend._ffi.NULL)
# ia5strings are supposed to be ITU T.50 but to allow round-tripping
# of broken certs that encode utf8 we'll encode utf8 here too.
data = name.value.encode("utf8")
- asn1_str = _encode_asn1_str(backend, data, len(data))
+ asn1_str = _encode_asn1_str(backend, data)
gn.type = backend._lib.GEN_URI
gn.d.uniformResourceIdentifier = asn1_str
else:
- raise ValueError(
- "{0} is an unknown GeneralName type".format(name)
- )
-
- return gn
+ raise ValueError("{} is an unknown GeneralName type".format(name))
def _encode_extended_key_usage(backend, extended_key_usage):
@@ -480,6 +509,34 @@ def _encode_extended_key_usage(backend, extended_key_usage):
}
+def _encode_reasonflags(backend, reasons):
+ bitmask = backend._lib.ASN1_BIT_STRING_new()
+ backend.openssl_assert(bitmask != backend._ffi.NULL)
+ for reason in reasons:
+ res = backend._lib.ASN1_BIT_STRING_set_bit(
+ bitmask, _CRLREASONFLAGS[reason], 1
+ )
+ backend.openssl_assert(res == 1)
+
+ return bitmask
+
+
+def _encode_full_name(backend, full_name):
+ dpn = backend._lib.DIST_POINT_NAME_new()
+ backend.openssl_assert(dpn != backend._ffi.NULL)
+ dpn.type = _DISTPOINT_TYPE_FULLNAME
+ dpn.name.fullname = _encode_general_names(backend, full_name)
+ return dpn
+
+
+def _encode_relative_name(backend, relative_name):
+ dpn = backend._lib.DIST_POINT_NAME_new()
+ backend.openssl_assert(dpn != backend._ffi.NULL)
+ dpn.type = _DISTPOINT_TYPE_RELATIVENAME
+ dpn.name.relativename = _encode_sk_name_entry(backend, relative_name)
+ return dpn
+
+
def _encode_cdps_freshest_crl(backend, cdps):
cdp = backend._lib.sk_DIST_POINT_new_null()
cdp = backend._ffi.gc(cdp, backend._lib.sk_DIST_POINT_free)
@@ -488,30 +545,13 @@ def _encode_cdps_freshest_crl(backend, cdps):
backend.openssl_assert(dp != backend._ffi.NULL)
if point.reasons:
- bitmask = backend._lib.ASN1_BIT_STRING_new()
- backend.openssl_assert(bitmask != backend._ffi.NULL)
- dp.reasons = bitmask
- for reason in point.reasons:
- res = backend._lib.ASN1_BIT_STRING_set_bit(
- bitmask, _CRLREASONFLAGS[reason], 1
- )
- backend.openssl_assert(res == 1)
+ dp.reasons = _encode_reasonflags(backend, point.reasons)
if point.full_name:
- dpn = backend._lib.DIST_POINT_NAME_new()
- backend.openssl_assert(dpn != backend._ffi.NULL)
- dpn.type = _DISTPOINT_TYPE_FULLNAME
- dpn.name.fullname = _encode_general_names(backend, point.full_name)
- dp.distpoint = dpn
+ dp.distpoint = _encode_full_name(backend, point.full_name)
if point.relative_name:
- dpn = backend._lib.DIST_POINT_NAME_new()
- backend.openssl_assert(dpn != backend._ffi.NULL)
- dpn.type = _DISTPOINT_TYPE_RELATIVENAME
- relativename = _encode_sk_name_entry(backend, point.relative_name)
- backend.openssl_assert(relativename != backend._ffi.NULL)
- dpn.name.relativename = relativename
- dp.distpoint = dpn
+ dp.distpoint = _encode_relative_name(backend, point.relative_name)
if point.crl_issuer:
dp.CRLissuer = _encode_general_names(backend, point.crl_issuer)
@@ -569,6 +609,10 @@ def _encode_general_subtree(backend, subtrees):
return general_subtrees
+def _encode_nonce(backend, nonce):
+ return _encode_asn1_str_gc(backend, nonce.nonce)
+
+
_EXTENSION_ENCODE_HANDLERS = {
ExtensionOID.BASIC_CONSTRAINTS: _encode_basic_constraints,
ExtensionOID.SUBJECT_KEY_IDENTIFIER: _encode_subject_key_identifier,
@@ -578,9 +622,8 @@ def _encode_general_subtree(backend, subtrees):
ExtensionOID.EXTENDED_KEY_USAGE: _encode_extended_key_usage,
ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _encode_authority_key_identifier,
ExtensionOID.CERTIFICATE_POLICIES: _encode_certificate_policies,
- ExtensionOID.AUTHORITY_INFORMATION_ACCESS: (
- _encode_authority_information_access
- ),
+ ExtensionOID.AUTHORITY_INFORMATION_ACCESS: _encode_information_access,
+ ExtensionOID.SUBJECT_INFORMATION_ACCESS: _encode_information_access,
ExtensionOID.CRL_DISTRIBUTION_POINTS: _encode_cdps_freshest_crl,
ExtensionOID.FRESHEST_CRL: _encode_cdps_freshest_crl,
ExtensionOID.INHIBIT_ANY_POLICY: _encode_inhibit_any_policy,
@@ -592,11 +635,11 @@ def _encode_general_subtree(backend, subtrees):
_CRL_EXTENSION_ENCODE_HANDLERS = {
ExtensionOID.ISSUER_ALTERNATIVE_NAME: _encode_alt_name,
ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _encode_authority_key_identifier,
- ExtensionOID.AUTHORITY_INFORMATION_ACCESS: (
- _encode_authority_information_access
- ),
+ ExtensionOID.AUTHORITY_INFORMATION_ACCESS: _encode_information_access,
ExtensionOID.CRL_NUMBER: _encode_crl_number_delta_crl_indicator,
ExtensionOID.DELTA_CRL_INDICATOR: _encode_crl_number_delta_crl_indicator,
+ ExtensionOID.ISSUING_DISTRIBUTION_POINT: _encode_issuing_dist_point,
+ ExtensionOID.FRESHEST_CRL: _encode_cdps_freshest_crl,
}
_CRL_ENTRY_EXTENSION_ENCODE_HANDLERS = {
@@ -604,3 +647,11 @@ def _encode_general_subtree(backend, subtrees):
CRLEntryExtensionOID.CRL_REASON: _encode_crl_reason,
CRLEntryExtensionOID.INVALIDITY_DATE: _encode_invalidity_date,
}
+
+_OCSP_REQUEST_EXTENSION_ENCODE_HANDLERS = {
+ OCSPExtensionOID.NONCE: _encode_nonce,
+}
+
+_OCSP_BASICRESP_EXTENSION_ENCODE_HANDLERS = {
+ OCSPExtensionOID.NONCE: _encode_nonce,
+}
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/hashes.py b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/hashes.py
index 92ea53b..4403399 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/hashes.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/hashes.py
@@ -22,16 +22,17 @@ def __init__(self, backend, algorithm, ctx=None):
ctx = self._backend._ffi.gc(
ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free
)
- name = self._backend._build_openssl_digest_name(algorithm)
- evp_md = self._backend._lib.EVP_get_digestbyname(name)
+ evp_md = self._backend._evp_md_from_algorithm(algorithm)
if evp_md == self._backend._ffi.NULL:
raise UnsupportedAlgorithm(
- "{0} is not a supported hash on this backend.".format(
- name),
- _Reasons.UNSUPPORTED_HASH
+ "{} is not a supported hash on this backend.".format(
+ algorithm.name
+ ),
+ _Reasons.UNSUPPORTED_HASH,
)
- res = self._backend._lib.EVP_DigestInit_ex(ctx, evp_md,
- self._backend._ffi.NULL)
+ res = self._backend._lib.EVP_DigestInit_ex(
+ ctx, evp_md, self._backend._ffi.NULL
+ )
self._backend.openssl_assert(res != 0)
self._ctx = ctx
@@ -48,14 +49,34 @@ def copy(self):
return _HashContext(self._backend, self.algorithm, ctx=copied_ctx)
def update(self, data):
- res = self._backend._lib.EVP_DigestUpdate(self._ctx, data, len(data))
+ data_ptr = self._backend._ffi.from_buffer(data)
+ res = self._backend._lib.EVP_DigestUpdate(
+ self._ctx, data_ptr, len(data)
+ )
self._backend.openssl_assert(res != 0)
def finalize(self):
- buf = self._backend._ffi.new("unsigned char[]",
- self._backend._lib.EVP_MAX_MD_SIZE)
- outlen = self._backend._ffi.new("unsigned int *")
- res = self._backend._lib.EVP_DigestFinal_ex(self._ctx, buf, outlen)
+ if isinstance(self.algorithm, hashes.ExtendableOutputFunction):
+ # extendable output functions use a different finalize
+ return self._finalize_xof()
+ else:
+ buf = self._backend._ffi.new(
+ "unsigned char[]", self._backend._lib.EVP_MAX_MD_SIZE
+ )
+ outlen = self._backend._ffi.new("unsigned int *")
+ res = self._backend._lib.EVP_DigestFinal_ex(self._ctx, buf, outlen)
+ self._backend.openssl_assert(res != 0)
+ self._backend.openssl_assert(
+ outlen[0] == self.algorithm.digest_size
+ )
+ return self._backend._ffi.buffer(buf)[: outlen[0]]
+
+ def _finalize_xof(self):
+ buf = self._backend._ffi.new(
+ "unsigned char[]", self.algorithm.digest_size
+ )
+ res = self._backend._lib.EVP_DigestFinalXOF(
+ self._ctx, buf, self.algorithm.digest_size
+ )
self._backend.openssl_assert(res != 0)
- self._backend.openssl_assert(outlen[0] == self.algorithm.digest_size)
- return self._backend._ffi.buffer(buf)[:outlen[0]]
+ return self._backend._ffi.buffer(buf)[: self.algorithm.digest_size]
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/hmac.py b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/hmac.py
index 3577f47..5024223 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/hmac.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/hmac.py
@@ -7,12 +7,13 @@
from cryptography import utils
from cryptography.exceptions import (
- InvalidSignature, UnsupportedAlgorithm, _Reasons
+ InvalidSignature,
+ UnsupportedAlgorithm,
+ _Reasons,
)
-from cryptography.hazmat.primitives import constant_time, hashes, mac
+from cryptography.hazmat.primitives import constant_time, hashes
-@utils.register_interface(mac.MACContext)
@utils.register_interface(hashes.HashContext)
class _HMACContext(object):
def __init__(self, backend, key, algorithm, ctx=None):
@@ -25,15 +26,17 @@ def __init__(self, backend, key, algorithm, ctx=None):
ctx = self._backend._ffi.gc(
ctx, self._backend._lib.Cryptography_HMAC_CTX_free
)
- name = self._backend._build_openssl_digest_name(algorithm)
- evp_md = self._backend._lib.EVP_get_digestbyname(name)
+ evp_md = self._backend._evp_md_from_algorithm(algorithm)
if evp_md == self._backend._ffi.NULL:
raise UnsupportedAlgorithm(
- "{0} is not a supported hash on this backend".format(name),
- _Reasons.UNSUPPORTED_HASH
+ "{} is not a supported hash on this backend".format(
+ algorithm.name
+ ),
+ _Reasons.UNSUPPORTED_HASH,
)
+ key_ptr = self._backend._ffi.from_buffer(key)
res = self._backend._lib.HMAC_Init_ex(
- ctx, key, len(key), evp_md, self._backend._ffi.NULL
+ ctx, key_ptr, len(key), evp_md, self._backend._ffi.NULL
)
self._backend.openssl_assert(res != 0)
@@ -55,17 +58,19 @@ def copy(self):
)
def update(self, data):
- res = self._backend._lib.HMAC_Update(self._ctx, data, len(data))
+ data_ptr = self._backend._ffi.from_buffer(data)
+ res = self._backend._lib.HMAC_Update(self._ctx, data_ptr, len(data))
self._backend.openssl_assert(res != 0)
def finalize(self):
- buf = self._backend._ffi.new("unsigned char[]",
- self._backend._lib.EVP_MAX_MD_SIZE)
+ buf = self._backend._ffi.new(
+ "unsigned char[]", self._backend._lib.EVP_MAX_MD_SIZE
+ )
outlen = self._backend._ffi.new("unsigned int *")
res = self._backend._lib.HMAC_Final(self._ctx, buf, outlen)
self._backend.openssl_assert(res != 0)
self._backend.openssl_assert(outlen[0] == self.algorithm.digest_size)
- return self._backend._ffi.buffer(buf)[:outlen[0]]
+ return self._backend._ffi.buffer(buf)[: outlen[0]]
def verify(self, signature):
digest = self.finalize()
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/ocsp.py b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/ocsp.py
new file mode 100644
index 0000000..50c02e7
--- /dev/null
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/ocsp.py
@@ -0,0 +1,401 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import functools
+
+from cryptography import utils, x509
+from cryptography.exceptions import UnsupportedAlgorithm
+from cryptography.hazmat.backends.openssl.decode_asn1 import (
+ _CRL_ENTRY_REASON_CODE_TO_ENUM,
+ _asn1_integer_to_int,
+ _asn1_string_to_bytes,
+ _decode_x509_name,
+ _obj2txt,
+ _parse_asn1_generalized_time,
+)
+from cryptography.hazmat.backends.openssl.x509 import _Certificate
+from cryptography.hazmat.primitives import serialization
+from cryptography.x509.ocsp import (
+ OCSPCertStatus,
+ OCSPRequest,
+ OCSPResponse,
+ OCSPResponseStatus,
+ _CERT_STATUS_TO_ENUM,
+ _OIDS_TO_HASH,
+ _RESPONSE_STATUS_TO_ENUM,
+)
+
+
+def _requires_successful_response(func):
+ @functools.wraps(func)
+ def wrapper(self, *args):
+ if self.response_status != OCSPResponseStatus.SUCCESSFUL:
+ raise ValueError(
+ "OCSP response status is not successful so the property "
+ "has no value"
+ )
+ else:
+ return func(self, *args)
+
+ return wrapper
+
+
+def _issuer_key_hash(backend, cert_id):
+ key_hash = backend._ffi.new("ASN1_OCTET_STRING **")
+ res = backend._lib.OCSP_id_get0_info(
+ backend._ffi.NULL,
+ backend._ffi.NULL,
+ key_hash,
+ backend._ffi.NULL,
+ cert_id,
+ )
+ backend.openssl_assert(res == 1)
+ backend.openssl_assert(key_hash[0] != backend._ffi.NULL)
+ return _asn1_string_to_bytes(backend, key_hash[0])
+
+
+def _issuer_name_hash(backend, cert_id):
+ name_hash = backend._ffi.new("ASN1_OCTET_STRING **")
+ res = backend._lib.OCSP_id_get0_info(
+ name_hash,
+ backend._ffi.NULL,
+ backend._ffi.NULL,
+ backend._ffi.NULL,
+ cert_id,
+ )
+ backend.openssl_assert(res == 1)
+ backend.openssl_assert(name_hash[0] != backend._ffi.NULL)
+ return _asn1_string_to_bytes(backend, name_hash[0])
+
+
+def _serial_number(backend, cert_id):
+ num = backend._ffi.new("ASN1_INTEGER **")
+ res = backend._lib.OCSP_id_get0_info(
+ backend._ffi.NULL, backend._ffi.NULL, backend._ffi.NULL, num, cert_id
+ )
+ backend.openssl_assert(res == 1)
+ backend.openssl_assert(num[0] != backend._ffi.NULL)
+ return _asn1_integer_to_int(backend, num[0])
+
+
+def _hash_algorithm(backend, cert_id):
+ asn1obj = backend._ffi.new("ASN1_OBJECT **")
+ res = backend._lib.OCSP_id_get0_info(
+ backend._ffi.NULL,
+ asn1obj,
+ backend._ffi.NULL,
+ backend._ffi.NULL,
+ cert_id,
+ )
+ backend.openssl_assert(res == 1)
+ backend.openssl_assert(asn1obj[0] != backend._ffi.NULL)
+ oid = _obj2txt(backend, asn1obj[0])
+ try:
+ return _OIDS_TO_HASH[oid]
+ except KeyError:
+ raise UnsupportedAlgorithm(
+ "Signature algorithm OID: {} not recognized".format(oid)
+ )
+
+
+@utils.register_interface(OCSPResponse)
+class _OCSPResponse(object):
+ def __init__(self, backend, ocsp_response):
+ self._backend = backend
+ self._ocsp_response = ocsp_response
+ status = self._backend._lib.OCSP_response_status(self._ocsp_response)
+ self._backend.openssl_assert(status in _RESPONSE_STATUS_TO_ENUM)
+ self._status = _RESPONSE_STATUS_TO_ENUM[status]
+ if self._status is OCSPResponseStatus.SUCCESSFUL:
+ basic = self._backend._lib.OCSP_response_get1_basic(
+ self._ocsp_response
+ )
+ self._backend.openssl_assert(basic != self._backend._ffi.NULL)
+ self._basic = self._backend._ffi.gc(
+ basic, self._backend._lib.OCSP_BASICRESP_free
+ )
+ num_resp = self._backend._lib.OCSP_resp_count(self._basic)
+ if num_resp != 1:
+ raise ValueError(
+ "OCSP response contains more than one SINGLERESP structure"
+ ", which this library does not support. "
+ "{} found".format(num_resp)
+ )
+ self._single = self._backend._lib.OCSP_resp_get0(self._basic, 0)
+ self._backend.openssl_assert(
+ self._single != self._backend._ffi.NULL
+ )
+ self._cert_id = self._backend._lib.OCSP_SINGLERESP_get0_id(
+ self._single
+ )
+ self._backend.openssl_assert(
+ self._cert_id != self._backend._ffi.NULL
+ )
+
+ response_status = utils.read_only_property("_status")
+
+ @property
+ @_requires_successful_response
+ def signature_algorithm_oid(self):
+ alg = self._backend._lib.OCSP_resp_get0_tbs_sigalg(self._basic)
+ self._backend.openssl_assert(alg != self._backend._ffi.NULL)
+ oid = _obj2txt(self._backend, alg.algorithm)
+ return x509.ObjectIdentifier(oid)
+
+ @property
+ @_requires_successful_response
+ def signature_hash_algorithm(self):
+ oid = self.signature_algorithm_oid
+ try:
+ return x509._SIG_OIDS_TO_HASH[oid]
+ except KeyError:
+ raise UnsupportedAlgorithm(
+ "Signature algorithm OID:{} not recognized".format(oid)
+ )
+
+ @property
+ @_requires_successful_response
+ def signature(self):
+ sig = self._backend._lib.OCSP_resp_get0_signature(self._basic)
+ self._backend.openssl_assert(sig != self._backend._ffi.NULL)
+ return _asn1_string_to_bytes(self._backend, sig)
+
+ @property
+ @_requires_successful_response
+ def tbs_response_bytes(self):
+ respdata = self._backend._lib.OCSP_resp_get0_respdata(self._basic)
+ self._backend.openssl_assert(respdata != self._backend._ffi.NULL)
+ pp = self._backend._ffi.new("unsigned char **")
+ res = self._backend._lib.i2d_OCSP_RESPDATA(respdata, pp)
+ self._backend.openssl_assert(pp[0] != self._backend._ffi.NULL)
+ pp = self._backend._ffi.gc(
+ pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0])
+ )
+ self._backend.openssl_assert(res > 0)
+ return self._backend._ffi.buffer(pp[0], res)[:]
+
+ @property
+ @_requires_successful_response
+ def certificates(self):
+ sk_x509 = self._backend._lib.OCSP_resp_get0_certs(self._basic)
+ num = self._backend._lib.sk_X509_num(sk_x509)
+ certs = []
+ for i in range(num):
+ x509 = self._backend._lib.sk_X509_value(sk_x509, i)
+ self._backend.openssl_assert(x509 != self._backend._ffi.NULL)
+ cert = _Certificate(self._backend, x509)
+ # We need to keep the OCSP response that the certificate came from
+ # alive until the Certificate object itself goes out of scope, so
+ # we give it a private reference.
+ cert._ocsp_resp = self
+ certs.append(cert)
+
+ return certs
+
+ @property
+ @_requires_successful_response
+ def responder_key_hash(self):
+ _, asn1_string = self._responder_key_name()
+ if asn1_string == self._backend._ffi.NULL:
+ return None
+ else:
+ return _asn1_string_to_bytes(self._backend, asn1_string)
+
+ @property
+ @_requires_successful_response
+ def responder_name(self):
+ x509_name, _ = self._responder_key_name()
+ if x509_name == self._backend._ffi.NULL:
+ return None
+ else:
+ return _decode_x509_name(self._backend, x509_name)
+
+ def _responder_key_name(self):
+ asn1_string = self._backend._ffi.new("ASN1_OCTET_STRING **")
+ x509_name = self._backend._ffi.new("X509_NAME **")
+ res = self._backend._lib.OCSP_resp_get0_id(
+ self._basic, asn1_string, x509_name
+ )
+ self._backend.openssl_assert(res == 1)
+ return x509_name[0], asn1_string[0]
+
+ @property
+ @_requires_successful_response
+ def produced_at(self):
+ produced_at = self._backend._lib.OCSP_resp_get0_produced_at(
+ self._basic
+ )
+ return _parse_asn1_generalized_time(self._backend, produced_at)
+
+ @property
+ @_requires_successful_response
+ def certificate_status(self):
+ status = self._backend._lib.OCSP_single_get0_status(
+ self._single,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ )
+ self._backend.openssl_assert(status in _CERT_STATUS_TO_ENUM)
+ return _CERT_STATUS_TO_ENUM[status]
+
+ @property
+ @_requires_successful_response
+ def revocation_time(self):
+ if self.certificate_status is not OCSPCertStatus.REVOKED:
+ return None
+
+ asn1_time = self._backend._ffi.new("ASN1_GENERALIZEDTIME **")
+ self._backend._lib.OCSP_single_get0_status(
+ self._single,
+ self._backend._ffi.NULL,
+ asn1_time,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ )
+ self._backend.openssl_assert(asn1_time[0] != self._backend._ffi.NULL)
+ return _parse_asn1_generalized_time(self._backend, asn1_time[0])
+
+ @property
+ @_requires_successful_response
+ def revocation_reason(self):
+ if self.certificate_status is not OCSPCertStatus.REVOKED:
+ return None
+
+ reason_ptr = self._backend._ffi.new("int *")
+ self._backend._lib.OCSP_single_get0_status(
+ self._single,
+ reason_ptr,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ )
+ # If no reason is encoded OpenSSL returns -1
+ if reason_ptr[0] == -1:
+ return None
+ else:
+ self._backend.openssl_assert(
+ reason_ptr[0] in _CRL_ENTRY_REASON_CODE_TO_ENUM
+ )
+ return _CRL_ENTRY_REASON_CODE_TO_ENUM[reason_ptr[0]]
+
+ @property
+ @_requires_successful_response
+ def this_update(self):
+ asn1_time = self._backend._ffi.new("ASN1_GENERALIZEDTIME **")
+ self._backend._lib.OCSP_single_get0_status(
+ self._single,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ asn1_time,
+ self._backend._ffi.NULL,
+ )
+ self._backend.openssl_assert(asn1_time[0] != self._backend._ffi.NULL)
+ return _parse_asn1_generalized_time(self._backend, asn1_time[0])
+
+ @property
+ @_requires_successful_response
+ def next_update(self):
+ asn1_time = self._backend._ffi.new("ASN1_GENERALIZEDTIME **")
+ self._backend._lib.OCSP_single_get0_status(
+ self._single,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ asn1_time,
+ )
+ if asn1_time[0] != self._backend._ffi.NULL:
+ return _parse_asn1_generalized_time(self._backend, asn1_time[0])
+ else:
+ return None
+
+ @property
+ @_requires_successful_response
+ def issuer_key_hash(self):
+ return _issuer_key_hash(self._backend, self._cert_id)
+
+ @property
+ @_requires_successful_response
+ def issuer_name_hash(self):
+ return _issuer_name_hash(self._backend, self._cert_id)
+
+ @property
+ @_requires_successful_response
+ def hash_algorithm(self):
+ return _hash_algorithm(self._backend, self._cert_id)
+
+ @property
+ @_requires_successful_response
+ def serial_number(self):
+ return _serial_number(self._backend, self._cert_id)
+
+ @utils.cached_property
+ @_requires_successful_response
+ def extensions(self):
+ return self._backend._ocsp_basicresp_ext_parser.parse(self._basic)
+
+ @utils.cached_property
+ @_requires_successful_response
+ def single_extensions(self):
+ return self._backend._ocsp_singleresp_ext_parser.parse(self._single)
+
+ def public_bytes(self, encoding):
+ if encoding is not serialization.Encoding.DER:
+ raise ValueError("The only allowed encoding value is Encoding.DER")
+
+ bio = self._backend._create_mem_bio_gc()
+ res = self._backend._lib.i2d_OCSP_RESPONSE_bio(
+ bio, self._ocsp_response
+ )
+ self._backend.openssl_assert(res > 0)
+ return self._backend._read_mem_bio(bio)
+
+
+@utils.register_interface(OCSPRequest)
+class _OCSPRequest(object):
+ def __init__(self, backend, ocsp_request):
+ if backend._lib.OCSP_request_onereq_count(ocsp_request) > 1:
+ raise NotImplementedError(
+ "OCSP request contains more than one request"
+ )
+ self._backend = backend
+ self._ocsp_request = ocsp_request
+ self._request = self._backend._lib.OCSP_request_onereq_get0(
+ self._ocsp_request, 0
+ )
+ self._backend.openssl_assert(self._request != self._backend._ffi.NULL)
+ self._cert_id = self._backend._lib.OCSP_onereq_get0_id(self._request)
+ self._backend.openssl_assert(self._cert_id != self._backend._ffi.NULL)
+
+ @property
+ def issuer_key_hash(self):
+ return _issuer_key_hash(self._backend, self._cert_id)
+
+ @property
+ def issuer_name_hash(self):
+ return _issuer_name_hash(self._backend, self._cert_id)
+
+ @property
+ def serial_number(self):
+ return _serial_number(self._backend, self._cert_id)
+
+ @property
+ def hash_algorithm(self):
+ return _hash_algorithm(self._backend, self._cert_id)
+
+ @utils.cached_property
+ def extensions(self):
+ return self._backend._ocsp_req_ext_parser.parse(self._ocsp_request)
+
+ def public_bytes(self, encoding):
+ if encoding is not serialization.Encoding.DER:
+ raise ValueError("The only allowed encoding value is Encoding.DER")
+
+ bio = self._backend._create_mem_bio_gc()
+ res = self._backend._lib.i2d_OCSP_REQUEST_bio(bio, self._ocsp_request)
+ self._backend.openssl_assert(res > 0)
+ return self._backend._read_mem_bio(bio)
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/poly1305.py b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/poly1305.py
new file mode 100644
index 0000000..17493ca
--- /dev/null
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/poly1305.py
@@ -0,0 +1,65 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+
+from cryptography.exceptions import InvalidSignature
+from cryptography.hazmat.primitives import constant_time
+
+
+_POLY1305_TAG_SIZE = 16
+_POLY1305_KEY_SIZE = 32
+
+
+class _Poly1305Context(object):
+ def __init__(self, backend, key):
+ self._backend = backend
+
+ key_ptr = self._backend._ffi.from_buffer(key)
+ # This function copies the key into OpenSSL-owned memory so we don't
+ # need to retain it ourselves
+ evp_pkey = self._backend._lib.EVP_PKEY_new_raw_private_key(
+ self._backend._lib.NID_poly1305,
+ self._backend._ffi.NULL,
+ key_ptr,
+ len(key),
+ )
+ self._backend.openssl_assert(evp_pkey != self._backend._ffi.NULL)
+ self._evp_pkey = self._backend._ffi.gc(
+ evp_pkey, self._backend._lib.EVP_PKEY_free
+ )
+ ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new()
+ self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
+ self._ctx = self._backend._ffi.gc(
+ ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free
+ )
+ res = self._backend._lib.EVP_DigestSignInit(
+ self._ctx,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._evp_pkey,
+ )
+ self._backend.openssl_assert(res == 1)
+
+ def update(self, data):
+ data_ptr = self._backend._ffi.from_buffer(data)
+ res = self._backend._lib.EVP_DigestSignUpdate(
+ self._ctx, data_ptr, len(data)
+ )
+ self._backend.openssl_assert(res != 0)
+
+ def finalize(self):
+ buf = self._backend._ffi.new("unsigned char[]", _POLY1305_TAG_SIZE)
+ outlen = self._backend._ffi.new("size_t *")
+ res = self._backend._lib.EVP_DigestSignFinal(self._ctx, buf, outlen)
+ self._backend.openssl_assert(res != 0)
+ self._backend.openssl_assert(outlen[0] == _POLY1305_TAG_SIZE)
+ return self._backend._ffi.buffer(buf)[: outlen[0]]
+
+ def verify(self, tag):
+ mac = self.finalize()
+ if not constant_time.bytes_eq(mac, tag):
+ raise InvalidSignature("Value did not match computed tag.")
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/rsa.py b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/rsa.py
index 1b6ebfd..423f687 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/rsa.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/rsa.py
@@ -4,25 +4,34 @@
from __future__ import absolute_import, division, print_function
-import math
-
from cryptography import utils
from cryptography.exceptions import (
- InvalidSignature, UnsupportedAlgorithm, _Reasons
+ InvalidSignature,
+ UnsupportedAlgorithm,
+ _Reasons,
)
from cryptography.hazmat.backends.openssl.utils import (
- _calculate_digest_and_algorithm, _check_not_prehashed,
- _warn_sign_verify_deprecated
+ _calculate_digest_and_algorithm,
+ _check_not_prehashed,
+ _warn_sign_verify_deprecated,
)
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import (
- AsymmetricSignatureContext, AsymmetricVerificationContext, rsa
+ AsymmetricSignatureContext,
+ AsymmetricVerificationContext,
+ rsa,
)
from cryptography.hazmat.primitives.asymmetric.padding import (
- AsymmetricPadding, MGF1, OAEP, PKCS1v15, PSS, calculate_max_pss_salt_length
+ AsymmetricPadding,
+ MGF1,
+ OAEP,
+ PKCS1v15,
+ PSS,
+ calculate_max_pss_salt_length,
)
from cryptography.hazmat.primitives.asymmetric.rsa import (
- RSAPrivateKeyWithSerialization, RSAPublicKeyWithSerialization
+ RSAPrivateKeyWithSerialization,
+ RSAPublicKeyWithSerialization,
)
@@ -47,22 +56,20 @@ def _enc_dec_rsa(backend, key, data, padding):
if not isinstance(padding._mgf, MGF1):
raise UnsupportedAlgorithm(
"Only MGF1 is supported by this backend.",
- _Reasons.UNSUPPORTED_MGF
+ _Reasons.UNSUPPORTED_MGF,
)
if not backend.rsa_padding_supported(padding):
raise UnsupportedAlgorithm(
"This combination of padding and hash algorithm is not "
"supported by this backend.",
- _Reasons.UNSUPPORTED_PADDING
+ _Reasons.UNSUPPORTED_PADDING,
)
else:
raise UnsupportedAlgorithm(
- "{0} is not supported by this backend.".format(
- padding.name
- ),
- _Reasons.UNSUPPORTED_PADDING
+ "{} is not supported by this backend.".format(padding.name),
+ _Reasons.UNSUPPORTED_PADDING,
)
return _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum, padding)
@@ -76,37 +83,29 @@ def _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum, padding):
init = backend._lib.EVP_PKEY_decrypt_init
crypt = backend._lib.EVP_PKEY_decrypt
- pkey_ctx = backend._lib.EVP_PKEY_CTX_new(
- key._evp_pkey, backend._ffi.NULL
- )
+ pkey_ctx = backend._lib.EVP_PKEY_CTX_new(key._evp_pkey, backend._ffi.NULL)
backend.openssl_assert(pkey_ctx != backend._ffi.NULL)
pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free)
res = init(pkey_ctx)
backend.openssl_assert(res == 1)
- res = backend._lib.EVP_PKEY_CTX_set_rsa_padding(
- pkey_ctx, padding_enum)
+ res = backend._lib.EVP_PKEY_CTX_set_rsa_padding(pkey_ctx, padding_enum)
backend.openssl_assert(res > 0)
buf_size = backend._lib.EVP_PKEY_size(key._evp_pkey)
backend.openssl_assert(buf_size > 0)
- if (
- isinstance(padding, OAEP) and
- backend._lib.Cryptography_HAS_RSA_OAEP_MD
- ):
- mgf1_md = backend._lib.EVP_get_digestbyname(
- padding._mgf._algorithm.name.encode("ascii"))
- backend.openssl_assert(mgf1_md != backend._ffi.NULL)
+ if isinstance(padding, OAEP) and backend._lib.Cryptography_HAS_RSA_OAEP_MD:
+ mgf1_md = backend._evp_md_non_null_from_algorithm(
+ padding._mgf._algorithm
+ )
res = backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, mgf1_md)
backend.openssl_assert(res > 0)
- oaep_md = backend._lib.EVP_get_digestbyname(
- padding._algorithm.name.encode("ascii"))
- backend.openssl_assert(oaep_md != backend._ffi.NULL)
+ oaep_md = backend._evp_md_non_null_from_algorithm(padding._algorithm)
res = backend._lib.EVP_PKEY_CTX_set_rsa_oaep_md(pkey_ctx, oaep_md)
backend.openssl_assert(res > 0)
if (
- isinstance(padding, OAEP) and
- padding._label is not None and
- len(padding._label) > 0
+ isinstance(padding, OAEP)
+ and padding._label is not None
+ and len(padding._label) > 0
):
# set0_rsa_oaep_label takes ownership of the char * so we need to
# copy it into some new memory
@@ -124,35 +123,19 @@ def _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum, padding):
if res <= 0:
_handle_rsa_enc_dec_error(backend, key)
- return backend._ffi.buffer(buf)[:outlen[0]]
+ return backend._ffi.buffer(buf)[: outlen[0]]
def _handle_rsa_enc_dec_error(backend, key):
- errors = backend._consume_errors()
- assert errors
- assert errors[0].lib == backend._lib.ERR_LIB_RSA
+ errors = backend._consume_errors_with_text()
if isinstance(key, _RSAPublicKey):
- assert (errors[0].reason ==
- backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE)
raise ValueError(
"Data too long for key size. Encrypt less data or use a "
- "larger key size."
+ "larger key size.",
+ errors,
)
else:
- decoding_errors = [
- backend._lib.RSA_R_BLOCK_TYPE_IS_NOT_01,
- backend._lib.RSA_R_BLOCK_TYPE_IS_NOT_02,
- backend._lib.RSA_R_OAEP_DECODING_ERROR,
- # Though this error looks similar to the
- # RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE, this occurs on decrypts,
- # rather than on encrypts
- backend._lib.RSA_R_DATA_TOO_LARGE_FOR_MODULUS,
- ]
- if backend._lib.Cryptography_HAS_RSA_R_PKCS_DECODING_ERROR:
- decoding_errors.append(backend._lib.RSA_R_PKCS_DECODING_ERROR)
-
- assert errors[0].reason in decoding_errors
- raise ValueError("Decryption failed.")
+ raise ValueError("Decryption failed.", errors)
def _rsa_sig_determine_padding(backend, key, padding, algorithm):
@@ -168,20 +151,22 @@ def _rsa_sig_determine_padding(backend, key, padding, algorithm):
if not isinstance(padding._mgf, MGF1):
raise UnsupportedAlgorithm(
"Only MGF1 is supported by this backend.",
- _Reasons.UNSUPPORTED_MGF
+ _Reasons.UNSUPPORTED_MGF,
)
# Size of key in bytes - 2 is the maximum
# PSS signature length (salt length is checked later)
if pkey_size - algorithm.digest_size - 2 < 0:
- raise ValueError("Digest too large for key size. Use a larger "
- "key or different digest.")
+ raise ValueError(
+ "Digest too large for key size. Use a larger "
+ "key or different digest."
+ )
padding_enum = backend._lib.RSA_PKCS1_PSS_PADDING
else:
raise UnsupportedAlgorithm(
- "{0} is not supported by this backend.".format(padding.name),
- _Reasons.UNSUPPORTED_PADDING
+ "{} is not supported by this backend.".format(padding.name),
+ _Reasons.UNSUPPORTED_PADDING,
)
return padding_enum
@@ -189,15 +174,21 @@ def _rsa_sig_determine_padding(backend, key, padding, algorithm):
def _rsa_sig_setup(backend, padding, algorithm, key, data, init_func):
padding_enum = _rsa_sig_determine_padding(backend, key, padding, algorithm)
- evp_md = backend._lib.EVP_get_digestbyname(algorithm.name.encode("ascii"))
- backend.openssl_assert(evp_md != backend._ffi.NULL)
+ evp_md = backend._evp_md_non_null_from_algorithm(algorithm)
pkey_ctx = backend._lib.EVP_PKEY_CTX_new(key._evp_pkey, backend._ffi.NULL)
backend.openssl_assert(pkey_ctx != backend._ffi.NULL)
pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free)
res = init_func(pkey_ctx)
backend.openssl_assert(res == 1)
res = backend._lib.EVP_PKEY_CTX_set_signature_md(pkey_ctx, evp_md)
- backend.openssl_assert(res > 0)
+ if res == 0:
+ backend._consume_errors()
+ raise UnsupportedAlgorithm(
+ "{} is not supported by this backend for RSA signing.".format(
+ algorithm.name
+ ),
+ _Reasons.UNSUPPORTED_HASH,
+ )
res = backend._lib.EVP_PKEY_CTX_set_rsa_padding(pkey_ctx, padding_enum)
backend.openssl_assert(res > 0)
if isinstance(padding, PSS):
@@ -206,10 +197,9 @@ def _rsa_sig_setup(backend, padding, algorithm, key, data, init_func):
)
backend.openssl_assert(res > 0)
- mgf1_md = backend._lib.EVP_get_digestbyname(
- padding._mgf._algorithm.name.encode("ascii")
+ mgf1_md = backend._evp_md_non_null_from_algorithm(
+ padding._mgf._algorithm
)
- backend.openssl_assert(mgf1_md != backend._ffi.NULL)
res = backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, mgf1_md)
backend.openssl_assert(res > 0)
@@ -218,43 +208,39 @@ def _rsa_sig_setup(backend, padding, algorithm, key, data, init_func):
def _rsa_sig_sign(backend, padding, algorithm, private_key, data):
pkey_ctx = _rsa_sig_setup(
- backend, padding, algorithm, private_key, data,
- backend._lib.EVP_PKEY_sign_init
+ backend,
+ padding,
+ algorithm,
+ private_key,
+ data,
+ backend._lib.EVP_PKEY_sign_init,
)
buflen = backend._ffi.new("size_t *")
res = backend._lib.EVP_PKEY_sign(
- pkey_ctx,
- backend._ffi.NULL,
- buflen,
- data,
- len(data)
+ pkey_ctx, backend._ffi.NULL, buflen, data, len(data)
)
backend.openssl_assert(res == 1)
buf = backend._ffi.new("unsigned char[]", buflen[0])
- res = backend._lib.EVP_PKEY_sign(
- pkey_ctx, buf, buflen, data, len(data))
+ res = backend._lib.EVP_PKEY_sign(pkey_ctx, buf, buflen, data, len(data))
if res != 1:
- errors = backend._consume_errors()
- assert errors[0].lib == backend._lib.ERR_LIB_RSA
- reason = None
- if (errors[0].reason ==
- backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE):
- reason = ("Salt length too long for key size. Try using "
- "MAX_LENGTH instead.")
- else:
- assert (errors[0].reason ==
- backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY)
- reason = "Digest too large for key size. Use a larger key."
- assert reason is not None
- raise ValueError(reason)
+ errors = backend._consume_errors_with_text()
+ raise ValueError(
+ "Digest or salt length too long for key size. Use a larger key "
+ "or shorter salt length if you are specifying a PSS salt",
+ errors,
+ )
return backend._ffi.buffer(buf)[:]
def _rsa_sig_verify(backend, padding, algorithm, public_key, signature, data):
pkey_ctx = _rsa_sig_setup(
- backend, padding, algorithm, public_key, data,
- backend._lib.EVP_PKEY_verify_init
+ backend,
+ padding,
+ algorithm,
+ public_key,
+ data,
+ backend._lib.EVP_PKEY_verify_init,
)
res = backend._lib.EVP_PKEY_verify(
pkey_ctx, signature, len(signature), data, len(data)
@@ -264,8 +250,7 @@ def _rsa_sig_verify(backend, padding, algorithm, public_key, signature, data):
# occurs.
backend.openssl_assert(res >= 0)
if res == 0:
- errors = backend._consume_errors()
- assert errors
+ backend._consume_errors()
raise InvalidSignature
@@ -292,7 +277,7 @@ def finalize(self):
self._padding,
self._algorithm,
self._private_key,
- self._hash_ctx.finalize()
+ self._hash_ctx.finalize(),
)
@@ -322,21 +307,28 @@ def verify(self):
self._algorithm,
self._public_key,
self._signature,
- self._hash_ctx.finalize()
+ self._hash_ctx.finalize(),
)
@utils.register_interface(RSAPrivateKeyWithSerialization)
class _RSAPrivateKey(object):
def __init__(self, backend, rsa_cdata, evp_pkey):
+ res = backend._lib.RSA_check_key(rsa_cdata)
+ if res != 1:
+ errors = backend._consume_errors_with_text()
+ raise ValueError("Invalid private key", errors)
+
self._backend = backend
self._rsa_cdata = rsa_cdata
self._evp_pkey = evp_pkey
n = self._backend._ffi.new("BIGNUM **")
self._backend._lib.RSA_get0_key(
- self._rsa_cdata, n, self._backend._ffi.NULL,
- self._backend._ffi.NULL
+ self._rsa_cdata,
+ n,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
)
self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
self._key_size = self._backend._lib.BN_num_bits(n[0])
@@ -349,7 +341,7 @@ def signer(self, padding, algorithm):
return _RSASignatureContext(self._backend, self, padding, algorithm)
def decrypt(self, ciphertext, padding):
- key_size_bytes = int(math.ceil(self.key_size / 8.0))
+ key_size_bytes = (self.key_size + 7) // 8
if key_size_bytes != len(ciphertext):
raise ValueError("Ciphertext length must be equal to key size.")
@@ -396,7 +388,7 @@ def private_numbers(self):
public_numbers=rsa.RSAPublicNumbers(
e=self._backend._bn_to_int(e[0]),
n=self._backend._bn_to_int(n[0]),
- )
+ ),
)
def private_bytes(self, encoding, format, encryption_algorithm):
@@ -404,8 +396,9 @@ def private_bytes(self, encoding, format, encryption_algorithm):
encoding,
format,
encryption_algorithm,
+ self,
self._evp_pkey,
- self._rsa_cdata
+ self._rsa_cdata,
)
def sign(self, data, padding, algorithm):
@@ -424,8 +417,10 @@ def __init__(self, backend, rsa_cdata, evp_pkey):
n = self._backend._ffi.new("BIGNUM **")
self._backend._lib.RSA_get0_key(
- self._rsa_cdata, n, self._backend._ffi.NULL,
- self._backend._ffi.NULL
+ self._rsa_cdata,
+ n,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
)
self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
self._key_size = self._backend._lib.BN_num_bits(n[0])
@@ -434,8 +429,7 @@ def __init__(self, backend, rsa_cdata, evp_pkey):
def verifier(self, signature, padding, algorithm):
_warn_sign_verify_deprecated()
- if not isinstance(signature, bytes):
- raise TypeError("signature must be bytes.")
+ utils._check_bytes("signature", signature)
_check_not_prehashed(algorithm)
return _RSAVerificationContext(
@@ -460,11 +454,7 @@ def public_numbers(self):
def public_bytes(self, encoding, format):
return self._backend._public_key_bytes(
- encoding,
- format,
- self,
- self._evp_pkey,
- self._rsa_cdata
+ encoding, format, self, self._evp_pkey, self._rsa_cdata
)
def verify(self, signature, data, padding, algorithm):
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/utils.py b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/utils.py
index 05d0fe5..ec0b947 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/utils.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/utils.py
@@ -11,6 +11,26 @@
from cryptography.hazmat.primitives.asymmetric.utils import Prehashed
+def _evp_pkey_derive(backend, evp_pkey, peer_public_key):
+ ctx = backend._lib.EVP_PKEY_CTX_new(evp_pkey, backend._ffi.NULL)
+ backend.openssl_assert(ctx != backend._ffi.NULL)
+ ctx = backend._ffi.gc(ctx, backend._lib.EVP_PKEY_CTX_free)
+ res = backend._lib.EVP_PKEY_derive_init(ctx)
+ backend.openssl_assert(res == 1)
+ res = backend._lib.EVP_PKEY_derive_set_peer(ctx, peer_public_key._evp_pkey)
+ backend.openssl_assert(res == 1)
+ keylen = backend._ffi.new("size_t *")
+ res = backend._lib.EVP_PKEY_derive(ctx, backend._ffi.NULL, keylen)
+ backend.openssl_assert(res == 1)
+ backend.openssl_assert(keylen[0] > 0)
+ buf = backend._ffi.new("unsigned char[]", keylen[0])
+ res = backend._lib.EVP_PKEY_derive(ctx, buf, keylen)
+ if res != 1:
+ raise ValueError("Null shared key derived from public/private pair.")
+
+ return backend._ffi.buffer(buf, keylen[0])[:]
+
+
def _calculate_digest_and_algorithm(backend, data, algorithm):
if not isinstance(algorithm, Prehashed):
hash_ctx = hashes.Hash(algorithm, backend)
@@ -40,6 +60,6 @@ def _warn_sign_verify_deprecated():
warnings.warn(
"signer and verifier have been deprecated. Please use sign "
"and verify instead.",
- utils.PersistentlyDeprecated,
- stacklevel=3
+ utils.PersistentlyDeprecated2017,
+ stacklevel=3,
)
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/x25519.py b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/x25519.py
index f92b184..4971c54 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/x25519.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/x25519.py
@@ -5,18 +5,43 @@
from __future__ import absolute_import, division, print_function
from cryptography import utils
+from cryptography.hazmat.backends.openssl.utils import _evp_pkey_derive
+from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric.x25519 import (
- X25519PrivateKey, X25519PublicKey
+ X25519PrivateKey,
+ X25519PublicKey,
)
+_X25519_KEY_SIZE = 32
+
+
@utils.register_interface(X25519PublicKey)
class _X25519PublicKey(object):
def __init__(self, backend, evp_pkey):
self._backend = backend
self._evp_pkey = evp_pkey
- def public_bytes(self):
+ def public_bytes(self, encoding, format):
+ if (
+ encoding is serialization.Encoding.Raw
+ or format is serialization.PublicFormat.Raw
+ ):
+ if (
+ encoding is not serialization.Encoding.Raw
+ or format is not serialization.PublicFormat.Raw
+ ):
+ raise ValueError(
+ "When using Raw both encoding and format must be Raw"
+ )
+
+ return self._raw_public_bytes()
+
+ return self._backend._public_key_bytes(
+ encoding, format, self, self._evp_pkey, None
+ )
+
+ def _raw_public_bytes(self):
ucharpp = self._backend._ffi.new("unsigned char **")
res = self._backend._lib.EVP_PKEY_get1_tls_encodedpoint(
self._evp_pkey, ucharpp
@@ -42,30 +67,57 @@ def public_key(self):
evp_pkey = self._backend._lib.d2i_PUBKEY_bio(
bio, self._backend._ffi.NULL
)
+ self._backend.openssl_assert(evp_pkey != self._backend._ffi.NULL)
+ evp_pkey = self._backend._ffi.gc(
+ evp_pkey, self._backend._lib.EVP_PKEY_free
+ )
return _X25519PublicKey(self._backend, evp_pkey)
def exchange(self, peer_public_key):
if not isinstance(peer_public_key, X25519PublicKey):
raise TypeError("peer_public_key must be X25519PublicKey.")
- ctx = self._backend._lib.EVP_PKEY_CTX_new(
- self._evp_pkey, self._backend._ffi.NULL
- )
- self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
- ctx = self._backend._ffi.gc(ctx, self._backend._lib.EVP_PKEY_CTX_free)
- res = self._backend._lib.EVP_PKEY_derive_init(ctx)
- self._backend.openssl_assert(res == 1)
- res = self._backend._lib.EVP_PKEY_derive_set_peer(
- ctx, peer_public_key._evp_pkey
+ return _evp_pkey_derive(self._backend, self._evp_pkey, peer_public_key)
+
+ def private_bytes(self, encoding, format, encryption_algorithm):
+ if (
+ encoding is serialization.Encoding.Raw
+ or format is serialization.PublicFormat.Raw
+ ):
+ if (
+ format is not serialization.PrivateFormat.Raw
+ or encoding is not serialization.Encoding.Raw
+ or not isinstance(
+ encryption_algorithm, serialization.NoEncryption
+ )
+ ):
+ raise ValueError(
+ "When using Raw both encoding and format must be Raw "
+ "and encryption_algorithm must be NoEncryption()"
+ )
+
+ return self._raw_private_bytes()
+
+ return self._backend._private_key_bytes(
+ encoding, format, encryption_algorithm, self, self._evp_pkey, None
)
- self._backend.openssl_assert(res == 1)
- keylen = self._backend._ffi.new("size_t *")
- res = self._backend._lib.EVP_PKEY_derive(
- ctx, self._backend._ffi.NULL, keylen
+
+ def _raw_private_bytes(self):
+ # When we drop support for CRYPTOGRAPHY_OPENSSL_LESS_THAN_111 we can
+ # switch this to EVP_PKEY_new_raw_private_key
+ # The trick we use here is serializing to a PKCS8 key and just
+ # using the last 32 bytes, which is the key itself.
+ bio = self._backend._create_mem_bio_gc()
+ res = self._backend._lib.i2d_PKCS8PrivateKey_bio(
+ bio,
+ self._evp_pkey,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ 0,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
)
self._backend.openssl_assert(res == 1)
- self._backend.openssl_assert(keylen[0] > 0)
- buf = self._backend._ffi.new("unsigned char[]", keylen[0])
- res = self._backend._lib.EVP_PKEY_derive(ctx, buf, keylen)
- self._backend.openssl_assert(res == 1)
- return self._backend._ffi.buffer(buf, keylen[0])[:]
+ pkcs8 = self._backend._read_mem_bio(bio)
+ self._backend.openssl_assert(len(pkcs8) == 48)
+ return pkcs8[-_X25519_KEY_SIZE:]
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/x448.py b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/x448.py
new file mode 100644
index 0000000..7ebcdf8
--- /dev/null
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/x448.py
@@ -0,0 +1,107 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography import utils
+from cryptography.hazmat.backends.openssl.utils import _evp_pkey_derive
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives.asymmetric.x448 import (
+ X448PrivateKey,
+ X448PublicKey,
+)
+
+_X448_KEY_SIZE = 56
+
+
+@utils.register_interface(X448PublicKey)
+class _X448PublicKey(object):
+ def __init__(self, backend, evp_pkey):
+ self._backend = backend
+ self._evp_pkey = evp_pkey
+
+ def public_bytes(self, encoding, format):
+ if (
+ encoding is serialization.Encoding.Raw
+ or format is serialization.PublicFormat.Raw
+ ):
+ if (
+ encoding is not serialization.Encoding.Raw
+ or format is not serialization.PublicFormat.Raw
+ ):
+ raise ValueError(
+ "When using Raw both encoding and format must be Raw"
+ )
+
+ return self._raw_public_bytes()
+
+ return self._backend._public_key_bytes(
+ encoding, format, self, self._evp_pkey, None
+ )
+
+ def _raw_public_bytes(self):
+ buf = self._backend._ffi.new("unsigned char []", _X448_KEY_SIZE)
+ buflen = self._backend._ffi.new("size_t *", _X448_KEY_SIZE)
+ res = self._backend._lib.EVP_PKEY_get_raw_public_key(
+ self._evp_pkey, buf, buflen
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _X448_KEY_SIZE)
+ return self._backend._ffi.buffer(buf, _X448_KEY_SIZE)[:]
+
+
+@utils.register_interface(X448PrivateKey)
+class _X448PrivateKey(object):
+ def __init__(self, backend, evp_pkey):
+ self._backend = backend
+ self._evp_pkey = evp_pkey
+
+ def public_key(self):
+ buf = self._backend._ffi.new("unsigned char []", _X448_KEY_SIZE)
+ buflen = self._backend._ffi.new("size_t *", _X448_KEY_SIZE)
+ res = self._backend._lib.EVP_PKEY_get_raw_public_key(
+ self._evp_pkey, buf, buflen
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _X448_KEY_SIZE)
+ return self._backend.x448_load_public_bytes(buf)
+
+ def exchange(self, peer_public_key):
+ if not isinstance(peer_public_key, X448PublicKey):
+ raise TypeError("peer_public_key must be X448PublicKey.")
+
+ return _evp_pkey_derive(self._backend, self._evp_pkey, peer_public_key)
+
+ def private_bytes(self, encoding, format, encryption_algorithm):
+ if (
+ encoding is serialization.Encoding.Raw
+ or format is serialization.PublicFormat.Raw
+ ):
+ if (
+ format is not serialization.PrivateFormat.Raw
+ or encoding is not serialization.Encoding.Raw
+ or not isinstance(
+ encryption_algorithm, serialization.NoEncryption
+ )
+ ):
+ raise ValueError(
+ "When using Raw both encoding and format must be Raw "
+ "and encryption_algorithm must be NoEncryption()"
+ )
+
+ return self._raw_private_bytes()
+
+ return self._backend._private_key_bytes(
+ encoding, format, encryption_algorithm, self, self._evp_pkey, None
+ )
+
+ def _raw_private_bytes(self):
+ buf = self._backend._ffi.new("unsigned char []", _X448_KEY_SIZE)
+ buflen = self._backend._ffi.new("size_t *", _X448_KEY_SIZE)
+ res = self._backend._lib.EVP_PKEY_get_raw_private_key(
+ self._evp_pkey, buf, buflen
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _X448_KEY_SIZE)
+ return self._backend._ffi.buffer(buf, _X448_KEY_SIZE)[:]
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/x509.py b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/x509.py
index 9637fc0..4d0dac7 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/x509.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/backends/openssl/x509.py
@@ -6,28 +6,43 @@
import datetime
import operator
-import warnings
from cryptography import utils, x509
from cryptography.exceptions import UnsupportedAlgorithm
from cryptography.hazmat.backends.openssl.decode_asn1 import (
- _CERTIFICATE_EXTENSION_PARSER, _CERTIFICATE_EXTENSION_PARSER_NO_SCT,
- _CRL_EXTENSION_PARSER, _CSR_EXTENSION_PARSER,
- _REVOKED_CERTIFICATE_EXTENSION_PARSER, _asn1_integer_to_int,
- _asn1_string_to_bytes, _decode_x509_name, _obj2txt, _parse_asn1_time
+ _asn1_integer_to_int,
+ _asn1_string_to_bytes,
+ _decode_x509_name,
+ _obj2txt,
+ _parse_asn1_time,
+)
+from cryptography.hazmat.backends.openssl.encode_asn1 import (
+ _encode_asn1_int_gc,
+ _txt2obj_gc,
)
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa
+from cryptography.x509.name import _ASN1Type
@utils.register_interface(x509.Certificate)
class _Certificate(object):
- def __init__(self, backend, x509):
+ def __init__(self, backend, x509_cert):
self._backend = backend
- self._x509 = x509
+ self._x509 = x509_cert
+
+ version = self._backend._lib.X509_get_version(self._x509)
+ if version == 0:
+ self._version = x509.Version.v1
+ elif version == 2:
+ self._version = x509.Version.v3
+ else:
+ raise x509.InvalidVersion(
+ "{} is not a valid X509 version".format(version), version
+ )
def __repr__(self):
- return "".format(self.subject)
+ return "".format(self.subject)
def __eq__(self, other):
if not isinstance(other, x509.Certificate):
@@ -42,31 +57,15 @@ def __ne__(self, other):
def __hash__(self):
return hash(self.public_bytes(serialization.Encoding.DER))
+ def __deepcopy__(self, memo):
+ return self
+
def fingerprint(self, algorithm):
h = hashes.Hash(algorithm, self._backend)
h.update(self.public_bytes(serialization.Encoding.DER))
return h.finalize()
- @property
- def version(self):
- version = self._backend._lib.X509_get_version(self._x509)
- if version == 0:
- return x509.Version.v1
- elif version == 2:
- return x509.Version.v3
- else:
- raise x509.InvalidVersion(
- "{0} is not a valid X509 version".format(version), version
- )
-
- @property
- def serial(self):
- warnings.warn(
- "Certificate serial is deprecated, use serial_number instead.",
- utils.PersistentlyDeprecated,
- stacklevel=2
- )
- return self.serial_number
+ version = utils.read_only_property("_version")
@property
def serial_number(self):
@@ -87,12 +86,12 @@ def public_key(self):
@property
def not_valid_before(self):
- asn1_time = self._backend._lib.X509_get_notBefore(self._x509)
+ asn1_time = self._backend._lib.X509_getm_notBefore(self._x509)
return _parse_asn1_time(self._backend, asn1_time)
@property
def not_valid_after(self):
- asn1_time = self._backend._lib.X509_get_notAfter(self._x509)
+ asn1_time = self._backend._lib.X509_getm_notAfter(self._x509)
return _parse_asn1_time(self._backend, asn1_time)
@property
@@ -114,7 +113,7 @@ def signature_hash_algorithm(self):
return x509._SIG_OIDS_TO_HASH[oid]
except KeyError:
raise UnsupportedAlgorithm(
- "Signature algorithm OID:{0} not recognized".format(oid)
+ "Signature algorithm OID:{} not recognized".format(oid)
)
@property
@@ -129,14 +128,7 @@ def signature_algorithm_oid(self):
@utils.cached_property
def extensions(self):
- if self._backend._lib.CRYPTOGRAPHY_OPENSSL_110_OR_GREATER:
- return _CERTIFICATE_EXTENSION_PARSER.parse(
- self._backend, self._x509
- )
- else:
- return _CERTIFICATE_EXTENSION_PARSER_NO_SCT.parse(
- self._backend, self._x509
- )
+ return self._backend._certificate_extension_parser.parse(self._x509)
@property
def signature(self):
@@ -198,13 +190,13 @@ def revocation_date(self):
self._backend,
self._backend._lib.X509_REVOKED_get0_revocationDate(
self._x509_revoked
- )
+ ),
)
@utils.cached_property
def extensions(self):
- return _REVOKED_CERTIFICATE_EXTENSION_PARSER.parse(
- self._backend, self._x509_revoked
+ return self._backend._revoked_cert_extension_parser.parse(
+ self._x509_revoked
)
@@ -227,14 +219,36 @@ def __ne__(self, other):
def fingerprint(self, algorithm):
h = hashes.Hash(algorithm, self._backend)
bio = self._backend._create_mem_bio_gc()
- res = self._backend._lib.i2d_X509_CRL_bio(
- bio, self._x509_crl
- )
+ res = self._backend._lib.i2d_X509_CRL_bio(bio, self._x509_crl)
self._backend.openssl_assert(res == 1)
der = self._backend._read_mem_bio(bio)
h.update(der)
return h.finalize()
+ @utils.cached_property
+ def _sorted_crl(self):
+ # X509_CRL_get0_by_serial sorts in place, which breaks a variety of
+ # things we don't want to break (like iteration and the signature).
+ # Let's dupe it and sort that instead.
+ dup = self._backend._lib.X509_CRL_dup(self._x509_crl)
+ self._backend.openssl_assert(dup != self._backend._ffi.NULL)
+ dup = self._backend._ffi.gc(dup, self._backend._lib.X509_CRL_free)
+ return dup
+
+ def get_revoked_certificate_by_serial_number(self, serial_number):
+ revoked = self._backend._ffi.new("X509_REVOKED **")
+ asn1_int = _encode_asn1_int_gc(self._backend, serial_number)
+ res = self._backend._lib.X509_CRL_get0_by_serial(
+ self._sorted_crl, revoked, asn1_int
+ )
+ if res == 0:
+ return None
+ else:
+ self._backend.openssl_assert(revoked[0] != self._backend._ffi.NULL)
+ return _RevokedCertificate(
+ self._backend, self._sorted_crl, revoked[0]
+ )
+
@property
def signature_hash_algorithm(self):
oid = self.signature_algorithm_oid
@@ -242,7 +256,7 @@ def signature_hash_algorithm(self):
return x509._SIG_OIDS_TO_HASH[oid]
except KeyError:
raise UnsupportedAlgorithm(
- "Signature algorithm OID:{0} not recognized".format(oid)
+ "Signature algorithm OID:{} not recognized".format(oid)
)
@property
@@ -337,13 +351,17 @@ def __len__(self):
@utils.cached_property
def extensions(self):
- return _CRL_EXTENSION_PARSER.parse(self._backend, self._x509_crl)
+ return self._backend._crl_extension_parser.parse(self._x509_crl)
def is_signature_valid(self, public_key):
- if not isinstance(public_key, (dsa.DSAPublicKey, rsa.RSAPublicKey,
- ec.EllipticCurvePublicKey)):
- raise TypeError('Expecting one of DSAPublicKey, RSAPublicKey,'
- ' or EllipticCurvePublicKey.')
+ if not isinstance(
+ public_key,
+ (dsa.DSAPublicKey, rsa.RSAPublicKey, ec.EllipticCurvePublicKey),
+ ):
+ raise TypeError(
+ "Expecting one of DSAPublicKey, RSAPublicKey,"
+ " or EllipticCurvePublicKey."
+ )
res = self._backend._lib.X509_CRL_verify(
self._x509_crl, public_key._evp_pkey
)
@@ -394,7 +412,7 @@ def signature_hash_algorithm(self):
return x509._SIG_OIDS_TO_HASH[oid]
except KeyError:
raise UnsupportedAlgorithm(
- "Signature algorithm OID:{0} not recognized".format(oid)
+ "Signature algorithm OID:{} not recognized".format(oid)
)
@property
@@ -410,7 +428,16 @@ def signature_algorithm_oid(self):
@utils.cached_property
def extensions(self):
x509_exts = self._backend._lib.X509_REQ_get_extensions(self._x509_req)
- return _CSR_EXTENSION_PARSER.parse(self._backend, x509_exts)
+ x509_exts = self._backend._ffi.gc(
+ x509_exts,
+ lambda x: self._backend._lib.sk_X509_EXTENSION_pop_free(
+ x,
+ self._backend._ffi.addressof(
+ self._backend._lib._original_lib, "X509_EXTENSION_free"
+ ),
+ ),
+ )
+ return self._backend._csr_extension_parser.parse(x509_exts)
def public_bytes(self, encoding):
bio = self._backend._create_mem_bio_gc()
@@ -458,6 +485,47 @@ def is_signature_valid(self):
return True
+ def get_attribute_for_oid(self, oid):
+ obj = _txt2obj_gc(self._backend, oid.dotted_string)
+ pos = self._backend._lib.X509_REQ_get_attr_by_OBJ(
+ self._x509_req, obj, -1
+ )
+ if pos == -1:
+ raise x509.AttributeNotFound(
+ "No {} attribute was found".format(oid), oid
+ )
+
+ attr = self._backend._lib.X509_REQ_get_attr(self._x509_req, pos)
+ self._backend.openssl_assert(attr != self._backend._ffi.NULL)
+ # We don't support multiple valued attributes for now.
+ self._backend.openssl_assert(
+ self._backend._lib.X509_ATTRIBUTE_count(attr) == 1
+ )
+ asn1_type = self._backend._lib.X509_ATTRIBUTE_get0_type(attr, 0)
+ self._backend.openssl_assert(asn1_type != self._backend._ffi.NULL)
+ # We need this to ensure that our C type cast is safe.
+ # Also this should always be a sane string type, but we'll see if
+ # that is true in the real world...
+ if asn1_type.type not in (
+ _ASN1Type.UTF8String.value,
+ _ASN1Type.PrintableString.value,
+ _ASN1Type.IA5String.value,
+ ):
+ raise ValueError(
+ "OID {} has a disallowed ASN.1 type: {}".format(
+ oid, asn1_type.type
+ )
+ )
+
+ data = self._backend._lib.X509_ATTRIBUTE_get0_data(
+ attr, 0, asn1_type.type, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(data != self._backend._ffi.NULL)
+ # This cast is safe iff we assert on the type above to ensure
+ # that it is always a type of ASN1_STRING
+ data = self._backend._ffi.cast("ASN1_STRING *", data)
+ return _asn1_string_to_bytes(self._backend, data)
+
@utils.register_interface(
x509.certificate_transparency.SignedCertificateTimestamp
@@ -486,9 +554,9 @@ def log_id(self):
def timestamp(self):
timestamp = self._backend._lib.SCT_get_timestamp(self._sct)
milliseconds = timestamp % 1000
- return datetime.datetime.utcfromtimestamp(
- timestamp // 1000
- ).replace(microsecond=milliseconds * 1000)
+ return datetime.datetime.utcfromtimestamp(timestamp // 1000).replace(
+ microsecond=milliseconds * 1000
+ )
@property
def entry_type(self):
@@ -497,3 +565,23 @@ def entry_type(self):
# we only have precerts.
assert entry_type == self._backend._lib.CT_LOG_ENTRY_TYPE_PRECERT
return x509.certificate_transparency.LogEntryType.PRE_CERTIFICATE
+
+ @property
+ def _signature(self):
+ ptrptr = self._backend._ffi.new("unsigned char **")
+ res = self._backend._lib.SCT_get0_signature(self._sct, ptrptr)
+ self._backend.openssl_assert(res > 0)
+ self._backend.openssl_assert(ptrptr[0] != self._backend._ffi.NULL)
+ return self._backend._ffi.buffer(ptrptr[0], res)[:]
+
+ def __hash__(self):
+ return hash(self._signature)
+
+ def __eq__(self, other):
+ if not isinstance(other, _SignedCertificateTimestamp):
+ return NotImplemented
+
+ return self._signature == other._signature
+
+ def __ne__(self, other):
+ return not self == other
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/bindings/__pycache__/__init__.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/bindings/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000..8e72dd3
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/bindings/__pycache__/__init__.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/bindings/_constant_time.so b/functions/source/CreateSSHKey/cryptography/hazmat/bindings/_constant_time.so
deleted file mode 100755
index 9d2f219..0000000
Binary files a/functions/source/CreateSSHKey/cryptography/hazmat/bindings/_constant_time.so and /dev/null differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/bindings/_openssl.abi3.so b/functions/source/CreateSSHKey/cryptography/hazmat/bindings/_openssl.abi3.so
new file mode 100755
index 0000000..0909db1
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/bindings/_openssl.abi3.so differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/bindings/_openssl.so b/functions/source/CreateSSHKey/cryptography/hazmat/bindings/_openssl.so
deleted file mode 100755
index d367255..0000000
Binary files a/functions/source/CreateSSHKey/cryptography/hazmat/bindings/_openssl.so and /dev/null differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/bindings/_padding.abi3.so b/functions/source/CreateSSHKey/cryptography/hazmat/bindings/_padding.abi3.so
new file mode 100755
index 0000000..3bcebc5
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/bindings/_padding.abi3.so differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/bindings/_padding.so b/functions/source/CreateSSHKey/cryptography/hazmat/bindings/_padding.so
deleted file mode 100755
index 9cfc66f..0000000
Binary files a/functions/source/CreateSSHKey/cryptography/hazmat/bindings/_padding.so and /dev/null differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000..d4545c3
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-38.pyc
new file mode 100644
index 0000000..3656e35
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-38.pyc
new file mode 100644
index 0000000..ba8e920
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/bindings/openssl/_conditional.py b/functions/source/CreateSSHKey/cryptography/hazmat/bindings/openssl/_conditional.py
index 866cf4a..9cf489a 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/bindings/openssl/_conditional.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/bindings/openssl/_conditional.py
@@ -5,67 +5,11 @@
from __future__ import absolute_import, division, print_function
-def cryptography_has_cms():
- return [
- "BIO_new_CMS",
- "i2d_CMS_bio_stream",
- "PEM_write_bio_CMS_stream",
- "CMS_final",
- "CMS_sign",
- "CMS_verify",
- "CMS_encrypt",
- "CMS_decrypt",
- "CMS_add1_signer",
- "CMS_TEXT",
- "CMS_NOCERTS",
- "CMS_NO_CONTENT_VERIFY",
- "CMS_NO_ATTR_VERIFY",
- "CMS_NOSIGS",
- "CMS_NOINTERN",
- "CMS_NO_SIGNER_CERT_VERIFY",
- "CMS_NOVERIFY",
- "CMS_DETACHED",
- "CMS_BINARY",
- "CMS_NOATTR",
- "CMS_NOSMIMECAP",
- "CMS_NOOLDMIMETYPE",
- "CMS_CRLFEOL",
- "CMS_STREAM",
- "CMS_NOCRL",
- "CMS_PARTIAL",
- "CMS_REUSE_DIGEST",
- "CMS_USE_KEYID",
- "CMS_DEBUG_DECRYPT",
- ]
-
-
def cryptography_has_ec2m():
return [
- "EC_GF2m_simple_method",
"EC_POINT_set_affine_coordinates_GF2m",
"EC_POINT_get_affine_coordinates_GF2m",
"EC_POINT_set_compressed_coordinates_GF2m",
- "EC_GROUP_set_curve_GF2m",
- "EC_GROUP_get_curve_GF2m",
- "EC_GROUP_new_curve_GF2m",
- ]
-
-
-def cryptography_has_ec_1_0_2():
- return [
- "EC_curve_nid2nist",
- ]
-
-
-def cryptography_has_set_ecdh_auto():
- return [
- "SSL_CTX_set_ecdh_auto",
- ]
-
-
-def cryptography_has_rsa_r_pkcs_decoding_error():
- return [
- "RSA_R_PKCS_DECODING_ERROR"
]
@@ -89,66 +33,22 @@ def cryptography_has_ssl3_method():
]
-def cryptography_has_alpn():
- return [
- "SSL_CTX_set_alpn_protos",
- "SSL_set_alpn_protos",
- "SSL_CTX_set_alpn_select_cb",
- "SSL_get0_alpn_selected",
- ]
-
-
-def cryptography_has_compression():
- return [
- "SSL_get_current_compression",
- "SSL_get_current_expansion",
- "SSL_COMP_get_name",
- ]
-
-
-def cryptography_has_get_server_tmp_key():
- return [
- "SSL_get_server_tmp_key",
- ]
-
-
-def cryptography_has_102_verification_error_codes():
- return [
- 'X509_V_ERR_SUITE_B_INVALID_VERSION',
- 'X509_V_ERR_SUITE_B_INVALID_ALGORITHM',
- 'X509_V_ERR_SUITE_B_INVALID_CURVE',
- 'X509_V_ERR_SUITE_B_INVALID_SIGNATURE_ALGORITHM',
- 'X509_V_ERR_SUITE_B_LOS_NOT_ALLOWED',
- 'X509_V_ERR_SUITE_B_CANNOT_SIGN_P_384_WITH_P_256',
- 'X509_V_ERR_HOSTNAME_MISMATCH',
- 'X509_V_ERR_EMAIL_MISMATCH',
- 'X509_V_ERR_IP_ADDRESS_MISMATCH'
- ]
-
-
-def cryptography_has_102_verification_params():
+def cryptography_has_102_verification():
return [
+ "X509_V_ERR_SUITE_B_INVALID_VERSION",
+ "X509_V_ERR_SUITE_B_INVALID_ALGORITHM",
+ "X509_V_ERR_SUITE_B_INVALID_CURVE",
+ "X509_V_ERR_SUITE_B_INVALID_SIGNATURE_ALGORITHM",
+ "X509_V_ERR_SUITE_B_LOS_NOT_ALLOWED",
+ "X509_V_ERR_SUITE_B_CANNOT_SIGN_P_384_WITH_P_256",
"X509_V_FLAG_SUITEB_128_LOS_ONLY",
"X509_V_FLAG_SUITEB_192_LOS",
"X509_V_FLAG_SUITEB_128_LOS",
- "X509_VERIFY_PARAM_set1_host",
- "X509_VERIFY_PARAM_set1_email",
- "X509_VERIFY_PARAM_set1_ip",
- "X509_VERIFY_PARAM_set1_ip_asc",
- "X509_VERIFY_PARAM_set_hostflags",
- ]
-
-
-def cryptography_has_x509_v_flag_trusted_first():
- return [
- "X509_V_FLAG_TRUSTED_FIRST",
]
-def cryptography_has_x509_v_flag_partial_chain():
- return [
- "X509_V_FLAG_PARTIAL_CHAIN",
- ]
+def cryptography_has_110_verification_params():
+ return ["X509_CHECK_FLAG_NEVER_CHECK_SUBJECT"]
def cryptography_has_set_cert_cb():
@@ -176,11 +76,7 @@ def cryptography_has_tls_st():
def cryptography_has_locking_callbacks():
return [
- "CRYPTO_LOCK",
- "CRYPTO_UNLOCK",
- "CRYPTO_READ",
- "CRYPTO_LOCK_SSL",
- "CRYPTO_lock",
+ "Cryptography_setup_ssl_threads",
]
@@ -190,14 +86,6 @@ def cryptography_has_scrypt():
]
-def cryptography_has_generic_dtls_method():
- return [
- "DTLS_method",
- "DTLS_server_method",
- "DTLS_client_method",
- ]
-
-
def cryptography_has_evp_pkey_dhx():
return [
"EVP_PKEY_DHX",
@@ -215,11 +103,19 @@ def cryptography_has_sct():
"SCT_get_version",
"SCT_get_log_entry_type",
"SCT_get0_log_id",
+ "SCT_get0_signature",
"SCT_get_timestamp",
"SCT_set_source",
"sk_SCT_num",
"sk_SCT_value",
"SCT_LIST_free",
+ "sk_SCT_push",
+ "sk_SCT_new_null",
+ "SCT_new",
+ "SCT_set1_log_id",
+ "SCT_set_timestamp",
+ "SCT_set_version",
+ "SCT_set_log_entry_type",
]
@@ -232,10 +128,52 @@ def cryptography_has_x509_store_ctx_get_issuer():
def cryptography_has_x25519():
return [
+ "EVP_PKEY_X25519",
"NID_X25519",
]
+def cryptography_has_x448():
+ return [
+ "EVP_PKEY_X448",
+ "NID_X448",
+ ]
+
+
+def cryptography_has_ed448():
+ return [
+ "EVP_PKEY_ED448",
+ "NID_ED448",
+ ]
+
+
+def cryptography_has_ed25519():
+ return [
+ "NID_ED25519",
+ "EVP_PKEY_ED25519",
+ ]
+
+
+def cryptography_has_poly1305():
+ return [
+ "NID_poly1305",
+ "EVP_PKEY_POLY1305",
+ ]
+
+
+def cryptography_has_oneshot_evp_digest_sign_verify():
+ return [
+ "EVP_DigestSign",
+ "EVP_DigestVerify",
+ ]
+
+
+def cryptography_has_evp_digestfinal_xof():
+ return [
+ "EVP_DigestFinalXOF",
+ ]
+
+
def cryptography_has_evp_pkey_get_set_tls_encodedpoint():
return [
"EVP_PKEY_get1_tls_encodedpoint",
@@ -245,50 +183,129 @@ def cryptography_has_evp_pkey_get_set_tls_encodedpoint():
def cryptography_has_fips():
return [
- "FIPS_set_mode",
+ "FIPS_mode_set",
"FIPS_mode",
]
+def cryptography_has_ssl_sigalgs():
+ return [
+ "SSL_CTX_set1_sigalgs_list",
+ "SSL_get_sigalgs",
+ ]
+
+
+def cryptography_has_psk():
+ return [
+ "SSL_CTX_use_psk_identity_hint",
+ "SSL_CTX_set_psk_server_callback",
+ "SSL_CTX_set_psk_client_callback",
+ ]
+
+
+def cryptography_has_custom_ext():
+ return [
+ "SSL_CTX_add_client_custom_ext",
+ "SSL_CTX_add_server_custom_ext",
+ "SSL_extension_supported",
+ ]
+
+
+def cryptography_has_openssl_cleanup():
+ return [
+ "OPENSSL_cleanup",
+ ]
+
+
+def cryptography_has_cipher_details():
+ return [
+ "SSL_CIPHER_is_aead",
+ "SSL_CIPHER_get_cipher_nid",
+ "SSL_CIPHER_get_digest_nid",
+ "SSL_CIPHER_get_kx_nid",
+ "SSL_CIPHER_get_auth_nid",
+ ]
+
+
+def cryptography_has_tlsv13():
+ return [
+ "SSL_OP_NO_TLSv1_3",
+ "SSL_VERIFY_POST_HANDSHAKE",
+ "SSL_CTX_set_ciphersuites",
+ "SSL_verify_client_post_handshake",
+ "SSL_CTX_set_post_handshake_auth",
+ "SSL_set_post_handshake_auth",
+ "SSL_SESSION_get_max_early_data",
+ "SSL_write_early_data",
+ "SSL_read_early_data",
+ "SSL_CTX_set_max_early_data",
+ ]
+
+
+def cryptography_has_keylog():
+ return [
+ "SSL_CTX_set_keylog_callback",
+ "SSL_CTX_get_keylog_callback",
+ ]
+
+
+def cryptography_has_raw_key():
+ return [
+ "EVP_PKEY_new_raw_private_key",
+ "EVP_PKEY_new_raw_public_key",
+ "EVP_PKEY_get_raw_private_key",
+ "EVP_PKEY_get_raw_public_key",
+ ]
+
+
+def cryptography_has_engine():
+ return [
+ "ENGINE_by_id",
+ "ENGINE_init",
+ "ENGINE_finish",
+ "ENGINE_get_default_RAND",
+ "ENGINE_set_default_RAND",
+ "ENGINE_unregister_RAND",
+ "ENGINE_ctrl_cmd",
+ "ENGINE_free",
+ "ENGINE_get_name",
+ "Cryptography_add_osrandom_engine",
+ ]
+
+
+def cryptography_has_verified_chain():
+ return [
+ "SSL_get0_verified_chain",
+ ]
+
+
+def cryptography_has_srtp():
+ return [
+ "SSL_CTX_set_tlsext_use_srtp",
+ "SSL_set_tlsext_use_srtp",
+ "SSL_get_selected_srtp_profile",
+ ]
+
+
# This is a mapping of
# {condition: function-returning-names-dependent-on-that-condition} so we can
# loop over them and delete unsupported names at runtime. It will be removed
# when cffi supports #if in cdef. We use functions instead of just a dict of
# lists so we can use coverage to measure which are used.
CONDITIONAL_NAMES = {
- "Cryptography_HAS_CMS": cryptography_has_cms,
"Cryptography_HAS_EC2M": cryptography_has_ec2m,
- "Cryptography_HAS_EC_1_0_2": cryptography_has_ec_1_0_2,
- "Cryptography_HAS_SET_ECDH_AUTO": cryptography_has_set_ecdh_auto,
- "Cryptography_HAS_RSA_R_PKCS_DECODING_ERROR": (
- cryptography_has_rsa_r_pkcs_decoding_error
- ),
"Cryptography_HAS_RSA_OAEP_MD": cryptography_has_rsa_oaep_md,
"Cryptography_HAS_RSA_OAEP_LABEL": cryptography_has_rsa_oaep_label,
"Cryptography_HAS_SSL3_METHOD": cryptography_has_ssl3_method,
- "Cryptography_HAS_ALPN": cryptography_has_alpn,
- "Cryptography_HAS_COMPRESSION": cryptography_has_compression,
- "Cryptography_HAS_GET_SERVER_TMP_KEY": cryptography_has_get_server_tmp_key,
- "Cryptography_HAS_102_VERIFICATION_ERROR_CODES": (
- cryptography_has_102_verification_error_codes
- ),
- "Cryptography_HAS_102_VERIFICATION_PARAMS": (
- cryptography_has_102_verification_params
- ),
- "Cryptography_HAS_X509_V_FLAG_TRUSTED_FIRST": (
- cryptography_has_x509_v_flag_trusted_first
- ),
- "Cryptography_HAS_X509_V_FLAG_PARTIAL_CHAIN": (
- cryptography_has_x509_v_flag_partial_chain
+ "Cryptography_HAS_102_VERIFICATION": cryptography_has_102_verification,
+ "Cryptography_HAS_110_VERIFICATION_PARAMS": (
+ cryptography_has_110_verification_params
),
"Cryptography_HAS_SET_CERT_CB": cryptography_has_set_cert_cb,
"Cryptography_HAS_SSL_ST": cryptography_has_ssl_st,
"Cryptography_HAS_TLS_ST": cryptography_has_tls_st,
"Cryptography_HAS_LOCKING_CALLBACKS": cryptography_has_locking_callbacks,
"Cryptography_HAS_SCRYPT": cryptography_has_scrypt,
- "Cryptography_HAS_GENERIC_DTLS_METHOD": (
- cryptography_has_generic_dtls_method
- ),
"Cryptography_HAS_EVP_PKEY_DHX": cryptography_has_evp_pkey_dhx,
"Cryptography_HAS_MEM_FUNCTIONS": cryptography_has_mem_functions,
"Cryptography_HAS_SCT": cryptography_has_sct,
@@ -296,8 +313,29 @@ def cryptography_has_fips():
cryptography_has_x509_store_ctx_get_issuer
),
"Cryptography_HAS_X25519": cryptography_has_x25519,
+ "Cryptography_HAS_X448": cryptography_has_x448,
+ "Cryptography_HAS_ED448": cryptography_has_ed448,
+ "Cryptography_HAS_ED25519": cryptography_has_ed25519,
+ "Cryptography_HAS_POLY1305": cryptography_has_poly1305,
+ "Cryptography_HAS_ONESHOT_EVP_DIGEST_SIGN_VERIFY": (
+ cryptography_has_oneshot_evp_digest_sign_verify
+ ),
"Cryptography_HAS_EVP_PKEY_get_set_tls_encodedpoint": (
cryptography_has_evp_pkey_get_set_tls_encodedpoint
),
"Cryptography_HAS_FIPS": cryptography_has_fips,
+ "Cryptography_HAS_SIGALGS": cryptography_has_ssl_sigalgs,
+ "Cryptography_HAS_PSK": cryptography_has_psk,
+ "Cryptography_HAS_CUSTOM_EXT": cryptography_has_custom_ext,
+ "Cryptography_HAS_OPENSSL_CLEANUP": cryptography_has_openssl_cleanup,
+ "Cryptography_HAS_CIPHER_DETAILS": cryptography_has_cipher_details,
+ "Cryptography_HAS_TLSv1_3": cryptography_has_tlsv13,
+ "Cryptography_HAS_KEYLOG": cryptography_has_keylog,
+ "Cryptography_HAS_RAW_KEY": cryptography_has_raw_key,
+ "Cryptography_HAS_EVP_DIGESTFINAL_XOF": (
+ cryptography_has_evp_digestfinal_xof
+ ),
+ "Cryptography_HAS_ENGINE": cryptography_has_engine,
+ "Cryptography_HAS_VERIFIED_CHAIN": cryptography_has_verified_chain,
+ "Cryptography_HAS_SRTP": cryptography_has_srtp,
}
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/bindings/openssl/binding.py b/functions/source/CreateSSHKey/cryptography/hazmat/bindings/openssl/binding.py
index 7790213..178a81e 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/bindings/openssl/binding.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/bindings/openssl/binding.py
@@ -7,7 +7,9 @@
import collections
import threading
import types
+import warnings
+import cryptography
from cryptography import utils
from cryptography.exceptions import InternalError
from cryptography.hazmat.bindings._openssl import ffi, lib
@@ -50,19 +52,31 @@ def _consume_errors(lib):
return errors
-def _openssl_assert(lib, ok):
- if not ok:
- errors = _consume_errors(lib)
- errors_with_text = []
- for err in errors:
- err_text_reason = ffi.string(
- lib.ERR_error_string(err.code, ffi.NULL)
- )
- errors_with_text.append(
- _OpenSSLErrorWithText(
- err.code, err.lib, err.func, err.reason, err_text_reason
- )
+def _errors_with_text(errors):
+ errors_with_text = []
+ for err in errors:
+ buf = ffi.new("char[]", 256)
+ lib.ERR_error_string_n(err.code, buf, len(buf))
+ err_text_reason = ffi.string(buf)
+
+ errors_with_text.append(
+ _OpenSSLErrorWithText(
+ err.code, err.lib, err.func, err.reason, err_text_reason
)
+ )
+
+ return errors_with_text
+
+
+def _consume_errors_with_text(lib):
+ return _errors_with_text(_consume_errors(lib))
+
+
+def _openssl_assert(lib, ok, errors=None):
+ if not ok:
+ if errors is None:
+ errors = _consume_errors(lib)
+ errors_with_text = _errors_with_text(errors)
raise InternalError(
"Unknown OpenSSL error. This error is commonly encountered when "
@@ -72,7 +86,7 @@ def _openssl_assert(lib, ok):
"please file an issue at https://github.com/pyca/cryptography/"
"issues with information on how to reproduce "
"this. ({0!r})".format(errors_with_text),
- errors_with_text
+ errors_with_text,
)
@@ -95,6 +109,7 @@ class Binding(object):
"""
OpenSSL API wrapper.
"""
+
lib = None
ffi = ffi
_lib_loaded = False
@@ -112,10 +127,9 @@ def _register_osrandom_engine(cls):
# reliably clear the error queue. Once we clear it here we will
# error on any subsequent unexpected item in the stack.
cls.lib.ERR_clear_error()
- cls._osrandom_engine_id = cls.lib.Cryptography_osrandom_engine_id
- cls._osrandom_engine_name = cls.lib.Cryptography_osrandom_engine_name
- result = cls.lib.Cryptography_add_osrandom_engine()
- _openssl_assert(cls.lib, result in (1, 2))
+ if cls.lib.CRYPTOGRAPHY_NEEDS_OSRANDOM_ENGINE:
+ result = cls.lib.Cryptography_add_osrandom_engine()
+ _openssl_assert(cls.lib, result in (1, 2))
@classmethod
def _ensure_ffi_initialized(cls):
@@ -139,18 +153,59 @@ def init_static_locks(cls):
# the setup for this.
__import__("_ssl")
- if cls.lib.CRYPTO_get_locking_callback() != cls.ffi.NULL:
+ if (
+ not cls.lib.Cryptography_HAS_LOCKING_CALLBACKS
+ or cls.lib.CRYPTO_get_locking_callback() != cls.ffi.NULL
+ ):
return
# If nothing else has setup a locking callback already, we set up
# our own
- res = lib._setup_ssl_threads()
+ res = lib.Cryptography_setup_ssl_threads()
_openssl_assert(cls.lib, res == 1)
+def _verify_openssl_version(lib):
+ if (
+ lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_110
+ and not lib.CRYPTOGRAPHY_IS_LIBRESSL
+ ):
+ warnings.warn(
+ "OpenSSL version 1.0.2 is no longer supported by the OpenSSL "
+ "project, please upgrade. The next version of cryptography will "
+ "drop support for it.",
+ utils.CryptographyDeprecationWarning,
+ )
+
+
+def _verify_package_version(version):
+ # Occasionally we run into situations where the version of the Python
+ # package does not match the version of the shared object that is loaded.
+ # This may occur in environments where multiple versions of cryptography
+ # are installed and available in the python path. To avoid errors cropping
+ # up later this code checks that the currently imported package and the
+ # shared object that were loaded have the same version and raise an
+ # ImportError if they do not
+ so_package_version = ffi.string(lib.CRYPTOGRAPHY_PACKAGE_VERSION)
+ if version.encode("ascii") != so_package_version:
+ raise ImportError(
+ "The version of cryptography does not match the loaded "
+ "shared object. This can happen if you have multiple copies of "
+ "cryptography installed in your Python path. Please try creating "
+ "a new virtual environment to resolve this issue. "
+ "Loaded python version: {}, shared object version: {}".format(
+ version, so_package_version
+ )
+ )
+
+
+_verify_package_version(cryptography.__version__)
+
# OpenSSL is not thread safe until the locks are initialized. We call this
# method in module scope so that it executes with the import lock. On
# Pythons < 3.4 this import lock is a global lock, which can prevent a race
# condition registering the OpenSSL locks. On Python 3.4+ the import lock
# is per module so this approach will not work.
Binding.init_static_locks()
+
+_verify_openssl_version(Binding.lib)
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/__pycache__/__init__.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000..39bbba5
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/__pycache__/__init__.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/__pycache__/cmac.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/__pycache__/cmac.cpython-38.pyc
new file mode 100644
index 0000000..992015b
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/__pycache__/cmac.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-38.pyc
new file mode 100644
index 0000000..f86ae7b
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/__pycache__/hashes.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/__pycache__/hashes.cpython-38.pyc
new file mode 100644
index 0000000..b4939e7
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/__pycache__/hashes.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/__pycache__/hmac.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/__pycache__/hmac.cpython-38.pyc
new file mode 100644
index 0000000..479713d
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/__pycache__/hmac.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-38.pyc
new file mode 100644
index 0000000..69ea1db
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/__pycache__/padding.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/__pycache__/padding.cpython-38.pyc
new file mode 100644
index 0000000..ae59543
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/__pycache__/padding.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-38.pyc
new file mode 100644
index 0000000..1ff8c71
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000..fb08204
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-38.pyc
new file mode 100644
index 0000000..879060c
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-38.pyc
new file mode 100644
index 0000000..d14dd3d
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-38.pyc
new file mode 100644
index 0000000..c08520a
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-38.pyc
new file mode 100644
index 0000000..283b48c
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-38.pyc
new file mode 100644
index 0000000..f1bbdb0
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-38.pyc
new file mode 100644
index 0000000..0e68359
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-38.pyc
new file mode 100644
index 0000000..60620ec
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-38.pyc
new file mode 100644
index 0000000..d75f51b
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-38.pyc
new file mode 100644
index 0000000..ab77341
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-38.pyc
new file mode 100644
index 0000000..54b34cd
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/dh.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/dh.py
index 4fc9952..cd9fbfa 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/dh.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/dh.py
@@ -9,9 +9,11 @@
import six
from cryptography import utils
+from cryptography.hazmat.backends import _get_backend
-def generate_parameters(generator, key_size, backend):
+def generate_parameters(generator, key_size, backend=None):
+ backend = _get_backend(backend)
return backend.generate_dh_parameters(generator, key_size)
@@ -21,8 +23,9 @@ def __init__(self, x, public_numbers):
raise TypeError("x must be an integer.")
if not isinstance(public_numbers, DHPublicNumbers):
- raise TypeError("public_numbers must be an instance of "
- "DHPublicNumbers.")
+ raise TypeError(
+ "public_numbers must be an instance of " "DHPublicNumbers."
+ )
self._x = x
self._public_numbers = public_numbers
@@ -32,14 +35,15 @@ def __eq__(self, other):
return NotImplemented
return (
- self._x == other._x and
- self._public_numbers == other._public_numbers
+ self._x == other._x
+ and self._public_numbers == other._public_numbers
)
def __ne__(self, other):
return not self == other
- def private_key(self, backend):
+ def private_key(self, backend=None):
+ backend = _get_backend(backend)
return backend.load_dh_private_numbers(self)
public_numbers = utils.read_only_property("_public_numbers")
@@ -53,7 +57,8 @@ def __init__(self, y, parameter_numbers):
if not isinstance(parameter_numbers, DHParameterNumbers):
raise TypeError(
- "parameters must be an instance of DHParameterNumbers.")
+ "parameters must be an instance of DHParameterNumbers."
+ )
self._y = y
self._parameter_numbers = parameter_numbers
@@ -63,14 +68,15 @@ def __eq__(self, other):
return NotImplemented
return (
- self._y == other._y and
- self._parameter_numbers == other._parameter_numbers
+ self._y == other._y
+ and self._parameter_numbers == other._parameter_numbers
)
def __ne__(self, other):
return not self == other
- def public_key(self, backend):
+ def public_key(self, backend=None):
+ backend = _get_backend(backend)
return backend.load_dh_public_numbers(self)
y = utils.read_only_property("_y")
@@ -79,9 +85,8 @@ def public_key(self, backend):
class DHParameterNumbers(object):
def __init__(self, p, g, q=None):
- if (
- not isinstance(p, six.integer_types) or
- not isinstance(g, six.integer_types)
+ if not isinstance(p, six.integer_types) or not isinstance(
+ g, six.integer_types
):
raise TypeError("p and g must be integers")
if q is not None and not isinstance(q, six.integer_types):
@@ -99,15 +104,14 @@ def __eq__(self, other):
return NotImplemented
return (
- self._p == other._p and
- self._g == other._g and
- self._q == other._q
+ self._p == other._p and self._g == other._g and self._q == other._q
)
def __ne__(self, other):
return not self == other
- def parameters(self, backend):
+ def parameters(self, backend=None):
+ backend = _get_backend(backend)
return backend.load_dh_parameter_numbers(self)
p = utils.read_only_property("_p")
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/dsa.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/dsa.py
index 03e6a53..8ccc666 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/dsa.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/dsa.py
@@ -9,6 +9,7 @@
import six
from cryptography import utils
+from cryptography.hazmat.backends import _get_backend
@six.add_metaclass(abc.ABCMeta)
@@ -119,19 +120,23 @@ def verify(self, signature, data, algorithm):
DSAPublicKeyWithSerialization = DSAPublicKey
-def generate_parameters(key_size, backend):
+def generate_parameters(key_size, backend=None):
+ backend = _get_backend(backend)
return backend.generate_dsa_parameters(key_size)
-def generate_private_key(key_size, backend):
+def generate_private_key(key_size, backend=None):
+ backend = _get_backend(backend)
return backend.generate_dsa_private_key_and_parameters(key_size)
def _check_dsa_parameters(parameters):
- if utils.bit_length(parameters.p) not in [1024, 2048, 3072]:
- raise ValueError("p must be exactly 1024, 2048, or 3072 bits long")
- if utils.bit_length(parameters.q) not in [160, 256]:
- raise ValueError("q must be exactly 160 or 256 bits long")
+ if parameters.p.bit_length() not in [1024, 2048, 3072, 4096]:
+ raise ValueError(
+ "p must be exactly 1024, 2048, 3072, or 4096 bits long"
+ )
+ if parameters.q.bit_length() not in [160, 224, 256]:
+ raise ValueError("q must be exactly 160, 224, or 256 bits long")
if not (1 < parameters.g < parameters.p):
raise ValueError("g, p don't satisfy 1 < g < p.")
@@ -150,9 +155,9 @@ def _check_dsa_private_numbers(numbers):
class DSAParameterNumbers(object):
def __init__(self, p, q, g):
if (
- not isinstance(p, six.integer_types) or
- not isinstance(q, six.integer_types) or
- not isinstance(g, six.integer_types)
+ not isinstance(p, six.integer_types)
+ or not isinstance(q, six.integer_types)
+ or not isinstance(g, six.integer_types)
):
raise TypeError(
"DSAParameterNumbers p, q, and g arguments must be integers."
@@ -166,7 +171,8 @@ def __init__(self, p, q, g):
q = utils.read_only_property("_q")
g = utils.read_only_property("_g")
- def parameters(self, backend):
+ def parameters(self, backend=None):
+ backend = _get_backend(backend)
return backend.load_dsa_parameter_numbers(self)
def __eq__(self, other):
@@ -180,9 +186,8 @@ def __ne__(self, other):
def __repr__(self):
return (
- "".format(
- self=self
- )
+ "".format(self=self)
)
@@ -202,7 +207,8 @@ def __init__(self, y, parameter_numbers):
y = utils.read_only_property("_y")
parameter_numbers = utils.read_only_property("_parameter_numbers")
- def public_key(self, backend):
+ def public_key(self, backend=None):
+ backend = _get_backend(backend)
return backend.load_dsa_public_numbers(self)
def __eq__(self, other):
@@ -210,8 +216,8 @@ def __eq__(self, other):
return NotImplemented
return (
- self.y == other.y and
- self.parameter_numbers == other.parameter_numbers
+ self.y == other.y
+ and self.parameter_numbers == other.parameter_numbers
)
def __ne__(self, other):
@@ -239,7 +245,8 @@ def __init__(self, x, public_numbers):
x = utils.read_only_property("_x")
public_numbers = utils.read_only_property("_public_numbers")
- def private_key(self, backend):
+ def private_key(self, backend=None):
+ backend = _get_backend(backend)
return backend.load_dsa_private_numbers(self)
def __eq__(self, other):
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/ec.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/ec.py
index 7931b08..c7e694f 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/ec.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/ec.py
@@ -5,10 +5,35 @@
from __future__ import absolute_import, division, print_function
import abc
+import warnings
import six
from cryptography import utils
+from cryptography.hazmat._oid import ObjectIdentifier
+from cryptography.hazmat.backends import _get_backend
+
+
+class EllipticCurveOID(object):
+ SECP192R1 = ObjectIdentifier("1.2.840.10045.3.1.1")
+ SECP224R1 = ObjectIdentifier("1.3.132.0.33")
+ SECP256K1 = ObjectIdentifier("1.3.132.0.10")
+ SECP256R1 = ObjectIdentifier("1.2.840.10045.3.1.7")
+ SECP384R1 = ObjectIdentifier("1.3.132.0.34")
+ SECP521R1 = ObjectIdentifier("1.3.132.0.35")
+ BRAINPOOLP256R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.7")
+ BRAINPOOLP384R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.11")
+ BRAINPOOLP512R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.13")
+ SECT163K1 = ObjectIdentifier("1.3.132.0.1")
+ SECT163R2 = ObjectIdentifier("1.3.132.0.15")
+ SECT233K1 = ObjectIdentifier("1.3.132.0.26")
+ SECT233R1 = ObjectIdentifier("1.3.132.0.27")
+ SECT283K1 = ObjectIdentifier("1.3.132.0.16")
+ SECT283R1 = ObjectIdentifier("1.3.132.0.17")
+ SECT409K1 = ObjectIdentifier("1.3.132.0.36")
+ SECT409R1 = ObjectIdentifier("1.3.132.0.37")
+ SECT571K1 = ObjectIdentifier("1.3.132.0.38")
+ SECT571R1 = ObjectIdentifier("1.3.132.0.39")
@six.add_metaclass(abc.ABCMeta)
@@ -68,7 +93,7 @@ def key_size(self):
Bit size of a secret scalar for the curve.
"""
- @abc.abstractproperty
+ @abc.abstractmethod
def sign(self, data, signature_algorithm):
"""
Signs the data
@@ -128,6 +153,23 @@ def verify(self, signature, data, signature_algorithm):
Verifies the signature of the data.
"""
+ @classmethod
+ def from_encoded_point(cls, curve, data):
+ utils._check_bytes("data", data)
+
+ if not isinstance(curve, EllipticCurve):
+ raise TypeError("curve must be an EllipticCurve instance")
+
+ if len(data) == 0:
+ raise ValueError("data must not be an empty byte string")
+
+ if six.indexbytes(data, 0) not in [0x02, 0x03, 0x04]:
+ raise ValueError("Unsupported elliptic curve point type")
+
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ return backend.load_elliptic_curve_public_bytes(curve, data)
+
EllipticCurvePublicKeyWithSerialization = EllipticCurvePublicKey
@@ -135,7 +177,7 @@ def verify(self, signature, data, signature_algorithm):
@utils.register_interface(EllipticCurve)
class SECT571R1(object):
name = "sect571r1"
- key_size = 571
+ key_size = 570
@utils.register_interface(EllipticCurve)
@@ -228,28 +270,46 @@ class SECP192R1(object):
key_size = 192
+@utils.register_interface(EllipticCurve)
+class BrainpoolP256R1(object):
+ name = "brainpoolP256r1"
+ key_size = 256
+
+
+@utils.register_interface(EllipticCurve)
+class BrainpoolP384R1(object):
+ name = "brainpoolP384r1"
+ key_size = 384
+
+
+@utils.register_interface(EllipticCurve)
+class BrainpoolP512R1(object):
+ name = "brainpoolP512r1"
+ key_size = 512
+
+
_CURVE_TYPES = {
"prime192v1": SECP192R1,
"prime256v1": SECP256R1,
-
"secp192r1": SECP192R1,
"secp224r1": SECP224R1,
"secp256r1": SECP256R1,
"secp384r1": SECP384R1,
"secp521r1": SECP521R1,
"secp256k1": SECP256K1,
-
"sect163k1": SECT163K1,
"sect233k1": SECT233K1,
"sect283k1": SECT283K1,
"sect409k1": SECT409K1,
"sect571k1": SECT571K1,
-
"sect163r2": SECT163R2,
"sect233r1": SECT233R1,
"sect283r1": SECT283R1,
"sect409r1": SECT409R1,
"sect571r1": SECT571R1,
+ "brainpoolP256r1": BrainpoolP256R1,
+ "brainpoolP384r1": BrainpoolP384R1,
+ "brainpoolP512r1": BrainpoolP512R1,
}
@@ -261,11 +321,13 @@ def __init__(self, algorithm):
algorithm = utils.read_only_property("_algorithm")
-def generate_private_key(curve, backend):
+def generate_private_key(curve, backend=None):
+ backend = _get_backend(backend)
return backend.generate_elliptic_curve_private_key(curve)
-def derive_private_key(private_value, curve, backend):
+def derive_private_key(private_value, curve, backend=None):
+ backend = _get_backend(backend)
if not isinstance(private_value, six.integer_types):
raise TypeError("private_value must be an integer type.")
@@ -280,9 +342,8 @@ def derive_private_key(private_value, curve, backend):
class EllipticCurvePublicNumbers(object):
def __init__(self, x, y, curve):
- if (
- not isinstance(x, six.integer_types) or
- not isinstance(y, six.integer_types)
+ if not isinstance(x, six.integer_types) or not isinstance(
+ y, six.integer_types
):
raise TypeError("x and y must be integers.")
@@ -293,15 +354,25 @@ def __init__(self, x, y, curve):
self._x = x
self._curve = curve
- def public_key(self, backend):
+ def public_key(self, backend=None):
+ backend = _get_backend(backend)
return backend.load_elliptic_curve_public_numbers(self)
def encode_point(self):
+ warnings.warn(
+ "encode_point has been deprecated on EllipticCurvePublicNumbers"
+ " and will be removed in a future version. Please use "
+ "EllipticCurvePublicKey.public_bytes to obtain both "
+ "compressed and uncompressed point encoding.",
+ utils.PersistentlyDeprecated2019,
+ stacklevel=2,
+ )
# key_size is in bits. Convert to bytes and round up
byte_length = (self.curve.key_size + 7) // 8
return (
- b'\x04' + utils.int_to_bytes(self.x, byte_length) +
- utils.int_to_bytes(self.y, byte_length)
+ b"\x04"
+ + utils.int_to_bytes(self.x, byte_length)
+ + utils.int_to_bytes(self.y, byte_length)
)
@classmethod
@@ -309,17 +380,25 @@ def from_encoded_point(cls, curve, data):
if not isinstance(curve, EllipticCurve):
raise TypeError("curve must be an EllipticCurve instance")
- if data.startswith(b'\x04'):
+ warnings.warn(
+ "Support for unsafe construction of public numbers from "
+ "encoded data will be removed in a future version. "
+ "Please use EllipticCurvePublicKey.from_encoded_point",
+ utils.PersistentlyDeprecated2019,
+ stacklevel=2,
+ )
+
+ if data.startswith(b"\x04"):
# key_size is in bits. Convert to bytes and round up
byte_length = (curve.key_size + 7) // 8
if len(data) == 2 * byte_length + 1:
- x = utils.int_from_bytes(data[1:byte_length + 1], 'big')
- y = utils.int_from_bytes(data[byte_length + 1:], 'big')
+ x = utils.int_from_bytes(data[1 : byte_length + 1], "big")
+ y = utils.int_from_bytes(data[byte_length + 1 :], "big")
return cls(x, y, curve)
else:
- raise ValueError('Invalid elliptic curve point data length')
+ raise ValueError("Invalid elliptic curve point data length")
else:
- raise ValueError('Unsupported elliptic curve point type')
+ raise ValueError("Unsupported elliptic curve point type")
curve = utils.read_only_property("_curve")
x = utils.read_only_property("_x")
@@ -330,10 +409,10 @@ def __eq__(self, other):
return NotImplemented
return (
- self.x == other.x and
- self.y == other.y and
- self.curve.name == other.curve.name and
- self.curve.key_size == other.curve.key_size
+ self.x == other.x
+ and self.y == other.y
+ and self.curve.name == other.curve.name
+ and self.curve.key_size == other.curve.key_size
)
def __ne__(self, other):
@@ -363,7 +442,8 @@ def __init__(self, private_value, public_numbers):
self._private_value = private_value
self._public_numbers = public_numbers
- def private_key(self, backend):
+ def private_key(self, backend=None):
+ backend = _get_backend(backend)
return backend.load_elliptic_curve_private_numbers(self)
private_value = utils.read_only_property("_private_value")
@@ -374,8 +454,8 @@ def __eq__(self, other):
return NotImplemented
return (
- self.private_value == other.private_value and
- self.public_numbers == other.public_numbers
+ self.private_value == other.private_value
+ and self.public_numbers == other.public_numbers
)
def __ne__(self, other):
@@ -387,3 +467,36 @@ def __hash__(self):
class ECDH(object):
pass
+
+
+_OID_TO_CURVE = {
+ EllipticCurveOID.SECP192R1: SECP192R1,
+ EllipticCurveOID.SECP224R1: SECP224R1,
+ EllipticCurveOID.SECP256K1: SECP256K1,
+ EllipticCurveOID.SECP256R1: SECP256R1,
+ EllipticCurveOID.SECP384R1: SECP384R1,
+ EllipticCurveOID.SECP521R1: SECP521R1,
+ EllipticCurveOID.BRAINPOOLP256R1: BrainpoolP256R1,
+ EllipticCurveOID.BRAINPOOLP384R1: BrainpoolP384R1,
+ EllipticCurveOID.BRAINPOOLP512R1: BrainpoolP512R1,
+ EllipticCurveOID.SECT163K1: SECT163K1,
+ EllipticCurveOID.SECT163R2: SECT163R2,
+ EllipticCurveOID.SECT233K1: SECT233K1,
+ EllipticCurveOID.SECT233R1: SECT233R1,
+ EllipticCurveOID.SECT283K1: SECT283K1,
+ EllipticCurveOID.SECT283R1: SECT283R1,
+ EllipticCurveOID.SECT409K1: SECT409K1,
+ EllipticCurveOID.SECT409R1: SECT409R1,
+ EllipticCurveOID.SECT571K1: SECT571K1,
+ EllipticCurveOID.SECT571R1: SECT571R1,
+}
+
+
+def get_curve_for_oid(oid):
+ try:
+ return _OID_TO_CURVE[oid]
+ except KeyError:
+ raise LookupError(
+ "The provided object identifier has no matching elliptic "
+ "curve class"
+ )
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/ed25519.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/ed25519.py
new file mode 100644
index 0000000..2d07a02
--- /dev/null
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/ed25519.py
@@ -0,0 +1,87 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import abc
+
+import six
+
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+
+
+_ED25519_KEY_SIZE = 32
+_ED25519_SIG_SIZE = 64
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Ed25519PublicKey(object):
+ @classmethod
+ def from_public_bytes(cls, data):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed25519_supported():
+ raise UnsupportedAlgorithm(
+ "ed25519 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+ return backend.ed25519_load_public_bytes(data)
+
+ @abc.abstractmethod
+ def public_bytes(self, encoding, format):
+ """
+ The serialized bytes of the public key.
+ """
+
+ @abc.abstractmethod
+ def verify(self, signature, data):
+ """
+ Verify the signature.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Ed25519PrivateKey(object):
+ @classmethod
+ def generate(cls):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed25519_supported():
+ raise UnsupportedAlgorithm(
+ "ed25519 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+ return backend.ed25519_generate_key()
+
+ @classmethod
+ def from_private_bytes(cls, data):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed25519_supported():
+ raise UnsupportedAlgorithm(
+ "ed25519 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+ return backend.ed25519_load_private_bytes(data)
+
+ @abc.abstractmethod
+ def public_key(self):
+ """
+ The Ed25519PublicKey derived from the private key.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(self, encoding, format, encryption_algorithm):
+ """
+ The serialized bytes of the private key.
+ """
+
+ @abc.abstractmethod
+ def sign(self, data):
+ """
+ Signs the data.
+ """
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/ed448.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/ed448.py
new file mode 100644
index 0000000..520ffcb
--- /dev/null
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/ed448.py
@@ -0,0 +1,82 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import abc
+
+import six
+
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Ed448PublicKey(object):
+ @classmethod
+ def from_public_bytes(cls, data):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed448_supported():
+ raise UnsupportedAlgorithm(
+ "ed448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+ return backend.ed448_load_public_bytes(data)
+
+ @abc.abstractmethod
+ def public_bytes(self, encoding, format):
+ """
+ The serialized bytes of the public key.
+ """
+
+ @abc.abstractmethod
+ def verify(self, signature, data):
+ """
+ Verify the signature.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Ed448PrivateKey(object):
+ @classmethod
+ def generate(cls):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed448_supported():
+ raise UnsupportedAlgorithm(
+ "ed448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+ return backend.ed448_generate_key()
+
+ @classmethod
+ def from_private_bytes(cls, data):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed448_supported():
+ raise UnsupportedAlgorithm(
+ "ed448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+ return backend.ed448_load_private_bytes(data)
+
+ @abc.abstractmethod
+ def public_key(self):
+ """
+ The Ed448PublicKey derived from the private key.
+ """
+
+ @abc.abstractmethod
+ def sign(self, data):
+ """
+ Signs the data.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(self, encoding, format, encryption_algorithm):
+ """
+ The serialized bytes of the private key.
+ """
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/padding.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/padding.py
index a37c3f9..fc8f6e2 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/padding.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/padding.py
@@ -5,7 +5,6 @@
from __future__ import absolute_import, division, print_function
import abc
-import math
import six
@@ -36,8 +35,10 @@ class PSS(object):
def __init__(self, mgf, salt_length):
self._mgf = mgf
- if (not isinstance(salt_length, six.integer_types) and
- salt_length is not self.MAX_LENGTH):
+ if (
+ not isinstance(salt_length, six.integer_types)
+ and salt_length is not self.MAX_LENGTH
+ ):
raise TypeError("salt_length must be an integer.")
if salt_length is not self.MAX_LENGTH and salt_length < 0:
@@ -73,7 +74,7 @@ def calculate_max_pss_salt_length(key, hash_algorithm):
if not isinstance(key, (rsa.RSAPrivateKey, rsa.RSAPublicKey)):
raise TypeError("key must be an RSA public or private key")
# bit length - 1 per RFC 3447
- emlen = int(math.ceil((key.key_size - 1) / 8.0))
+ emlen = (key.key_size + 6) // 8
salt_length = emlen - hash_algorithm.digest_size - 2
assert salt_length >= 0
return salt_length
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/rsa.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/rsa.py
index 27db671..d8b8ddd 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/rsa.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/rsa.py
@@ -5,6 +5,7 @@
from __future__ import absolute_import, division, print_function
import abc
+
try:
# Only available in math in 3.5+
from math import gcd
@@ -15,6 +16,7 @@
from cryptography import utils
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.backends import _get_backend
from cryptography.hazmat.backends.interfaces import RSABackend
@@ -108,11 +110,12 @@ def verify(self, signature, data, padding, algorithm):
RSAPublicKeyWithSerialization = RSAPublicKey
-def generate_private_key(public_exponent, key_size, backend):
+def generate_private_key(public_exponent, key_size, backend=None):
+ backend = _get_backend(backend)
if not isinstance(backend, RSABackend):
raise UnsupportedAlgorithm(
"Backend object does not implement RSABackend.",
- _Reasons.BACKEND_MISSING_INTERFACE
+ _Reasons.BACKEND_MISSING_INTERFACE,
)
_verify_rsa_parameters(public_exponent, key_size)
@@ -120,18 +123,19 @@ def generate_private_key(public_exponent, key_size, backend):
def _verify_rsa_parameters(public_exponent, key_size):
- if public_exponent < 3:
- raise ValueError("public_exponent must be >= 3.")
-
- if public_exponent & 1 == 0:
- raise ValueError("public_exponent must be odd.")
+ if public_exponent not in (3, 65537):
+ raise ValueError(
+ "public_exponent must be either 3 (for legacy compatibility) or "
+ "65537. Almost everyone should choose 65537 here!"
+ )
if key_size < 512:
raise ValueError("key_size must be at least 512-bits.")
-def _check_private_key_components(p, q, private_exponent, dmp1, dmq1, iqmp,
- public_exponent, modulus):
+def _check_private_key_components(
+ p, q, private_exponent, dmp1, dmq1, iqmp, public_exponent, modulus
+):
if modulus < 3:
raise ValueError("modulus must be >= 3.")
@@ -184,12 +188,12 @@ def _modinv(e, m):
"""
Modular Multiplicative Inverse. Returns x such that: (x*e) mod m == 1
"""
- x1, y1, x2, y2 = 1, 0, 0, 1
+ x1, x2 = 1, 0
a, b = e, m
while b > 0:
q, r = divmod(a, b)
- xn, yn = x1 - q * x2, y1 - q * y2
- a, b, x1, y1, x2, y2 = b, r, x2, y2, xn, yn
+ xn = x1 - q * x2
+ a, b, x1, x2 = b, r, x2, xn
return x1 % m
@@ -266,15 +270,14 @@ def rsa_recover_prime_factors(n, e, d):
class RSAPrivateNumbers(object):
- def __init__(self, p, q, d, dmp1, dmq1, iqmp,
- public_numbers):
+ def __init__(self, p, q, d, dmp1, dmq1, iqmp, public_numbers):
if (
- not isinstance(p, six.integer_types) or
- not isinstance(q, six.integer_types) or
- not isinstance(d, six.integer_types) or
- not isinstance(dmp1, six.integer_types) or
- not isinstance(dmq1, six.integer_types) or
- not isinstance(iqmp, six.integer_types)
+ not isinstance(p, six.integer_types)
+ or not isinstance(q, six.integer_types)
+ or not isinstance(d, six.integer_types)
+ or not isinstance(dmp1, six.integer_types)
+ or not isinstance(dmq1, six.integer_types)
+ or not isinstance(iqmp, six.integer_types)
):
raise TypeError(
"RSAPrivateNumbers p, q, d, dmp1, dmq1, iqmp arguments must"
@@ -303,7 +306,8 @@ def __init__(self, p, q, d, dmp1, dmq1, iqmp,
iqmp = utils.read_only_property("_iqmp")
public_numbers = utils.read_only_property("_public_numbers")
- def private_key(self, backend):
+ def private_key(self, backend=None):
+ backend = _get_backend(backend)
return backend.load_rsa_private_numbers(self)
def __eq__(self, other):
@@ -311,35 +315,36 @@ def __eq__(self, other):
return NotImplemented
return (
- self.p == other.p and
- self.q == other.q and
- self.d == other.d and
- self.dmp1 == other.dmp1 and
- self.dmq1 == other.dmq1 and
- self.iqmp == other.iqmp and
- self.public_numbers == other.public_numbers
+ self.p == other.p
+ and self.q == other.q
+ and self.d == other.d
+ and self.dmp1 == other.dmp1
+ and self.dmq1 == other.dmq1
+ and self.iqmp == other.iqmp
+ and self.public_numbers == other.public_numbers
)
def __ne__(self, other):
return not self == other
def __hash__(self):
- return hash((
- self.p,
- self.q,
- self.d,
- self.dmp1,
- self.dmq1,
- self.iqmp,
- self.public_numbers,
- ))
+ return hash(
+ (
+ self.p,
+ self.q,
+ self.d,
+ self.dmp1,
+ self.dmq1,
+ self.iqmp,
+ self.public_numbers,
+ )
+ )
class RSAPublicNumbers(object):
def __init__(self, e, n):
- if (
- not isinstance(e, six.integer_types) or
- not isinstance(n, six.integer_types)
+ if not isinstance(e, six.integer_types) or not isinstance(
+ n, six.integer_types
):
raise TypeError("RSAPublicNumbers arguments must be integers.")
@@ -349,7 +354,8 @@ def __init__(self, e, n):
e = utils.read_only_property("_e")
n = utils.read_only_property("_n")
- def public_key(self, backend):
+ def public_key(self, backend=None):
+ backend = _get_backend(backend)
return backend.load_rsa_public_numbers(self)
def __repr__(self):
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/utils.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/utils.py
index ef1e7eb..5f9b677 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/utils.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/utils.py
@@ -4,49 +4,30 @@
from __future__ import absolute_import, division, print_function
-import warnings
-
-from asn1crypto.algos import DSASignature
-
-import six
-
from cryptography import utils
+from cryptography.hazmat._der import (
+ DERReader,
+ INTEGER,
+ SEQUENCE,
+ encode_der,
+ encode_der_integer,
+)
from cryptography.hazmat.primitives import hashes
-def decode_rfc6979_signature(signature):
- warnings.warn(
- "decode_rfc6979_signature is deprecated and will "
- "be removed in a future version, use decode_dss_signature instead.",
- utils.PersistentlyDeprecated,
- stacklevel=2
- )
- return decode_dss_signature(signature)
-
-
def decode_dss_signature(signature):
- data = DSASignature.load(signature, strict=True).native
- return data['r'], data['s']
-
-
-def encode_rfc6979_signature(r, s):
- warnings.warn(
- "encode_rfc6979_signature is deprecated and will "
- "be removed in a future version, use encode_dss_signature instead.",
- utils.PersistentlyDeprecated,
- stacklevel=2
- )
- return encode_dss_signature(r, s)
+ with DERReader(signature).read_single_element(SEQUENCE) as seq:
+ r = seq.read_element(INTEGER).as_integer()
+ s = seq.read_element(INTEGER).as_integer()
+ return r, s
def encode_dss_signature(r, s):
- if (
- not isinstance(r, six.integer_types) or
- not isinstance(s, six.integer_types)
- ):
- raise ValueError("Both r and s must be integers")
-
- return DSASignature({'r': r, 's': s}).dump()
+ return encode_der(
+ SEQUENCE,
+ encode_der(INTEGER, encode_der_integer(r)),
+ encode_der(INTEGER, encode_der_integer(s)),
+ )
class Prehashed(object):
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/x25519.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/x25519.py
index 5c4652a..fc63691 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/x25519.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/x25519.py
@@ -16,16 +16,20 @@ class X25519PublicKey(object):
@classmethod
def from_public_bytes(cls, data):
from cryptography.hazmat.backends.openssl.backend import backend
+
if not backend.x25519_supported():
raise UnsupportedAlgorithm(
"X25519 is not supported by this version of OpenSSL.",
- _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
)
+
return backend.x25519_load_public_bytes(data)
@abc.abstractmethod
- def public_bytes(self):
- pass
+ def public_bytes(self, encoding, format):
+ """
+ The serialized bytes of the public key.
+ """
@six.add_metaclass(abc.ABCMeta)
@@ -33,22 +37,40 @@ class X25519PrivateKey(object):
@classmethod
def generate(cls):
from cryptography.hazmat.backends.openssl.backend import backend
+
if not backend.x25519_supported():
raise UnsupportedAlgorithm(
"X25519 is not supported by this version of OpenSSL.",
- _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
)
return backend.x25519_generate_key()
@classmethod
- def _from_private_bytes(cls, data):
+ def from_private_bytes(cls, data):
from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.x25519_supported():
+ raise UnsupportedAlgorithm(
+ "X25519 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+
return backend.x25519_load_private_bytes(data)
@abc.abstractmethod
def public_key(self):
- pass
+ """
+ The serialized bytes of the public key.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(self, encoding, format, encryption_algorithm):
+ """
+ The serialized bytes of the private key.
+ """
@abc.abstractmethod
def exchange(self, peer_public_key):
- pass
+ """
+ Performs a key exchange operation using the provided peer's public key.
+ """
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/x448.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/x448.py
new file mode 100644
index 0000000..3ac067b
--- /dev/null
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/asymmetric/x448.py
@@ -0,0 +1,76 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import abc
+
+import six
+
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+
+
+@six.add_metaclass(abc.ABCMeta)
+class X448PublicKey(object):
+ @classmethod
+ def from_public_bytes(cls, data):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.x448_supported():
+ raise UnsupportedAlgorithm(
+ "X448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+
+ return backend.x448_load_public_bytes(data)
+
+ @abc.abstractmethod
+ def public_bytes(self, encoding, format):
+ """
+ The serialized bytes of the public key.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class X448PrivateKey(object):
+ @classmethod
+ def generate(cls):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.x448_supported():
+ raise UnsupportedAlgorithm(
+ "X448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+ return backend.x448_generate_key()
+
+ @classmethod
+ def from_private_bytes(cls, data):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.x448_supported():
+ raise UnsupportedAlgorithm(
+ "X448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+
+ return backend.x448_load_private_bytes(data)
+
+ @abc.abstractmethod
+ def public_key(self):
+ """
+ The serialized bytes of the public key.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(self, encoding, format, encryption_algorithm):
+ """
+ The serialized bytes of the private key.
+ """
+
+ @abc.abstractmethod
+ def exchange(self, peer_public_key):
+ """
+ Performs a key exchange operation using the provided peer's public key.
+ """
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/__init__.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/__init__.py
index 171b1c6..4380f72 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/__init__.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/__init__.py
@@ -5,8 +5,13 @@
from __future__ import absolute_import, division, print_function
from cryptography.hazmat.primitives.ciphers.base import (
- AEADCipherContext, AEADDecryptionContext, AEADEncryptionContext,
- BlockCipherAlgorithm, Cipher, CipherAlgorithm, CipherContext
+ AEADCipherContext,
+ AEADDecryptionContext,
+ AEADEncryptionContext,
+ BlockCipherAlgorithm,
+ Cipher,
+ CipherAlgorithm,
+ CipherContext,
)
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000..65f1b86
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-38.pyc
new file mode 100644
index 0000000..0eb7c4c
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-38.pyc
new file mode 100644
index 0000000..965870f
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-38.pyc
new file mode 100644
index 0000000..3fef035
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-38.pyc
new file mode 100644
index 0000000..4784075
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/aead.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/aead.py
index 415a45a..4eddc1e 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/aead.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/aead.py
@@ -12,13 +12,15 @@
class ChaCha20Poly1305(object):
+ _MAX_SIZE = 2 ** 32
+
def __init__(self, key):
if not backend.aead_cipher_supported(self):
raise exceptions.UnsupportedAlgorithm(
"ChaCha20Poly1305 is not supported by this version of OpenSSL",
- exceptions._Reasons.UNSUPPORTED_CIPHER
+ exceptions._Reasons.UNSUPPORTED_CIPHER,
)
- utils._check_bytes("key", key)
+ utils._check_byteslike("key", key)
if len(key) != 32:
raise ValueError("ChaCha20Poly1305 key must be 32 bytes.")
@@ -33,22 +35,24 @@ def encrypt(self, nonce, data, associated_data):
if associated_data is None:
associated_data = b""
+ if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE:
+ # This is OverflowError to match what cffi would raise
+ raise OverflowError(
+ "Data or associated data too long. Max 2**32 bytes"
+ )
+
self._check_params(nonce, data, associated_data)
- return aead._encrypt(
- backend, self, nonce, data, associated_data, 16
- )
+ return aead._encrypt(backend, self, nonce, data, associated_data, 16)
def decrypt(self, nonce, data, associated_data):
if associated_data is None:
associated_data = b""
self._check_params(nonce, data, associated_data)
- return aead._decrypt(
- backend, self, nonce, data, associated_data, 16
- )
+ return aead._decrypt(backend, self, nonce, data, associated_data, 16)
def _check_params(self, nonce, data, associated_data):
- utils._check_bytes("nonce", nonce)
+ utils._check_byteslike("nonce", nonce)
utils._check_bytes("data", data)
utils._check_bytes("associated_data", associated_data)
if len(nonce) != 12:
@@ -56,8 +60,10 @@ def _check_params(self, nonce, data, associated_data):
class AESCCM(object):
+ _MAX_SIZE = 2 ** 32
+
def __init__(self, key, tag_length=16):
- utils._check_bytes("key", key)
+ utils._check_byteslike("key", key)
if len(key) not in (16, 24, 32):
raise ValueError("AESCCM key must be 128, 192, or 256 bits.")
@@ -65,17 +71,11 @@ def __init__(self, key, tag_length=16):
if not isinstance(tag_length, int):
raise TypeError("tag_length must be an integer")
- if tag_length not in (4, 6, 8, 12, 14, 16):
+ if tag_length not in (4, 6, 8, 10, 12, 14, 16):
raise ValueError("Invalid tag_length")
self._tag_length = tag_length
- if not backend.aead_cipher_supported(self):
- raise exceptions.UnsupportedAlgorithm(
- "AESCCM is not supported by this version of OpenSSL",
- exceptions._Reasons.UNSUPPORTED_CIPHER
- )
-
@classmethod
def generate_key(cls, bit_length):
if not isinstance(bit_length, int):
@@ -90,6 +90,12 @@ def encrypt(self, nonce, data, associated_data):
if associated_data is None:
associated_data = b""
+ if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE:
+ # This is OverflowError to match what cffi would raise
+ raise OverflowError(
+ "Data or associated data too long. Max 2**32 bytes"
+ )
+
self._check_params(nonce, data, associated_data)
self._validate_lengths(nonce, len(data))
return aead._encrypt(
@@ -110,10 +116,10 @@ def _validate_lengths(self, nonce, data_len):
# https://tools.ietf.org/html/rfc3610#section-2.1
l_val = 15 - len(nonce)
if 2 ** (8 * l_val) < data_len:
- raise ValueError("Nonce too long for data")
+ raise ValueError("Data too long for nonce")
def _check_params(self, nonce, data, associated_data):
- utils._check_bytes("nonce", nonce)
+ utils._check_byteslike("nonce", nonce)
utils._check_bytes("data", data)
utils._check_bytes("associated_data", associated_data)
if not 7 <= len(nonce) <= 13:
@@ -121,8 +127,10 @@ def _check_params(self, nonce, data, associated_data):
class AESGCM(object):
+ _MAX_SIZE = 2 ** 32
+
def __init__(self, key):
- utils._check_bytes("key", key)
+ utils._check_byteslike("key", key)
if len(key) not in (16, 24, 32):
raise ValueError("AESGCM key must be 128, 192, or 256 bits.")
@@ -142,21 +150,25 @@ def encrypt(self, nonce, data, associated_data):
if associated_data is None:
associated_data = b""
+ if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE:
+ # This is OverflowError to match what cffi would raise
+ raise OverflowError(
+ "Data or associated data too long. Max 2**32 bytes"
+ )
+
self._check_params(nonce, data, associated_data)
- return aead._encrypt(
- backend, self, nonce, data, associated_data, 16
- )
+ return aead._encrypt(backend, self, nonce, data, associated_data, 16)
def decrypt(self, nonce, data, associated_data):
if associated_data is None:
associated_data = b""
self._check_params(nonce, data, associated_data)
- return aead._decrypt(
- backend, self, nonce, data, associated_data, 16
- )
+ return aead._decrypt(backend, self, nonce, data, associated_data, 16)
def _check_params(self, nonce, data, associated_data):
- utils._check_bytes("nonce", nonce)
+ utils._check_byteslike("nonce", nonce)
utils._check_bytes("data", data)
utils._check_bytes("associated_data", associated_data)
+ if len(nonce) == 0:
+ raise ValueError("Nonce must be at least 1 byte")
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/algorithms.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/algorithms.py
index 99a837e..8072ced 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/algorithms.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/algorithms.py
@@ -6,17 +6,23 @@
from cryptography import utils
from cryptography.hazmat.primitives.ciphers import (
- BlockCipherAlgorithm, CipherAlgorithm
+ BlockCipherAlgorithm,
+ CipherAlgorithm,
)
from cryptography.hazmat.primitives.ciphers.modes import ModeWithNonce
def _verify_key_size(algorithm, key):
+ # Verify that the key is instance of bytes
+ utils._check_byteslike("key", key)
+
# Verify that the key size matches the expected key size
if len(key) * 8 not in algorithm.key_sizes:
- raise ValueError("Invalid key size ({0}) for {1}.".format(
- len(key) * 8, algorithm.name
- ))
+ raise ValueError(
+ "Invalid key size ({}) for {}.".format(
+ len(key) * 8, algorithm.name
+ )
+ )
return key
@@ -150,8 +156,7 @@ class ChaCha20(object):
def __init__(self, key, nonce):
self.key = _verify_key_size(self, key)
- if not isinstance(nonce, bytes):
- raise TypeError("nonce must be bytes")
+ utils._check_byteslike("nonce", nonce)
if len(nonce) != 16:
raise ValueError("nonce must be 128-bits (16 bytes)")
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/base.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/base.py
index f857041..dae425a 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/base.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/base.py
@@ -10,9 +10,13 @@
from cryptography import utils
from cryptography.exceptions import (
- AlreadyFinalized, AlreadyUpdated, NotYetFinalized, UnsupportedAlgorithm,
- _Reasons
+ AlreadyFinalized,
+ AlreadyUpdated,
+ NotYetFinalized,
+ UnsupportedAlgorithm,
+ _Reasons,
)
+from cryptography.hazmat.backends import _get_backend
from cryptography.hazmat.backends.interfaces import CipherBackend
from cryptography.hazmat.primitives.ciphers import modes
@@ -94,11 +98,12 @@ def tag(self):
class Cipher(object):
- def __init__(self, algorithm, mode, backend):
+ def __init__(self, algorithm, mode, backend=None):
+ backend = _get_backend(backend)
if not isinstance(backend, CipherBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement CipherBackend.",
- _Reasons.BACKEND_MISSING_INTERFACE
+ _Reasons.BACKEND_MISSING_INTERFACE,
)
if not isinstance(algorithm, CipherAlgorithm):
@@ -179,7 +184,7 @@ def _check_limit(self, data_size):
self._bytes_processed += data_size
if self._bytes_processed > self._ctx._mode._MAX_ENCRYPTED_BYTES:
raise ValueError(
- "{0} has a maximum encrypted byte limit of {1}".format(
+ "{} has a maximum encrypted byte limit of {}".format(
self._ctx._mode.name, self._ctx._mode._MAX_ENCRYPTED_BYTES
)
)
@@ -217,7 +222,7 @@ def authenticate_additional_data(self, data):
self._aad_bytes_processed += len(data)
if self._aad_bytes_processed > self._ctx._mode._MAX_AAD_BYTES:
raise ValueError(
- "{0} has a maximum AAD byte limit of {1}".format(
+ "{} has a maximum AAD byte limit of {}".format(
self._ctx._mode.name, self._ctx._mode._MAX_AAD_BYTES
)
)
@@ -230,6 +235,7 @@ class _AEADEncryptionContext(_AEADCipherContext):
@property
def tag(self):
if self._ctx is not None:
- raise NotYetFinalized("You must finalize encryption before "
- "getting the tag.")
+ raise NotYetFinalized(
+ "You must finalize encryption before " "getting the tag."
+ )
return self._tag
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/modes.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/modes.py
index 598dfaa..dcb2444 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/modes.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/ciphers/modes.py
@@ -72,9 +72,11 @@ def _check_aes_key_length(self, algorithm):
def _check_iv_length(self, algorithm):
if len(self.initialization_vector) * 8 != algorithm.block_size:
- raise ValueError("Invalid IV size ({0}) for {1}.".format(
- len(self.initialization_vector), self.name
- ))
+ raise ValueError(
+ "Invalid IV size ({}) for {}.".format(
+ len(self.initialization_vector), self.name
+ )
+ )
def _check_iv_and_key_length(self, algorithm):
@@ -88,9 +90,7 @@ class CBC(object):
name = "CBC"
def __init__(self, initialization_vector):
- if not isinstance(initialization_vector, bytes):
- raise TypeError("initialization_vector must be bytes")
-
+ utils._check_byteslike("initialization_vector", initialization_vector)
self._initialization_vector = initialization_vector
initialization_vector = utils.read_only_property("_initialization_vector")
@@ -103,8 +103,7 @@ class XTS(object):
name = "XTS"
def __init__(self, tweak):
- if not isinstance(tweak, bytes):
- raise TypeError("tweak must be bytes")
+ utils._check_byteslike("tweak", tweak)
if len(tweak) != 16:
raise ValueError("tweak must be 128-bits (16 bytes)")
@@ -134,9 +133,7 @@ class OFB(object):
name = "OFB"
def __init__(self, initialization_vector):
- if not isinstance(initialization_vector, bytes):
- raise TypeError("initialization_vector must be bytes")
-
+ utils._check_byteslike("initialization_vector", initialization_vector)
self._initialization_vector = initialization_vector
initialization_vector = utils.read_only_property("_initialization_vector")
@@ -149,9 +146,7 @@ class CFB(object):
name = "CFB"
def __init__(self, initialization_vector):
- if not isinstance(initialization_vector, bytes):
- raise TypeError("initialization_vector must be bytes")
-
+ utils._check_byteslike("initialization_vector", initialization_vector)
self._initialization_vector = initialization_vector
initialization_vector = utils.read_only_property("_initialization_vector")
@@ -164,9 +159,7 @@ class CFB8(object):
name = "CFB8"
def __init__(self, initialization_vector):
- if not isinstance(initialization_vector, bytes):
- raise TypeError("initialization_vector must be bytes")
-
+ utils._check_byteslike("initialization_vector", initialization_vector)
self._initialization_vector = initialization_vector
initialization_vector = utils.read_only_property("_initialization_vector")
@@ -179,9 +172,7 @@ class CTR(object):
name = "CTR"
def __init__(self, nonce):
- if not isinstance(nonce, bytes):
- raise TypeError("nonce must be bytes")
-
+ utils._check_byteslike("nonce", nonce)
self._nonce = nonce
nonce = utils.read_only_property("_nonce")
@@ -189,9 +180,11 @@ def __init__(self, nonce):
def validate_for_algorithm(self, algorithm):
_check_aes_key_length(self, algorithm)
if len(self.nonce) * 8 != algorithm.block_size:
- raise ValueError("Invalid nonce size ({0}) for {1}.".format(
- len(self.nonce), self.name
- ))
+ raise ValueError(
+ "Invalid nonce size ({}) for {}.".format(
+ len(self.nonce), self.name
+ )
+ )
@utils.register_interface(Mode)
@@ -206,20 +199,22 @@ def __init__(self, initialization_vector, tag=None, min_tag_length=16):
# len(initialization_vector) must in [1, 2 ** 64), but it's impossible
# to actually construct a bytes object that large, so we don't check
# for it
- if not isinstance(initialization_vector, bytes):
- raise TypeError("initialization_vector must be bytes")
+ utils._check_byteslike("initialization_vector", initialization_vector)
+ if len(initialization_vector) == 0:
+ raise ValueError("initialization_vector must be at least 1 byte")
self._initialization_vector = initialization_vector
if tag is not None:
- if not isinstance(tag, bytes):
- raise TypeError("tag must be bytes or None")
+ utils._check_bytes("tag", tag)
if min_tag_length < 4:
raise ValueError("min_tag_length must be >= 4")
if len(tag) < min_tag_length:
raise ValueError(
- "Authentication tag must be {0} bytes or longer.".format(
- min_tag_length)
+ "Authentication tag must be {} bytes or longer.".format(
+ min_tag_length
+ )
)
self._tag = tag
+ self._min_tag_length = min_tag_length
tag = utils.read_only_property("_tag")
initialization_vector = utils.read_only_property("_initialization_vector")
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/cmac.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/cmac.py
index 77537f0..bf962c9 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/cmac.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/cmac.py
@@ -6,25 +6,26 @@
from cryptography import utils
from cryptography.exceptions import (
- AlreadyFinalized, UnsupportedAlgorithm, _Reasons
+ AlreadyFinalized,
+ UnsupportedAlgorithm,
+ _Reasons,
)
+from cryptography.hazmat.backends import _get_backend
from cryptography.hazmat.backends.interfaces import CMACBackend
-from cryptography.hazmat.primitives import ciphers, mac
+from cryptography.hazmat.primitives import ciphers
-@utils.register_interface(mac.MACContext)
class CMAC(object):
- def __init__(self, algorithm, backend, ctx=None):
+ def __init__(self, algorithm, backend=None, ctx=None):
+ backend = _get_backend(backend)
if not isinstance(backend, CMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement CMACBackend.",
- _Reasons.BACKEND_MISSING_INTERFACE
+ _Reasons.BACKEND_MISSING_INTERFACE,
)
if not isinstance(algorithm, ciphers.BlockCipherAlgorithm):
- raise TypeError(
- "Expected instance of BlockCipherAlgorithm."
- )
+ raise TypeError("Expected instance of BlockCipherAlgorithm.")
self._algorithm = algorithm
self._backend = backend
@@ -36,8 +37,8 @@ def __init__(self, algorithm, backend, ctx=None):
def update(self, data):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
- if not isinstance(data, bytes):
- raise TypeError("data must be bytes.")
+
+ utils._check_bytes("data", data)
self._ctx.update(data)
def finalize(self):
@@ -48,8 +49,7 @@ def finalize(self):
return digest
def verify(self, signature):
- if not isinstance(signature, bytes):
- raise TypeError("signature must be bytes.")
+ utils._check_bytes("signature", signature)
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
@@ -60,7 +60,5 @@ def copy(self):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
return CMAC(
- self._algorithm,
- backend=self._backend,
- ctx=self._ctx.copy()
+ self._algorithm, backend=self._backend, ctx=self._ctx.copy()
)
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/constant_time.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/constant_time.py
index 5a682ca..7f41b9e 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/constant_time.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/constant_time.py
@@ -6,21 +6,9 @@
import hmac
-from cryptography.hazmat.bindings._constant_time import lib
+def bytes_eq(a, b):
+ if not isinstance(a, bytes) or not isinstance(b, bytes):
+ raise TypeError("a and b must be bytes.")
-if hasattr(hmac, "compare_digest"):
- def bytes_eq(a, b):
- if not isinstance(a, bytes) or not isinstance(b, bytes):
- raise TypeError("a and b must be bytes.")
-
- return hmac.compare_digest(a, b)
-
-else:
- def bytes_eq(a, b):
- if not isinstance(a, bytes) or not isinstance(b, bytes):
- raise TypeError("a and b must be bytes.")
-
- return lib.Cryptography_constant_time_bytes_eq(
- a, len(a), b, len(b)
- ) == 1
+ return hmac.compare_digest(a, b)
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/hashes.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/hashes.py
index 1764e9c..18e2bab 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/hashes.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/hashes.py
@@ -10,8 +10,11 @@
from cryptography import utils
from cryptography.exceptions import (
- AlreadyFinalized, UnsupportedAlgorithm, _Reasons
+ AlreadyFinalized,
+ UnsupportedAlgorithm,
+ _Reasons,
)
+from cryptography.hazmat.backends import _get_backend
from cryptography.hazmat.backends.interfaces import HashBackend
@@ -29,12 +32,6 @@ def digest_size(self):
The size of the resulting digest in bytes.
"""
- @abc.abstractproperty
- def block_size(self):
- """
- The internal block size of the hash algorithm in bytes.
- """
-
@six.add_metaclass(abc.ABCMeta)
class HashContext(object):
@@ -63,13 +60,21 @@ def copy(self):
"""
+@six.add_metaclass(abc.ABCMeta)
+class ExtendableOutputFunction(object):
+ """
+ An interface for extendable output functions.
+ """
+
+
@utils.register_interface(HashContext)
class Hash(object):
- def __init__(self, algorithm, backend, ctx=None):
+ def __init__(self, algorithm, backend=None, ctx=None):
+ backend = _get_backend(backend)
if not isinstance(backend, HashBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HashBackend.",
- _Reasons.BACKEND_MISSING_INTERFACE
+ _Reasons.BACKEND_MISSING_INTERFACE,
)
if not isinstance(algorithm, HashAlgorithm):
@@ -88,8 +93,7 @@ def __init__(self, algorithm, backend, ctx=None):
def update(self, data):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
- if not isinstance(data, bytes):
- raise TypeError("data must be bytes.")
+ utils._check_byteslike("data", data)
self._ctx.update(data)
def copy(self):
@@ -114,6 +118,20 @@ class SHA1(object):
block_size = 64
+@utils.register_interface(HashAlgorithm)
+class SHA512_224(object): # noqa: N801
+ name = "sha512-224"
+ digest_size = 28
+ block_size = 128
+
+
+@utils.register_interface(HashAlgorithm)
+class SHA512_256(object): # noqa: N801
+ name = "sha512-256"
+ digest_size = 32
+ block_size = 128
+
+
@utils.register_interface(HashAlgorithm)
class SHA224(object):
name = "sha224"
@@ -142,6 +160,64 @@ class SHA512(object):
block_size = 128
+@utils.register_interface(HashAlgorithm)
+class SHA3_224(object): # noqa: N801
+ name = "sha3-224"
+ digest_size = 28
+
+
+@utils.register_interface(HashAlgorithm)
+class SHA3_256(object): # noqa: N801
+ name = "sha3-256"
+ digest_size = 32
+
+
+@utils.register_interface(HashAlgorithm)
+class SHA3_384(object): # noqa: N801
+ name = "sha3-384"
+ digest_size = 48
+
+
+@utils.register_interface(HashAlgorithm)
+class SHA3_512(object): # noqa: N801
+ name = "sha3-512"
+ digest_size = 64
+
+
+@utils.register_interface(HashAlgorithm)
+@utils.register_interface(ExtendableOutputFunction)
+class SHAKE128(object):
+ name = "shake128"
+
+ def __init__(self, digest_size):
+ if not isinstance(digest_size, six.integer_types):
+ raise TypeError("digest_size must be an integer")
+
+ if digest_size < 1:
+ raise ValueError("digest_size must be a positive integer")
+
+ self._digest_size = digest_size
+
+ digest_size = utils.read_only_property("_digest_size")
+
+
+@utils.register_interface(HashAlgorithm)
+@utils.register_interface(ExtendableOutputFunction)
+class SHAKE256(object):
+ name = "shake256"
+
+ def __init__(self, digest_size):
+ if not isinstance(digest_size, six.integer_types):
+ raise TypeError("digest_size must be an integer")
+
+ if digest_size < 1:
+ raise ValueError("digest_size must be a positive integer")
+
+ self._digest_size = digest_size
+
+ digest_size = utils.read_only_property("_digest_size")
+
+
@utils.register_interface(HashAlgorithm)
class MD5(object):
name = "md5"
@@ -157,13 +233,9 @@ class BLAKE2b(object):
block_size = 128
def __init__(self, digest_size):
- if (
- digest_size > self._max_digest_size or
- digest_size < self._min_digest_size
- ):
- raise ValueError("Digest size must be {0}-{1}".format(
- self._min_digest_size, self._max_digest_size)
- )
+
+ if digest_size != 64:
+ raise ValueError("Digest size must be 64")
self._digest_size = digest_size
@@ -178,13 +250,9 @@ class BLAKE2s(object):
_min_digest_size = 1
def __init__(self, digest_size):
- if (
- digest_size > self._max_digest_size or
- digest_size < self._min_digest_size
- ):
- raise ValueError("Digest size must be {0}-{1}".format(
- self._min_digest_size, self._max_digest_size)
- )
+
+ if digest_size != 32:
+ raise ValueError("Digest size must be 32")
self._digest_size = digest_size
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/hmac.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/hmac.py
index 2e9a4e2..8c421dc 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/hmac.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/hmac.py
@@ -6,20 +6,23 @@
from cryptography import utils
from cryptography.exceptions import (
- AlreadyFinalized, UnsupportedAlgorithm, _Reasons
+ AlreadyFinalized,
+ UnsupportedAlgorithm,
+ _Reasons,
)
+from cryptography.hazmat.backends import _get_backend
from cryptography.hazmat.backends.interfaces import HMACBackend
-from cryptography.hazmat.primitives import hashes, mac
+from cryptography.hazmat.primitives import hashes
-@utils.register_interface(mac.MACContext)
@utils.register_interface(hashes.HashContext)
class HMAC(object):
- def __init__(self, key, algorithm, backend, ctx=None):
+ def __init__(self, key, algorithm, backend=None, ctx=None):
+ backend = _get_backend(backend)
if not isinstance(backend, HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HMACBackend.",
- _Reasons.BACKEND_MISSING_INTERFACE
+ _Reasons.BACKEND_MISSING_INTERFACE,
)
if not isinstance(algorithm, hashes.HashAlgorithm):
@@ -38,8 +41,7 @@ def __init__(self, key, algorithm, backend, ctx=None):
def update(self, data):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
- if not isinstance(data, bytes):
- raise TypeError("data must be bytes.")
+ utils._check_byteslike("data", data)
self._ctx.update(data)
def copy(self):
@@ -49,7 +51,7 @@ def copy(self):
self._key,
self.algorithm,
backend=self._backend,
- ctx=self._ctx.copy()
+ ctx=self._ctx.copy(),
)
def finalize(self):
@@ -60,8 +62,7 @@ def finalize(self):
return digest
def verify(self, signature):
- if not isinstance(signature, bytes):
- raise TypeError("signature must be bytes.")
+ utils._check_bytes("signature", signature)
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000..7b6310d
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-38.pyc
new file mode 100644
index 0000000..950f64c
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-38.pyc
new file mode 100644
index 0000000..72c4c7a
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-38.pyc
new file mode 100644
index 0000000..9f3e280
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-38.pyc
new file mode 100644
index 0000000..1af31c6
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-38.pyc
new file mode 100644
index 0000000..0d619f3
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-38.pyc
new file mode 100644
index 0000000..2d00a1d
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/concatkdf.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/concatkdf.py
index c6399e4..7cc0324 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/concatkdf.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/concatkdf.py
@@ -8,8 +8,12 @@
from cryptography import utils
from cryptography.exceptions import (
- AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons
+ AlreadyFinalized,
+ InvalidKey,
+ UnsupportedAlgorithm,
+ _Reasons,
)
+from cryptography.hazmat.backends import _get_backend
from cryptography.hazmat.backends.interfaces import HMACBackend
from cryptography.hazmat.backends.interfaces import HashBackend
from cryptography.hazmat.primitives import constant_time, hashes, hmac
@@ -17,29 +21,26 @@
def _int_to_u32be(n):
- return struct.pack('>I', n)
+ return struct.pack(">I", n)
def _common_args_checks(algorithm, length, otherinfo):
max_length = algorithm.digest_size * (2 ** 32 - 1)
if length > max_length:
raise ValueError(
- "Can not derive keys larger than {0} bits.".format(
- max_length
- ))
- if not (otherinfo is None or isinstance(otherinfo, bytes)):
- raise TypeError("otherinfo must be bytes.")
+ "Can not derive keys larger than {} bits.".format(max_length)
+ )
+ if otherinfo is not None:
+ utils._check_bytes("otherinfo", otherinfo)
def _concatkdf_derive(key_material, length, auxfn, otherinfo):
- if not isinstance(key_material, bytes):
- raise TypeError("key_material must be bytes.")
-
+ utils._check_byteslike("key_material", key_material)
output = [b""]
outlen = 0
counter = 1
- while (length > outlen):
+ while length > outlen:
h = auxfn()
h.update(_int_to_u32be(counter))
h.update(key_material)
@@ -53,7 +54,8 @@ def _concatkdf_derive(key_material, length, auxfn, otherinfo):
@utils.register_interface(KeyDerivationFunction)
class ConcatKDFHash(object):
- def __init__(self, algorithm, length, otherinfo, backend):
+ def __init__(self, algorithm, length, otherinfo, backend=None):
+ backend = _get_backend(backend)
_common_args_checks(algorithm, length, otherinfo)
self._algorithm = algorithm
@@ -65,7 +67,7 @@ def __init__(self, algorithm, length, otherinfo, backend):
if not isinstance(backend, HashBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HashBackend.",
- _Reasons.BACKEND_MISSING_INTERFACE
+ _Reasons.BACKEND_MISSING_INTERFACE,
)
self._backend = backend
self._used = False
@@ -77,8 +79,9 @@ def derive(self, key_material):
if self._used:
raise AlreadyFinalized
self._used = True
- return _concatkdf_derive(key_material, self._length,
- self._hash, self._otherinfo)
+ return _concatkdf_derive(
+ key_material, self._length, self._hash, self._otherinfo
+ )
def verify(self, key_material, expected_key):
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
@@ -87,7 +90,8 @@ def verify(self, key_material, expected_key):
@utils.register_interface(KeyDerivationFunction)
class ConcatKDFHMAC(object):
- def __init__(self, algorithm, length, salt, otherinfo, backend):
+ def __init__(self, algorithm, length, salt, otherinfo, backend=None):
+ backend = _get_backend(backend)
_common_args_checks(algorithm, length, otherinfo)
self._algorithm = algorithm
@@ -96,16 +100,17 @@ def __init__(self, algorithm, length, salt, otherinfo, backend):
if self._otherinfo is None:
self._otherinfo = b""
- if not (salt is None or isinstance(salt, bytes)):
- raise TypeError("salt must be bytes.")
if salt is None:
salt = b"\x00" * algorithm.block_size
+ else:
+ utils._check_bytes("salt", salt)
+
self._salt = salt
if not isinstance(backend, HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HMACBackend.",
- _Reasons.BACKEND_MISSING_INTERFACE
+ _Reasons.BACKEND_MISSING_INTERFACE,
)
self._backend = backend
self._used = False
@@ -117,8 +122,9 @@ def derive(self, key_material):
if self._used:
raise AlreadyFinalized
self._used = True
- return _concatkdf_derive(key_material, self._length,
- self._hmac, self._otherinfo)
+ return _concatkdf_derive(
+ key_material, self._length, self._hmac, self._otherinfo
+ )
def verify(self, key_material, expected_key):
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/hkdf.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/hkdf.py
index 82ed9b1..9bb6bc2 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/hkdf.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/hkdf.py
@@ -8,8 +8,12 @@
from cryptography import utils
from cryptography.exceptions import (
- AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons
+ AlreadyFinalized,
+ InvalidKey,
+ UnsupportedAlgorithm,
+ _Reasons,
)
+from cryptography.hazmat.backends import _get_backend
from cryptography.hazmat.backends.interfaces import HMACBackend
from cryptography.hazmat.primitives import constant_time, hmac
from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
@@ -17,20 +21,20 @@
@utils.register_interface(KeyDerivationFunction)
class HKDF(object):
- def __init__(self, algorithm, length, salt, info, backend):
+ def __init__(self, algorithm, length, salt, info, backend=None):
+ backend = _get_backend(backend)
if not isinstance(backend, HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HMACBackend.",
- _Reasons.BACKEND_MISSING_INTERFACE
+ _Reasons.BACKEND_MISSING_INTERFACE,
)
self._algorithm = algorithm
- if not (salt is None or isinstance(salt, bytes)):
- raise TypeError("salt must be bytes.")
-
if salt is None:
- salt = b"\x00" * (self._algorithm.digest_size // 8)
+ salt = b"\x00" * self._algorithm.digest_size
+ else:
+ utils._check_bytes("salt", salt)
self._salt = salt
@@ -44,9 +48,7 @@ def _extract(self, key_material):
return h.finalize()
def derive(self, key_material):
- if not isinstance(key_material, bytes):
- raise TypeError("key_material must be bytes.")
-
+ utils._check_byteslike("key_material", key_material)
return self._hkdf_expand.derive(self._extract(key_material))
def verify(self, key_material, expected_key):
@@ -56,32 +58,31 @@ def verify(self, key_material, expected_key):
@utils.register_interface(KeyDerivationFunction)
class HKDFExpand(object):
- def __init__(self, algorithm, length, info, backend):
+ def __init__(self, algorithm, length, info, backend=None):
+ backend = _get_backend(backend)
if not isinstance(backend, HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HMACBackend.",
- _Reasons.BACKEND_MISSING_INTERFACE
+ _Reasons.BACKEND_MISSING_INTERFACE,
)
self._algorithm = algorithm
self._backend = backend
- max_length = 255 * (algorithm.digest_size // 8)
+ max_length = 255 * algorithm.digest_size
if length > max_length:
raise ValueError(
- "Can not derive keys larger than {0} octets.".format(
- max_length
- ))
+ "Can not derive keys larger than {} octets.".format(max_length)
+ )
self._length = length
- if not (info is None or isinstance(info, bytes)):
- raise TypeError("info must be bytes.")
-
if info is None:
info = b""
+ else:
+ utils._check_bytes("info", info)
self._info = info
@@ -99,12 +100,10 @@ def _expand(self, key_material):
output.append(h.finalize())
counter += 1
- return b"".join(output)[:self._length]
+ return b"".join(output)[: self._length]
def derive(self, key_material):
- if not isinstance(key_material, bytes):
- raise TypeError("key_material must be bytes.")
-
+ utils._check_byteslike("key_material", key_material)
if self._used:
raise AlreadyFinalized
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/kbkdf.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/kbkdf.py
index 14de56e..8643370 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/kbkdf.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/kbkdf.py
@@ -10,8 +10,12 @@
from cryptography import utils
from cryptography.exceptions import (
- AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons
+ AlreadyFinalized,
+ InvalidKey,
+ UnsupportedAlgorithm,
+ _Reasons,
)
+from cryptography.hazmat.backends import _get_backend
from cryptography.hazmat.backends.interfaces import HMACBackend
from cryptography.hazmat.primitives import constant_time, hashes, hmac
from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
@@ -28,24 +32,36 @@ class CounterLocation(Enum):
@utils.register_interface(KeyDerivationFunction)
class KBKDFHMAC(object):
- def __init__(self, algorithm, mode, length, rlen, llen,
- location, label, context, fixed, backend):
+ def __init__(
+ self,
+ algorithm,
+ mode,
+ length,
+ rlen,
+ llen,
+ location,
+ label,
+ context,
+ fixed,
+ backend=None,
+ ):
+ backend = _get_backend(backend)
if not isinstance(backend, HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HMACBackend.",
- _Reasons.BACKEND_MISSING_INTERFACE
+ _Reasons.BACKEND_MISSING_INTERFACE,
)
if not isinstance(algorithm, hashes.HashAlgorithm):
raise UnsupportedAlgorithm(
"Algorithm supplied is not a supported hash algorithm.",
- _Reasons.UNSUPPORTED_HASH
+ _Reasons.UNSUPPORTED_HASH,
)
if not backend.hmac_supported(algorithm):
raise UnsupportedAlgorithm(
"Algorithm supplied is not a supported hmac algorithm.",
- _Reasons.UNSUPPORTED_HASH
+ _Reasons.UNSUPPORTED_HASH,
)
if not isinstance(mode, Mode):
@@ -55,8 +71,9 @@ def __init__(self, algorithm, mode, length, rlen, llen,
raise TypeError("location must be of type CounterLocation")
if (label or context) and fixed:
- raise ValueError("When supplying fixed data, "
- "label and context are ignored.")
+ raise ValueError(
+ "When supplying fixed data, " "label and context are ignored."
+ )
if rlen is None or not self._valid_byte_length(rlen):
raise ValueError("rlen must be between 1 and 4")
@@ -68,15 +85,13 @@ def __init__(self, algorithm, mode, length, rlen, llen,
raise TypeError("llen must be an integer")
if label is None:
- label = b''
+ label = b""
if context is None:
- context = b''
-
- if (not isinstance(label, bytes) or
- not isinstance(context, bytes)):
- raise TypeError('label and context must be of type bytes')
+ context = b""
+ utils._check_bytes("label", label)
+ utils._check_bytes("context", context)
self._algorithm = algorithm
self._mode = mode
self._length = length
@@ -91,7 +106,7 @@ def __init__(self, algorithm, mode, length, rlen, llen,
def _valid_byte_length(self, value):
if not isinstance(value, int):
- raise TypeError('value must be of type int')
+ raise TypeError("value must be of type int")
value_bin = utils.int_to_bytes(1, value)
if not 1 <= len(value_bin) <= 4:
@@ -102,14 +117,13 @@ def derive(self, key_material):
if self._used:
raise AlreadyFinalized
- if not isinstance(key_material, bytes):
- raise TypeError('key_material must be bytes')
+ utils._check_byteslike("key_material", key_material)
self._used = True
# inverse floor division (equivalent to ceiling)
rounds = -(-self._length // self._algorithm.digest_size)
- output = [b'']
+ output = [b""]
# For counter mode, the number of iterations shall not be
# larger than 2^r-1, where r <= 32 is the binary length of the counter
@@ -117,7 +131,7 @@ def derive(self, key_material):
# PRF will not repeat during a particular call to the KDF function.
r_bin = utils.int_to_bytes(1, self._rlen)
if rounds > pow(2, len(r_bin) * 8) - 1:
- raise ValueError('There are too many iterations.')
+ raise ValueError("There are too many iterations.")
for i in range(1, rounds + 1):
h = hmac.HMAC(key_material, self._algorithm, backend=self._backend)
@@ -133,7 +147,7 @@ def derive(self, key_material):
output.append(h.finalize())
- return b''.join(output)[:self._length]
+ return b"".join(output)[: self._length]
def _generate_fixed_input(self):
if self._fixed_data and isinstance(self._fixed_data, bytes):
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/pbkdf2.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/pbkdf2.py
index f8ce7a3..5b67d48 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/pbkdf2.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/pbkdf2.py
@@ -6,8 +6,12 @@
from cryptography import utils
from cryptography.exceptions import (
- AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons
+ AlreadyFinalized,
+ InvalidKey,
+ UnsupportedAlgorithm,
+ _Reasons,
)
+from cryptography.hazmat.backends import _get_backend
from cryptography.hazmat.backends.interfaces import PBKDF2HMACBackend
from cryptography.hazmat.primitives import constant_time
from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
@@ -15,24 +19,25 @@
@utils.register_interface(KeyDerivationFunction)
class PBKDF2HMAC(object):
- def __init__(self, algorithm, length, salt, iterations, backend):
+ def __init__(self, algorithm, length, salt, iterations, backend=None):
+ backend = _get_backend(backend)
if not isinstance(backend, PBKDF2HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement PBKDF2HMACBackend.",
- _Reasons.BACKEND_MISSING_INTERFACE
+ _Reasons.BACKEND_MISSING_INTERFACE,
)
if not backend.pbkdf2_hmac_supported(algorithm):
raise UnsupportedAlgorithm(
- "{0} is not supported for PBKDF2 by this backend.".format(
- algorithm.name),
- _Reasons.UNSUPPORTED_HASH
+ "{} is not supported for PBKDF2 by this backend.".format(
+ algorithm.name
+ ),
+ _Reasons.UNSUPPORTED_HASH,
)
self._used = False
self._algorithm = algorithm
self._length = length
- if not isinstance(salt, bytes):
- raise TypeError("salt must be bytes.")
+ utils._check_bytes("salt", salt)
self._salt = salt
self._iterations = iterations
self._backend = backend
@@ -42,14 +47,13 @@ def derive(self, key_material):
raise AlreadyFinalized("PBKDF2 instances can only be used once.")
self._used = True
- if not isinstance(key_material, bytes):
- raise TypeError("key_material must be bytes.")
+ utils._check_byteslike("key_material", key_material)
return self._backend.derive_pbkdf2_hmac(
self._algorithm,
self._length,
self._salt,
self._iterations,
- key_material
+ key_material,
)
def verify(self, key_material, expected_key):
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/scrypt.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/scrypt.py
index 77dcf9a..f028646 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/scrypt.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/scrypt.py
@@ -8,8 +8,12 @@
from cryptography import utils
from cryptography.exceptions import (
- AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons
+ AlreadyFinalized,
+ InvalidKey,
+ UnsupportedAlgorithm,
+ _Reasons,
)
+from cryptography.hazmat.backends import _get_backend
from cryptography.hazmat.backends.interfaces import ScryptBackend
from cryptography.hazmat.primitives import constant_time
from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
@@ -22,17 +26,16 @@
@utils.register_interface(KeyDerivationFunction)
class Scrypt(object):
- def __init__(self, salt, length, n, r, p, backend):
+ def __init__(self, salt, length, n, r, p, backend=None):
+ backend = _get_backend(backend)
if not isinstance(backend, ScryptBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement ScryptBackend.",
- _Reasons.BACKEND_MISSING_INTERFACE
+ _Reasons.BACKEND_MISSING_INTERFACE,
)
self._length = length
- if not isinstance(salt, bytes):
- raise TypeError("salt must be bytes.")
-
+ utils._check_bytes("salt", salt)
if n < 2 or (n & (n - 1)) != 0:
raise ValueError("n must be greater than 1 and be a power of 2.")
@@ -54,8 +57,7 @@ def derive(self, key_material):
raise AlreadyFinalized("Scrypt instances can only be used once.")
self._used = True
- if not isinstance(key_material, bytes):
- raise TypeError("key_material must be bytes.")
+ utils._check_byteslike("key_material", key_material)
return self._backend.derive_scrypt(
key_material, self._salt, self._length, self._n, self._r, self._p
)
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/x963kdf.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/x963kdf.py
index 83789b3..1898d52 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/x963kdf.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/kdf/x963kdf.py
@@ -8,27 +8,34 @@
from cryptography import utils
from cryptography.exceptions import (
- AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons
+ AlreadyFinalized,
+ InvalidKey,
+ UnsupportedAlgorithm,
+ _Reasons,
)
+from cryptography.hazmat.backends import _get_backend
from cryptography.hazmat.backends.interfaces import HashBackend
from cryptography.hazmat.primitives import constant_time, hashes
from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
def _int_to_u32be(n):
- return struct.pack('>I', n)
+ return struct.pack(">I", n)
@utils.register_interface(KeyDerivationFunction)
class X963KDF(object):
- def __init__(self, algorithm, length, sharedinfo, backend):
+ def __init__(self, algorithm, length, sharedinfo, backend=None):
+ backend = _get_backend(backend)
max_len = algorithm.digest_size * (2 ** 32 - 1)
if length > max_len:
raise ValueError(
- "Can not derive keys larger than {0} bits.".format(max_len))
- if not (sharedinfo is None or isinstance(sharedinfo, bytes)):
- raise TypeError("sharedinfo must be bytes.")
+ "Can not derive keys larger than {} bits.".format(max_len)
+ )
+ if sharedinfo is not None:
+ utils._check_bytes("sharedinfo", sharedinfo)
+
self._algorithm = algorithm
self._length = length
self._sharedinfo = sharedinfo
@@ -36,7 +43,7 @@ def __init__(self, algorithm, length, sharedinfo, backend):
if not isinstance(backend, HashBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HashBackend.",
- _Reasons.BACKEND_MISSING_INTERFACE
+ _Reasons.BACKEND_MISSING_INTERFACE,
)
self._backend = backend
self._used = False
@@ -45,10 +52,7 @@ def derive(self, key_material):
if self._used:
raise AlreadyFinalized
self._used = True
-
- if not isinstance(key_material, bytes):
- raise TypeError("key_material must be bytes.")
-
+ utils._check_byteslike("key_material", key_material)
output = [b""]
outlen = 0
counter = 1
@@ -63,7 +67,7 @@ def derive(self, key_material):
outlen += len(output[-1])
counter += 1
- return b"".join(output)[:self._length]
+ return b"".join(output)[: self._length]
def verify(self, key_material, expected_key):
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/keywrap.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/keywrap.py
index 702a693..2439caf 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/keywrap.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/keywrap.py
@@ -6,6 +6,7 @@
import struct
+from cryptography.hazmat.backends import _get_backend
from cryptography.hazmat.primitives.ciphers import Cipher
from cryptography.hazmat.primitives.ciphers.algorithms import AES
from cryptography.hazmat.primitives.ciphers.modes import ECB
@@ -33,7 +34,8 @@ def _wrap_core(wrapping_key, a, r, backend):
return a + b"".join(r)
-def aes_key_wrap(wrapping_key, key_to_wrap, backend):
+def aes_key_wrap(wrapping_key, key_to_wrap, backend=None):
+ backend = _get_backend(backend)
if len(wrapping_key) not in [16, 24, 32]:
raise ValueError("The wrapping key must be a valid AES key length")
@@ -44,7 +46,7 @@ def aes_key_wrap(wrapping_key, key_to_wrap, backend):
raise ValueError("The key to wrap must be a multiple of 8 bytes")
a = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6"
- r = [key_to_wrap[i:i + 8] for i in range(0, len(key_to_wrap), 8)]
+ r = [key_to_wrap[i : i + 8] for i in range(0, len(key_to_wrap), 8)]
return _wrap_core(wrapping_key, a, r, backend)
@@ -55,9 +57,12 @@ def _unwrap_core(wrapping_key, a, r, backend):
for j in reversed(range(6)):
for i in reversed(range(n)):
# pack/unpack are safe as these are always 64-bit chunks
- atr = struct.pack(
- ">Q", struct.unpack(">Q", a)[0] ^ ((n * j) + i + 1)
- ) + r[i]
+ atr = (
+ struct.pack(
+ ">Q", struct.unpack(">Q", a)[0] ^ ((n * j) + i + 1)
+ )
+ + r[i]
+ )
# every decryption operation is a discrete 16 byte chunk so
# it is safe to reuse the decryptor for the entire operation
b = decryptor.update(atr)
@@ -68,18 +73,82 @@ def _unwrap_core(wrapping_key, a, r, backend):
return a, r
-def aes_key_unwrap(wrapping_key, wrapped_key, backend):
+def aes_key_wrap_with_padding(wrapping_key, key_to_wrap, backend=None):
+ backend = _get_backend(backend)
+ if len(wrapping_key) not in [16, 24, 32]:
+ raise ValueError("The wrapping key must be a valid AES key length")
+
+ aiv = b"\xA6\x59\x59\xA6" + struct.pack(">i", len(key_to_wrap))
+ # pad the key to wrap if necessary
+ pad = (8 - (len(key_to_wrap) % 8)) % 8
+ key_to_wrap = key_to_wrap + b"\x00" * pad
+ if len(key_to_wrap) == 8:
+ # RFC 5649 - 4.1 - exactly 8 octets after padding
+ encryptor = Cipher(AES(wrapping_key), ECB(), backend).encryptor()
+ b = encryptor.update(aiv + key_to_wrap)
+ assert encryptor.finalize() == b""
+ return b
+ else:
+ r = [key_to_wrap[i : i + 8] for i in range(0, len(key_to_wrap), 8)]
+ return _wrap_core(wrapping_key, aiv, r, backend)
+
+
+def aes_key_unwrap_with_padding(wrapping_key, wrapped_key, backend=None):
+ backend = _get_backend(backend)
+ if len(wrapped_key) < 16:
+ raise InvalidUnwrap("Must be at least 16 bytes")
+
+ if len(wrapping_key) not in [16, 24, 32]:
+ raise ValueError("The wrapping key must be a valid AES key length")
+
+ if len(wrapped_key) == 16:
+ # RFC 5649 - 4.2 - exactly two 64-bit blocks
+ decryptor = Cipher(AES(wrapping_key), ECB(), backend).decryptor()
+ b = decryptor.update(wrapped_key)
+ assert decryptor.finalize() == b""
+ a = b[:8]
+ data = b[8:]
+ n = 1
+ else:
+ r = [wrapped_key[i : i + 8] for i in range(0, len(wrapped_key), 8)]
+ encrypted_aiv = r.pop(0)
+ n = len(r)
+ a, r = _unwrap_core(wrapping_key, encrypted_aiv, r, backend)
+ data = b"".join(r)
+
+ # 1) Check that MSB(32,A) = A65959A6.
+ # 2) Check that 8*(n-1) < LSB(32,A) <= 8*n. If so, let
+ # MLI = LSB(32,A).
+ # 3) Let b = (8*n)-MLI, and then check that the rightmost b octets of
+ # the output data are zero.
+ (mli,) = struct.unpack(">I", a[4:])
+ b = (8 * n) - mli
+ if (
+ not bytes_eq(a[:4], b"\xa6\x59\x59\xa6")
+ or not 8 * (n - 1) < mli <= 8 * n
+ or (b != 0 and not bytes_eq(data[-b:], b"\x00" * b))
+ ):
+ raise InvalidUnwrap()
+
+ if b == 0:
+ return data
+ else:
+ return data[:-b]
+
+
+def aes_key_unwrap(wrapping_key, wrapped_key, backend=None):
+ backend = _get_backend(backend)
if len(wrapped_key) < 24:
- raise ValueError("Must be at least 24 bytes")
+ raise InvalidUnwrap("Must be at least 24 bytes")
if len(wrapped_key) % 8 != 0:
- raise ValueError("The wrapped key must be a multiple of 8 bytes")
+ raise InvalidUnwrap("The wrapped key must be a multiple of 8 bytes")
if len(wrapping_key) not in [16, 24, 32]:
raise ValueError("The wrapping key must be a valid AES key length")
aiv = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6"
- r = [wrapped_key[i:i + 8] for i in range(0, len(wrapped_key), 8)]
+ r = [wrapped_key[i : i + 8] for i in range(0, len(wrapped_key), 8)]
a = r.pop(0)
a, r = _unwrap_core(wrapping_key, a, r, backend)
if not bytes_eq(a, aiv):
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/mac.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/mac.py
deleted file mode 100644
index 4c95190..0000000
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/mac.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# This file is dual licensed under the terms of the Apache License, Version
-# 2.0, and the BSD License. See the LICENSE file in the root of this repository
-# for complete details.
-
-from __future__ import absolute_import, division, print_function
-
-import abc
-
-import six
-
-
-@six.add_metaclass(abc.ABCMeta)
-class MACContext(object):
- @abc.abstractmethod
- def update(self, data):
- """
- Processes the provided bytes.
- """
-
- @abc.abstractmethod
- def finalize(self):
- """
- Returns the message authentication code as bytes.
- """
-
- @abc.abstractmethod
- def copy(self):
- """
- Return a MACContext that is a copy of the current context.
- """
-
- @abc.abstractmethod
- def verify(self, signature):
- """
- Checks if the generated message authentication code matches the
- signature.
- """
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/padding.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/padding.py
index a081976..9591361 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/padding.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/padding.py
@@ -40,15 +40,14 @@ def _byte_padding_update(buffer_, data, block_size):
if buffer_ is None:
raise AlreadyFinalized("Context was already finalized.")
- if not isinstance(data, bytes):
- raise TypeError("data must be bytes.")
+ utils._check_bytes("data", data)
buffer_ += data
finished_blocks = len(buffer_) // (block_size // 8)
- result = buffer_[:finished_blocks * (block_size // 8)]
- buffer_ = buffer_[finished_blocks * (block_size // 8):]
+ result = buffer_[: finished_blocks * (block_size // 8)]
+ buffer_ = buffer_[finished_blocks * (block_size // 8) :]
return buffer_, result
@@ -65,15 +64,14 @@ def _byte_unpadding_update(buffer_, data, block_size):
if buffer_ is None:
raise AlreadyFinalized("Context was already finalized.")
- if not isinstance(data, bytes):
- raise TypeError("data must be bytes.")
+ utils._check_bytes("data", data)
buffer_ += data
finished_blocks = max(len(buffer_) // (block_size // 8) - 1, 0)
- result = buffer_[:finished_blocks * (block_size // 8)]
- buffer_ = buffer_[finished_blocks * (block_size // 8):]
+ result = buffer_[: finished_blocks * (block_size // 8)]
+ buffer_ = buffer_[finished_blocks * (block_size // 8) :]
return buffer_, result
@@ -115,7 +113,8 @@ def __init__(self, block_size):
def update(self, data):
self._buffer, result = _byte_padding_update(
- self._buffer, data, self.block_size)
+ self._buffer, data, self.block_size
+ )
return result
def _padding(self, size):
@@ -123,7 +122,8 @@ def _padding(self, size):
def finalize(self):
result = _byte_padding_pad(
- self._buffer, self.block_size, self._padding)
+ self._buffer, self.block_size, self._padding
+ )
self._buffer = None
return result
@@ -137,13 +137,14 @@ def __init__(self, block_size):
def update(self, data):
self._buffer, result = _byte_unpadding_update(
- self._buffer, data, self.block_size)
+ self._buffer, data, self.block_size
+ )
return result
def finalize(self):
result = _byte_unpadding_check(
- self._buffer, self.block_size,
- lib.Cryptography_check_pkcs7_padding)
+ self._buffer, self.block_size, lib.Cryptography_check_pkcs7_padding
+ )
self._buffer = None
return result
@@ -169,7 +170,8 @@ def __init__(self, block_size):
def update(self, data):
self._buffer, result = _byte_padding_update(
- self._buffer, data, self.block_size)
+ self._buffer, data, self.block_size
+ )
return result
def _padding(self, size):
@@ -177,7 +179,8 @@ def _padding(self, size):
def finalize(self):
result = _byte_padding_pad(
- self._buffer, self.block_size, self._padding)
+ self._buffer, self.block_size, self._padding
+ )
self._buffer = None
return result
@@ -191,12 +194,15 @@ def __init__(self, block_size):
def update(self, data):
self._buffer, result = _byte_unpadding_update(
- self._buffer, data, self.block_size)
+ self._buffer, data, self.block_size
+ )
return result
def finalize(self):
result = _byte_unpadding_check(
- self._buffer, self.block_size,
- lib.Cryptography_check_ansix923_padding)
+ self._buffer,
+ self.block_size,
+ lib.Cryptography_check_ansix923_padding,
+ )
self._buffer = None
return result
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/poly1305.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/poly1305.py
new file mode 100644
index 0000000..6439686
--- /dev/null
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/poly1305.py
@@ -0,0 +1,58 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+
+from cryptography import utils
+from cryptography.exceptions import (
+ AlreadyFinalized,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+
+
+class Poly1305(object):
+ def __init__(self, key):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.poly1305_supported():
+ raise UnsupportedAlgorithm(
+ "poly1305 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_MAC,
+ )
+ self._ctx = backend.create_poly1305_ctx(key)
+
+ def update(self, data):
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ utils._check_byteslike("data", data)
+ self._ctx.update(data)
+
+ def finalize(self):
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ mac = self._ctx.finalize()
+ self._ctx = None
+ return mac
+
+ def verify(self, tag):
+ utils._check_bytes("tag", tag)
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ ctx, self._ctx = self._ctx, None
+ ctx.verify(tag)
+
+ @classmethod
+ def generate_tag(cls, key, data):
+ p = Poly1305(key)
+ p.update(data)
+ return p.finalize()
+
+ @classmethod
+ def verify_tag(cls, key, data, tag):
+ p = Poly1305(key)
+ p.update(data)
+ p.verify(tag)
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization.py
deleted file mode 100644
index bd09e6e..0000000
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization.py
+++ /dev/null
@@ -1,209 +0,0 @@
-# This file is dual licensed under the terms of the Apache License, Version
-# 2.0, and the BSD License. See the LICENSE file in the root of this repository
-# for complete details.
-
-from __future__ import absolute_import, division, print_function
-
-import abc
-import base64
-import struct
-from enum import Enum
-
-import six
-
-from cryptography import utils
-from cryptography.exceptions import UnsupportedAlgorithm
-from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa
-
-
-def load_pem_private_key(data, password, backend):
- return backend.load_pem_private_key(data, password)
-
-
-def load_pem_public_key(data, backend):
- return backend.load_pem_public_key(data)
-
-
-def load_pem_parameters(data, backend):
- return backend.load_pem_parameters(data)
-
-
-def load_der_private_key(data, password, backend):
- return backend.load_der_private_key(data, password)
-
-
-def load_der_public_key(data, backend):
- return backend.load_der_public_key(data)
-
-
-def load_der_parameters(data, backend):
- return backend.load_der_parameters(data)
-
-
-def load_ssh_public_key(data, backend):
- key_parts = data.split(b' ', 2)
-
- if len(key_parts) < 2:
- raise ValueError(
- 'Key is not in the proper format or contains extra data.')
-
- key_type = key_parts[0]
-
- if key_type == b'ssh-rsa':
- loader = _load_ssh_rsa_public_key
- elif key_type == b'ssh-dss':
- loader = _load_ssh_dss_public_key
- elif key_type in [
- b'ecdsa-sha2-nistp256', b'ecdsa-sha2-nistp384', b'ecdsa-sha2-nistp521',
- ]:
- loader = _load_ssh_ecdsa_public_key
- else:
- raise UnsupportedAlgorithm('Key type is not supported.')
-
- key_body = key_parts[1]
-
- try:
- decoded_data = base64.b64decode(key_body)
- except TypeError:
- raise ValueError('Key is not in the proper format.')
-
- inner_key_type, rest = _ssh_read_next_string(decoded_data)
-
- if inner_key_type != key_type:
- raise ValueError(
- 'Key header and key body contain different key type values.'
- )
-
- return loader(key_type, rest, backend)
-
-
-def _load_ssh_rsa_public_key(key_type, decoded_data, backend):
- e, rest = _ssh_read_next_mpint(decoded_data)
- n, rest = _ssh_read_next_mpint(rest)
-
- if rest:
- raise ValueError('Key body contains extra bytes.')
-
- return rsa.RSAPublicNumbers(e, n).public_key(backend)
-
-
-def _load_ssh_dss_public_key(key_type, decoded_data, backend):
- p, rest = _ssh_read_next_mpint(decoded_data)
- q, rest = _ssh_read_next_mpint(rest)
- g, rest = _ssh_read_next_mpint(rest)
- y, rest = _ssh_read_next_mpint(rest)
-
- if rest:
- raise ValueError('Key body contains extra bytes.')
-
- parameter_numbers = dsa.DSAParameterNumbers(p, q, g)
- public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers)
-
- return public_numbers.public_key(backend)
-
-
-def _load_ssh_ecdsa_public_key(expected_key_type, decoded_data, backend):
- curve_name, rest = _ssh_read_next_string(decoded_data)
- data, rest = _ssh_read_next_string(rest)
-
- if expected_key_type != b"ecdsa-sha2-" + curve_name:
- raise ValueError(
- 'Key header and key body contain different key type values.'
- )
-
- if rest:
- raise ValueError('Key body contains extra bytes.')
-
- curve = {
- b"nistp256": ec.SECP256R1,
- b"nistp384": ec.SECP384R1,
- b"nistp521": ec.SECP521R1,
- }[curve_name]()
-
- if six.indexbytes(data, 0) != 4:
- raise NotImplementedError(
- "Compressed elliptic curve points are not supported"
- )
-
- numbers = ec.EllipticCurvePublicNumbers.from_encoded_point(curve, data)
- return numbers.public_key(backend)
-
-
-def _ssh_read_next_string(data):
- """
- Retrieves the next RFC 4251 string value from the data.
-
- While the RFC calls these strings, in Python they are bytes objects.
- """
- if len(data) < 4:
- raise ValueError("Key is not in the proper format")
-
- str_len, = struct.unpack('>I', data[:4])
- if len(data) < str_len + 4:
- raise ValueError("Key is not in the proper format")
-
- return data[4:4 + str_len], data[4 + str_len:]
-
-
-def _ssh_read_next_mpint(data):
- """
- Reads the next mpint from the data.
-
- Currently, all mpints are interpreted as unsigned.
- """
- mpint_data, rest = _ssh_read_next_string(data)
-
- return (
- utils.int_from_bytes(mpint_data, byteorder='big', signed=False), rest
- )
-
-
-def _ssh_write_string(data):
- return struct.pack(">I", len(data)) + data
-
-
-def _ssh_write_mpint(value):
- data = utils.int_to_bytes(value)
- if six.indexbytes(data, 0) & 0x80:
- data = b"\x00" + data
- return _ssh_write_string(data)
-
-
-class Encoding(Enum):
- PEM = "PEM"
- DER = "DER"
- OpenSSH = "OpenSSH"
-
-
-class PrivateFormat(Enum):
- PKCS8 = "PKCS8"
- TraditionalOpenSSL = "TraditionalOpenSSL"
-
-
-class PublicFormat(Enum):
- SubjectPublicKeyInfo = "X.509 subjectPublicKeyInfo with PKCS#1"
- PKCS1 = "Raw PKCS#1"
- OpenSSH = "OpenSSH"
-
-
-class ParameterFormat(Enum):
- PKCS3 = "PKCS3"
-
-
-@six.add_metaclass(abc.ABCMeta)
-class KeySerializationEncryption(object):
- pass
-
-
-@utils.register_interface(KeySerializationEncryption)
-class BestAvailableEncryption(object):
- def __init__(self, password):
- if not isinstance(password, bytes) or len(password) == 0:
- raise ValueError("Password must be 1 or more bytes.")
-
- self.password = password
-
-
-@utils.register_interface(KeySerializationEncryption)
-class NoEncryption(object):
- pass
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/__init__.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/__init__.py
new file mode 100644
index 0000000..c2f9b01
--- /dev/null
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/__init__.py
@@ -0,0 +1,44 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography.hazmat.primitives.serialization.base import (
+ BestAvailableEncryption,
+ Encoding,
+ KeySerializationEncryption,
+ NoEncryption,
+ ParameterFormat,
+ PrivateFormat,
+ PublicFormat,
+ load_der_parameters,
+ load_der_private_key,
+ load_der_public_key,
+ load_pem_parameters,
+ load_pem_private_key,
+ load_pem_public_key,
+)
+from cryptography.hazmat.primitives.serialization.ssh import (
+ load_ssh_private_key,
+ load_ssh_public_key,
+)
+
+
+__all__ = [
+ "load_der_parameters",
+ "load_der_private_key",
+ "load_der_public_key",
+ "load_pem_parameters",
+ "load_pem_private_key",
+ "load_pem_public_key",
+ "load_ssh_private_key",
+ "load_ssh_public_key",
+ "Encoding",
+ "PrivateFormat",
+ "PublicFormat",
+ "ParameterFormat",
+ "KeySerializationEncryption",
+ "BestAvailableEncryption",
+ "NoEncryption",
+]
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000..3e64f7a
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-38.pyc
new file mode 100644
index 0000000..337b10f
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-38.pyc
new file mode 100644
index 0000000..74cd668
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-38.pyc
new file mode 100644
index 0000000..bcba0df
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-38.pyc
new file mode 100644
index 0000000..996dbad
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/base.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/base.py
new file mode 100644
index 0000000..b2b4034
--- /dev/null
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/base.py
@@ -0,0 +1,90 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import abc
+from enum import Enum
+
+import six
+
+from cryptography import utils
+from cryptography.hazmat.backends import _get_backend
+
+
+def load_pem_private_key(data, password, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_pem_private_key(data, password)
+
+
+def load_pem_public_key(data, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_pem_public_key(data)
+
+
+def load_pem_parameters(data, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_pem_parameters(data)
+
+
+def load_der_private_key(data, password, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_der_private_key(data, password)
+
+
+def load_der_public_key(data, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_der_public_key(data)
+
+
+def load_der_parameters(data, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_der_parameters(data)
+
+
+class Encoding(Enum):
+ PEM = "PEM"
+ DER = "DER"
+ OpenSSH = "OpenSSH"
+ Raw = "Raw"
+ X962 = "ANSI X9.62"
+
+
+class PrivateFormat(Enum):
+ PKCS8 = "PKCS8"
+ TraditionalOpenSSL = "TraditionalOpenSSL"
+ Raw = "Raw"
+ OpenSSH = "OpenSSH"
+
+
+class PublicFormat(Enum):
+ SubjectPublicKeyInfo = "X.509 subjectPublicKeyInfo with PKCS#1"
+ PKCS1 = "Raw PKCS#1"
+ OpenSSH = "OpenSSH"
+ Raw = "Raw"
+ CompressedPoint = "X9.62 Compressed Point"
+ UncompressedPoint = "X9.62 Uncompressed Point"
+
+
+class ParameterFormat(Enum):
+ PKCS3 = "PKCS3"
+
+
+@six.add_metaclass(abc.ABCMeta)
+class KeySerializationEncryption(object):
+ pass
+
+
+@utils.register_interface(KeySerializationEncryption)
+class BestAvailableEncryption(object):
+ def __init__(self, password):
+ if not isinstance(password, bytes) or len(password) == 0:
+ raise ValueError("Password must be 1 or more bytes.")
+
+ self.password = password
+
+
+@utils.register_interface(KeySerializationEncryption)
+class NoEncryption(object):
+ pass
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/pkcs12.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/pkcs12.py
new file mode 100644
index 0000000..201f329
--- /dev/null
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/pkcs12.py
@@ -0,0 +1,50 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography import x509
+from cryptography.hazmat.backends import _get_backend
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa
+
+
+def load_key_and_certificates(data, password, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_key_and_certificates_from_pkcs12(data, password)
+
+
+def serialize_key_and_certificates(name, key, cert, cas, encryption_algorithm):
+ if key is not None and not isinstance(
+ key,
+ (
+ rsa.RSAPrivateKeyWithSerialization,
+ dsa.DSAPrivateKeyWithSerialization,
+ ec.EllipticCurvePrivateKeyWithSerialization,
+ ),
+ ):
+ raise TypeError("Key must be RSA, DSA, or EllipticCurve private key.")
+ if cert is not None and not isinstance(cert, x509.Certificate):
+ raise TypeError("cert must be a certificate")
+
+ if cas is not None:
+ cas = list(cas)
+ if not all(isinstance(val, x509.Certificate) for val in cas):
+ raise TypeError("all values in cas must be certificates")
+
+ if not isinstance(
+ encryption_algorithm, serialization.KeySerializationEncryption
+ ):
+ raise TypeError(
+ "Key encryption algorithm must be a "
+ "KeySerializationEncryption instance"
+ )
+
+ if key is None and cert is None and not cas:
+ raise ValueError("You must supply at least one of key, cert, or cas")
+
+ backend = _get_backend(None)
+ return backend.serialize_key_and_certificates_to_pkcs12(
+ name, key, cert, cas, encryption_algorithm
+ )
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/pkcs7.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/pkcs7.py
new file mode 100644
index 0000000..fcdd1c9
--- /dev/null
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/pkcs7.py
@@ -0,0 +1,17 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography.hazmat.backends import _get_backend
+
+
+def load_pem_pkcs7_certificates(data):
+ backend = _get_backend(None)
+ return backend.load_pem_pkcs7_certificates(data)
+
+
+def load_der_pkcs7_certificates(data):
+ backend = _get_backend(None)
+ return backend.load_der_pkcs7_certificates(data)
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/ssh.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/ssh.py
new file mode 100644
index 0000000..5ecae59
--- /dev/null
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/serialization/ssh.py
@@ -0,0 +1,683 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import binascii
+import os
+import re
+import struct
+
+import six
+
+from cryptography import utils
+from cryptography.exceptions import UnsupportedAlgorithm
+from cryptography.hazmat.backends import _get_backend
+from cryptography.hazmat.primitives.asymmetric import dsa, ec, ed25519, rsa
+from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
+from cryptography.hazmat.primitives.serialization import (
+ Encoding,
+ NoEncryption,
+ PrivateFormat,
+ PublicFormat,
+)
+
+try:
+ from bcrypt import kdf as _bcrypt_kdf
+
+ _bcrypt_supported = True
+except ImportError:
+ _bcrypt_supported = False
+
+ def _bcrypt_kdf(*args, **kwargs):
+ raise UnsupportedAlgorithm("Need bcrypt module")
+
+
+try:
+ from base64 import encodebytes as _base64_encode
+except ImportError:
+ from base64 import encodestring as _base64_encode
+
+_SSH_ED25519 = b"ssh-ed25519"
+_SSH_RSA = b"ssh-rsa"
+_SSH_DSA = b"ssh-dss"
+_ECDSA_NISTP256 = b"ecdsa-sha2-nistp256"
+_ECDSA_NISTP384 = b"ecdsa-sha2-nistp384"
+_ECDSA_NISTP521 = b"ecdsa-sha2-nistp521"
+_CERT_SUFFIX = b"-cert-v01@openssh.com"
+
+_SSH_PUBKEY_RC = re.compile(br"\A(\S+)[ \t]+(\S+)")
+_SK_MAGIC = b"openssh-key-v1\0"
+_SK_START = b"-----BEGIN OPENSSH PRIVATE KEY-----"
+_SK_END = b"-----END OPENSSH PRIVATE KEY-----"
+_BCRYPT = b"bcrypt"
+_NONE = b"none"
+_DEFAULT_CIPHER = b"aes256-ctr"
+_DEFAULT_ROUNDS = 16
+_MAX_PASSWORD = 72
+
+# re is only way to work on bytes-like data
+_PEM_RC = re.compile(_SK_START + b"(.*?)" + _SK_END, re.DOTALL)
+
+# padding for max blocksize
+_PADDING = memoryview(bytearray(range(1, 1 + 16)))
+
+# ciphers that are actually used in key wrapping
+_SSH_CIPHERS = {
+ b"aes256-ctr": (algorithms.AES, 32, modes.CTR, 16),
+ b"aes256-cbc": (algorithms.AES, 32, modes.CBC, 16),
+}
+
+# map local curve name to key type
+_ECDSA_KEY_TYPE = {
+ "secp256r1": _ECDSA_NISTP256,
+ "secp384r1": _ECDSA_NISTP384,
+ "secp521r1": _ECDSA_NISTP521,
+}
+
+_U32 = struct.Struct(b">I")
+_U64 = struct.Struct(b">Q")
+
+
+def _ecdsa_key_type(public_key):
+ """Return SSH key_type and curve_name for private key."""
+ curve = public_key.curve
+ if curve.name not in _ECDSA_KEY_TYPE:
+ raise ValueError(
+ "Unsupported curve for ssh private key: %r" % curve.name
+ )
+ return _ECDSA_KEY_TYPE[curve.name]
+
+
+def _ssh_pem_encode(data, prefix=_SK_START + b"\n", suffix=_SK_END + b"\n"):
+ return b"".join([prefix, _base64_encode(data), suffix])
+
+
+def _check_block_size(data, block_len):
+ """Require data to be full blocks"""
+ if not data or len(data) % block_len != 0:
+ raise ValueError("Corrupt data: missing padding")
+
+
+def _check_empty(data):
+ """All data should have been parsed."""
+ if data:
+ raise ValueError("Corrupt data: unparsed data")
+
+
+def _init_cipher(ciphername, password, salt, rounds, backend):
+ """Generate key + iv and return cipher."""
+ if not password:
+ raise ValueError("Key is password-protected.")
+
+ algo, key_len, mode, iv_len = _SSH_CIPHERS[ciphername]
+ seed = _bcrypt_kdf(password, salt, key_len + iv_len, rounds, True)
+ return Cipher(algo(seed[:key_len]), mode(seed[key_len:]), backend)
+
+
+def _get_u32(data):
+ """Uint32"""
+ if len(data) < 4:
+ raise ValueError("Invalid data")
+ return _U32.unpack(data[:4])[0], data[4:]
+
+
+def _get_u64(data):
+ """Uint64"""
+ if len(data) < 8:
+ raise ValueError("Invalid data")
+ return _U64.unpack(data[:8])[0], data[8:]
+
+
+def _get_sshstr(data):
+ """Bytes with u32 length prefix"""
+ n, data = _get_u32(data)
+ if n > len(data):
+ raise ValueError("Invalid data")
+ return data[:n], data[n:]
+
+
+def _get_mpint(data):
+ """Big integer."""
+ val, data = _get_sshstr(data)
+ if val and six.indexbytes(val, 0) > 0x7F:
+ raise ValueError("Invalid data")
+ return utils.int_from_bytes(val, "big"), data
+
+
+def _to_mpint(val):
+ """Storage format for signed bigint."""
+ if val < 0:
+ raise ValueError("negative mpint not allowed")
+ if not val:
+ return b""
+ nbytes = (val.bit_length() + 8) // 8
+ return utils.int_to_bytes(val, nbytes)
+
+
+class _FragList(object):
+ """Build recursive structure without data copy."""
+
+ def __init__(self, init=None):
+ self.flist = []
+ if init:
+ self.flist.extend(init)
+
+ def put_raw(self, val):
+ """Add plain bytes"""
+ self.flist.append(val)
+
+ def put_u32(self, val):
+ """Big-endian uint32"""
+ self.flist.append(_U32.pack(val))
+
+ def put_sshstr(self, val):
+ """Bytes prefixed with u32 length"""
+ if isinstance(val, (bytes, memoryview, bytearray)):
+ self.put_u32(len(val))
+ self.flist.append(val)
+ else:
+ self.put_u32(val.size())
+ self.flist.extend(val.flist)
+
+ def put_mpint(self, val):
+ """Big-endian bigint prefixed with u32 length"""
+ self.put_sshstr(_to_mpint(val))
+
+ def size(self):
+ """Current number of bytes"""
+ return sum(map(len, self.flist))
+
+ def render(self, dstbuf, pos=0):
+ """Write into bytearray"""
+ for frag in self.flist:
+ flen = len(frag)
+ start, pos = pos, pos + flen
+ dstbuf[start:pos] = frag
+ return pos
+
+ def tobytes(self):
+ """Return as bytes"""
+ buf = memoryview(bytearray(self.size()))
+ self.render(buf)
+ return buf.tobytes()
+
+
+class _SSHFormatRSA(object):
+ """Format for RSA keys.
+
+ Public:
+ mpint e, n
+ Private:
+ mpint n, e, d, iqmp, p, q
+ """
+
+ def get_public(self, data):
+ """RSA public fields"""
+ e, data = _get_mpint(data)
+ n, data = _get_mpint(data)
+ return (e, n), data
+
+ def load_public(self, key_type, data, backend):
+ """Make RSA public key from data."""
+ (e, n), data = self.get_public(data)
+ public_numbers = rsa.RSAPublicNumbers(e, n)
+ public_key = public_numbers.public_key(backend)
+ return public_key, data
+
+ def load_private(self, data, pubfields, backend):
+ """Make RSA private key from data."""
+ n, data = _get_mpint(data)
+ e, data = _get_mpint(data)
+ d, data = _get_mpint(data)
+ iqmp, data = _get_mpint(data)
+ p, data = _get_mpint(data)
+ q, data = _get_mpint(data)
+
+ if (e, n) != pubfields:
+ raise ValueError("Corrupt data: rsa field mismatch")
+ dmp1 = rsa.rsa_crt_dmp1(d, p)
+ dmq1 = rsa.rsa_crt_dmq1(d, q)
+ public_numbers = rsa.RSAPublicNumbers(e, n)
+ private_numbers = rsa.RSAPrivateNumbers(
+ p, q, d, dmp1, dmq1, iqmp, public_numbers
+ )
+ private_key = private_numbers.private_key(backend)
+ return private_key, data
+
+ def encode_public(self, public_key, f_pub):
+ """Write RSA public key"""
+ pubn = public_key.public_numbers()
+ f_pub.put_mpint(pubn.e)
+ f_pub.put_mpint(pubn.n)
+
+ def encode_private(self, private_key, f_priv):
+ """Write RSA private key"""
+ private_numbers = private_key.private_numbers()
+ public_numbers = private_numbers.public_numbers
+
+ f_priv.put_mpint(public_numbers.n)
+ f_priv.put_mpint(public_numbers.e)
+
+ f_priv.put_mpint(private_numbers.d)
+ f_priv.put_mpint(private_numbers.iqmp)
+ f_priv.put_mpint(private_numbers.p)
+ f_priv.put_mpint(private_numbers.q)
+
+
+class _SSHFormatDSA(object):
+ """Format for DSA keys.
+
+ Public:
+ mpint p, q, g, y
+ Private:
+ mpint p, q, g, y, x
+ """
+
+ def get_public(self, data):
+ """DSA public fields"""
+ p, data = _get_mpint(data)
+ q, data = _get_mpint(data)
+ g, data = _get_mpint(data)
+ y, data = _get_mpint(data)
+ return (p, q, g, y), data
+
+ def load_public(self, key_type, data, backend):
+ """Make DSA public key from data."""
+ (p, q, g, y), data = self.get_public(data)
+ parameter_numbers = dsa.DSAParameterNumbers(p, q, g)
+ public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers)
+ self._validate(public_numbers)
+ public_key = public_numbers.public_key(backend)
+ return public_key, data
+
+ def load_private(self, data, pubfields, backend):
+ """Make DSA private key from data."""
+ (p, q, g, y), data = self.get_public(data)
+ x, data = _get_mpint(data)
+
+ if (p, q, g, y) != pubfields:
+ raise ValueError("Corrupt data: dsa field mismatch")
+ parameter_numbers = dsa.DSAParameterNumbers(p, q, g)
+ public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers)
+ self._validate(public_numbers)
+ private_numbers = dsa.DSAPrivateNumbers(x, public_numbers)
+ private_key = private_numbers.private_key(backend)
+ return private_key, data
+
+ def encode_public(self, public_key, f_pub):
+ """Write DSA public key"""
+ public_numbers = public_key.public_numbers()
+ parameter_numbers = public_numbers.parameter_numbers
+ self._validate(public_numbers)
+
+ f_pub.put_mpint(parameter_numbers.p)
+ f_pub.put_mpint(parameter_numbers.q)
+ f_pub.put_mpint(parameter_numbers.g)
+ f_pub.put_mpint(public_numbers.y)
+
+ def encode_private(self, private_key, f_priv):
+ """Write DSA private key"""
+ self.encode_public(private_key.public_key(), f_priv)
+ f_priv.put_mpint(private_key.private_numbers().x)
+
+ def _validate(self, public_numbers):
+ parameter_numbers = public_numbers.parameter_numbers
+ if parameter_numbers.p.bit_length() != 1024:
+ raise ValueError("SSH supports only 1024 bit DSA keys")
+
+
+class _SSHFormatECDSA(object):
+ """Format for ECDSA keys.
+
+ Public:
+ str curve
+ bytes point
+ Private:
+ str curve
+ bytes point
+ mpint secret
+ """
+
+ def __init__(self, ssh_curve_name, curve):
+ self.ssh_curve_name = ssh_curve_name
+ self.curve = curve
+
+ def get_public(self, data):
+ """ECDSA public fields"""
+ curve, data = _get_sshstr(data)
+ point, data = _get_sshstr(data)
+ if curve != self.ssh_curve_name:
+ raise ValueError("Curve name mismatch")
+ if six.indexbytes(point, 0) != 4:
+ raise NotImplementedError("Need uncompressed point")
+ return (curve, point), data
+
+ def load_public(self, key_type, data, backend):
+ """Make ECDSA public key from data."""
+ (curve_name, point), data = self.get_public(data)
+ public_key = ec.EllipticCurvePublicKey.from_encoded_point(
+ self.curve, point.tobytes()
+ )
+ return public_key, data
+
+ def load_private(self, data, pubfields, backend):
+ """Make ECDSA private key from data."""
+ (curve_name, point), data = self.get_public(data)
+ secret, data = _get_mpint(data)
+
+ if (curve_name, point) != pubfields:
+ raise ValueError("Corrupt data: ecdsa field mismatch")
+ private_key = ec.derive_private_key(secret, self.curve, backend)
+ return private_key, data
+
+ def encode_public(self, public_key, f_pub):
+ """Write ECDSA public key"""
+ point = public_key.public_bytes(
+ Encoding.X962, PublicFormat.UncompressedPoint
+ )
+ f_pub.put_sshstr(self.ssh_curve_name)
+ f_pub.put_sshstr(point)
+
+ def encode_private(self, private_key, f_priv):
+ """Write ECDSA private key"""
+ public_key = private_key.public_key()
+ private_numbers = private_key.private_numbers()
+
+ self.encode_public(public_key, f_priv)
+ f_priv.put_mpint(private_numbers.private_value)
+
+
+class _SSHFormatEd25519(object):
+ """Format for Ed25519 keys.
+
+ Public:
+ bytes point
+ Private:
+ bytes point
+ bytes secret_and_point
+ """
+
+ def get_public(self, data):
+ """Ed25519 public fields"""
+ point, data = _get_sshstr(data)
+ return (point,), data
+
+ def load_public(self, key_type, data, backend):
+ """Make Ed25519 public key from data."""
+ (point,), data = self.get_public(data)
+ public_key = ed25519.Ed25519PublicKey.from_public_bytes(
+ point.tobytes()
+ )
+ return public_key, data
+
+ def load_private(self, data, pubfields, backend):
+ """Make Ed25519 private key from data."""
+ (point,), data = self.get_public(data)
+ keypair, data = _get_sshstr(data)
+
+ secret = keypair[:32]
+ point2 = keypair[32:]
+ if point != point2 or (point,) != pubfields:
+ raise ValueError("Corrupt data: ed25519 field mismatch")
+ private_key = ed25519.Ed25519PrivateKey.from_private_bytes(secret)
+ return private_key, data
+
+ def encode_public(self, public_key, f_pub):
+ """Write Ed25519 public key"""
+ raw_public_key = public_key.public_bytes(
+ Encoding.Raw, PublicFormat.Raw
+ )
+ f_pub.put_sshstr(raw_public_key)
+
+ def encode_private(self, private_key, f_priv):
+ """Write Ed25519 private key"""
+ public_key = private_key.public_key()
+ raw_private_key = private_key.private_bytes(
+ Encoding.Raw, PrivateFormat.Raw, NoEncryption()
+ )
+ raw_public_key = public_key.public_bytes(
+ Encoding.Raw, PublicFormat.Raw
+ )
+ f_keypair = _FragList([raw_private_key, raw_public_key])
+
+ self.encode_public(public_key, f_priv)
+ f_priv.put_sshstr(f_keypair)
+
+
+_KEY_FORMATS = {
+ _SSH_RSA: _SSHFormatRSA(),
+ _SSH_DSA: _SSHFormatDSA(),
+ _SSH_ED25519: _SSHFormatEd25519(),
+ _ECDSA_NISTP256: _SSHFormatECDSA(b"nistp256", ec.SECP256R1()),
+ _ECDSA_NISTP384: _SSHFormatECDSA(b"nistp384", ec.SECP384R1()),
+ _ECDSA_NISTP521: _SSHFormatECDSA(b"nistp521", ec.SECP521R1()),
+}
+
+
+def _lookup_kformat(key_type):
+ """Return valid format or throw error"""
+ if not isinstance(key_type, bytes):
+ key_type = memoryview(key_type).tobytes()
+ if key_type in _KEY_FORMATS:
+ return _KEY_FORMATS[key_type]
+ raise UnsupportedAlgorithm("Unsupported key type: %r" % key_type)
+
+
+def load_ssh_private_key(data, password, backend=None):
+ """Load private key from OpenSSH custom encoding."""
+ utils._check_byteslike("data", data)
+ backend = _get_backend(backend)
+ if password is not None:
+ utils._check_bytes("password", password)
+
+ m = _PEM_RC.search(data)
+ if not m:
+ raise ValueError("Not OpenSSH private key format")
+ p1 = m.start(1)
+ p2 = m.end(1)
+ data = binascii.a2b_base64(memoryview(data)[p1:p2])
+ if not data.startswith(_SK_MAGIC):
+ raise ValueError("Not OpenSSH private key format")
+ data = memoryview(data)[len(_SK_MAGIC) :]
+
+ # parse header
+ ciphername, data = _get_sshstr(data)
+ kdfname, data = _get_sshstr(data)
+ kdfoptions, data = _get_sshstr(data)
+ nkeys, data = _get_u32(data)
+ if nkeys != 1:
+ raise ValueError("Only one key supported")
+
+ # load public key data
+ pubdata, data = _get_sshstr(data)
+ pub_key_type, pubdata = _get_sshstr(pubdata)
+ kformat = _lookup_kformat(pub_key_type)
+ pubfields, pubdata = kformat.get_public(pubdata)
+ _check_empty(pubdata)
+
+ # load secret data
+ edata, data = _get_sshstr(data)
+ _check_empty(data)
+
+ if (ciphername, kdfname) != (_NONE, _NONE):
+ ciphername = ciphername.tobytes()
+ if ciphername not in _SSH_CIPHERS:
+ raise UnsupportedAlgorithm("Unsupported cipher: %r" % ciphername)
+ if kdfname != _BCRYPT:
+ raise UnsupportedAlgorithm("Unsupported KDF: %r" % kdfname)
+ blklen = _SSH_CIPHERS[ciphername][3]
+ _check_block_size(edata, blklen)
+ salt, kbuf = _get_sshstr(kdfoptions)
+ rounds, kbuf = _get_u32(kbuf)
+ _check_empty(kbuf)
+ ciph = _init_cipher(
+ ciphername, password, salt.tobytes(), rounds, backend
+ )
+ edata = memoryview(ciph.decryptor().update(edata))
+ else:
+ blklen = 8
+ _check_block_size(edata, blklen)
+ ck1, edata = _get_u32(edata)
+ ck2, edata = _get_u32(edata)
+ if ck1 != ck2:
+ raise ValueError("Corrupt data: broken checksum")
+
+ # load per-key struct
+ key_type, edata = _get_sshstr(edata)
+ if key_type != pub_key_type:
+ raise ValueError("Corrupt data: key type mismatch")
+ private_key, edata = kformat.load_private(edata, pubfields, backend)
+ comment, edata = _get_sshstr(edata)
+
+ # yes, SSH does padding check *after* all other parsing is done.
+ # need to follow as it writes zero-byte padding too.
+ if edata != _PADDING[: len(edata)]:
+ raise ValueError("Corrupt data: invalid padding")
+
+ return private_key
+
+
+def serialize_ssh_private_key(private_key, password=None):
+ """Serialize private key with OpenSSH custom encoding."""
+ if password is not None:
+ utils._check_bytes("password", password)
+ if password and len(password) > _MAX_PASSWORD:
+ raise ValueError(
+ "Passwords longer than 72 bytes are not supported by "
+ "OpenSSH private key format"
+ )
+
+ if isinstance(private_key, ec.EllipticCurvePrivateKey):
+ key_type = _ecdsa_key_type(private_key.public_key())
+ elif isinstance(private_key, rsa.RSAPrivateKey):
+ key_type = _SSH_RSA
+ elif isinstance(private_key, dsa.DSAPrivateKey):
+ key_type = _SSH_DSA
+ elif isinstance(private_key, ed25519.Ed25519PrivateKey):
+ key_type = _SSH_ED25519
+ else:
+ raise ValueError("Unsupported key type")
+ kformat = _lookup_kformat(key_type)
+
+ # setup parameters
+ f_kdfoptions = _FragList()
+ if password:
+ ciphername = _DEFAULT_CIPHER
+ blklen = _SSH_CIPHERS[ciphername][3]
+ kdfname = _BCRYPT
+ rounds = _DEFAULT_ROUNDS
+ salt = os.urandom(16)
+ f_kdfoptions.put_sshstr(salt)
+ f_kdfoptions.put_u32(rounds)
+ backend = _get_backend(None)
+ ciph = _init_cipher(ciphername, password, salt, rounds, backend)
+ else:
+ ciphername = kdfname = _NONE
+ blklen = 8
+ ciph = None
+ nkeys = 1
+ checkval = os.urandom(4)
+ comment = b""
+
+ # encode public and private parts together
+ f_public_key = _FragList()
+ f_public_key.put_sshstr(key_type)
+ kformat.encode_public(private_key.public_key(), f_public_key)
+
+ f_secrets = _FragList([checkval, checkval])
+ f_secrets.put_sshstr(key_type)
+ kformat.encode_private(private_key, f_secrets)
+ f_secrets.put_sshstr(comment)
+ f_secrets.put_raw(_PADDING[: blklen - (f_secrets.size() % blklen)])
+
+ # top-level structure
+ f_main = _FragList()
+ f_main.put_raw(_SK_MAGIC)
+ f_main.put_sshstr(ciphername)
+ f_main.put_sshstr(kdfname)
+ f_main.put_sshstr(f_kdfoptions)
+ f_main.put_u32(nkeys)
+ f_main.put_sshstr(f_public_key)
+ f_main.put_sshstr(f_secrets)
+
+ # copy result info bytearray
+ slen = f_secrets.size()
+ mlen = f_main.size()
+ buf = memoryview(bytearray(mlen + blklen))
+ f_main.render(buf)
+ ofs = mlen - slen
+
+ # encrypt in-place
+ if ciph is not None:
+ ciph.encryptor().update_into(buf[ofs:mlen], buf[ofs:])
+
+ txt = _ssh_pem_encode(buf[:mlen])
+ buf[ofs:mlen] = bytearray(slen)
+ return txt
+
+
+def load_ssh_public_key(data, backend=None):
+ """Load public key from OpenSSH one-line format."""
+ backend = _get_backend(backend)
+ utils._check_byteslike("data", data)
+
+ m = _SSH_PUBKEY_RC.match(data)
+ if not m:
+ raise ValueError("Invalid line format")
+ key_type = orig_key_type = m.group(1)
+ key_body = m.group(2)
+ with_cert = False
+ if _CERT_SUFFIX == key_type[-len(_CERT_SUFFIX) :]:
+ with_cert = True
+ key_type = key_type[: -len(_CERT_SUFFIX)]
+ kformat = _lookup_kformat(key_type)
+
+ try:
+ data = memoryview(binascii.a2b_base64(key_body))
+ except (TypeError, binascii.Error):
+ raise ValueError("Invalid key format")
+
+ inner_key_type, data = _get_sshstr(data)
+ if inner_key_type != orig_key_type:
+ raise ValueError("Invalid key format")
+ if with_cert:
+ nonce, data = _get_sshstr(data)
+ public_key, data = kformat.load_public(key_type, data, backend)
+ if with_cert:
+ serial, data = _get_u64(data)
+ cctype, data = _get_u32(data)
+ key_id, data = _get_sshstr(data)
+ principals, data = _get_sshstr(data)
+ valid_after, data = _get_u64(data)
+ valid_before, data = _get_u64(data)
+ crit_options, data = _get_sshstr(data)
+ extensions, data = _get_sshstr(data)
+ reserved, data = _get_sshstr(data)
+ sig_key, data = _get_sshstr(data)
+ signature, data = _get_sshstr(data)
+ _check_empty(data)
+ return public_key
+
+
+def serialize_ssh_public_key(public_key):
+ """One-line public key format for OpenSSH"""
+ if isinstance(public_key, ec.EllipticCurvePublicKey):
+ key_type = _ecdsa_key_type(public_key)
+ elif isinstance(public_key, rsa.RSAPublicKey):
+ key_type = _SSH_RSA
+ elif isinstance(public_key, dsa.DSAPublicKey):
+ key_type = _SSH_DSA
+ elif isinstance(public_key, ed25519.Ed25519PublicKey):
+ key_type = _SSH_ED25519
+ else:
+ raise ValueError("Unsupported key type")
+ kformat = _lookup_kformat(key_type)
+
+ f_pub = _FragList()
+ f_pub.put_sshstr(key_type)
+ kformat.encode_public(public_key, f_pub)
+
+ pub = binascii.b2a_base64(f_pub.tobytes()).strip()
+ return b"".join([key_type, b" ", pub])
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000..f2b452b
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-38.pyc
new file mode 100644
index 0000000..637aaf7
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-38.pyc
new file mode 100644
index 0000000..a24aa9a
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/twofactor/__pycache__/utils.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/twofactor/__pycache__/utils.cpython-38.pyc
new file mode 100644
index 0000000..8180c23
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/twofactor/__pycache__/utils.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/twofactor/hotp.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/twofactor/hotp.py
index 4ad1bdc..c00eec0 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/twofactor/hotp.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/twofactor/hotp.py
@@ -8,9 +8,8 @@
import six
-from cryptography.exceptions import (
- UnsupportedAlgorithm, _Reasons
-)
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.backends import _get_backend
from cryptography.hazmat.backends.interfaces import HMACBackend
from cryptography.hazmat.primitives import constant_time, hmac
from cryptography.hazmat.primitives.hashes import SHA1, SHA256, SHA512
@@ -19,12 +18,14 @@
class HOTP(object):
- def __init__(self, key, length, algorithm, backend,
- enforce_key_length=True):
+ def __init__(
+ self, key, length, algorithm, backend=None, enforce_key_length=True
+ ):
+ backend = _get_backend(backend)
if not isinstance(backend, HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HMACBackend.",
- _Reasons.BACKEND_MISSING_INTERFACE
+ _Reasons.BACKEND_MISSING_INTERFACE,
)
if len(key) < 16 and enforce_key_length is True:
@@ -59,10 +60,10 @@ def _dynamic_truncate(self, counter):
hmac_value = ctx.finalize()
offset = six.indexbytes(hmac_value, len(hmac_value) - 1) & 0b1111
- p = hmac_value[offset:offset + 4]
- return struct.unpack(">I", p)[0] & 0x7fffffff
+ p = hmac_value[offset : offset + 4]
+ return struct.unpack(">I", p)[0] & 0x7FFFFFFF
def get_provisioning_uri(self, account_name, counter, issuer):
- return _generate_uri(self, "hotp", account_name, issuer, [
- ("counter", int(counter)),
- ])
+ return _generate_uri(
+ self, "hotp", account_name, issuer, [("counter", int(counter))]
+ )
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/twofactor/totp.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/twofactor/totp.py
index 499f282..d59539b 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/twofactor/totp.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/twofactor/totp.py
@@ -4,9 +4,8 @@
from __future__ import absolute_import, division, print_function
-from cryptography.exceptions import (
- UnsupportedAlgorithm, _Reasons
-)
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.backends import _get_backend
from cryptography.hazmat.backends.interfaces import HMACBackend
from cryptography.hazmat.primitives import constant_time
from cryptography.hazmat.primitives.twofactor import InvalidToken
@@ -15,12 +14,20 @@
class TOTP(object):
- def __init__(self, key, length, algorithm, time_step, backend,
- enforce_key_length=True):
+ def __init__(
+ self,
+ key,
+ length,
+ algorithm,
+ time_step,
+ backend=None,
+ enforce_key_length=True,
+ ):
+ backend = _get_backend(backend)
if not isinstance(backend, HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HMACBackend.",
- _Reasons.BACKEND_MISSING_INTERFACE
+ _Reasons.BACKEND_MISSING_INTERFACE,
)
self._time_step = time_step
@@ -35,6 +42,10 @@ def verify(self, totp, time):
raise InvalidToken("Supplied TOTP value does not match.")
def get_provisioning_uri(self, account_name, issuer):
- return _generate_uri(self._hotp, "totp", account_name, issuer, [
- ("period", int(self._time_step)),
- ])
+ return _generate_uri(
+ self._hotp,
+ "totp",
+ account_name,
+ issuer,
+ [("period", int(self._time_step))],
+ )
diff --git a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/twofactor/utils.py b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/twofactor/utils.py
index 0ed8c4c..0afa1cc 100644
--- a/functions/source/CreateSSHKey/cryptography/hazmat/primitives/twofactor/utils.py
+++ b/functions/source/CreateSSHKey/cryptography/hazmat/primitives/twofactor/utils.py
@@ -23,8 +23,11 @@ def _generate_uri(hotp, type_name, account_name, issuer, extra_parameters):
uriparts = {
"type": type_name,
- "label": ("%s:%s" % (quote(issuer), quote(account_name)) if issuer
- else quote(account_name)),
+ "label": (
+ "%s:%s" % (quote(issuer), quote(account_name))
+ if issuer
+ else quote(account_name)
+ ),
"parameters": urlencode(parameters),
}
return "otpauth://{type}/{label}?{parameters}".format(**uriparts)
diff --git a/functions/source/CreateSSHKey/cryptography/utils.py b/functions/source/CreateSSHKey/cryptography/utils.py
index 02857c0..bdb3dbf 100644
--- a/functions/source/CreateSSHKey/cryptography/utils.py
+++ b/functions/source/CreateSSHKey/cryptography/utils.py
@@ -11,17 +11,29 @@
import warnings
+# We use a UserWarning subclass, instead of DeprecationWarning, because CPython
+# decided deprecation warnings should be invisble by default.
+class CryptographyDeprecationWarning(UserWarning):
+ pass
+
+
# Several APIs were deprecated with no specific end-of-life date because of the
# ubiquity of their use. They should not be removed until we agree on when that
# cycle ends.
-PersistentlyDeprecated = DeprecationWarning
-DeprecatedIn19 = DeprecationWarning
-DeprecatedIn21 = PendingDeprecationWarning
+PersistentlyDeprecated2017 = CryptographyDeprecationWarning
+PersistentlyDeprecated2019 = CryptographyDeprecationWarning
def _check_bytes(name, value):
if not isinstance(value, bytes):
- raise TypeError("{0} must be bytes".format(name))
+ raise TypeError("{} must be bytes".format(name))
+
+
+def _check_byteslike(name, value):
+ try:
+ memoryview(value)
+ except TypeError:
+ raise TypeError("{} must be bytes-like".format(name))
def read_only_property(name):
@@ -33,6 +45,7 @@ def register_decorator(klass):
verify_interface(iface, klass)
iface.register(klass)
return klass
+
return register_decorator
@@ -42,28 +55,33 @@ def register_decorator(klass):
verify_interface(iface, klass)
iface.register(klass)
return klass
+
return register_decorator
if hasattr(int, "from_bytes"):
int_from_bytes = int.from_bytes
else:
+
def int_from_bytes(data, byteorder, signed=False):
- assert byteorder == 'big'
+ assert byteorder == "big"
assert not signed
- # call bytes() on data to allow the use of bytearrays
- return int(bytes(data).encode('hex'), 16)
+ return int(binascii.hexlify(data), 16)
if hasattr(int, "to_bytes"):
+
def int_to_bytes(integer, length=None):
return integer.to_bytes(
- length or (integer.bit_length() + 7) // 8 or 1, 'big'
+ length or (integer.bit_length() + 7) // 8 or 1, "big"
)
+
+
else:
+
def int_to_bytes(integer, length=None):
- hex_string = '%x' % integer
+ hex_string = "%x" % integer
if length is None:
n = len(hex_string)
else:
@@ -85,7 +103,7 @@ def verify_interface(iface, klass):
for method in iface.__abstractmethods__:
if not hasattr(klass, method):
raise InterfaceNotImplemented(
- "{0} is missing a {1!r} method".format(klass, method)
+ "{} is missing a {!r} method".format(klass, method)
)
if isinstance(getattr(iface, method), abc.abstractproperty):
# Can't properly verify these yet.
@@ -94,21 +112,11 @@ def verify_interface(iface, klass):
actual = signature(getattr(klass, method))
if sig != actual:
raise InterfaceNotImplemented(
- "{0}.{1}'s signature differs from the expected. Expected: "
- "{2!r}. Received: {3!r}".format(
- klass, method, sig, actual
- )
+ "{}.{}'s signature differs from the expected. Expected: "
+ "{!r}. Received: {!r}".format(klass, method, sig, actual)
)
-if sys.version_info >= (2, 7):
- def bit_length(x):
- return x.bit_length()
-else:
- def bit_length(x):
- return len(bin(x)) - (2 + (x <= 0))
-
-
class _DeprecatedValue(object):
def __init__(self, value, message, warning_class):
self.value = value
@@ -149,7 +157,7 @@ def deprecated(value, module_name, message, warning_class):
def cached_property(func):
- cached_name = "_cached_{0}".format(func)
+ cached_name = "_cached_{}".format(func)
sentinel = object()
def inner(instance):
@@ -159,4 +167,5 @@ def inner(instance):
result = func(instance)
setattr(instance, cached_name, result)
return result
+
return property(inner)
diff --git a/functions/source/CreateSSHKey/cryptography/x509/__init__.py b/functions/source/CreateSSHKey/cryptography/x509/__init__.py
index 224c9af..69630e4 100644
--- a/functions/source/CreateSSHKey/cryptography/x509/__init__.py
+++ b/functions/source/CreateSSHKey/cryptography/x509/__init__.py
@@ -6,38 +6,95 @@
from cryptography.x509 import certificate_transparency
from cryptography.x509.base import (
- Certificate, CertificateBuilder, CertificateRevocationList,
+ AttributeNotFound,
+ Certificate,
+ CertificateBuilder,
+ CertificateRevocationList,
CertificateRevocationListBuilder,
- CertificateSigningRequest, CertificateSigningRequestBuilder,
- InvalidVersion, RevokedCertificate, RevokedCertificateBuilder,
- Version, load_der_x509_certificate, load_der_x509_crl, load_der_x509_csr,
- load_pem_x509_certificate, load_pem_x509_crl, load_pem_x509_csr,
+ CertificateSigningRequest,
+ CertificateSigningRequestBuilder,
+ InvalidVersion,
+ RevokedCertificate,
+ RevokedCertificateBuilder,
+ Version,
+ load_der_x509_certificate,
+ load_der_x509_crl,
+ load_der_x509_csr,
+ load_pem_x509_certificate,
+ load_pem_x509_crl,
+ load_pem_x509_csr,
random_serial_number,
)
from cryptography.x509.extensions import (
- AccessDescription, AuthorityInformationAccess,
- AuthorityKeyIdentifier, BasicConstraints, CRLDistributionPoints,
- CRLNumber, CRLReason, CertificateIssuer, CertificatePolicies,
- DeltaCRLIndicator, DistributionPoint, DuplicateExtension, ExtendedKeyUsage,
- Extension, ExtensionNotFound, ExtensionType, Extensions, FreshestCRL,
- GeneralNames, InhibitAnyPolicy, InvalidityDate, IssuerAlternativeName,
- KeyUsage, NameConstraints, NoticeReference, OCSPNoCheck, PolicyConstraints,
- PolicyInformation, PrecertificateSignedCertificateTimestamps, ReasonFlags,
- SubjectAlternativeName, SubjectKeyIdentifier, TLSFeature, TLSFeatureType,
- UnrecognizedExtension, UserNotice
+ AccessDescription,
+ AuthorityInformationAccess,
+ AuthorityKeyIdentifier,
+ BasicConstraints,
+ CRLDistributionPoints,
+ CRLNumber,
+ CRLReason,
+ CertificateIssuer,
+ CertificatePolicies,
+ DeltaCRLIndicator,
+ DistributionPoint,
+ DuplicateExtension,
+ ExtendedKeyUsage,
+ Extension,
+ ExtensionNotFound,
+ ExtensionType,
+ Extensions,
+ FreshestCRL,
+ GeneralNames,
+ InhibitAnyPolicy,
+ InvalidityDate,
+ IssuerAlternativeName,
+ IssuingDistributionPoint,
+ KeyUsage,
+ NameConstraints,
+ NoticeReference,
+ OCSPNoCheck,
+ OCSPNonce,
+ PolicyConstraints,
+ PolicyInformation,
+ PrecertPoison,
+ PrecertificateSignedCertificateTimestamps,
+ ReasonFlags,
+ SignedCertificateTimestamps,
+ SubjectAlternativeName,
+ SubjectInformationAccess,
+ SubjectKeyIdentifier,
+ TLSFeature,
+ TLSFeatureType,
+ UnrecognizedExtension,
+ UserNotice,
)
from cryptography.x509.general_name import (
- DNSName, DirectoryName, GeneralName, IPAddress, OtherName, RFC822Name,
- RegisteredID, UniformResourceIdentifier, UnsupportedGeneralNameType,
- _GENERAL_NAMES
+ DNSName,
+ DirectoryName,
+ GeneralName,
+ IPAddress,
+ OtherName,
+ RFC822Name,
+ RegisteredID,
+ UniformResourceIdentifier,
+ UnsupportedGeneralNameType,
+ _GENERAL_NAMES,
)
from cryptography.x509.name import (
- Name, NameAttribute, RelativeDistinguishedName
+ Name,
+ NameAttribute,
+ RelativeDistinguishedName,
)
from cryptography.x509.oid import (
- AuthorityInformationAccessOID, CRLEntryExtensionOID,
- CertificatePoliciesOID, ExtendedKeyUsageOID, ExtensionOID, NameOID,
- ObjectIdentifier, SignatureAlgorithmOID, _SIG_OIDS_TO_HASH
+ AuthorityInformationAccessOID,
+ CRLEntryExtensionOID,
+ CertificatePoliciesOID,
+ ExtendedKeyUsageOID,
+ ExtensionOID,
+ NameOID,
+ ObjectIdentifier,
+ SignatureAlgorithmOID,
+ _SIG_OIDS_TO_HASH,
)
@@ -74,6 +131,7 @@
OID_RSA_WITH_SHA256 = SignatureAlgorithmOID.RSA_WITH_SHA256
OID_RSA_WITH_SHA384 = SignatureAlgorithmOID.RSA_WITH_SHA384
OID_RSA_WITH_SHA512 = SignatureAlgorithmOID.RSA_WITH_SHA512
+OID_RSASSA_PSS = SignatureAlgorithmOID.RSASSA_PSS
OID_COMMON_NAME = NameOID.COMMON_NAME
OID_COUNTRY_NAME = NameOID.COUNTRY_NAME
@@ -118,6 +176,7 @@
"load_pem_x509_crl",
"load_der_x509_crl",
"random_serial_number",
+ "AttributeNotFound",
"InvalidVersion",
"DeltaCRLIndicator",
"DuplicateExtension",
@@ -132,6 +191,7 @@
"Extension",
"ExtendedKeyUsage",
"FreshestCRL",
+ "IssuingDistributionPoint",
"TLSFeature",
"TLSFeatureType",
"OCSPNoCheck",
@@ -139,6 +199,7 @@
"CRLNumber",
"KeyUsage",
"AuthorityInformationAccess",
+ "SubjectInformationAccess",
"AccessDescription",
"CertificatePolicies",
"PolicyInformation",
@@ -181,4 +242,7 @@
"UnrecognizedExtension",
"PolicyConstraints",
"PrecertificateSignedCertificateTimestamps",
+ "PrecertPoison",
+ "OCSPNonce",
+ "SignedCertificateTimestamps",
]
diff --git a/functions/source/CreateSSHKey/cryptography/x509/__pycache__/__init__.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/x509/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000..8620c43
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/x509/__pycache__/__init__.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/x509/__pycache__/base.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/x509/__pycache__/base.cpython-38.pyc
new file mode 100644
index 0000000..b3f3c64
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/x509/__pycache__/base.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/x509/__pycache__/certificate_transparency.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/x509/__pycache__/certificate_transparency.cpython-38.pyc
new file mode 100644
index 0000000..ea54cc4
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/x509/__pycache__/certificate_transparency.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/x509/__pycache__/extensions.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/x509/__pycache__/extensions.cpython-38.pyc
new file mode 100644
index 0000000..04a819d
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/x509/__pycache__/extensions.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/x509/__pycache__/general_name.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/x509/__pycache__/general_name.cpython-38.pyc
new file mode 100644
index 0000000..7c473c5
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/x509/__pycache__/general_name.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/x509/__pycache__/name.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/x509/__pycache__/name.cpython-38.pyc
new file mode 100644
index 0000000..2e1cc81
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/x509/__pycache__/name.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/x509/__pycache__/ocsp.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/x509/__pycache__/ocsp.cpython-38.pyc
new file mode 100644
index 0000000..fb6a4e8
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/x509/__pycache__/ocsp.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/x509/__pycache__/oid.cpython-38.pyc b/functions/source/CreateSSHKey/cryptography/x509/__pycache__/oid.cpython-38.pyc
new file mode 100644
index 0000000..e0bfbed
Binary files /dev/null and b/functions/source/CreateSSHKey/cryptography/x509/__pycache__/oid.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/cryptography/x509/base.py b/functions/source/CreateSSHKey/cryptography/x509/base.py
index 2c96c5b..f3bc872 100644
--- a/functions/source/CreateSSHKey/cryptography/x509/base.py
+++ b/functions/source/CreateSSHKey/cryptography/x509/base.py
@@ -12,12 +12,40 @@
import six
from cryptography import utils
-from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa
+from cryptography.hazmat.backends import _get_backend
+from cryptography.hazmat.primitives.asymmetric import (
+ dsa,
+ ec,
+ ed25519,
+ ed448,
+ rsa,
+)
from cryptography.x509.extensions import Extension, ExtensionType
from cryptography.x509.name import Name
+from cryptography.x509.oid import ObjectIdentifier
-_UNIX_EPOCH = datetime.datetime(1970, 1, 1)
+_EARLIEST_UTC_TIME = datetime.datetime(1950, 1, 1)
+
+
+class AttributeNotFound(Exception):
+ def __init__(self, msg, oid):
+ super(AttributeNotFound, self).__init__(msg)
+ self.oid = oid
+
+
+def _reject_duplicate_extension(extension, extensions):
+ # This is quadratic in the number of extensions
+ for e in extensions:
+ if e.oid == extension.oid:
+ raise ValueError("This extension has already been set.")
+
+
+def _reject_duplicate_attribute(oid, attributes):
+ # This is quadratic in the number of attributes
+ for attr_oid, _ in attributes:
+ if attr_oid == oid:
+ raise ValueError("This attribute has already been set.")
def _convert_to_naive_utc_time(time):
@@ -39,27 +67,33 @@ class Version(Enum):
v3 = 2
-def load_pem_x509_certificate(data, backend):
+def load_pem_x509_certificate(data, backend=None):
+ backend = _get_backend(backend)
return backend.load_pem_x509_certificate(data)
-def load_der_x509_certificate(data, backend):
+def load_der_x509_certificate(data, backend=None):
+ backend = _get_backend(backend)
return backend.load_der_x509_certificate(data)
-def load_pem_x509_csr(data, backend):
+def load_pem_x509_csr(data, backend=None):
+ backend = _get_backend(backend)
return backend.load_pem_x509_csr(data)
-def load_der_x509_csr(data, backend):
+def load_der_x509_csr(data, backend=None):
+ backend = _get_backend(backend)
return backend.load_der_x509_csr(data)
-def load_pem_x509_crl(data, backend):
+def load_pem_x509_crl(data, backend=None):
+ backend = _get_backend(backend)
return backend.load_pem_x509_crl(data)
-def load_der_x509_crl(data, backend):
+def load_der_x509_crl(data, backend=None):
+ backend = _get_backend(backend)
return backend.load_der_x509_crl(data)
@@ -189,6 +223,13 @@ def fingerprint(self, algorithm):
Returns bytes using digest passed.
"""
+ @abc.abstractmethod
+ def get_revoked_certificate_by_serial_number(self, serial_number):
+ """
+ Returns an instance of RevokedCertificate or None if the serial_number
+ is not in the CRL.
+ """
+
@abc.abstractproperty
def signature_hash_algorithm(self):
"""
@@ -250,6 +291,24 @@ def __ne__(self, other):
Checks not equal.
"""
+ @abc.abstractmethod
+ def __len__(self):
+ """
+ Number of revoked certificates in the CRL.
+ """
+
+ @abc.abstractmethod
+ def __getitem__(self, idx):
+ """
+ Returns a revoked certificate (or slice of revoked certificates).
+ """
+
+ @abc.abstractmethod
+ def __iter__(self):
+ """
+ Iterator over the revoked certificates
+ """
+
@abc.abstractmethod
def is_signature_valid(self, public_key):
"""
@@ -333,6 +392,12 @@ def is_signature_valid(self):
Verifies signature of signing request.
"""
+ @abc.abstractproperty
+ def get_attribute_for_oid(self):
+ """
+ Get the attribute value for a given OID.
+ """
+
@six.add_metaclass(abc.ABCMeta)
class RevokedCertificate(object):
@@ -356,22 +421,25 @@ def extensions(self):
class CertificateSigningRequestBuilder(object):
- def __init__(self, subject_name=None, extensions=[]):
+ def __init__(self, subject_name=None, extensions=[], attributes=[]):
"""
Creates an empty X.509 certificate request (v1).
"""
self._subject_name = subject_name
self._extensions = extensions
+ self._attributes = attributes
def subject_name(self, name):
"""
Sets the certificate requestor's distinguished name.
"""
if not isinstance(name, Name):
- raise TypeError('Expecting x509.Name object.')
+ raise TypeError("Expecting x509.Name object.")
if self._subject_name is not None:
- raise ValueError('The subject name may only be set once.')
- return CertificateSigningRequestBuilder(name, self._extensions)
+ raise ValueError("The subject name may only be set once.")
+ return CertificateSigningRequestBuilder(
+ name, self._extensions, self._attributes
+ )
def add_extension(self, extension, critical):
"""
@@ -381,28 +449,53 @@ def add_extension(self, extension, critical):
raise TypeError("extension must be an ExtensionType")
extension = Extension(extension.oid, critical, extension)
+ _reject_duplicate_extension(extension, self._extensions)
+
+ return CertificateSigningRequestBuilder(
+ self._subject_name,
+ self._extensions + [extension],
+ self._attributes,
+ )
+
+ def add_attribute(self, oid, value):
+ """
+ Adds an X.509 attribute with an OID and associated value.
+ """
+ if not isinstance(oid, ObjectIdentifier):
+ raise TypeError("oid must be an ObjectIdentifier")
+
+ if not isinstance(value, bytes):
+ raise TypeError("value must be bytes")
+
+ _reject_duplicate_attribute(oid, self._attributes)
- # TODO: This is quadratic in the number of extensions
- for e in self._extensions:
- if e.oid == extension.oid:
- raise ValueError('This extension has already been set.')
return CertificateSigningRequestBuilder(
- self._subject_name, self._extensions + [extension]
+ self._subject_name,
+ self._extensions,
+ self._attributes + [(oid, value)],
)
- def sign(self, private_key, algorithm, backend):
+ def sign(self, private_key, algorithm, backend=None):
"""
Signs the request using the requestor's private key.
"""
+ backend = _get_backend(backend)
if self._subject_name is None:
raise ValueError("A CertificateSigningRequest must have a subject")
return backend.create_x509_csr(self, private_key, algorithm)
class CertificateBuilder(object):
- def __init__(self, issuer_name=None, subject_name=None,
- public_key=None, serial_number=None, not_valid_before=None,
- not_valid_after=None, extensions=[]):
+ def __init__(
+ self,
+ issuer_name=None,
+ subject_name=None,
+ public_key=None,
+ serial_number=None,
+ not_valid_before=None,
+ not_valid_after=None,
+ extensions=[],
+ ):
self._version = Version.v3
self._issuer_name = issuer_name
self._subject_name = subject_name
@@ -417,13 +510,17 @@ def issuer_name(self, name):
Sets the CA's distinguished name.
"""
if not isinstance(name, Name):
- raise TypeError('Expecting x509.Name object.')
+ raise TypeError("Expecting x509.Name object.")
if self._issuer_name is not None:
- raise ValueError('The issuer name may only be set once.')
+ raise ValueError("The issuer name may only be set once.")
return CertificateBuilder(
- name, self._subject_name, self._public_key,
- self._serial_number, self._not_valid_before,
- self._not_valid_after, self._extensions
+ name,
+ self._subject_name,
+ self._public_key,
+ self._serial_number,
+ self._not_valid_before,
+ self._not_valid_after,
+ self._extensions,
)
def subject_name(self, name):
@@ -431,29 +528,48 @@ def subject_name(self, name):
Sets the requestor's distinguished name.
"""
if not isinstance(name, Name):
- raise TypeError('Expecting x509.Name object.')
+ raise TypeError("Expecting x509.Name object.")
if self._subject_name is not None:
- raise ValueError('The subject name may only be set once.')
+ raise ValueError("The subject name may only be set once.")
return CertificateBuilder(
- self._issuer_name, name, self._public_key,
- self._serial_number, self._not_valid_before,
- self._not_valid_after, self._extensions
+ self._issuer_name,
+ name,
+ self._public_key,
+ self._serial_number,
+ self._not_valid_before,
+ self._not_valid_after,
+ self._extensions,
)
def public_key(self, key):
"""
Sets the requestor's public key (as found in the signing request).
"""
- if not isinstance(key, (dsa.DSAPublicKey, rsa.RSAPublicKey,
- ec.EllipticCurvePublicKey)):
- raise TypeError('Expecting one of DSAPublicKey, RSAPublicKey,'
- ' or EllipticCurvePublicKey.')
+ if not isinstance(
+ key,
+ (
+ dsa.DSAPublicKey,
+ rsa.RSAPublicKey,
+ ec.EllipticCurvePublicKey,
+ ed25519.Ed25519PublicKey,
+ ed448.Ed448PublicKey,
+ ),
+ ):
+ raise TypeError(
+ "Expecting one of DSAPublicKey, RSAPublicKey,"
+ " EllipticCurvePublicKey, Ed25519PublicKey or"
+ " Ed448PublicKey."
+ )
if self._public_key is not None:
- raise ValueError('The public key may only be set once.')
+ raise ValueError("The public key may only be set once.")
return CertificateBuilder(
- self._issuer_name, self._subject_name, key,
- self._serial_number, self._not_valid_before,
- self._not_valid_after, self._extensions
+ self._issuer_name,
+ self._subject_name,
+ key,
+ self._serial_number,
+ self._not_valid_before,
+ self._not_valid_after,
+ self._extensions,
)
def serial_number(self, number):
@@ -461,21 +577,26 @@ def serial_number(self, number):
Sets the certificate serial number.
"""
if not isinstance(number, six.integer_types):
- raise TypeError('Serial number must be of integral type.')
+ raise TypeError("Serial number must be of integral type.")
if self._serial_number is not None:
- raise ValueError('The serial number may only be set once.')
+ raise ValueError("The serial number may only be set once.")
if number <= 0:
- raise ValueError('The serial number should be positive.')
+ raise ValueError("The serial number should be positive.")
# ASN.1 integers are always signed, so most significant bit must be
# zero.
- if utils.bit_length(number) >= 160: # As defined in RFC 5280
- raise ValueError('The serial number should not be more than 159 '
- 'bits.')
+ if number.bit_length() >= 160: # As defined in RFC 5280
+ raise ValueError(
+ "The serial number should not be more than 159 " "bits."
+ )
return CertificateBuilder(
- self._issuer_name, self._subject_name,
- self._public_key, number, self._not_valid_before,
- self._not_valid_after, self._extensions
+ self._issuer_name,
+ self._subject_name,
+ self._public_key,
+ number,
+ self._not_valid_before,
+ self._not_valid_after,
+ self._extensions,
)
def not_valid_before(self, time):
@@ -483,22 +604,28 @@ def not_valid_before(self, time):
Sets the certificate activation time.
"""
if not isinstance(time, datetime.datetime):
- raise TypeError('Expecting datetime object.')
+ raise TypeError("Expecting datetime object.")
if self._not_valid_before is not None:
- raise ValueError('The not valid before may only be set once.')
+ raise ValueError("The not valid before may only be set once.")
time = _convert_to_naive_utc_time(time)
- if time <= _UNIX_EPOCH:
- raise ValueError('The not valid before date must be after the unix'
- ' epoch (1970 January 1).')
+ if time < _EARLIEST_UTC_TIME:
+ raise ValueError(
+ "The not valid before date must be on or after"
+ " 1950 January 1)."
+ )
if self._not_valid_after is not None and time > self._not_valid_after:
raise ValueError(
- 'The not valid before date must be before the not valid after '
- 'date.'
+ "The not valid before date must be before the not valid after "
+ "date."
)
return CertificateBuilder(
- self._issuer_name, self._subject_name,
- self._public_key, self._serial_number, time,
- self._not_valid_after, self._extensions
+ self._issuer_name,
+ self._subject_name,
+ self._public_key,
+ self._serial_number,
+ time,
+ self._not_valid_after,
+ self._extensions,
)
def not_valid_after(self, time):
@@ -506,23 +633,31 @@ def not_valid_after(self, time):
Sets the certificate expiration time.
"""
if not isinstance(time, datetime.datetime):
- raise TypeError('Expecting datetime object.')
+ raise TypeError("Expecting datetime object.")
if self._not_valid_after is not None:
- raise ValueError('The not valid after may only be set once.')
+ raise ValueError("The not valid after may only be set once.")
time = _convert_to_naive_utc_time(time)
- if time <= _UNIX_EPOCH:
- raise ValueError('The not valid after date must be after the unix'
- ' epoch (1970 January 1).')
- if (self._not_valid_before is not None and
- time < self._not_valid_before):
+ if time < _EARLIEST_UTC_TIME:
raise ValueError(
- 'The not valid after date must be after the not valid before '
- 'date.'
+ "The not valid after date must be on or after"
+ " 1950 January 1."
+ )
+ if (
+ self._not_valid_before is not None
+ and time < self._not_valid_before
+ ):
+ raise ValueError(
+ "The not valid after date must be after the not valid before "
+ "date."
)
return CertificateBuilder(
- self._issuer_name, self._subject_name,
- self._public_key, self._serial_number, self._not_valid_before,
- time, self._extensions
+ self._issuer_name,
+ self._subject_name,
+ self._public_key,
+ self._serial_number,
+ self._not_valid_before,
+ time,
+ self._extensions,
)
def add_extension(self, extension, critical):
@@ -533,22 +668,23 @@ def add_extension(self, extension, critical):
raise TypeError("extension must be an ExtensionType")
extension = Extension(extension.oid, critical, extension)
-
- # TODO: This is quadratic in the number of extensions
- for e in self._extensions:
- if e.oid == extension.oid:
- raise ValueError('This extension has already been set.')
+ _reject_duplicate_extension(extension, self._extensions)
return CertificateBuilder(
- self._issuer_name, self._subject_name,
- self._public_key, self._serial_number, self._not_valid_before,
- self._not_valid_after, self._extensions + [extension]
+ self._issuer_name,
+ self._subject_name,
+ self._public_key,
+ self._serial_number,
+ self._not_valid_before,
+ self._not_valid_after,
+ self._extensions + [extension],
)
- def sign(self, private_key, algorithm, backend):
+ def sign(self, private_key, algorithm, backend=None):
"""
Signs the certificate using the CA's private key.
"""
+ backend = _get_backend(backend)
if self._subject_name is None:
raise ValueError("A certificate must have a subject name")
@@ -571,8 +707,14 @@ def sign(self, private_key, algorithm, backend):
class CertificateRevocationListBuilder(object):
- def __init__(self, issuer_name=None, last_update=None, next_update=None,
- extensions=[], revoked_certificates=[]):
+ def __init__(
+ self,
+ issuer_name=None,
+ last_update=None,
+ next_update=None,
+ extensions=[],
+ revoked_certificates=[],
+ ):
self._issuer_name = issuer_name
self._last_update = last_update
self._next_update = next_update
@@ -581,48 +723,59 @@ def __init__(self, issuer_name=None, last_update=None, next_update=None,
def issuer_name(self, issuer_name):
if not isinstance(issuer_name, Name):
- raise TypeError('Expecting x509.Name object.')
+ raise TypeError("Expecting x509.Name object.")
if self._issuer_name is not None:
- raise ValueError('The issuer name may only be set once.')
+ raise ValueError("The issuer name may only be set once.")
return CertificateRevocationListBuilder(
- issuer_name, self._last_update, self._next_update,
- self._extensions, self._revoked_certificates
+ issuer_name,
+ self._last_update,
+ self._next_update,
+ self._extensions,
+ self._revoked_certificates,
)
def last_update(self, last_update):
if not isinstance(last_update, datetime.datetime):
- raise TypeError('Expecting datetime object.')
+ raise TypeError("Expecting datetime object.")
if self._last_update is not None:
- raise ValueError('Last update may only be set once.')
+ raise ValueError("Last update may only be set once.")
last_update = _convert_to_naive_utc_time(last_update)
- if last_update <= _UNIX_EPOCH:
- raise ValueError('The last update date must be after the unix'
- ' epoch (1970 January 1).')
+ if last_update < _EARLIEST_UTC_TIME:
+ raise ValueError(
+ "The last update date must be on or after" " 1950 January 1."
+ )
if self._next_update is not None and last_update > self._next_update:
raise ValueError(
- 'The last update date must be before the next update date.'
+ "The last update date must be before the next update date."
)
return CertificateRevocationListBuilder(
- self._issuer_name, last_update, self._next_update,
- self._extensions, self._revoked_certificates
+ self._issuer_name,
+ last_update,
+ self._next_update,
+ self._extensions,
+ self._revoked_certificates,
)
def next_update(self, next_update):
if not isinstance(next_update, datetime.datetime):
- raise TypeError('Expecting datetime object.')
+ raise TypeError("Expecting datetime object.")
if self._next_update is not None:
- raise ValueError('Last update may only be set once.')
+ raise ValueError("Last update may only be set once.")
next_update = _convert_to_naive_utc_time(next_update)
- if next_update <= _UNIX_EPOCH:
- raise ValueError('The last update date must be after the unix'
- ' epoch (1970 January 1).')
+ if next_update < _EARLIEST_UTC_TIME:
+ raise ValueError(
+ "The last update date must be on or after" " 1950 January 1."
+ )
if self._last_update is not None and next_update < self._last_update:
raise ValueError(
- 'The next update date must be after the last update date.'
+ "The next update date must be after the last update date."
)
return CertificateRevocationListBuilder(
- self._issuer_name, self._last_update, next_update,
- self._extensions, self._revoked_certificates
+ self._issuer_name,
+ self._last_update,
+ next_update,
+ self._extensions,
+ self._revoked_certificates,
)
def add_extension(self, extension, critical):
@@ -633,14 +786,13 @@ def add_extension(self, extension, critical):
raise TypeError("extension must be an ExtensionType")
extension = Extension(extension.oid, critical, extension)
-
- # TODO: This is quadratic in the number of extensions
- for e in self._extensions:
- if e.oid == extension.oid:
- raise ValueError('This extension has already been set.')
+ _reject_duplicate_extension(extension, self._extensions)
return CertificateRevocationListBuilder(
- self._issuer_name, self._last_update, self._next_update,
- self._extensions + [extension], self._revoked_certificates
+ self._issuer_name,
+ self._last_update,
+ self._next_update,
+ self._extensions + [extension],
+ self._revoked_certificates,
)
def add_revoked_certificate(self, revoked_certificate):
@@ -651,12 +803,15 @@ def add_revoked_certificate(self, revoked_certificate):
raise TypeError("Must be an instance of RevokedCertificate")
return CertificateRevocationListBuilder(
- self._issuer_name, self._last_update,
- self._next_update, self._extensions,
- self._revoked_certificates + [revoked_certificate]
+ self._issuer_name,
+ self._last_update,
+ self._next_update,
+ self._extensions,
+ self._revoked_certificates + [revoked_certificate],
)
- def sign(self, private_key, algorithm, backend):
+ def sign(self, private_key, algorithm, backend=None):
+ backend = _get_backend(backend)
if self._issuer_name is None:
raise ValueError("A CRL must have an issuer name")
@@ -670,38 +825,41 @@ def sign(self, private_key, algorithm, backend):
class RevokedCertificateBuilder(object):
- def __init__(self, serial_number=None, revocation_date=None,
- extensions=[]):
+ def __init__(
+ self, serial_number=None, revocation_date=None, extensions=[]
+ ):
self._serial_number = serial_number
self._revocation_date = revocation_date
self._extensions = extensions
def serial_number(self, number):
if not isinstance(number, six.integer_types):
- raise TypeError('Serial number must be of integral type.')
+ raise TypeError("Serial number must be of integral type.")
if self._serial_number is not None:
- raise ValueError('The serial number may only be set once.')
+ raise ValueError("The serial number may only be set once.")
if number <= 0:
- raise ValueError('The serial number should be positive')
+ raise ValueError("The serial number should be positive")
# ASN.1 integers are always signed, so most significant bit must be
# zero.
- if utils.bit_length(number) >= 160: # As defined in RFC 5280
- raise ValueError('The serial number should not be more than 159 '
- 'bits.')
+ if number.bit_length() >= 160: # As defined in RFC 5280
+ raise ValueError(
+ "The serial number should not be more than 159 " "bits."
+ )
return RevokedCertificateBuilder(
number, self._revocation_date, self._extensions
)
def revocation_date(self, time):
if not isinstance(time, datetime.datetime):
- raise TypeError('Expecting datetime object.')
+ raise TypeError("Expecting datetime object.")
if self._revocation_date is not None:
- raise ValueError('The revocation date may only be set once.')
+ raise ValueError("The revocation date may only be set once.")
time = _convert_to_naive_utc_time(time)
- if time <= _UNIX_EPOCH:
- raise ValueError('The revocation date must be after the unix'
- ' epoch (1970 January 1).')
+ if time < _EARLIEST_UTC_TIME:
+ raise ValueError(
+ "The revocation date must be on or after" " 1950 January 1."
+ )
return RevokedCertificateBuilder(
self._serial_number, time, self._extensions
)
@@ -711,17 +869,15 @@ def add_extension(self, extension, critical):
raise TypeError("extension must be an ExtensionType")
extension = Extension(extension.oid, critical, extension)
-
- # TODO: This is quadratic in the number of extensions
- for e in self._extensions:
- if e.oid == extension.oid:
- raise ValueError('This extension has already been set.')
+ _reject_duplicate_extension(extension, self._extensions)
return RevokedCertificateBuilder(
- self._serial_number, self._revocation_date,
- self._extensions + [extension]
+ self._serial_number,
+ self._revocation_date,
+ self._extensions + [extension],
)
- def build(self, backend):
+ def build(self, backend=None):
+ backend = _get_backend(backend)
if self._serial_number is None:
raise ValueError("A revoked certificate must have a serial number")
if self._revocation_date is None:
diff --git a/functions/source/CreateSSHKey/cryptography/x509/extensions.py b/functions/source/CreateSSHKey/cryptography/x509/extensions.py
index eb4b927..130ba69 100644
--- a/functions/source/CreateSSHKey/cryptography/x509/extensions.py
+++ b/functions/source/CreateSSHKey/cryptography/x509/extensions.py
@@ -10,21 +10,28 @@
import ipaddress
from enum import Enum
-from asn1crypto.keys import PublicKeyInfo
-
import six
from cryptography import utils
+from cryptography.hazmat._der import (
+ BIT_STRING,
+ DERReader,
+ OBJECT_IDENTIFIER,
+ SEQUENCE,
+)
from cryptography.hazmat.primitives import constant_time, serialization
from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePublicKey
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey
from cryptography.x509.certificate_transparency import (
- SignedCertificateTimestamp
+ SignedCertificateTimestamp,
)
from cryptography.x509.general_name import GeneralName, IPAddress, OtherName
from cryptography.x509.name import RelativeDistinguishedName
from cryptography.x509.oid import (
- CRLEntryExtensionOID, ExtensionOID, ObjectIdentifier
+ CRLEntryExtensionOID,
+ ExtensionOID,
+ OCSPExtensionOID,
+ ObjectIdentifier,
)
@@ -35,19 +42,52 @@ def _key_identifier_from_public_key(public_key):
serialization.PublicFormat.PKCS1,
)
elif isinstance(public_key, EllipticCurvePublicKey):
- data = public_key.public_numbers().encode_point()
+ data = public_key.public_bytes(
+ serialization.Encoding.X962,
+ serialization.PublicFormat.UncompressedPoint,
+ )
else:
# This is a very slow way to do this.
serialized = public_key.public_bytes(
serialization.Encoding.DER,
- serialization.PublicFormat.SubjectPublicKeyInfo
+ serialization.PublicFormat.SubjectPublicKeyInfo,
)
- data = six.binary_type(PublicKeyInfo.load(serialized)['public_key'])
+ reader = DERReader(serialized)
+ with reader.read_single_element(SEQUENCE) as public_key_info:
+ algorithm = public_key_info.read_element(SEQUENCE)
+ public_key = public_key_info.read_element(BIT_STRING)
+
+ # Double-check the algorithm structure.
+ with algorithm:
+ algorithm.read_element(OBJECT_IDENTIFIER)
+ if not algorithm.is_empty():
+ # Skip the optional parameters field.
+ algorithm.read_any_element()
+
+ # BIT STRING contents begin with the number of padding bytes added. It
+ # must be zero for SubjectPublicKeyInfo structures.
+ if public_key.read_byte() != 0:
+ raise ValueError("Invalid public key encoding")
+
+ data = public_key.data
return hashlib.sha1(data).digest()
+def _make_sequence_methods(field_name):
+ def len_method(self):
+ return len(getattr(self, field_name))
+
+ def iter_method(self):
+ return iter(getattr(self, field_name))
+
+ def getitem_method(self, idx):
+ return getattr(self, field_name)[idx]
+
+ return len_method, iter_method, getitem_method
+
+
class DuplicateExtension(Exception):
def __init__(self, msg, oid):
super(DuplicateExtension, self).__init__(msg)
@@ -78,7 +118,7 @@ def get_extension_for_oid(self, oid):
if ext.oid == oid:
return ext
- raise ExtensionNotFound("No {0} extension was found".format(oid), oid)
+ raise ExtensionNotFound("No {} extension was found".format(oid), oid)
def get_extension_for_class(self, extclass):
if extclass is UnrecognizedExtension:
@@ -93,22 +133,13 @@ def get_extension_for_class(self, extclass):
return ext
raise ExtensionNotFound(
- "No {0} extension was found".format(extclass), extclass.oid
+ "No {} extension was found".format(extclass), extclass.oid
)
- def __iter__(self):
- return iter(self._extensions)
-
- def __len__(self):
- return len(self._extensions)
-
- def __getitem__(self, idx):
- return self._extensions[idx]
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_extensions")
def __repr__(self):
- return (
- "".format(self._extensions)
- )
+ return "".format(self._extensions)
@utils.register_interface(ExtensionType)
@@ -134,7 +165,7 @@ def __hash__(self):
return hash(self.crl_number)
def __repr__(self):
- return "".format(self.crl_number)
+ return "".format(self.crl_number)
crl_number = utils.read_only_property("_crl_number")
@@ -143,8 +174,12 @@ def __repr__(self):
class AuthorityKeyIdentifier(object):
oid = ExtensionOID.AUTHORITY_KEY_IDENTIFIER
- def __init__(self, key_identifier, authority_cert_issuer,
- authority_cert_serial_number):
+ def __init__(
+ self,
+ key_identifier,
+ authority_cert_issuer,
+ authority_cert_serial_number,
+ ):
if (authority_cert_issuer is None) != (
authority_cert_serial_number is None
):
@@ -166,9 +201,7 @@ def __init__(self, key_identifier, authority_cert_issuer,
if authority_cert_serial_number is not None and not isinstance(
authority_cert_serial_number, six.integer_types
):
- raise TypeError(
- "authority_cert_serial_number must be an integer"
- )
+ raise TypeError("authority_cert_serial_number must be an integer")
self._key_identifier = key_identifier
self._authority_cert_issuer = authority_cert_issuer
@@ -180,15 +213,15 @@ def from_issuer_public_key(cls, public_key):
return cls(
key_identifier=digest,
authority_cert_issuer=None,
- authority_cert_serial_number=None
+ authority_cert_serial_number=None,
)
@classmethod
def from_issuer_subject_key_identifier(cls, ski):
return cls(
- key_identifier=ski.value.digest,
+ key_identifier=ski.digest,
authority_cert_issuer=None,
- authority_cert_serial_number=None
+ authority_cert_serial_number=None,
)
def __repr__(self):
@@ -204,10 +237,10 @@ def __eq__(self, other):
return NotImplemented
return (
- self.key_identifier == other.key_identifier and
- self.authority_cert_issuer == other.authority_cert_issuer and
- self.authority_cert_serial_number ==
- other.authority_cert_serial_number
+ self.key_identifier == other.key_identifier
+ and self.authority_cert_issuer == other.authority_cert_issuer
+ and self.authority_cert_serial_number
+ == other.authority_cert_serial_number
)
def __ne__(self, other):
@@ -218,9 +251,9 @@ def __hash__(self):
aci = None
else:
aci = tuple(self.authority_cert_issuer)
- return hash((
- self.key_identifier, aci, self.authority_cert_serial_number
- ))
+ return hash(
+ (self.key_identifier, aci, self.authority_cert_serial_number)
+ )
key_identifier = utils.read_only_property("_key_identifier")
authority_cert_issuer = utils.read_only_property("_authority_cert_issuer")
@@ -272,14 +305,10 @@ def __init__(self, descriptions):
self._descriptions = descriptions
- def __iter__(self):
- return iter(self._descriptions)
-
- def __len__(self):
- return len(self._descriptions)
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions")
def __repr__(self):
- return "".format(self._descriptions)
+ return "".format(self._descriptions)
def __eq__(self, other):
if not isinstance(other, AuthorityInformationAccess):
@@ -290,8 +319,37 @@ def __eq__(self, other):
def __ne__(self, other):
return not self == other
- def __getitem__(self, idx):
- return self._descriptions[idx]
+ def __hash__(self):
+ return hash(tuple(self._descriptions))
+
+
+@utils.register_interface(ExtensionType)
+class SubjectInformationAccess(object):
+ oid = ExtensionOID.SUBJECT_INFORMATION_ACCESS
+
+ def __init__(self, descriptions):
+ descriptions = list(descriptions)
+ if not all(isinstance(x, AccessDescription) for x in descriptions):
+ raise TypeError(
+ "Every item in the descriptions list must be an "
+ "AccessDescription"
+ )
+
+ self._descriptions = descriptions
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions")
+
+ def __repr__(self):
+ return "".format(self._descriptions)
+
+ def __eq__(self, other):
+ if not isinstance(other, SubjectInformationAccess):
+ return NotImplemented
+
+ return self._descriptions == other._descriptions
+
+ def __ne__(self, other):
+ return not self == other
def __hash__(self):
return hash(tuple(self._descriptions))
@@ -319,8 +377,8 @@ def __eq__(self, other):
return NotImplemented
return (
- self.access_method == other.access_method and
- self.access_location == other.access_location
+ self.access_method == other.access_method
+ and self.access_location == other.access_location
)
def __ne__(self, other):
@@ -344,9 +402,8 @@ def __init__(self, ca, path_length):
if path_length is not None and not ca:
raise ValueError("path_length must be None when ca is False")
- if (
- path_length is not None and
- (not isinstance(path_length, six.integer_types) or path_length < 0)
+ if path_length is not None and (
+ not isinstance(path_length, six.integer_types) or path_length < 0
):
raise TypeError(
"path_length must be a non-negative integer or None"
@@ -359,8 +416,9 @@ def __init__(self, ca, path_length):
path_length = utils.read_only_property("_path_length")
def __repr__(self):
- return ("").format(self)
+ return (
+ ""
+ ).format(self)
def __eq__(self, other):
if not isinstance(other, BasicConstraints):
@@ -419,14 +477,12 @@ def __init__(self, distribution_points):
self._distribution_points = distribution_points
- def __iter__(self):
- return iter(self._distribution_points)
-
- def __len__(self):
- return len(self._distribution_points)
+ __len__, __iter__, __getitem__ = _make_sequence_methods(
+ "_distribution_points"
+ )
def __repr__(self):
- return "".format(self._distribution_points)
+ return "".format(self._distribution_points)
def __eq__(self, other):
if not isinstance(other, CRLDistributionPoints):
@@ -437,9 +493,6 @@ def __eq__(self, other):
def __ne__(self, other):
return not self == other
- def __getitem__(self, idx):
- return self._distribution_points[idx]
-
def __hash__(self):
return hash(tuple(self._distribution_points))
@@ -460,14 +513,12 @@ def __init__(self, distribution_points):
self._distribution_points = distribution_points
- def __iter__(self):
- return iter(self._distribution_points)
-
- def __len__(self):
- return len(self._distribution_points)
+ __len__, __iter__, __getitem__ = _make_sequence_methods(
+ "_distribution_points"
+ )
def __repr__(self):
- return "".format(self._distribution_points)
+ return "".format(self._distribution_points)
def __eq__(self, other):
if not isinstance(other, FreshestCRL):
@@ -478,9 +529,6 @@ def __eq__(self, other):
def __ne__(self, other):
return not self == other
- def __getitem__(self, idx):
- return self._distribution_points[idx]
-
def __hash__(self):
return hash(tuple(self._distribution_points))
@@ -513,14 +561,15 @@ def __init__(self, full_name, relative_name, reasons, crl_issuer):
"crl_issuer must be None or a list of general names"
)
- if reasons and (not isinstance(reasons, frozenset) or not all(
- isinstance(x, ReasonFlags) for x in reasons
- )):
+ if reasons and (
+ not isinstance(reasons, frozenset)
+ or not all(isinstance(x, ReasonFlags) for x in reasons)
+ ):
raise TypeError("reasons must be None or frozenset of ReasonFlags")
if reasons and (
- ReasonFlags.unspecified in reasons or
- ReasonFlags.remove_from_crl in reasons
+ ReasonFlags.unspecified in reasons
+ or ReasonFlags.remove_from_crl in reasons
):
raise ValueError(
"unspecified and remove_from_crl are not valid reasons in a "
@@ -541,8 +590,8 @@ def __init__(self, full_name, relative_name, reasons, crl_issuer):
def __repr__(self):
return (
"".format(self)
+ "tive_name}, reasons={0.reasons}, "
+ "crl_issuer={0.crl_issuer})>".format(self)
)
def __eq__(self, other):
@@ -550,10 +599,10 @@ def __eq__(self, other):
return NotImplemented
return (
- self.full_name == other.full_name and
- self.relative_name == other.relative_name and
- self.reasons == other.reasons and
- self.crl_issuer == other.crl_issuer
+ self.full_name == other.full_name
+ and self.relative_name == other.relative_name
+ and self.reasons == other.reasons
+ and self.crl_issuer == other.crl_issuer
)
def __ne__(self, other):
@@ -632,8 +681,8 @@ def __eq__(self, other):
return NotImplemented
return (
- self.require_explicit_policy == other.require_explicit_policy and
- self.inhibit_policy_mapping == other.inhibit_policy_mapping
+ self.require_explicit_policy == other.require_explicit_policy
+ and self.inhibit_policy_mapping == other.inhibit_policy_mapping
)
def __ne__(self, other):
@@ -666,14 +715,10 @@ def __init__(self, policies):
self._policies = policies
- def __iter__(self):
- return iter(self._policies)
-
- def __len__(self):
- return len(self._policies)
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_policies")
def __repr__(self):
- return "".format(self._policies)
+ return "".format(self._policies)
def __eq__(self, other):
if not isinstance(other, CertificatePolicies):
@@ -684,9 +729,6 @@ def __eq__(self, other):
def __ne__(self, other):
return not self == other
- def __getitem__(self, idx):
- return self._policies[idx]
-
def __hash__(self):
return hash(tuple(self._policies))
@@ -701,8 +743,8 @@ def __init__(self, policy_identifier, policy_qualifiers):
if policy_qualifiers:
policy_qualifiers = list(policy_qualifiers)
if not all(
- isinstance(x, (six.text_type, UserNotice))
- for x in policy_qualifiers
+ isinstance(x, (six.text_type, UserNotice))
+ for x in policy_qualifiers
):
raise TypeError(
"policy_qualifiers must be a list of strings and/or "
@@ -722,8 +764,8 @@ def __eq__(self, other):
return NotImplemented
return (
- self.policy_identifier == other.policy_identifier and
- self.policy_qualifiers == other.policy_qualifiers
+ self.policy_identifier == other.policy_identifier
+ and self.policy_qualifiers == other.policy_qualifiers
)
def __ne__(self, other):
@@ -764,8 +806,8 @@ def __eq__(self, other):
return NotImplemented
return (
- self.notice_reference == other.notice_reference and
- self.explicit_text == other.explicit_text
+ self.notice_reference == other.notice_reference
+ and self.explicit_text == other.explicit_text
)
def __ne__(self, other):
@@ -783,9 +825,7 @@ def __init__(self, organization, notice_numbers):
self._organization = organization
notice_numbers = list(notice_numbers)
if not all(isinstance(x, int) for x in notice_numbers):
- raise TypeError(
- "notice_numbers must be a list of integers"
- )
+ raise TypeError("notice_numbers must be a list of integers")
self._notice_numbers = notice_numbers
@@ -800,8 +840,8 @@ def __eq__(self, other):
return NotImplemented
return (
- self.organization == other.organization and
- self.notice_numbers == other.notice_numbers
+ self.organization == other.organization
+ and self.notice_numbers == other.notice_numbers
)
def __ne__(self, other):
@@ -827,14 +867,10 @@ def __init__(self, usages):
self._usages = usages
- def __iter__(self):
- return iter(self._usages)
-
- def __len__(self):
- return len(self._usages)
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_usages")
def __repr__(self):
- return "".format(self._usages)
+ return "".format(self._usages)
def __eq__(self, other):
if not isinstance(other, ExtendedKeyUsage):
@@ -853,6 +889,41 @@ def __hash__(self):
class OCSPNoCheck(object):
oid = ExtensionOID.OCSP_NO_CHECK
+ def __eq__(self, other):
+ if not isinstance(other, OCSPNoCheck):
+ return NotImplemented
+
+ return True
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(OCSPNoCheck)
+
+ def __repr__(self):
+ return ""
+
+
+@utils.register_interface(ExtensionType)
+class PrecertPoison(object):
+ oid = ExtensionOID.PRECERT_POISON
+
+ def __eq__(self, other):
+ if not isinstance(other, PrecertPoison):
+ return NotImplemented
+
+ return True
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(PrecertPoison)
+
+ def __repr__(self):
+ return ""
+
@utils.register_interface(ExtensionType)
class TLSFeature(object):
@@ -861,8 +932,8 @@ class TLSFeature(object):
def __init__(self, features):
features = list(features)
if (
- not all(isinstance(x, TLSFeatureType) for x in features) or
- len(features) == 0
+ not all(isinstance(x, TLSFeatureType) for x in features)
+ or len(features) == 0
):
raise TypeError(
"features must be a list of elements from the TLSFeatureType "
@@ -871,11 +942,7 @@ def __init__(self, features):
self._features = features
- def __iter__(self):
- return iter(self._features)
-
- def __len__(self):
- return len(self._features)
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_features")
def __repr__(self):
return "".format(self)
@@ -886,9 +953,6 @@ def __eq__(self, other):
return self._features == other._features
- def __getitem__(self, idx):
- return self._features[idx]
-
def __ne__(self, other):
return not self == other
@@ -907,7 +971,7 @@ class TLSFeatureType(Enum):
status_request_v2 = 17
-_TLS_FEATURE_TYPE_TO_ENUM = dict((x.value, x) for x in TLSFeatureType)
+_TLS_FEATURE_TYPE_TO_ENUM = {x.value: x for x in TLSFeatureType}
@utils.register_interface(ExtensionType)
@@ -945,9 +1009,18 @@ def __hash__(self):
class KeyUsage(object):
oid = ExtensionOID.KEY_USAGE
- def __init__(self, digital_signature, content_commitment, key_encipherment,
- data_encipherment, key_agreement, key_cert_sign, crl_sign,
- encipher_only, decipher_only):
+ def __init__(
+ self,
+ digital_signature,
+ content_commitment,
+ key_encipherment,
+ data_encipherment,
+ key_agreement,
+ key_cert_sign,
+ crl_sign,
+ encipher_only,
+ decipher_only,
+ ):
if not key_agreement and (encipher_only or decipher_only):
raise ValueError(
"encipher_only and decipher_only can only be true when "
@@ -995,45 +1068,55 @@ def __repr__(self):
encipher_only = self.encipher_only
decipher_only = self.decipher_only
except ValueError:
- encipher_only = None
- decipher_only = None
-
- return ("").format(
- self, encipher_only, decipher_only)
+ # Users found None confusing because even though encipher/decipher
+ # have no meaning unless key_agreement is true, to construct an
+ # instance of the class you still need to pass False.
+ encipher_only = False
+ decipher_only = False
+
+ return (
+ ""
+ ).format(self, encipher_only, decipher_only)
def __eq__(self, other):
if not isinstance(other, KeyUsage):
return NotImplemented
return (
- self.digital_signature == other.digital_signature and
- self.content_commitment == other.content_commitment and
- self.key_encipherment == other.key_encipherment and
- self.data_encipherment == other.data_encipherment and
- self.key_agreement == other.key_agreement and
- self.key_cert_sign == other.key_cert_sign and
- self.crl_sign == other.crl_sign and
- self._encipher_only == other._encipher_only and
- self._decipher_only == other._decipher_only
+ self.digital_signature == other.digital_signature
+ and self.content_commitment == other.content_commitment
+ and self.key_encipherment == other.key_encipherment
+ and self.data_encipherment == other.data_encipherment
+ and self.key_agreement == other.key_agreement
+ and self.key_cert_sign == other.key_cert_sign
+ and self.crl_sign == other.crl_sign
+ and self._encipher_only == other._encipher_only
+ and self._decipher_only == other._decipher_only
)
def __ne__(self, other):
return not self == other
def __hash__(self):
- return hash((
- self.digital_signature, self.content_commitment,
- self.key_encipherment, self.data_encipherment,
- self.key_agreement, self.key_cert_sign,
- self.crl_sign, self._encipher_only,
- self._decipher_only
- ))
+ return hash(
+ (
+ self.digital_signature,
+ self.content_commitment,
+ self.key_encipherment,
+ self.data_encipherment,
+ self.key_agreement,
+ self.key_cert_sign,
+ self.crl_sign,
+ self._encipher_only,
+ self._decipher_only,
+ )
+ )
@utils.register_interface(ExtensionType)
@@ -1043,9 +1126,7 @@ class NameConstraints(object):
def __init__(self, permitted_subtrees, excluded_subtrees):
if permitted_subtrees is not None:
permitted_subtrees = list(permitted_subtrees)
- if not all(
- isinstance(x, GeneralName) for x in permitted_subtrees
- ):
+ if not all(isinstance(x, GeneralName) for x in permitted_subtrees):
raise TypeError(
"permitted_subtrees must be a list of GeneralName objects "
"or None"
@@ -1055,9 +1136,7 @@ def __init__(self, permitted_subtrees, excluded_subtrees):
if excluded_subtrees is not None:
excluded_subtrees = list(excluded_subtrees)
- if not all(
- isinstance(x, GeneralName) for x in excluded_subtrees
- ):
+ if not all(isinstance(x, GeneralName) for x in excluded_subtrees):
raise TypeError(
"excluded_subtrees must be a list of GeneralName objects "
"or None"
@@ -1079,17 +1158,21 @@ def __eq__(self, other):
return NotImplemented
return (
- self.excluded_subtrees == other.excluded_subtrees and
- self.permitted_subtrees == other.permitted_subtrees
+ self.excluded_subtrees == other.excluded_subtrees
+ and self.permitted_subtrees == other.permitted_subtrees
)
def __ne__(self, other):
return not self == other
def _validate_ip_name(self, tree):
- if any(isinstance(name, IPAddress) and not isinstance(
- name.value, (ipaddress.IPv4Network, ipaddress.IPv6Network)
- ) for name in tree):
+ if any(
+ isinstance(name, IPAddress)
+ and not isinstance(
+ name.value, (ipaddress.IPv4Network, ipaddress.IPv6Network)
+ )
+ for name in tree
+ ):
raise TypeError(
"IPAddress name constraints must be an IPv4Network or"
" IPv6Network object"
@@ -1137,17 +1220,19 @@ def __init__(self, oid, critical, value):
value = utils.read_only_property("_value")
def __repr__(self):
- return ("").format(self)
+ return (
+ ""
+ ).format(self)
def __eq__(self, other):
if not isinstance(other, Extension):
return NotImplemented
return (
- self.oid == other.oid and
- self.critical == other.critical and
- self.value == other.value
+ self.oid == other.oid
+ and self.critical == other.critical
+ and self.value == other.value
)
def __ne__(self, other):
@@ -1168,11 +1253,7 @@ def __init__(self, general_names):
self._general_names = general_names
- def __iter__(self):
- return iter(self._general_names)
-
- def __len__(self):
- return len(self._general_names)
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names")
def get_values_for_type(self, type):
# Return the value of each GeneralName, except for OtherName instances
@@ -1184,7 +1265,7 @@ def get_values_for_type(self, type):
return list(objs)
def __repr__(self):
- return "".format(self._general_names)
+ return "".format(self._general_names)
def __eq__(self, other):
if not isinstance(other, GeneralNames):
@@ -1195,9 +1276,6 @@ def __eq__(self, other):
def __ne__(self, other):
return not self == other
- def __getitem__(self, idx):
- return self._general_names[idx]
-
def __hash__(self):
return hash(tuple(self._general_names))
@@ -1209,17 +1287,13 @@ class SubjectAlternativeName(object):
def __init__(self, general_names):
self._general_names = GeneralNames(general_names)
- def __iter__(self):
- return iter(self._general_names)
-
- def __len__(self):
- return len(self._general_names)
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names")
def get_values_for_type(self, type):
return self._general_names.get_values_for_type(type)
def __repr__(self):
- return "".format(self._general_names)
+ return "".format(self._general_names)
def __eq__(self, other):
if not isinstance(other, SubjectAlternativeName):
@@ -1227,9 +1301,6 @@ def __eq__(self, other):
return self._general_names == other._general_names
- def __getitem__(self, idx):
- return self._general_names[idx]
-
def __ne__(self, other):
return not self == other
@@ -1244,17 +1315,13 @@ class IssuerAlternativeName(object):
def __init__(self, general_names):
self._general_names = GeneralNames(general_names)
- def __iter__(self):
- return iter(self._general_names)
-
- def __len__(self):
- return len(self._general_names)
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names")
def get_values_for_type(self, type):
return self._general_names.get_values_for_type(type)
def __repr__(self):
- return "".format(self._general_names)
+ return "".format(self._general_names)
def __eq__(self, other):
if not isinstance(other, IssuerAlternativeName):
@@ -1265,9 +1332,6 @@ def __eq__(self, other):
def __ne__(self, other):
return not self == other
- def __getitem__(self, idx):
- return self._general_names[idx]
-
def __hash__(self):
return hash(self._general_names)
@@ -1279,17 +1343,13 @@ class CertificateIssuer(object):
def __init__(self, general_names):
self._general_names = GeneralNames(general_names)
- def __iter__(self):
- return iter(self._general_names)
-
- def __len__(self):
- return len(self._general_names)
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names")
def get_values_for_type(self, type):
return self._general_names.get_values_for_type(type)
def __repr__(self):
- return "".format(self._general_names)
+ return "".format(self._general_names)
def __eq__(self, other):
if not isinstance(other, CertificateIssuer):
@@ -1300,9 +1360,6 @@ def __eq__(self, other):
def __ne__(self, other):
return not self == other
- def __getitem__(self, idx):
- return self._general_names[idx]
-
def __hash__(self):
return hash(self._general_names)
@@ -1318,7 +1375,7 @@ def __init__(self, reason):
self._reason = reason
def __repr__(self):
- return "".format(self._reason)
+ return "".format(self._reason)
def __eq__(self, other):
if not isinstance(other, CRLReason):
@@ -1346,7 +1403,7 @@ def __init__(self, invalidity_date):
self._invalidity_date = invalidity_date
def __repr__(self):
- return "".format(
+ return "".format(
self._invalidity_date
)
@@ -1381,22 +1438,239 @@ def __init__(self, signed_certificate_timestamps):
)
self._signed_certificate_timestamps = signed_certificate_timestamps
- def __iter__(self):
- return iter(self._signed_certificate_timestamps)
+ __len__, __iter__, __getitem__ = _make_sequence_methods(
+ "_signed_certificate_timestamps"
+ )
+
+ def __repr__(self):
+ return "".format(
+ list(self)
+ )
+
+ def __hash__(self):
+ return hash(tuple(self._signed_certificate_timestamps))
+
+ def __eq__(self, other):
+ if not isinstance(other, PrecertificateSignedCertificateTimestamps):
+ return NotImplemented
+
+ return (
+ self._signed_certificate_timestamps
+ == other._signed_certificate_timestamps
+ )
- def __len__(self):
- return len(self._signed_certificate_timestamps)
+ def __ne__(self, other):
+ return not self == other
+
+
+@utils.register_interface(ExtensionType)
+class SignedCertificateTimestamps(object):
+ oid = ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS
+
+ def __init__(self, signed_certificate_timestamps):
+ signed_certificate_timestamps = list(signed_certificate_timestamps)
+ if not all(
+ isinstance(sct, SignedCertificateTimestamp)
+ for sct in signed_certificate_timestamps
+ ):
+ raise TypeError(
+ "Every item in the signed_certificate_timestamps list must be "
+ "a SignedCertificateTimestamp"
+ )
+ self._signed_certificate_timestamps = signed_certificate_timestamps
- def __getitem__(self, idx):
- return self._signed_certificate_timestamps[idx]
+ __len__, __iter__, __getitem__ = _make_sequence_methods(
+ "_signed_certificate_timestamps"
+ )
def __repr__(self):
+ return "".format(list(self))
+
+ def __hash__(self):
+ return hash(tuple(self._signed_certificate_timestamps))
+
+ def __eq__(self, other):
+ if not isinstance(other, SignedCertificateTimestamps):
+ return NotImplemented
+
return (
- "".format(
- list(self)
+ self._signed_certificate_timestamps
+ == other._signed_certificate_timestamps
+ )
+
+ def __ne__(self, other):
+ return not self == other
+
+
+@utils.register_interface(ExtensionType)
+class OCSPNonce(object):
+ oid = OCSPExtensionOID.NONCE
+
+ def __init__(self, nonce):
+ if not isinstance(nonce, bytes):
+ raise TypeError("nonce must be bytes")
+
+ self._nonce = nonce
+
+ def __eq__(self, other):
+ if not isinstance(other, OCSPNonce):
+ return NotImplemented
+
+ return self.nonce == other.nonce
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(self.nonce)
+
+ def __repr__(self):
+ return "".format(self)
+
+ nonce = utils.read_only_property("_nonce")
+
+
+@utils.register_interface(ExtensionType)
+class IssuingDistributionPoint(object):
+ oid = ExtensionOID.ISSUING_DISTRIBUTION_POINT
+
+ def __init__(
+ self,
+ full_name,
+ relative_name,
+ only_contains_user_certs,
+ only_contains_ca_certs,
+ only_some_reasons,
+ indirect_crl,
+ only_contains_attribute_certs,
+ ):
+ if only_some_reasons and (
+ not isinstance(only_some_reasons, frozenset)
+ or not all(isinstance(x, ReasonFlags) for x in only_some_reasons)
+ ):
+ raise TypeError(
+ "only_some_reasons must be None or frozenset of ReasonFlags"
+ )
+
+ if only_some_reasons and (
+ ReasonFlags.unspecified in only_some_reasons
+ or ReasonFlags.remove_from_crl in only_some_reasons
+ ):
+ raise ValueError(
+ "unspecified and remove_from_crl are not valid reasons in an "
+ "IssuingDistributionPoint"
+ )
+
+ if not (
+ isinstance(only_contains_user_certs, bool)
+ and isinstance(only_contains_ca_certs, bool)
+ and isinstance(indirect_crl, bool)
+ and isinstance(only_contains_attribute_certs, bool)
+ ):
+ raise TypeError(
+ "only_contains_user_certs, only_contains_ca_certs, "
+ "indirect_crl and only_contains_attribute_certs "
+ "must all be boolean."
+ )
+
+ crl_constraints = [
+ only_contains_user_certs,
+ only_contains_ca_certs,
+ indirect_crl,
+ only_contains_attribute_certs,
+ ]
+
+ if len([x for x in crl_constraints if x]) > 1:
+ raise ValueError(
+ "Only one of the following can be set to True: "
+ "only_contains_user_certs, only_contains_ca_certs, "
+ "indirect_crl, only_contains_attribute_certs"
+ )
+
+ if not any(
+ [
+ only_contains_user_certs,
+ only_contains_ca_certs,
+ indirect_crl,
+ only_contains_attribute_certs,
+ full_name,
+ relative_name,
+ only_some_reasons,
+ ]
+ ):
+ raise ValueError(
+ "Cannot create empty extension: "
+ "if only_contains_user_certs, only_contains_ca_certs, "
+ "indirect_crl, and only_contains_attribute_certs are all False"
+ ", then either full_name, relative_name, or only_some_reasons "
+ "must have a value."
+ )
+
+ self._only_contains_user_certs = only_contains_user_certs
+ self._only_contains_ca_certs = only_contains_ca_certs
+ self._indirect_crl = indirect_crl
+ self._only_contains_attribute_certs = only_contains_attribute_certs
+ self._only_some_reasons = only_some_reasons
+ self._full_name = full_name
+ self._relative_name = relative_name
+
+ def __repr__(self):
+ return (
+ "".format(self)
+ )
+
+ def __eq__(self, other):
+ if not isinstance(other, IssuingDistributionPoint):
+ return NotImplemented
+
+ return (
+ self.full_name == other.full_name
+ and self.relative_name == other.relative_name
+ and self.only_contains_user_certs == other.only_contains_user_certs
+ and self.only_contains_ca_certs == other.only_contains_ca_certs
+ and self.only_some_reasons == other.only_some_reasons
+ and self.indirect_crl == other.indirect_crl
+ and self.only_contains_attribute_certs
+ == other.only_contains_attribute_certs
+ )
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(
+ (
+ self.full_name,
+ self.relative_name,
+ self.only_contains_user_certs,
+ self.only_contains_ca_certs,
+ self.only_some_reasons,
+ self.indirect_crl,
+ self.only_contains_attribute_certs,
)
)
+ full_name = utils.read_only_property("_full_name")
+ relative_name = utils.read_only_property("_relative_name")
+ only_contains_user_certs = utils.read_only_property(
+ "_only_contains_user_certs"
+ )
+ only_contains_ca_certs = utils.read_only_property(
+ "_only_contains_ca_certs"
+ )
+ only_some_reasons = utils.read_only_property("_only_some_reasons")
+ indirect_crl = utils.read_only_property("_indirect_crl")
+ only_contains_attribute_certs = utils.read_only_property(
+ "_only_contains_attribute_certs"
+ )
+
@utils.register_interface(ExtensionType)
class UnrecognizedExtension(object):
@@ -1411,9 +1685,8 @@ def __init__(self, oid, value):
def __repr__(self):
return (
- "".format(
- self
- )
+ "".format(self)
)
def __eq__(self, other):
diff --git a/functions/source/CreateSSHKey/cryptography/x509/general_name.py b/functions/source/CreateSSHKey/cryptography/x509/general_name.py
index 776219e..9be9d8c 100644
--- a/functions/source/CreateSSHKey/cryptography/x509/general_name.py
+++ b/functions/source/CreateSSHKey/cryptography/x509/general_name.py
@@ -6,15 +6,10 @@
import abc
import ipaddress
-import warnings
from email.utils import parseaddr
-import idna
-
import six
-from six.moves import urllib_parse
-
from cryptography import utils
from cryptography.x509.name import Name
from cryptography.x509.oid import ObjectIdentifier
@@ -55,14 +50,10 @@ def __init__(self, value):
try:
value.encode("ascii")
except UnicodeEncodeError:
- value = self._idna_encode(value)
- warnings.warn(
+ raise ValueError(
"RFC822Name values should be passed as an A-label string. "
"This means unicode characters should be encoded via "
- "idna. Support for passing unicode strings (aka U-label) "
- " will be removed in a future version.",
- utils.DeprecatedIn21,
- stacklevel=2,
+ "a library like idna."
)
else:
raise TypeError("value must be string")
@@ -83,11 +74,6 @@ def _init_without_validation(cls, value):
instance._value = value
return instance
- def _idna_encode(self, value):
- _, address = parseaddr(value)
- parts = address.split(u"@")
- return parts[0] + "@" + idna.encode(parts[1]).decode("ascii")
-
def __repr__(self):
return "".format(self.value)
@@ -104,15 +90,6 @@ def __hash__(self):
return hash(self.value)
-def _idna_encode(value):
- # Retain prefixes '*.' for common/alt names and '.' for name constraints
- for prefix in ['*.', '.']:
- if value.startswith(prefix):
- value = value[len(prefix):]
- return prefix + idna.encode(value).decode("ascii")
- return idna.encode(value).decode("ascii")
-
-
@utils.register_interface(GeneralName)
class DNSName(object):
def __init__(self, value):
@@ -120,14 +97,10 @@ def __init__(self, value):
try:
value.encode("ascii")
except UnicodeEncodeError:
- value = _idna_encode(value)
- warnings.warn(
+ raise ValueError(
"DNSName values should be passed as an A-label string. "
"This means unicode characters should be encoded via "
- "idna. Support for passing unicode strings (aka U-label) "
- " will be removed in a future version.",
- utils.DeprecatedIn21,
- stacklevel=2,
+ "a library like idna."
)
else:
raise TypeError("value must be string")
@@ -165,14 +138,10 @@ def __init__(self, value):
try:
value.encode("ascii")
except UnicodeEncodeError:
- value = self._idna_encode(value)
- warnings.warn(
+ raise ValueError(
"URI values should be passed as an A-label string. "
"This means unicode characters should be encoded via "
- "idna. Support for passing unicode strings (aka U-label) "
- " will be removed in a future version.",
- utils.DeprecatedIn21,
- stacklevel=2,
+ "a library like idna."
)
else:
raise TypeError("value must be string")
@@ -187,28 +156,6 @@ def _init_without_validation(cls, value):
instance._value = value
return instance
- def _idna_encode(self, value):
- parsed = urllib_parse.urlparse(value)
- if parsed.port:
- netloc = (
- idna.encode(parsed.hostname) +
- ":{0}".format(parsed.port).encode("ascii")
- ).decode("ascii")
- else:
- netloc = idna.encode(parsed.hostname).decode("ascii")
-
- # Note that building a URL in this fashion means it should be
- # semantically indistinguishable from the original but is not
- # guaranteed to be exactly the same.
- return urllib_parse.urlunparse((
- parsed.scheme,
- netloc,
- parsed.path,
- parsed.params,
- parsed.query,
- parsed.fragment
- ))
-
def __repr__(self):
return "".format(self.value)
@@ -236,7 +183,7 @@ def __init__(self, value):
value = utils.read_only_property("_value")
def __repr__(self):
- return "".format(self.value)
+ return "".format(self.value)
def __eq__(self, other):
if not isinstance(other, DirectoryName):
@@ -262,7 +209,7 @@ def __init__(self, value):
value = utils.read_only_property("_value")
def __repr__(self):
- return "".format(self.value)
+ return "".format(self.value)
def __eq__(self, other):
if not isinstance(other, RegisteredID):
@@ -286,8 +233,8 @@ def __init__(self, value):
ipaddress.IPv4Address,
ipaddress.IPv6Address,
ipaddress.IPv4Network,
- ipaddress.IPv6Network
- )
+ ipaddress.IPv6Network,
+ ),
):
raise TypeError(
"value must be an instance of ipaddress.IPv4Address, "
@@ -300,7 +247,7 @@ def __init__(self, value):
value = utils.read_only_property("_value")
def __repr__(self):
- return "".format(self.value)
+ return "".format(self.value)
def __eq__(self, other):
if not isinstance(other, IPAddress):
@@ -330,8 +277,9 @@ def __init__(self, type_id, value):
value = utils.read_only_property("_value")
def __repr__(self):
- return "".format(
- self.type_id, self.value)
+ return "".format(
+ self.type_id, self.value
+ )
def __eq__(self, other):
if not isinstance(other, OtherName):
diff --git a/functions/source/CreateSSHKey/cryptography/x509/name.py b/functions/source/CreateSSHKey/cryptography/x509/name.py
index 2fbaee9..0be876a 100644
--- a/functions/source/CreateSSHKey/cryptography/x509/name.py
+++ b/functions/source/CreateSSHKey/cryptography/x509/name.py
@@ -9,6 +9,7 @@
import six
from cryptography import utils
+from cryptography.hazmat.backends import _get_backend
from cryptography.x509.oid import NameOID, ObjectIdentifier
@@ -25,8 +26,54 @@ class _ASN1Type(Enum):
BMPString = 30
-_ASN1_TYPE_TO_ENUM = dict((i.value, i) for i in _ASN1Type)
+_ASN1_TYPE_TO_ENUM = {i.value: i for i in _ASN1Type}
_SENTINEL = object()
+_NAMEOID_DEFAULT_TYPE = {
+ NameOID.COUNTRY_NAME: _ASN1Type.PrintableString,
+ NameOID.JURISDICTION_COUNTRY_NAME: _ASN1Type.PrintableString,
+ NameOID.SERIAL_NUMBER: _ASN1Type.PrintableString,
+ NameOID.DN_QUALIFIER: _ASN1Type.PrintableString,
+ NameOID.EMAIL_ADDRESS: _ASN1Type.IA5String,
+ NameOID.DOMAIN_COMPONENT: _ASN1Type.IA5String,
+}
+
+#: Short attribute names from RFC 4514:
+#: https://tools.ietf.org/html/rfc4514#page-7
+_NAMEOID_TO_NAME = {
+ NameOID.COMMON_NAME: "CN",
+ NameOID.LOCALITY_NAME: "L",
+ NameOID.STATE_OR_PROVINCE_NAME: "ST",
+ NameOID.ORGANIZATION_NAME: "O",
+ NameOID.ORGANIZATIONAL_UNIT_NAME: "OU",
+ NameOID.COUNTRY_NAME: "C",
+ NameOID.STREET_ADDRESS: "STREET",
+ NameOID.DOMAIN_COMPONENT: "DC",
+ NameOID.USER_ID: "UID",
+}
+
+
+def _escape_dn_value(val):
+ """Escape special characters in RFC4514 Distinguished Name value."""
+
+ if not val:
+ return ""
+
+ # See https://tools.ietf.org/html/rfc4514#section-2.4
+ val = val.replace("\\", "\\\\")
+ val = val.replace('"', '\\"')
+ val = val.replace("+", "\\+")
+ val = val.replace(",", "\\,")
+ val = val.replace(";", "\\;")
+ val = val.replace("<", "\\<")
+ val = val.replace(">", "\\>")
+ val = val.replace("\0", "\\00")
+
+ if val[0] in ("#", " "):
+ val = "\\" + val
+ if val[-1] == " ":
+ val = val[:-1] + "\\ "
+
+ return val
class NameAttribute(object):
@@ -37,30 +84,25 @@ def __init__(self, oid, value, _type=_SENTINEL):
)
if not isinstance(value, six.text_type):
- raise TypeError(
- "value argument must be a text type."
- )
+ raise TypeError("value argument must be a text type.")
if (
- oid == NameOID.COUNTRY_NAME or
- oid == NameOID.JURISDICTION_COUNTRY_NAME
+ oid == NameOID.COUNTRY_NAME
+ or oid == NameOID.JURISDICTION_COUNTRY_NAME
):
if len(value.encode("utf8")) != 2:
raise ValueError(
"Country name must be a 2 character country code"
)
- if _type == _SENTINEL:
- _type = _ASN1Type.PrintableString
-
- if len(value) == 0:
- raise ValueError("Value cannot be an empty string")
-
- # Set the default string type for encoding ASN1 strings to UTF8. This
- # is the default for newer OpenSSLs for several years (1.0.1h+) and is
- # recommended in RFC 2459.
+ # The appropriate ASN1 string type varies by OID and is defined across
+ # multiple RFCs including 2459, 3280, and 5280. In general UTF8String
+ # is preferred (2459), but 3280 and 5280 specify several OIDs with
+ # alternate types. This means when we see the sentinel value we need
+ # to look up whether the OID has a non-UTF8 type. If it does, set it
+ # to that. Otherwise, UTF8!
if _type == _SENTINEL:
- _type = _ASN1Type.UTF8String
+ _type = _NAMEOID_DEFAULT_TYPE.get(oid, _ASN1Type.UTF8String)
if not isinstance(_type, _ASN1Type):
raise TypeError("_type must be from the _ASN1Type enum")
@@ -72,14 +114,21 @@ def __init__(self, oid, value, _type=_SENTINEL):
oid = utils.read_only_property("_oid")
value = utils.read_only_property("_value")
+ def rfc4514_string(self):
+ """
+ Format as RFC4514 Distinguished Name string.
+
+ Use short attribute name if available, otherwise fall back to OID
+ dotted string.
+ """
+ key = _NAMEOID_TO_NAME.get(self.oid, self.oid.dotted_string)
+ return "%s=%s" % (key, _escape_dn_value(self.value))
+
def __eq__(self, other):
if not isinstance(other, NameAttribute):
return NotImplemented
- return (
- self.oid == other.oid and
- self.value == other.value
- )
+ return self.oid == other.oid and self.value == other.value
def __ne__(self, other):
return not self == other
@@ -93,28 +142,42 @@ def __repr__(self):
class RelativeDistinguishedName(object):
def __init__(self, attributes):
- attributes = frozenset(attributes)
+ attributes = list(attributes)
if not attributes:
raise ValueError("a relative distinguished name cannot be empty")
if not all(isinstance(x, NameAttribute) for x in attributes):
raise TypeError("attributes must be an iterable of NameAttribute")
+ # Keep list and frozenset to preserve attribute order where it matters
self._attributes = attributes
+ self._attribute_set = frozenset(attributes)
+
+ if len(self._attribute_set) != len(attributes):
+ raise ValueError("duplicate attributes are not allowed")
def get_attributes_for_oid(self, oid):
return [i for i in self if i.oid == oid]
+ def rfc4514_string(self):
+ """
+ Format as RFC4514 Distinguished Name string.
+
+ Within each RDN, attributes are joined by '+', although that is rarely
+ used in certificates.
+ """
+ return "+".join(attr.rfc4514_string() for attr in self._attributes)
+
def __eq__(self, other):
if not isinstance(other, RelativeDistinguishedName):
return NotImplemented
- return self._attributes == other._attributes
+ return self._attribute_set == other._attribute_set
def __ne__(self, other):
return not self == other
def __hash__(self):
- return hash(self._attributes)
+ return hash(self._attribute_set)
def __iter__(self):
return iter(self._attributes)
@@ -123,7 +186,7 @@ def __len__(self):
return len(self._attributes)
def __repr__(self):
- return "".format(list(self))
+ return "".format(self.rfc4514_string())
class Name(object):
@@ -141,6 +204,21 @@ def __init__(self, attributes):
" or a list RelativeDistinguishedName"
)
+ def rfc4514_string(self):
+ """
+ Format as RFC4514 Distinguished Name string.
+ For example 'CN=foobar.com,O=Foo Corp,C=US'
+
+ An X.509 name is a two-level structure: a list of sets of attributes.
+ Each list element is separated by ',' and within each list element, set
+ elements are separated by '+'. The latter is almost never used in
+ real world certificates. According to RFC4514 section 2.1 the
+ RDNSequence must be reversed when converting to string representation.
+ """
+ return ",".join(
+ attr.rfc4514_string() for attr in reversed(self._attributes)
+ )
+
def get_attributes_for_oid(self, oid):
return [i for i in self if i.oid == oid]
@@ -148,7 +226,8 @@ def get_attributes_for_oid(self, oid):
def rdns(self):
return self._attributes
- def public_bytes(self, backend):
+ def public_bytes(self, backend=None):
+ backend = _get_backend(backend)
return backend.x509_name_bytes(self)
def __eq__(self, other):
@@ -174,4 +253,9 @@ def __len__(self):
return sum(len(rdn) for rdn in self._attributes)
def __repr__(self):
- return "".format(list(self))
+ rdns = ",".join(attr.rfc4514_string() for attr in self._attributes)
+
+ if six.PY2:
+ return "".format(rdns.encode("utf8"))
+ else:
+ return "".format(rdns)
diff --git a/functions/source/CreateSSHKey/cryptography/x509/ocsp.py b/functions/source/CreateSSHKey/cryptography/x509/ocsp.py
new file mode 100644
index 0000000..f8e2722
--- /dev/null
+++ b/functions/source/CreateSSHKey/cryptography/x509/ocsp.py
@@ -0,0 +1,467 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import abc
+import datetime
+from enum import Enum
+
+import six
+
+from cryptography import x509
+from cryptography.hazmat.primitives import hashes
+from cryptography.x509.base import (
+ _EARLIEST_UTC_TIME,
+ _convert_to_naive_utc_time,
+ _reject_duplicate_extension,
+)
+
+
+_OIDS_TO_HASH = {
+ "1.3.14.3.2.26": hashes.SHA1(),
+ "2.16.840.1.101.3.4.2.4": hashes.SHA224(),
+ "2.16.840.1.101.3.4.2.1": hashes.SHA256(),
+ "2.16.840.1.101.3.4.2.2": hashes.SHA384(),
+ "2.16.840.1.101.3.4.2.3": hashes.SHA512(),
+}
+
+
+class OCSPResponderEncoding(Enum):
+ HASH = "By Hash"
+ NAME = "By Name"
+
+
+class OCSPResponseStatus(Enum):
+ SUCCESSFUL = 0
+ MALFORMED_REQUEST = 1
+ INTERNAL_ERROR = 2
+ TRY_LATER = 3
+ SIG_REQUIRED = 5
+ UNAUTHORIZED = 6
+
+
+_RESPONSE_STATUS_TO_ENUM = {x.value: x for x in OCSPResponseStatus}
+_ALLOWED_HASHES = (
+ hashes.SHA1,
+ hashes.SHA224,
+ hashes.SHA256,
+ hashes.SHA384,
+ hashes.SHA512,
+)
+
+
+def _verify_algorithm(algorithm):
+ if not isinstance(algorithm, _ALLOWED_HASHES):
+ raise ValueError(
+ "Algorithm must be SHA1, SHA224, SHA256, SHA384, or SHA512"
+ )
+
+
+class OCSPCertStatus(Enum):
+ GOOD = 0
+ REVOKED = 1
+ UNKNOWN = 2
+
+
+_CERT_STATUS_TO_ENUM = {x.value: x for x in OCSPCertStatus}
+
+
+def load_der_ocsp_request(data):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ return backend.load_der_ocsp_request(data)
+
+
+def load_der_ocsp_response(data):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ return backend.load_der_ocsp_response(data)
+
+
+class OCSPRequestBuilder(object):
+ def __init__(self, request=None, extensions=[]):
+ self._request = request
+ self._extensions = extensions
+
+ def add_certificate(self, cert, issuer, algorithm):
+ if self._request is not None:
+ raise ValueError("Only one certificate can be added to a request")
+
+ _verify_algorithm(algorithm)
+ if not isinstance(cert, x509.Certificate) or not isinstance(
+ issuer, x509.Certificate
+ ):
+ raise TypeError("cert and issuer must be a Certificate")
+
+ return OCSPRequestBuilder((cert, issuer, algorithm), self._extensions)
+
+ def add_extension(self, extension, critical):
+ if not isinstance(extension, x509.ExtensionType):
+ raise TypeError("extension must be an ExtensionType")
+
+ extension = x509.Extension(extension.oid, critical, extension)
+ _reject_duplicate_extension(extension, self._extensions)
+
+ return OCSPRequestBuilder(
+ self._request, self._extensions + [extension]
+ )
+
+ def build(self):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if self._request is None:
+ raise ValueError("You must add a certificate before building")
+
+ return backend.create_ocsp_request(self)
+
+
+class _SingleResponse(object):
+ def __init__(
+ self,
+ cert,
+ issuer,
+ algorithm,
+ cert_status,
+ this_update,
+ next_update,
+ revocation_time,
+ revocation_reason,
+ ):
+ if not isinstance(cert, x509.Certificate) or not isinstance(
+ issuer, x509.Certificate
+ ):
+ raise TypeError("cert and issuer must be a Certificate")
+
+ _verify_algorithm(algorithm)
+ if not isinstance(this_update, datetime.datetime):
+ raise TypeError("this_update must be a datetime object")
+ if next_update is not None and not isinstance(
+ next_update, datetime.datetime
+ ):
+ raise TypeError("next_update must be a datetime object or None")
+
+ self._cert = cert
+ self._issuer = issuer
+ self._algorithm = algorithm
+ self._this_update = this_update
+ self._next_update = next_update
+
+ if not isinstance(cert_status, OCSPCertStatus):
+ raise TypeError(
+ "cert_status must be an item from the OCSPCertStatus enum"
+ )
+ if cert_status is not OCSPCertStatus.REVOKED:
+ if revocation_time is not None:
+ raise ValueError(
+ "revocation_time can only be provided if the certificate "
+ "is revoked"
+ )
+ if revocation_reason is not None:
+ raise ValueError(
+ "revocation_reason can only be provided if the certificate"
+ " is revoked"
+ )
+ else:
+ if not isinstance(revocation_time, datetime.datetime):
+ raise TypeError("revocation_time must be a datetime object")
+
+ revocation_time = _convert_to_naive_utc_time(revocation_time)
+ if revocation_time < _EARLIEST_UTC_TIME:
+ raise ValueError(
+ "The revocation_time must be on or after"
+ " 1950 January 1."
+ )
+
+ if revocation_reason is not None and not isinstance(
+ revocation_reason, x509.ReasonFlags
+ ):
+ raise TypeError(
+ "revocation_reason must be an item from the ReasonFlags "
+ "enum or None"
+ )
+
+ self._cert_status = cert_status
+ self._revocation_time = revocation_time
+ self._revocation_reason = revocation_reason
+
+
+class OCSPResponseBuilder(object):
+ def __init__(
+ self, response=None, responder_id=None, certs=None, extensions=[]
+ ):
+ self._response = response
+ self._responder_id = responder_id
+ self._certs = certs
+ self._extensions = extensions
+
+ def add_response(
+ self,
+ cert,
+ issuer,
+ algorithm,
+ cert_status,
+ this_update,
+ next_update,
+ revocation_time,
+ revocation_reason,
+ ):
+ if self._response is not None:
+ raise ValueError("Only one response per OCSPResponse.")
+
+ singleresp = _SingleResponse(
+ cert,
+ issuer,
+ algorithm,
+ cert_status,
+ this_update,
+ next_update,
+ revocation_time,
+ revocation_reason,
+ )
+ return OCSPResponseBuilder(
+ singleresp,
+ self._responder_id,
+ self._certs,
+ self._extensions,
+ )
+
+ def responder_id(self, encoding, responder_cert):
+ if self._responder_id is not None:
+ raise ValueError("responder_id can only be set once")
+ if not isinstance(responder_cert, x509.Certificate):
+ raise TypeError("responder_cert must be a Certificate")
+ if not isinstance(encoding, OCSPResponderEncoding):
+ raise TypeError(
+ "encoding must be an element from OCSPResponderEncoding"
+ )
+
+ return OCSPResponseBuilder(
+ self._response,
+ (responder_cert, encoding),
+ self._certs,
+ self._extensions,
+ )
+
+ def certificates(self, certs):
+ if self._certs is not None:
+ raise ValueError("certificates may only be set once")
+ certs = list(certs)
+ if len(certs) == 0:
+ raise ValueError("certs must not be an empty list")
+ if not all(isinstance(x, x509.Certificate) for x in certs):
+ raise TypeError("certs must be a list of Certificates")
+ return OCSPResponseBuilder(
+ self._response,
+ self._responder_id,
+ certs,
+ self._extensions,
+ )
+
+ def add_extension(self, extension, critical):
+ if not isinstance(extension, x509.ExtensionType):
+ raise TypeError("extension must be an ExtensionType")
+
+ extension = x509.Extension(extension.oid, critical, extension)
+ _reject_duplicate_extension(extension, self._extensions)
+
+ return OCSPResponseBuilder(
+ self._response,
+ self._responder_id,
+ self._certs,
+ self._extensions + [extension],
+ )
+
+ def sign(self, private_key, algorithm):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if self._response is None:
+ raise ValueError("You must add a response before signing")
+ if self._responder_id is None:
+ raise ValueError("You must add a responder_id before signing")
+
+ return backend.create_ocsp_response(
+ OCSPResponseStatus.SUCCESSFUL, self, private_key, algorithm
+ )
+
+ @classmethod
+ def build_unsuccessful(cls, response_status):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not isinstance(response_status, OCSPResponseStatus):
+ raise TypeError(
+ "response_status must be an item from OCSPResponseStatus"
+ )
+ if response_status is OCSPResponseStatus.SUCCESSFUL:
+ raise ValueError("response_status cannot be SUCCESSFUL")
+
+ return backend.create_ocsp_response(response_status, None, None, None)
+
+
+@six.add_metaclass(abc.ABCMeta)
+class OCSPRequest(object):
+ @abc.abstractproperty
+ def issuer_key_hash(self):
+ """
+ The hash of the issuer public key
+ """
+
+ @abc.abstractproperty
+ def issuer_name_hash(self):
+ """
+ The hash of the issuer name
+ """
+
+ @abc.abstractproperty
+ def hash_algorithm(self):
+ """
+ The hash algorithm used in the issuer name and key hashes
+ """
+
+ @abc.abstractproperty
+ def serial_number(self):
+ """
+ The serial number of the cert whose status is being checked
+ """
+
+ @abc.abstractmethod
+ def public_bytes(self, encoding):
+ """
+ Serializes the request to DER
+ """
+
+ @abc.abstractproperty
+ def extensions(self):
+ """
+ The list of request extensions. Not single request extensions.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class OCSPResponse(object):
+ @abc.abstractproperty
+ def response_status(self):
+ """
+ The status of the response. This is a value from the OCSPResponseStatus
+ enumeration
+ """
+
+ @abc.abstractproperty
+ def signature_algorithm_oid(self):
+ """
+ The ObjectIdentifier of the signature algorithm
+ """
+
+ @abc.abstractproperty
+ def signature_hash_algorithm(self):
+ """
+ Returns a HashAlgorithm corresponding to the type of the digest signed
+ """
+
+ @abc.abstractproperty
+ def signature(self):
+ """
+ The signature bytes
+ """
+
+ @abc.abstractproperty
+ def tbs_response_bytes(self):
+ """
+ The tbsResponseData bytes
+ """
+
+ @abc.abstractproperty
+ def certificates(self):
+ """
+ A list of certificates used to help build a chain to verify the OCSP
+ response. This situation occurs when the OCSP responder uses a delegate
+ certificate.
+ """
+
+ @abc.abstractproperty
+ def responder_key_hash(self):
+ """
+ The responder's key hash or None
+ """
+
+ @abc.abstractproperty
+ def responder_name(self):
+ """
+ The responder's Name or None
+ """
+
+ @abc.abstractproperty
+ def produced_at(self):
+ """
+ The time the response was produced
+ """
+
+ @abc.abstractproperty
+ def certificate_status(self):
+ """
+ The status of the certificate (an element from the OCSPCertStatus enum)
+ """
+
+ @abc.abstractproperty
+ def revocation_time(self):
+ """
+ The date of when the certificate was revoked or None if not
+ revoked.
+ """
+
+ @abc.abstractproperty
+ def revocation_reason(self):
+ """
+ The reason the certificate was revoked or None if not specified or
+ not revoked.
+ """
+
+ @abc.abstractproperty
+ def this_update(self):
+ """
+ The most recent time at which the status being indicated is known by
+ the responder to have been correct
+ """
+
+ @abc.abstractproperty
+ def next_update(self):
+ """
+ The time when newer information will be available
+ """
+
+ @abc.abstractproperty
+ def issuer_key_hash(self):
+ """
+ The hash of the issuer public key
+ """
+
+ @abc.abstractproperty
+ def issuer_name_hash(self):
+ """
+ The hash of the issuer name
+ """
+
+ @abc.abstractproperty
+ def hash_algorithm(self):
+ """
+ The hash algorithm used in the issuer name and key hashes
+ """
+
+ @abc.abstractproperty
+ def serial_number(self):
+ """
+ The serial number of the cert whose status is being checked
+ """
+
+ @abc.abstractproperty
+ def extensions(self):
+ """
+ The list of response extensions. Not single response extensions.
+ """
+
+ @abc.abstractproperty
+ def single_extensions(self):
+ """
+ The list of single response extensions. Not response extensions.
+ """
diff --git a/functions/source/CreateSSHKey/cryptography/x509/oid.py b/functions/source/CreateSSHKey/cryptography/x509/oid.py
index fedea31..2bf606e 100644
--- a/functions/source/CreateSSHKey/cryptography/x509/oid.py
+++ b/functions/source/CreateSSHKey/cryptography/x509/oid.py
@@ -4,68 +4,10 @@
from __future__ import absolute_import, division, print_function
-from cryptography import utils
+from cryptography.hazmat._oid import ObjectIdentifier
from cryptography.hazmat.primitives import hashes
-class ObjectIdentifier(object):
- def __init__(self, dotted_string):
- self._dotted_string = dotted_string
-
- nodes = self._dotted_string.split(".")
- intnodes = []
-
- # There must be at least 2 nodes, the first node must be 0..2, and
- # if less than 2, the second node cannot have a value outside the
- # range 0..39. All nodes must be integers.
- for node in nodes:
- try:
- intnodes.append(int(node, 0))
- except ValueError:
- raise ValueError(
- "Malformed OID: %s (non-integer nodes)" % (
- self._dotted_string))
-
- if len(nodes) < 2:
- raise ValueError(
- "Malformed OID: %s (insufficient number of nodes)" % (
- self._dotted_string))
-
- if intnodes[0] > 2:
- raise ValueError(
- "Malformed OID: %s (first node outside valid range)" % (
- self._dotted_string))
-
- if intnodes[0] < 2 and intnodes[1] >= 40:
- raise ValueError(
- "Malformed OID: %s (second node outside valid range)" % (
- self._dotted_string))
-
- def __eq__(self, other):
- if not isinstance(other, ObjectIdentifier):
- return NotImplemented
-
- return self.dotted_string == other.dotted_string
-
- def __ne__(self, other):
- return not self == other
-
- def __repr__(self):
- return "".format(
- self.dotted_string,
- self._name
- )
-
- def __hash__(self):
- return hash(self.dotted_string)
-
- @property
- def _name(self):
- return _OID_NAMES.get(self, "Unknown OID")
-
- dotted_string = utils.read_only_property("_dotted_string")
-
-
class ExtensionOID(object):
SUBJECT_DIRECTORY_ATTRIBUTES = ObjectIdentifier("2.5.29.9")
SUBJECT_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.14")
@@ -82,15 +24,22 @@ class ExtensionOID(object):
EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37")
FRESHEST_CRL = ObjectIdentifier("2.5.29.46")
INHIBIT_ANY_POLICY = ObjectIdentifier("2.5.29.54")
+ ISSUING_DISTRIBUTION_POINT = ObjectIdentifier("2.5.29.28")
AUTHORITY_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.1")
SUBJECT_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.11")
OCSP_NO_CHECK = ObjectIdentifier("1.3.6.1.5.5.7.48.1.5")
TLS_FEATURE = ObjectIdentifier("1.3.6.1.5.5.7.1.24")
CRL_NUMBER = ObjectIdentifier("2.5.29.20")
DELTA_CRL_INDICATOR = ObjectIdentifier("2.5.29.27")
- PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS = (
- ObjectIdentifier("1.3.6.1.4.1.11129.2.4.2")
+ PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier(
+ "1.3.6.1.4.1.11129.2.4.2"
)
+ PRECERT_POISON = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.3")
+ SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.5")
+
+
+class OCSPExtensionOID(object):
+ NONCE = ObjectIdentifier("1.3.6.1.5.5.7.48.1.2")
class CRLEntryExtensionOID(object):
@@ -126,6 +75,10 @@ class NameOID(object):
BUSINESS_CATEGORY = ObjectIdentifier("2.5.4.15")
POSTAL_ADDRESS = ObjectIdentifier("2.5.4.16")
POSTAL_CODE = ObjectIdentifier("2.5.4.17")
+ INN = ObjectIdentifier("1.2.643.3.131.1.1")
+ OGRN = ObjectIdentifier("1.2.643.100.1")
+ SNILS = ObjectIdentifier("1.2.643.100.3")
+ UNSTRUCTURED_NAME = ObjectIdentifier("1.2.840.113549.1.9.2")
class SignatureAlgorithmOID(object):
@@ -137,6 +90,7 @@ class SignatureAlgorithmOID(object):
RSA_WITH_SHA256 = ObjectIdentifier("1.2.840.113549.1.1.11")
RSA_WITH_SHA384 = ObjectIdentifier("1.2.840.113549.1.1.12")
RSA_WITH_SHA512 = ObjectIdentifier("1.2.840.113549.1.1.13")
+ RSASSA_PSS = ObjectIdentifier("1.2.840.113549.1.1.10")
ECDSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10045.4.1")
ECDSA_WITH_SHA224 = ObjectIdentifier("1.2.840.10045.4.3.1")
ECDSA_WITH_SHA256 = ObjectIdentifier("1.2.840.10045.4.3.2")
@@ -145,6 +99,11 @@ class SignatureAlgorithmOID(object):
DSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10040.4.3")
DSA_WITH_SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.3.1")
DSA_WITH_SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.3.2")
+ ED25519 = ObjectIdentifier("1.3.101.112")
+ ED448 = ObjectIdentifier("1.3.101.113")
+ GOSTR3411_94_WITH_3410_2001 = ObjectIdentifier("1.2.643.2.2.3")
+ GOSTR3410_2012_WITH_3411_2012_256 = ObjectIdentifier("1.2.643.7.1.1.3.2")
+ GOSTR3410_2012_WITH_3411_2012_512 = ObjectIdentifier("1.2.643.7.1.1.3.3")
_SIG_OIDS_TO_HASH = {
@@ -162,7 +121,12 @@ class SignatureAlgorithmOID(object):
SignatureAlgorithmOID.ECDSA_WITH_SHA512: hashes.SHA512(),
SignatureAlgorithmOID.DSA_WITH_SHA1: hashes.SHA1(),
SignatureAlgorithmOID.DSA_WITH_SHA224: hashes.SHA224(),
- SignatureAlgorithmOID.DSA_WITH_SHA256: hashes.SHA256()
+ SignatureAlgorithmOID.DSA_WITH_SHA256: hashes.SHA256(),
+ SignatureAlgorithmOID.ED25519: None,
+ SignatureAlgorithmOID.ED448: None,
+ SignatureAlgorithmOID.GOSTR3411_94_WITH_3410_2001: None,
+ SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_256: None,
+ SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: None,
}
@@ -181,12 +145,21 @@ class AuthorityInformationAccessOID(object):
OCSP = ObjectIdentifier("1.3.6.1.5.5.7.48.1")
+class SubjectInformationAccessOID(object):
+ CA_REPOSITORY = ObjectIdentifier("1.3.6.1.5.5.7.48.5")
+
+
class CertificatePoliciesOID(object):
CPS_QUALIFIER = ObjectIdentifier("1.3.6.1.5.5.7.2.1")
CPS_USER_NOTICE = ObjectIdentifier("1.3.6.1.5.5.7.2.2")
ANY_POLICY = ObjectIdentifier("2.5.29.32.0")
+class AttributeOID(object):
+ CHALLENGE_PASSWORD = ObjectIdentifier("1.2.840.113549.1.9.7")
+ UNSTRUCTURED_NAME = ObjectIdentifier("1.2.840.113549.1.9.2")
+
+
_OID_NAMES = {
NameOID.COMMON_NAME: "commonName",
NameOID.COUNTRY_NAME: "countryName",
@@ -214,13 +187,17 @@ class CertificatePoliciesOID(object):
NameOID.BUSINESS_CATEGORY: "businessCategory",
NameOID.POSTAL_ADDRESS: "postalAddress",
NameOID.POSTAL_CODE: "postalCode",
-
+ NameOID.INN: "INN",
+ NameOID.OGRN: "OGRN",
+ NameOID.SNILS: "SNILS",
+ NameOID.UNSTRUCTURED_NAME: "unstructuredName",
SignatureAlgorithmOID.RSA_WITH_MD5: "md5WithRSAEncryption",
SignatureAlgorithmOID.RSA_WITH_SHA1: "sha1WithRSAEncryption",
SignatureAlgorithmOID.RSA_WITH_SHA224: "sha224WithRSAEncryption",
SignatureAlgorithmOID.RSA_WITH_SHA256: "sha256WithRSAEncryption",
SignatureAlgorithmOID.RSA_WITH_SHA384: "sha384WithRSAEncryption",
SignatureAlgorithmOID.RSA_WITH_SHA512: "sha512WithRSAEncryption",
+ SignatureAlgorithmOID.RSASSA_PSS: "RSASSA-PSS",
SignatureAlgorithmOID.ECDSA_WITH_SHA1: "ecdsa-with-SHA1",
SignatureAlgorithmOID.ECDSA_WITH_SHA224: "ecdsa-with-SHA224",
SignatureAlgorithmOID.ECDSA_WITH_SHA256: "ecdsa-with-SHA256",
@@ -229,6 +206,17 @@ class CertificatePoliciesOID(object):
SignatureAlgorithmOID.DSA_WITH_SHA1: "dsa-with-sha1",
SignatureAlgorithmOID.DSA_WITH_SHA224: "dsa-with-sha224",
SignatureAlgorithmOID.DSA_WITH_SHA256: "dsa-with-sha256",
+ SignatureAlgorithmOID.ED25519: "ed25519",
+ SignatureAlgorithmOID.ED448: "ed448",
+ SignatureAlgorithmOID.GOSTR3411_94_WITH_3410_2001: (
+ "GOST R 34.11-94 with GOST R 34.10-2001"
+ ),
+ SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_256: (
+ "GOST R 34.10-2012 with GOST R 34.11-2012 (256 bit)"
+ ),
+ SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: (
+ "GOST R 34.10-2012 with GOST R 34.11-2012 (512 bit)"
+ ),
ExtendedKeyUsageOID.SERVER_AUTH: "serverAuth",
ExtendedKeyUsageOID.CLIENT_AUTH: "clientAuth",
ExtendedKeyUsageOID.CODE_SIGNING: "codeSigning",
@@ -241,6 +229,13 @@ class CertificatePoliciesOID(object):
ExtensionOID.SUBJECT_ALTERNATIVE_NAME: "subjectAltName",
ExtensionOID.ISSUER_ALTERNATIVE_NAME: "issuerAltName",
ExtensionOID.BASIC_CONSTRAINTS: "basicConstraints",
+ ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS: (
+ "signedCertificateTimestampList"
+ ),
+ ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS: (
+ "signedCertificateTimestampList"
+ ),
+ ExtensionOID.PRECERT_POISON: "ctPoison",
CRLEntryExtensionOID.CRL_REASON: "cRLReason",
CRLEntryExtensionOID.INVALIDITY_DATE: "invalidityDate",
CRLEntryExtensionOID.CERTIFICATE_ISSUER: "certificateIssuer",
@@ -253,6 +248,7 @@ class CertificatePoliciesOID(object):
ExtensionOID.EXTENDED_KEY_USAGE: "extendedKeyUsage",
ExtensionOID.FRESHEST_CRL: "freshestCRL",
ExtensionOID.INHIBIT_ANY_POLICY: "inhibitAnyPolicy",
+ ExtensionOID.ISSUING_DISTRIBUTION_POINT: ("issuingDistributionPoint"),
ExtensionOID.AUTHORITY_INFORMATION_ACCESS: "authorityInfoAccess",
ExtensionOID.SUBJECT_INFORMATION_ACCESS: "subjectInfoAccess",
ExtensionOID.OCSP_NO_CHECK: "OCSPNoCheck",
@@ -261,6 +257,9 @@ class CertificatePoliciesOID(object):
ExtensionOID.TLS_FEATURE: "TLSFeature",
AuthorityInformationAccessOID.OCSP: "OCSP",
AuthorityInformationAccessOID.CA_ISSUERS: "caIssuers",
+ SubjectInformationAccessOID.CA_REPOSITORY: "caRepository",
CertificatePoliciesOID.CPS_QUALIFIER: "id-qt-cps",
CertificatePoliciesOID.CPS_USER_NOTICE: "id-qt-unotice",
+ OCSPExtensionOID.NONCE: "OCSPNonce",
+ AttributeOID.CHALLENGE_PASSWORD: "challengePassword",
}
diff --git a/functions/source/CreateSSHKey/enum/LICENSE b/functions/source/CreateSSHKey/enum/LICENSE
deleted file mode 100644
index 9003b88..0000000
--- a/functions/source/CreateSSHKey/enum/LICENSE
+++ /dev/null
@@ -1,32 +0,0 @@
-Copyright (c) 2013, Ethan Furman.
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions
-are met:
-
- Redistributions of source code must retain the above
- copyright notice, this list of conditions and the
- following disclaimer.
-
- Redistributions in binary form must reproduce the above
- copyright notice, this list of conditions and the following
- disclaimer in the documentation and/or other materials
- provided with the distribution.
-
- Neither the name Ethan Furman nor the names of any
- contributors may be used to endorse or promote products
- derived from this software without specific prior written
- permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
-LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
-INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
-CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-POSSIBILITY OF SUCH DAMAGE.
diff --git a/functions/source/CreateSSHKey/enum/README b/functions/source/CreateSSHKey/enum/README
deleted file mode 100644
index aa2333d..0000000
--- a/functions/source/CreateSSHKey/enum/README
+++ /dev/null
@@ -1,3 +0,0 @@
-enum34 is the new Python stdlib enum module available in Python 3.4
-backported for previous versions of Python from 2.4 to 3.3.
-tested on 2.6, 2.7, and 3.3+
diff --git a/functions/source/CreateSSHKey/enum/__init__.py b/functions/source/CreateSSHKey/enum/__init__.py
deleted file mode 100644
index d6ffb3a..0000000
--- a/functions/source/CreateSSHKey/enum/__init__.py
+++ /dev/null
@@ -1,837 +0,0 @@
-"""Python Enumerations"""
-
-import sys as _sys
-
-__all__ = ['Enum', 'IntEnum', 'unique']
-
-version = 1, 1, 6
-
-pyver = float('%s.%s' % _sys.version_info[:2])
-
-try:
- any
-except NameError:
- def any(iterable):
- for element in iterable:
- if element:
- return True
- return False
-
-try:
- from collections import OrderedDict
-except ImportError:
- OrderedDict = None
-
-try:
- basestring
-except NameError:
- # In Python 2 basestring is the ancestor of both str and unicode
- # in Python 3 it's just str, but was missing in 3.1
- basestring = str
-
-try:
- unicode
-except NameError:
- # In Python 3 unicode no longer exists (it's just str)
- unicode = str
-
-class _RouteClassAttributeToGetattr(object):
- """Route attribute access on a class to __getattr__.
-
- This is a descriptor, used to define attributes that act differently when
- accessed through an instance and through a class. Instance access remains
- normal, but access to an attribute through a class will be routed to the
- class's __getattr__ method; this is done by raising AttributeError.
-
- """
- def __init__(self, fget=None):
- self.fget = fget
-
- def __get__(self, instance, ownerclass=None):
- if instance is None:
- raise AttributeError()
- return self.fget(instance)
-
- def __set__(self, instance, value):
- raise AttributeError("can't set attribute")
-
- def __delete__(self, instance):
- raise AttributeError("can't delete attribute")
-
-
-def _is_descriptor(obj):
- """Returns True if obj is a descriptor, False otherwise."""
- return (
- hasattr(obj, '__get__') or
- hasattr(obj, '__set__') or
- hasattr(obj, '__delete__'))
-
-
-def _is_dunder(name):
- """Returns True if a __dunder__ name, False otherwise."""
- return (name[:2] == name[-2:] == '__' and
- name[2:3] != '_' and
- name[-3:-2] != '_' and
- len(name) > 4)
-
-
-def _is_sunder(name):
- """Returns True if a _sunder_ name, False otherwise."""
- return (name[0] == name[-1] == '_' and
- name[1:2] != '_' and
- name[-2:-1] != '_' and
- len(name) > 2)
-
-
-def _make_class_unpicklable(cls):
- """Make the given class un-picklable."""
- def _break_on_call_reduce(self, protocol=None):
- raise TypeError('%r cannot be pickled' % self)
- cls.__reduce_ex__ = _break_on_call_reduce
- cls.__module__ = ''
-
-
-class _EnumDict(dict):
- """Track enum member order and ensure member names are not reused.
-
- EnumMeta will use the names found in self._member_names as the
- enumeration member names.
-
- """
- def __init__(self):
- super(_EnumDict, self).__init__()
- self._member_names = []
-
- def __setitem__(self, key, value):
- """Changes anything not dundered or not a descriptor.
-
- If a descriptor is added with the same name as an enum member, the name
- is removed from _member_names (this may leave a hole in the numerical
- sequence of values).
-
- If an enum member name is used twice, an error is raised; duplicate
- values are not checked for.
-
- Single underscore (sunder) names are reserved.
-
- Note: in 3.x __order__ is simply discarded as a not necessary piece
- leftover from 2.x
-
- """
- if pyver >= 3.0 and key in ('_order_', '__order__'):
- return
- elif key == '__order__':
- key = '_order_'
- if _is_sunder(key):
- if key != '_order_':
- raise ValueError('_names_ are reserved for future Enum use')
- elif _is_dunder(key):
- pass
- elif key in self._member_names:
- # descriptor overwriting an enum?
- raise TypeError('Attempted to reuse key: %r' % key)
- elif not _is_descriptor(value):
- if key in self:
- # enum overwriting a descriptor?
- raise TypeError('Key already defined as: %r' % self[key])
- self._member_names.append(key)
- super(_EnumDict, self).__setitem__(key, value)
-
-
-# Dummy value for Enum as EnumMeta explicity checks for it, but of course until
-# EnumMeta finishes running the first time the Enum class doesn't exist. This
-# is also why there are checks in EnumMeta like `if Enum is not None`
-Enum = None
-
-
-class EnumMeta(type):
- """Metaclass for Enum"""
- @classmethod
- def __prepare__(metacls, cls, bases):
- return _EnumDict()
-
- def __new__(metacls, cls, bases, classdict):
- # an Enum class is final once enumeration items have been defined; it
- # cannot be mixed with other types (int, float, etc.) if it has an
- # inherited __new__ unless a new __new__ is defined (or the resulting
- # class will fail).
- if type(classdict) is dict:
- original_dict = classdict
- classdict = _EnumDict()
- for k, v in original_dict.items():
- classdict[k] = v
-
- member_type, first_enum = metacls._get_mixins_(bases)
- __new__, save_new, use_args = metacls._find_new_(classdict, member_type,
- first_enum)
- # save enum items into separate mapping so they don't get baked into
- # the new class
- members = dict((k, classdict[k]) for k in classdict._member_names)
- for name in classdict._member_names:
- del classdict[name]
-
- # py2 support for definition order
- _order_ = classdict.get('_order_')
- if _order_ is None:
- if pyver < 3.0:
- try:
- _order_ = [name for (name, value) in sorted(members.items(), key=lambda item: item[1])]
- except TypeError:
- _order_ = [name for name in sorted(members.keys())]
- else:
- _order_ = classdict._member_names
- else:
- del classdict['_order_']
- if pyver < 3.0:
- _order_ = _order_.replace(',', ' ').split()
- aliases = [name for name in members if name not in _order_]
- _order_ += aliases
-
- # check for illegal enum names (any others?)
- invalid_names = set(members) & set(['mro'])
- if invalid_names:
- raise ValueError('Invalid enum member name(s): %s' % (
- ', '.join(invalid_names), ))
-
- # save attributes from super classes so we know if we can take
- # the shortcut of storing members in the class dict
- base_attributes = set([a for b in bases for a in b.__dict__])
- # create our new Enum type
- enum_class = super(EnumMeta, metacls).__new__(metacls, cls, bases, classdict)
- enum_class._member_names_ = [] # names in random order
- if OrderedDict is not None:
- enum_class._member_map_ = OrderedDict()
- else:
- enum_class._member_map_ = {} # name->value map
- enum_class._member_type_ = member_type
-
- # Reverse value->name map for hashable values.
- enum_class._value2member_map_ = {}
-
- # instantiate them, checking for duplicates as we go
- # we instantiate first instead of checking for duplicates first in case
- # a custom __new__ is doing something funky with the values -- such as
- # auto-numbering ;)
- if __new__ is None:
- __new__ = enum_class.__new__
- for member_name in _order_:
- value = members[member_name]
- if not isinstance(value, tuple):
- args = (value, )
- else:
- args = value
- if member_type is tuple: # special case for tuple enums
- args = (args, ) # wrap it one more time
- if not use_args or not args:
- enum_member = __new__(enum_class)
- if not hasattr(enum_member, '_value_'):
- enum_member._value_ = value
- else:
- enum_member = __new__(enum_class, *args)
- if not hasattr(enum_member, '_value_'):
- enum_member._value_ = member_type(*args)
- value = enum_member._value_
- enum_member._name_ = member_name
- enum_member.__objclass__ = enum_class
- enum_member.__init__(*args)
- # If another member with the same value was already defined, the
- # new member becomes an alias to the existing one.
- for name, canonical_member in enum_class._member_map_.items():
- if canonical_member.value == enum_member._value_:
- enum_member = canonical_member
- break
- else:
- # Aliases don't appear in member names (only in __members__).
- enum_class._member_names_.append(member_name)
- # performance boost for any member that would not shadow
- # a DynamicClassAttribute (aka _RouteClassAttributeToGetattr)
- if member_name not in base_attributes:
- setattr(enum_class, member_name, enum_member)
- # now add to _member_map_
- enum_class._member_map_[member_name] = enum_member
- try:
- # This may fail if value is not hashable. We can't add the value
- # to the map, and by-value lookups for this value will be
- # linear.
- enum_class._value2member_map_[value] = enum_member
- except TypeError:
- pass
-
-
- # If a custom type is mixed into the Enum, and it does not know how
- # to pickle itself, pickle.dumps will succeed but pickle.loads will
- # fail. Rather than have the error show up later and possibly far
- # from the source, sabotage the pickle protocol for this class so
- # that pickle.dumps also fails.
- #
- # However, if the new class implements its own __reduce_ex__, do not
- # sabotage -- it's on them to make sure it works correctly. We use
- # __reduce_ex__ instead of any of the others as it is preferred by
- # pickle over __reduce__, and it handles all pickle protocols.
- unpicklable = False
- if '__reduce_ex__' not in classdict:
- if member_type is not object:
- methods = ('__getnewargs_ex__', '__getnewargs__',
- '__reduce_ex__', '__reduce__')
- if not any(m in member_type.__dict__ for m in methods):
- _make_class_unpicklable(enum_class)
- unpicklable = True
-
-
- # double check that repr and friends are not the mixin's or various
- # things break (such as pickle)
- for name in ('__repr__', '__str__', '__format__', '__reduce_ex__'):
- class_method = getattr(enum_class, name)
- obj_method = getattr(member_type, name, None)
- enum_method = getattr(first_enum, name, None)
- if name not in classdict and class_method is not enum_method:
- if name == '__reduce_ex__' and unpicklable:
- continue
- setattr(enum_class, name, enum_method)
-
- # method resolution and int's are not playing nice
- # Python's less than 2.6 use __cmp__
-
- if pyver < 2.6:
-
- if issubclass(enum_class, int):
- setattr(enum_class, '__cmp__', getattr(int, '__cmp__'))
-
- elif pyver < 3.0:
-
- if issubclass(enum_class, int):
- for method in (
- '__le__',
- '__lt__',
- '__gt__',
- '__ge__',
- '__eq__',
- '__ne__',
- '__hash__',
- ):
- setattr(enum_class, method, getattr(int, method))
-
- # replace any other __new__ with our own (as long as Enum is not None,
- # anyway) -- again, this is to support pickle
- if Enum is not None:
- # if the user defined their own __new__, save it before it gets
- # clobbered in case they subclass later
- if save_new:
- setattr(enum_class, '__member_new__', enum_class.__dict__['__new__'])
- setattr(enum_class, '__new__', Enum.__dict__['__new__'])
- return enum_class
-
- def __bool__(cls):
- """
- classes/types should always be True.
- """
- return True
-
- def __call__(cls, value, names=None, module=None, type=None, start=1):
- """Either returns an existing member, or creates a new enum class.
-
- This method is used both when an enum class is given a value to match
- to an enumeration member (i.e. Color(3)) and for the functional API
- (i.e. Color = Enum('Color', names='red green blue')).
-
- When used for the functional API: `module`, if set, will be stored in
- the new class' __module__ attribute; `type`, if set, will be mixed in
- as the first base class.
-
- Note: if `module` is not set this routine will attempt to discover the
- calling module by walking the frame stack; if this is unsuccessful
- the resulting class will not be pickleable.
-
- """
- if names is None: # simple value lookup
- return cls.__new__(cls, value)
- # otherwise, functional API: we're creating a new Enum type
- return cls._create_(value, names, module=module, type=type, start=start)
-
- def __contains__(cls, member):
- return isinstance(member, cls) and member.name in cls._member_map_
-
- def __delattr__(cls, attr):
- # nicer error message when someone tries to delete an attribute
- # (see issue19025).
- if attr in cls._member_map_:
- raise AttributeError(
- "%s: cannot delete Enum member." % cls.__name__)
- super(EnumMeta, cls).__delattr__(attr)
-
- def __dir__(self):
- return (['__class__', '__doc__', '__members__', '__module__'] +
- self._member_names_)
-
- @property
- def __members__(cls):
- """Returns a mapping of member name->value.
-
- This mapping lists all enum members, including aliases. Note that this
- is a copy of the internal mapping.
-
- """
- return cls._member_map_.copy()
-
- def __getattr__(cls, name):
- """Return the enum member matching `name`
-
- We use __getattr__ instead of descriptors or inserting into the enum
- class' __dict__ in order to support `name` and `value` being both
- properties for enum members (which live in the class' __dict__) and
- enum members themselves.
-
- """
- if _is_dunder(name):
- raise AttributeError(name)
- try:
- return cls._member_map_[name]
- except KeyError:
- raise AttributeError(name)
-
- def __getitem__(cls, name):
- return cls._member_map_[name]
-
- def __iter__(cls):
- return (cls._member_map_[name] for name in cls._member_names_)
-
- def __reversed__(cls):
- return (cls._member_map_[name] for name in reversed(cls._member_names_))
-
- def __len__(cls):
- return len(cls._member_names_)
-
- __nonzero__ = __bool__
-
- def __repr__(cls):
- return "" % cls.__name__
-
- def __setattr__(cls, name, value):
- """Block attempts to reassign Enum members.
-
- A simple assignment to the class namespace only changes one of the
- several possible ways to get an Enum member from the Enum class,
- resulting in an inconsistent Enumeration.
-
- """
- member_map = cls.__dict__.get('_member_map_', {})
- if name in member_map:
- raise AttributeError('Cannot reassign members.')
- super(EnumMeta, cls).__setattr__(name, value)
-
- def _create_(cls, class_name, names=None, module=None, type=None, start=1):
- """Convenience method to create a new Enum class.
-
- `names` can be:
-
- * A string containing member names, separated either with spaces or
- commas. Values are auto-numbered from 1.
- * An iterable of member names. Values are auto-numbered from 1.
- * An iterable of (member name, value) pairs.
- * A mapping of member name -> value.
-
- """
- if pyver < 3.0:
- # if class_name is unicode, attempt a conversion to ASCII
- if isinstance(class_name, unicode):
- try:
- class_name = class_name.encode('ascii')
- except UnicodeEncodeError:
- raise TypeError('%r is not representable in ASCII' % class_name)
- metacls = cls.__class__
- if type is None:
- bases = (cls, )
- else:
- bases = (type, cls)
- classdict = metacls.__prepare__(class_name, bases)
- _order_ = []
-
- # special processing needed for names?
- if isinstance(names, basestring):
- names = names.replace(',', ' ').split()
- if isinstance(names, (tuple, list)) and isinstance(names[0], basestring):
- names = [(e, i+start) for (i, e) in enumerate(names)]
-
- # Here, names is either an iterable of (name, value) or a mapping.
- item = None # in case names is empty
- for item in names:
- if isinstance(item, basestring):
- member_name, member_value = item, names[item]
- else:
- member_name, member_value = item
- classdict[member_name] = member_value
- _order_.append(member_name)
- # only set _order_ in classdict if name/value was not from a mapping
- if not isinstance(item, basestring):
- classdict['_order_'] = ' '.join(_order_)
- enum_class = metacls.__new__(metacls, class_name, bases, classdict)
-
- # TODO: replace the frame hack if a blessed way to know the calling
- # module is ever developed
- if module is None:
- try:
- module = _sys._getframe(2).f_globals['__name__']
- except (AttributeError, ValueError):
- pass
- if module is None:
- _make_class_unpicklable(enum_class)
- else:
- enum_class.__module__ = module
-
- return enum_class
-
- @staticmethod
- def _get_mixins_(bases):
- """Returns the type for creating enum members, and the first inherited
- enum class.
-
- bases: the tuple of bases that was given to __new__
-
- """
- if not bases or Enum is None:
- return object, Enum
-
-
- # double check that we are not subclassing a class with existing
- # enumeration members; while we're at it, see if any other data
- # type has been mixed in so we can use the correct __new__
- member_type = first_enum = None
- for base in bases:
- if (base is not Enum and
- issubclass(base, Enum) and
- base._member_names_):
- raise TypeError("Cannot extend enumerations")
- # base is now the last base in bases
- if not issubclass(base, Enum):
- raise TypeError("new enumerations must be created as "
- "`ClassName([mixin_type,] enum_type)`")
-
- # get correct mix-in type (either mix-in type of Enum subclass, or
- # first base if last base is Enum)
- if not issubclass(bases[0], Enum):
- member_type = bases[0] # first data type
- first_enum = bases[-1] # enum type
- else:
- for base in bases[0].__mro__:
- # most common: (IntEnum, int, Enum, object)
- # possible: (, ,
- # , ,
- # )
- if issubclass(base, Enum):
- if first_enum is None:
- first_enum = base
- else:
- if member_type is None:
- member_type = base
-
- return member_type, first_enum
-
- if pyver < 3.0:
- @staticmethod
- def _find_new_(classdict, member_type, first_enum):
- """Returns the __new__ to be used for creating the enum members.
-
- classdict: the class dictionary given to __new__
- member_type: the data type whose __new__ will be used by default
- first_enum: enumeration to check for an overriding __new__
-
- """
- # now find the correct __new__, checking to see of one was defined
- # by the user; also check earlier enum classes in case a __new__ was
- # saved as __member_new__
- __new__ = classdict.get('__new__', None)
- if __new__:
- return None, True, True # __new__, save_new, use_args
-
- N__new__ = getattr(None, '__new__')
- O__new__ = getattr(object, '__new__')
- if Enum is None:
- E__new__ = N__new__
- else:
- E__new__ = Enum.__dict__['__new__']
- # check all possibles for __member_new__ before falling back to
- # __new__
- for method in ('__member_new__', '__new__'):
- for possible in (member_type, first_enum):
- try:
- target = possible.__dict__[method]
- except (AttributeError, KeyError):
- target = getattr(possible, method, None)
- if target not in [
- None,
- N__new__,
- O__new__,
- E__new__,
- ]:
- if method == '__member_new__':
- classdict['__new__'] = target
- return None, False, True
- if isinstance(target, staticmethod):
- target = target.__get__(member_type)
- __new__ = target
- break
- if __new__ is not None:
- break
- else:
- __new__ = object.__new__
-
- # if a non-object.__new__ is used then whatever value/tuple was
- # assigned to the enum member name will be passed to __new__ and to the
- # new enum member's __init__
- if __new__ is object.__new__:
- use_args = False
- else:
- use_args = True
-
- return __new__, False, use_args
- else:
- @staticmethod
- def _find_new_(classdict, member_type, first_enum):
- """Returns the __new__ to be used for creating the enum members.
-
- classdict: the class dictionary given to __new__
- member_type: the data type whose __new__ will be used by default
- first_enum: enumeration to check for an overriding __new__
-
- """
- # now find the correct __new__, checking to see of one was defined
- # by the user; also check earlier enum classes in case a __new__ was
- # saved as __member_new__
- __new__ = classdict.get('__new__', None)
-
- # should __new__ be saved as __member_new__ later?
- save_new = __new__ is not None
-
- if __new__ is None:
- # check all possibles for __member_new__ before falling back to
- # __new__
- for method in ('__member_new__', '__new__'):
- for possible in (member_type, first_enum):
- target = getattr(possible, method, None)
- if target not in (
- None,
- None.__new__,
- object.__new__,
- Enum.__new__,
- ):
- __new__ = target
- break
- if __new__ is not None:
- break
- else:
- __new__ = object.__new__
-
- # if a non-object.__new__ is used then whatever value/tuple was
- # assigned to the enum member name will be passed to __new__ and to the
- # new enum member's __init__
- if __new__ is object.__new__:
- use_args = False
- else:
- use_args = True
-
- return __new__, save_new, use_args
-
-
-########################################################
-# In order to support Python 2 and 3 with a single
-# codebase we have to create the Enum methods separately
-# and then use the `type(name, bases, dict)` method to
-# create the class.
-########################################################
-temp_enum_dict = {}
-temp_enum_dict['__doc__'] = "Generic enumeration.\n\n Derive from this class to define new enumerations.\n\n"
-
-def __new__(cls, value):
- # all enum instances are actually created during class construction
- # without calling this method; this method is called by the metaclass'
- # __call__ (i.e. Color(3) ), and by pickle
- if type(value) is cls:
- # For lookups like Color(Color.red)
- value = value.value
- #return value
- # by-value search for a matching enum member
- # see if it's in the reverse mapping (for hashable values)
- try:
- if value in cls._value2member_map_:
- return cls._value2member_map_[value]
- except TypeError:
- # not there, now do long search -- O(n) behavior
- for member in cls._member_map_.values():
- if member.value == value:
- return member
- raise ValueError("%s is not a valid %s" % (value, cls.__name__))
-temp_enum_dict['__new__'] = __new__
-del __new__
-
-def __repr__(self):
- return "<%s.%s: %r>" % (
- self.__class__.__name__, self._name_, self._value_)
-temp_enum_dict['__repr__'] = __repr__
-del __repr__
-
-def __str__(self):
- return "%s.%s" % (self.__class__.__name__, self._name_)
-temp_enum_dict['__str__'] = __str__
-del __str__
-
-if pyver >= 3.0:
- def __dir__(self):
- added_behavior = [
- m
- for cls in self.__class__.mro()
- for m in cls.__dict__
- if m[0] != '_' and m not in self._member_map_
- ]
- return (['__class__', '__doc__', '__module__', ] + added_behavior)
- temp_enum_dict['__dir__'] = __dir__
- del __dir__
-
-def __format__(self, format_spec):
- # mixed-in Enums should use the mixed-in type's __format__, otherwise
- # we can get strange results with the Enum name showing up instead of
- # the value
-
- # pure Enum branch
- if self._member_type_ is object:
- cls = str
- val = str(self)
- # mix-in branch
- else:
- cls = self._member_type_
- val = self.value
- return cls.__format__(val, format_spec)
-temp_enum_dict['__format__'] = __format__
-del __format__
-
-
-####################################
-# Python's less than 2.6 use __cmp__
-
-if pyver < 2.6:
-
- def __cmp__(self, other):
- if type(other) is self.__class__:
- if self is other:
- return 0
- return -1
- return NotImplemented
- raise TypeError("unorderable types: %s() and %s()" % (self.__class__.__name__, other.__class__.__name__))
- temp_enum_dict['__cmp__'] = __cmp__
- del __cmp__
-
-else:
-
- def __le__(self, other):
- raise TypeError("unorderable types: %s() <= %s()" % (self.__class__.__name__, other.__class__.__name__))
- temp_enum_dict['__le__'] = __le__
- del __le__
-
- def __lt__(self, other):
- raise TypeError("unorderable types: %s() < %s()" % (self.__class__.__name__, other.__class__.__name__))
- temp_enum_dict['__lt__'] = __lt__
- del __lt__
-
- def __ge__(self, other):
- raise TypeError("unorderable types: %s() >= %s()" % (self.__class__.__name__, other.__class__.__name__))
- temp_enum_dict['__ge__'] = __ge__
- del __ge__
-
- def __gt__(self, other):
- raise TypeError("unorderable types: %s() > %s()" % (self.__class__.__name__, other.__class__.__name__))
- temp_enum_dict['__gt__'] = __gt__
- del __gt__
-
-
-def __eq__(self, other):
- if type(other) is self.__class__:
- return self is other
- return NotImplemented
-temp_enum_dict['__eq__'] = __eq__
-del __eq__
-
-def __ne__(self, other):
- if type(other) is self.__class__:
- return self is not other
- return NotImplemented
-temp_enum_dict['__ne__'] = __ne__
-del __ne__
-
-def __hash__(self):
- return hash(self._name_)
-temp_enum_dict['__hash__'] = __hash__
-del __hash__
-
-def __reduce_ex__(self, proto):
- return self.__class__, (self._value_, )
-temp_enum_dict['__reduce_ex__'] = __reduce_ex__
-del __reduce_ex__
-
-# _RouteClassAttributeToGetattr is used to provide access to the `name`
-# and `value` properties of enum members while keeping some measure of
-# protection from modification, while still allowing for an enumeration
-# to have members named `name` and `value`. This works because enumeration
-# members are not set directly on the enum class -- __getattr__ is
-# used to look them up.
-
-@_RouteClassAttributeToGetattr
-def name(self):
- return self._name_
-temp_enum_dict['name'] = name
-del name
-
-@_RouteClassAttributeToGetattr
-def value(self):
- return self._value_
-temp_enum_dict['value'] = value
-del value
-
-@classmethod
-def _convert(cls, name, module, filter, source=None):
- """
- Create a new Enum subclass that replaces a collection of global constants
- """
- # convert all constants from source (or module) that pass filter() to
- # a new Enum called name, and export the enum and its members back to
- # module;
- # also, replace the __reduce_ex__ method so unpickling works in
- # previous Python versions
- module_globals = vars(_sys.modules[module])
- if source:
- source = vars(source)
- else:
- source = module_globals
- members = dict((name, value) for name, value in source.items() if filter(name))
- cls = cls(name, members, module=module)
- cls.__reduce_ex__ = _reduce_ex_by_name
- module_globals.update(cls.__members__)
- module_globals[name] = cls
- return cls
-temp_enum_dict['_convert'] = _convert
-del _convert
-
-Enum = EnumMeta('Enum', (object, ), temp_enum_dict)
-del temp_enum_dict
-
-# Enum has now been created
-###########################
-
-class IntEnum(int, Enum):
- """Enum where members are also (and must be) ints"""
-
-def _reduce_ex_by_name(self, proto):
- return self.name
-
-def unique(enumeration):
- """Class decorator that ensures only unique members exist in an enumeration."""
- duplicates = []
- for name, member in enumeration.__members__.items():
- if name != member.name:
- duplicates.append((name, member.name))
- if duplicates:
- duplicate_names = ', '.join(
- ["%s -> %s" % (alias, name) for (alias, name) in duplicates]
- )
- raise ValueError('duplicate names found in %r: %s' %
- (enumeration, duplicate_names)
- )
- return enumeration
diff --git a/functions/source/CreateSSHKey/idna/__init__.py b/functions/source/CreateSSHKey/idna/__init__.py
deleted file mode 100644
index 847bf93..0000000
--- a/functions/source/CreateSSHKey/idna/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-from .package_data import __version__
-from .core import *
diff --git a/functions/source/CreateSSHKey/idna/codec.py b/functions/source/CreateSSHKey/idna/codec.py
deleted file mode 100644
index 98c65ea..0000000
--- a/functions/source/CreateSSHKey/idna/codec.py
+++ /dev/null
@@ -1,118 +0,0 @@
-from .core import encode, decode, alabel, ulabel, IDNAError
-import codecs
-import re
-
-_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]')
-
-class Codec(codecs.Codec):
-
- def encode(self, data, errors='strict'):
-
- if errors != 'strict':
- raise IDNAError("Unsupported error handling \"{0}\"".format(errors))
-
- if not data:
- return "", 0
-
- return encode(data), len(data)
-
- def decode(self, data, errors='strict'):
-
- if errors != 'strict':
- raise IDNAError("Unsupported error handling \"{0}\"".format(errors))
-
- if not data:
- return u"", 0
-
- return decode(data), len(data)
-
-class IncrementalEncoder(codecs.BufferedIncrementalEncoder):
- def _buffer_encode(self, data, errors, final):
- if errors != 'strict':
- raise IDNAError("Unsupported error handling \"{0}\"".format(errors))
-
- if not data:
- return ("", 0)
-
- labels = _unicode_dots_re.split(data)
- trailing_dot = u''
- if labels:
- if not labels[-1]:
- trailing_dot = '.'
- del labels[-1]
- elif not final:
- # Keep potentially unfinished label until the next call
- del labels[-1]
- if labels:
- trailing_dot = '.'
-
- result = []
- size = 0
- for label in labels:
- result.append(alabel(label))
- if size:
- size += 1
- size += len(label)
-
- # Join with U+002E
- result = ".".join(result) + trailing_dot
- size += len(trailing_dot)
- return (result, size)
-
-class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
- def _buffer_decode(self, data, errors, final):
- if errors != 'strict':
- raise IDNAError("Unsupported error handling \"{0}\"".format(errors))
-
- if not data:
- return (u"", 0)
-
- # IDNA allows decoding to operate on Unicode strings, too.
- if isinstance(data, unicode):
- labels = _unicode_dots_re.split(data)
- else:
- # Must be ASCII string
- data = str(data)
- unicode(data, "ascii")
- labels = data.split(".")
-
- trailing_dot = u''
- if labels:
- if not labels[-1]:
- trailing_dot = u'.'
- del labels[-1]
- elif not final:
- # Keep potentially unfinished label until the next call
- del labels[-1]
- if labels:
- trailing_dot = u'.'
-
- result = []
- size = 0
- for label in labels:
- result.append(ulabel(label))
- if size:
- size += 1
- size += len(label)
-
- result = u".".join(result) + trailing_dot
- size += len(trailing_dot)
- return (result, size)
-
-
-class StreamWriter(Codec, codecs.StreamWriter):
- pass
-
-class StreamReader(Codec, codecs.StreamReader):
- pass
-
-def getregentry():
- return codecs.CodecInfo(
- name='idna',
- encode=Codec().encode,
- decode=Codec().decode,
- incrementalencoder=IncrementalEncoder,
- incrementaldecoder=IncrementalDecoder,
- streamwriter=StreamWriter,
- streamreader=StreamReader,
- )
diff --git a/functions/source/CreateSSHKey/idna/compat.py b/functions/source/CreateSSHKey/idna/compat.py
deleted file mode 100644
index 4d47f33..0000000
--- a/functions/source/CreateSSHKey/idna/compat.py
+++ /dev/null
@@ -1,12 +0,0 @@
-from .core import *
-from .codec import *
-
-def ToASCII(label):
- return encode(label)
-
-def ToUnicode(label):
- return decode(label)
-
-def nameprep(s):
- raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol")
-
diff --git a/functions/source/CreateSSHKey/idna/core.py b/functions/source/CreateSSHKey/idna/core.py
deleted file mode 100644
index b55b664..0000000
--- a/functions/source/CreateSSHKey/idna/core.py
+++ /dev/null
@@ -1,387 +0,0 @@
-from . import idnadata
-import bisect
-import unicodedata
-import re
-import sys
-from .intranges import intranges_contain
-
-_virama_combining_class = 9
-_alabel_prefix = b'xn--'
-_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]')
-
-if sys.version_info[0] == 3:
- unicode = str
- unichr = chr
-
-class IDNAError(UnicodeError):
- """ Base exception for all IDNA-encoding related problems """
- pass
-
-
-class IDNABidiError(IDNAError):
- """ Exception when bidirectional requirements are not satisfied """
- pass
-
-
-class InvalidCodepoint(IDNAError):
- """ Exception when a disallowed or unallocated codepoint is used """
- pass
-
-
-class InvalidCodepointContext(IDNAError):
- """ Exception when the codepoint is not valid in the context it is used """
- pass
-
-
-def _combining_class(cp):
- return unicodedata.combining(unichr(cp))
-
-def _is_script(cp, script):
- return intranges_contain(ord(cp), idnadata.scripts[script])
-
-def _punycode(s):
- return s.encode('punycode')
-
-def _unot(s):
- return 'U+{0:04X}'.format(s)
-
-
-def valid_label_length(label):
-
- if len(label) > 63:
- return False
- return True
-
-
-def valid_string_length(label, trailing_dot):
-
- if len(label) > (254 if trailing_dot else 253):
- return False
- return True
-
-
-def check_bidi(label, check_ltr=False):
-
- # Bidi rules should only be applied if string contains RTL characters
- bidi_label = False
- for (idx, cp) in enumerate(label, 1):
- direction = unicodedata.bidirectional(cp)
- if direction == '':
- # String likely comes from a newer version of Unicode
- raise IDNABidiError('Unknown directionality in label {0} at position {1}'.format(repr(label), idx))
- if direction in ['R', 'AL', 'AN']:
- bidi_label = True
- break
- if not bidi_label and not check_ltr:
- return True
-
- # Bidi rule 1
- direction = unicodedata.bidirectional(label[0])
- if direction in ['R', 'AL']:
- rtl = True
- elif direction == 'L':
- rtl = False
- else:
- raise IDNABidiError('First codepoint in label {0} must be directionality L, R or AL'.format(repr(label)))
-
- valid_ending = False
- number_type = False
- for (idx, cp) in enumerate(label, 1):
- direction = unicodedata.bidirectional(cp)
-
- if rtl:
- # Bidi rule 2
- if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']:
- raise IDNABidiError('Invalid direction for codepoint at position {0} in a right-to-left label'.format(idx))
- # Bidi rule 3
- if direction in ['R', 'AL', 'EN', 'AN']:
- valid_ending = True
- elif direction != 'NSM':
- valid_ending = False
- # Bidi rule 4
- if direction in ['AN', 'EN']:
- if not number_type:
- number_type = direction
- else:
- if number_type != direction:
- raise IDNABidiError('Can not mix numeral types in a right-to-left label')
- else:
- # Bidi rule 5
- if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']:
- raise IDNABidiError('Invalid direction for codepoint at position {0} in a left-to-right label'.format(idx))
- # Bidi rule 6
- if direction in ['L', 'EN']:
- valid_ending = True
- elif direction != 'NSM':
- valid_ending = False
-
- if not valid_ending:
- raise IDNABidiError('Label ends with illegal codepoint directionality')
-
- return True
-
-
-def check_initial_combiner(label):
-
- if unicodedata.category(label[0])[0] == 'M':
- raise IDNAError('Label begins with an illegal combining character')
- return True
-
-
-def check_hyphen_ok(label):
-
- if label[2:4] == '--':
- raise IDNAError('Label has disallowed hyphens in 3rd and 4th position')
- if label[0] == '-' or label[-1] == '-':
- raise IDNAError('Label must not start or end with a hyphen')
- return True
-
-
-def check_nfc(label):
-
- if unicodedata.normalize('NFC', label) != label:
- raise IDNAError('Label must be in Normalization Form C')
-
-
-def valid_contextj(label, pos):
-
- cp_value = ord(label[pos])
-
- if cp_value == 0x200c:
-
- if pos > 0:
- if _combining_class(ord(label[pos - 1])) == _virama_combining_class:
- return True
-
- ok = False
- for i in range(pos-1, -1, -1):
- joining_type = idnadata.joining_types.get(ord(label[i]))
- if joining_type == ord('T'):
- continue
- if joining_type in [ord('L'), ord('D')]:
- ok = True
- break
-
- if not ok:
- return False
-
- ok = False
- for i in range(pos+1, len(label)):
- joining_type = idnadata.joining_types.get(ord(label[i]))
- if joining_type == ord('T'):
- continue
- if joining_type in [ord('R'), ord('D')]:
- ok = True
- break
- return ok
-
- if cp_value == 0x200d:
-
- if pos > 0:
- if _combining_class(ord(label[pos - 1])) == _virama_combining_class:
- return True
- return False
-
- else:
-
- return False
-
-
-def valid_contexto(label, pos, exception=False):
-
- cp_value = ord(label[pos])
-
- if cp_value == 0x00b7:
- if 0 < pos < len(label)-1:
- if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c:
- return True
- return False
-
- elif cp_value == 0x0375:
- if pos < len(label)-1 and len(label) > 1:
- return _is_script(label[pos + 1], 'Greek')
- return False
-
- elif cp_value == 0x05f3 or cp_value == 0x05f4:
- if pos > 0:
- return _is_script(label[pos - 1], 'Hebrew')
- return False
-
- elif cp_value == 0x30fb:
- for cp in label:
- if cp == u'\u30fb':
- continue
- if _is_script(cp, 'Hiragana') or _is_script(cp, 'Katakana') or _is_script(cp, 'Han'):
- return True
- return False
-
- elif 0x660 <= cp_value <= 0x669:
- for cp in label:
- if 0x6f0 <= ord(cp) <= 0x06f9:
- return False
- return True
-
- elif 0x6f0 <= cp_value <= 0x6f9:
- for cp in label:
- if 0x660 <= ord(cp) <= 0x0669:
- return False
- return True
-
-
-def check_label(label):
-
- if isinstance(label, (bytes, bytearray)):
- label = label.decode('utf-8')
- if len(label) == 0:
- raise IDNAError('Empty Label')
-
- check_nfc(label)
- check_hyphen_ok(label)
- check_initial_combiner(label)
-
- for (pos, cp) in enumerate(label):
- cp_value = ord(cp)
- if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']):
- continue
- elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']):
- if not valid_contextj(label, pos):
- raise InvalidCodepointContext('Joiner {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label)))
- elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']):
- if not valid_contexto(label, pos):
- raise InvalidCodepointContext('Codepoint {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label)))
- else:
- raise InvalidCodepoint('Codepoint {0} at position {1} of {2} not allowed'.format(_unot(cp_value), pos+1, repr(label)))
-
- check_bidi(label)
-
-
-def alabel(label):
-
- try:
- label = label.encode('ascii')
- try:
- ulabel(label)
- except IDNAError:
- raise IDNAError('The label {0} is not a valid A-label'.format(label))
- if not valid_label_length(label):
- raise IDNAError('Label too long')
- return label
- except UnicodeEncodeError:
- pass
-
- if not label:
- raise IDNAError('No Input')
-
- label = unicode(label)
- check_label(label)
- label = _punycode(label)
- label = _alabel_prefix + label
-
- if not valid_label_length(label):
- raise IDNAError('Label too long')
-
- return label
-
-
-def ulabel(label):
-
- if not isinstance(label, (bytes, bytearray)):
- try:
- label = label.encode('ascii')
- except UnicodeEncodeError:
- check_label(label)
- return label
-
- label = label.lower()
- if label.startswith(_alabel_prefix):
- label = label[len(_alabel_prefix):]
- else:
- check_label(label)
- return label.decode('ascii')
-
- label = label.decode('punycode')
- check_label(label)
- return label
-
-
-def uts46_remap(domain, std3_rules=True, transitional=False):
- """Re-map the characters in the string according to UTS46 processing."""
- from .uts46data import uts46data
- output = u""
- try:
- for pos, char in enumerate(domain):
- code_point = ord(char)
- uts46row = uts46data[code_point if code_point < 256 else
- bisect.bisect_left(uts46data, (code_point, "Z")) - 1]
- status = uts46row[1]
- replacement = uts46row[2] if len(uts46row) == 3 else None
- if (status == "V" or
- (status == "D" and not transitional) or
- (status == "3" and std3_rules and replacement is None)):
- output += char
- elif replacement is not None and (status == "M" or
- (status == "3" and std3_rules) or
- (status == "D" and transitional)):
- output += replacement
- elif status != "I":
- raise IndexError()
- return unicodedata.normalize("NFC", output)
- except IndexError:
- raise InvalidCodepoint(
- "Codepoint {0} not allowed at position {1} in {2}".format(
- _unot(code_point), pos + 1, repr(domain)))
-
-
-def encode(s, strict=False, uts46=False, std3_rules=False, transitional=False):
-
- if isinstance(s, (bytes, bytearray)):
- s = s.decode("ascii")
- if uts46:
- s = uts46_remap(s, std3_rules, transitional)
- trailing_dot = False
- result = []
- if strict:
- labels = s.split('.')
- else:
- labels = _unicode_dots_re.split(s)
- while labels and not labels[0]:
- del labels[0]
- if not labels:
- raise IDNAError('Empty domain')
- if labels[-1] == '':
- del labels[-1]
- trailing_dot = True
- for label in labels:
- result.append(alabel(label))
- if trailing_dot:
- result.append(b'')
- s = b'.'.join(result)
- if not valid_string_length(s, trailing_dot):
- raise IDNAError('Domain too long')
- return s
-
-
-def decode(s, strict=False, uts46=False, std3_rules=False):
-
- if isinstance(s, (bytes, bytearray)):
- s = s.decode("ascii")
- if uts46:
- s = uts46_remap(s, std3_rules, False)
- trailing_dot = False
- result = []
- if not strict:
- labels = _unicode_dots_re.split(s)
- else:
- labels = s.split(u'.')
- while labels and not labels[0]:
- del labels[0]
- if not labels:
- raise IDNAError('Empty domain')
- if not labels[-1]:
- del labels[-1]
- trailing_dot = True
- for label in labels:
- result.append(ulabel(label))
- if trailing_dot:
- result.append(u'')
- return u'.'.join(result)
diff --git a/functions/source/CreateSSHKey/idna/idnadata.py b/functions/source/CreateSSHKey/idna/idnadata.py
deleted file mode 100644
index c48f1b5..0000000
--- a/functions/source/CreateSSHKey/idna/idnadata.py
+++ /dev/null
@@ -1,1585 +0,0 @@
-# This file is automatically generated by tools/idna-data
-
-__version__ = "6.3.0"
-scripts = {
- 'Greek': (
- 0x37000000374,
- 0x37500000378,
- 0x37a0000037e,
- 0x38400000385,
- 0x38600000387,
- 0x3880000038b,
- 0x38c0000038d,
- 0x38e000003a2,
- 0x3a3000003e2,
- 0x3f000000400,
- 0x1d2600001d2b,
- 0x1d5d00001d62,
- 0x1d6600001d6b,
- 0x1dbf00001dc0,
- 0x1f0000001f16,
- 0x1f1800001f1e,
- 0x1f2000001f46,
- 0x1f4800001f4e,
- 0x1f5000001f58,
- 0x1f5900001f5a,
- 0x1f5b00001f5c,
- 0x1f5d00001f5e,
- 0x1f5f00001f7e,
- 0x1f8000001fb5,
- 0x1fb600001fc5,
- 0x1fc600001fd4,
- 0x1fd600001fdc,
- 0x1fdd00001ff0,
- 0x1ff200001ff5,
- 0x1ff600001fff,
- 0x212600002127,
- 0x101400001018b,
- 0x1d2000001d246,
- ),
- 'Han': (
- 0x2e8000002e9a,
- 0x2e9b00002ef4,
- 0x2f0000002fd6,
- 0x300500003006,
- 0x300700003008,
- 0x30210000302a,
- 0x30380000303c,
- 0x340000004db6,
- 0x4e0000009fcd,
- 0xf9000000fa6e,
- 0xfa700000fada,
- 0x200000002a6d7,
- 0x2a7000002b735,
- 0x2b7400002b81e,
- 0x2f8000002fa1e,
- ),
- 'Hebrew': (
- 0x591000005c8,
- 0x5d0000005eb,
- 0x5f0000005f5,
- 0xfb1d0000fb37,
- 0xfb380000fb3d,
- 0xfb3e0000fb3f,
- 0xfb400000fb42,
- 0xfb430000fb45,
- 0xfb460000fb50,
- ),
- 'Hiragana': (
- 0x304100003097,
- 0x309d000030a0,
- 0x1b0010001b002,
- 0x1f2000001f201,
- ),
- 'Katakana': (
- 0x30a1000030fb,
- 0x30fd00003100,
- 0x31f000003200,
- 0x32d0000032ff,
- 0x330000003358,
- 0xff660000ff70,
- 0xff710000ff9e,
- 0x1b0000001b001,
- ),
-}
-joining_types = {
- 0x600: 85,
- 0x601: 85,
- 0x602: 85,
- 0x603: 85,
- 0x604: 85,
- 0x608: 85,
- 0x60b: 85,
- 0x620: 68,
- 0x621: 85,
- 0x622: 82,
- 0x623: 82,
- 0x624: 82,
- 0x625: 82,
- 0x626: 68,
- 0x627: 82,
- 0x628: 68,
- 0x629: 82,
- 0x62a: 68,
- 0x62b: 68,
- 0x62c: 68,
- 0x62d: 68,
- 0x62e: 68,
- 0x62f: 82,
- 0x630: 82,
- 0x631: 82,
- 0x632: 82,
- 0x633: 68,
- 0x634: 68,
- 0x635: 68,
- 0x636: 68,
- 0x637: 68,
- 0x638: 68,
- 0x639: 68,
- 0x63a: 68,
- 0x63b: 68,
- 0x63c: 68,
- 0x63d: 68,
- 0x63e: 68,
- 0x63f: 68,
- 0x640: 67,
- 0x641: 68,
- 0x642: 68,
- 0x643: 68,
- 0x644: 68,
- 0x645: 68,
- 0x646: 68,
- 0x647: 68,
- 0x648: 82,
- 0x649: 68,
- 0x64a: 68,
- 0x66e: 68,
- 0x66f: 68,
- 0x671: 82,
- 0x672: 82,
- 0x673: 82,
- 0x674: 85,
- 0x675: 82,
- 0x676: 82,
- 0x677: 82,
- 0x678: 68,
- 0x679: 68,
- 0x67a: 68,
- 0x67b: 68,
- 0x67c: 68,
- 0x67d: 68,
- 0x67e: 68,
- 0x67f: 68,
- 0x680: 68,
- 0x681: 68,
- 0x682: 68,
- 0x683: 68,
- 0x684: 68,
- 0x685: 68,
- 0x686: 68,
- 0x687: 68,
- 0x688: 82,
- 0x689: 82,
- 0x68a: 82,
- 0x68b: 82,
- 0x68c: 82,
- 0x68d: 82,
- 0x68e: 82,
- 0x68f: 82,
- 0x690: 82,
- 0x691: 82,
- 0x692: 82,
- 0x693: 82,
- 0x694: 82,
- 0x695: 82,
- 0x696: 82,
- 0x697: 82,
- 0x698: 82,
- 0x699: 82,
- 0x69a: 68,
- 0x69b: 68,
- 0x69c: 68,
- 0x69d: 68,
- 0x69e: 68,
- 0x69f: 68,
- 0x6a0: 68,
- 0x6a1: 68,
- 0x6a2: 68,
- 0x6a3: 68,
- 0x6a4: 68,
- 0x6a5: 68,
- 0x6a6: 68,
- 0x6a7: 68,
- 0x6a8: 68,
- 0x6a9: 68,
- 0x6aa: 68,
- 0x6ab: 68,
- 0x6ac: 68,
- 0x6ad: 68,
- 0x6ae: 68,
- 0x6af: 68,
- 0x6b0: 68,
- 0x6b1: 68,
- 0x6b2: 68,
- 0x6b3: 68,
- 0x6b4: 68,
- 0x6b5: 68,
- 0x6b6: 68,
- 0x6b7: 68,
- 0x6b8: 68,
- 0x6b9: 68,
- 0x6ba: 68,
- 0x6bb: 68,
- 0x6bc: 68,
- 0x6bd: 68,
- 0x6be: 68,
- 0x6bf: 68,
- 0x6c0: 82,
- 0x6c1: 68,
- 0x6c2: 68,
- 0x6c3: 82,
- 0x6c4: 82,
- 0x6c5: 82,
- 0x6c6: 82,
- 0x6c7: 82,
- 0x6c8: 82,
- 0x6c9: 82,
- 0x6ca: 82,
- 0x6cb: 82,
- 0x6cc: 68,
- 0x6cd: 82,
- 0x6ce: 68,
- 0x6cf: 82,
- 0x6d0: 68,
- 0x6d1: 68,
- 0x6d2: 82,
- 0x6d3: 82,
- 0x6d5: 82,
- 0x6dd: 85,
- 0x6ee: 82,
- 0x6ef: 82,
- 0x6fa: 68,
- 0x6fb: 68,
- 0x6fc: 68,
- 0x6ff: 68,
- 0x710: 82,
- 0x712: 68,
- 0x713: 68,
- 0x714: 68,
- 0x715: 82,
- 0x716: 82,
- 0x717: 82,
- 0x718: 82,
- 0x719: 82,
- 0x71a: 68,
- 0x71b: 68,
- 0x71c: 68,
- 0x71d: 68,
- 0x71e: 82,
- 0x71f: 68,
- 0x720: 68,
- 0x721: 68,
- 0x722: 68,
- 0x723: 68,
- 0x724: 68,
- 0x725: 68,
- 0x726: 68,
- 0x727: 68,
- 0x728: 82,
- 0x729: 68,
- 0x72a: 82,
- 0x72b: 68,
- 0x72c: 82,
- 0x72d: 68,
- 0x72e: 68,
- 0x72f: 82,
- 0x74d: 82,
- 0x74e: 68,
- 0x74f: 68,
- 0x750: 68,
- 0x751: 68,
- 0x752: 68,
- 0x753: 68,
- 0x754: 68,
- 0x755: 68,
- 0x756: 68,
- 0x757: 68,
- 0x758: 68,
- 0x759: 82,
- 0x75a: 82,
- 0x75b: 82,
- 0x75c: 68,
- 0x75d: 68,
- 0x75e: 68,
- 0x75f: 68,
- 0x760: 68,
- 0x761: 68,
- 0x762: 68,
- 0x763: 68,
- 0x764: 68,
- 0x765: 68,
- 0x766: 68,
- 0x767: 68,
- 0x768: 68,
- 0x769: 68,
- 0x76a: 68,
- 0x76b: 82,
- 0x76c: 82,
- 0x76d: 68,
- 0x76e: 68,
- 0x76f: 68,
- 0x770: 68,
- 0x771: 82,
- 0x772: 68,
- 0x773: 82,
- 0x774: 82,
- 0x775: 68,
- 0x776: 68,
- 0x777: 68,
- 0x778: 82,
- 0x779: 82,
- 0x77a: 68,
- 0x77b: 68,
- 0x77c: 68,
- 0x77d: 68,
- 0x77e: 68,
- 0x77f: 68,
- 0x7ca: 68,
- 0x7cb: 68,
- 0x7cc: 68,
- 0x7cd: 68,
- 0x7ce: 68,
- 0x7cf: 68,
- 0x7d0: 68,
- 0x7d1: 68,
- 0x7d2: 68,
- 0x7d3: 68,
- 0x7d4: 68,
- 0x7d5: 68,
- 0x7d6: 68,
- 0x7d7: 68,
- 0x7d8: 68,
- 0x7d9: 68,
- 0x7da: 68,
- 0x7db: 68,
- 0x7dc: 68,
- 0x7dd: 68,
- 0x7de: 68,
- 0x7df: 68,
- 0x7e0: 68,
- 0x7e1: 68,
- 0x7e2: 68,
- 0x7e3: 68,
- 0x7e4: 68,
- 0x7e5: 68,
- 0x7e6: 68,
- 0x7e7: 68,
- 0x7e8: 68,
- 0x7e9: 68,
- 0x7ea: 68,
- 0x7fa: 67,
- 0x840: 82,
- 0x841: 68,
- 0x842: 68,
- 0x843: 68,
- 0x844: 68,
- 0x845: 68,
- 0x846: 82,
- 0x847: 68,
- 0x848: 68,
- 0x849: 82,
- 0x84a: 68,
- 0x84b: 68,
- 0x84c: 68,
- 0x84d: 68,
- 0x84e: 68,
- 0x84f: 82,
- 0x850: 68,
- 0x851: 68,
- 0x852: 68,
- 0x853: 68,
- 0x854: 82,
- 0x855: 68,
- 0x856: 85,
- 0x857: 85,
- 0x858: 85,
- 0x8a0: 68,
- 0x8a2: 68,
- 0x8a3: 68,
- 0x8a4: 68,
- 0x8a5: 68,
- 0x8a6: 68,
- 0x8a7: 68,
- 0x8a8: 68,
- 0x8a9: 68,
- 0x8aa: 82,
- 0x8ab: 82,
- 0x8ac: 82,
- 0x1806: 85,
- 0x1807: 68,
- 0x180a: 67,
- 0x180e: 85,
- 0x1820: 68,
- 0x1821: 68,
- 0x1822: 68,
- 0x1823: 68,
- 0x1824: 68,
- 0x1825: 68,
- 0x1826: 68,
- 0x1827: 68,
- 0x1828: 68,
- 0x1829: 68,
- 0x182a: 68,
- 0x182b: 68,
- 0x182c: 68,
- 0x182d: 68,
- 0x182e: 68,
- 0x182f: 68,
- 0x1830: 68,
- 0x1831: 68,
- 0x1832: 68,
- 0x1833: 68,
- 0x1834: 68,
- 0x1835: 68,
- 0x1836: 68,
- 0x1837: 68,
- 0x1838: 68,
- 0x1839: 68,
- 0x183a: 68,
- 0x183b: 68,
- 0x183c: 68,
- 0x183d: 68,
- 0x183e: 68,
- 0x183f: 68,
- 0x1840: 68,
- 0x1841: 68,
- 0x1842: 68,
- 0x1843: 68,
- 0x1844: 68,
- 0x1845: 68,
- 0x1846: 68,
- 0x1847: 68,
- 0x1848: 68,
- 0x1849: 68,
- 0x184a: 68,
- 0x184b: 68,
- 0x184c: 68,
- 0x184d: 68,
- 0x184e: 68,
- 0x184f: 68,
- 0x1850: 68,
- 0x1851: 68,
- 0x1852: 68,
- 0x1853: 68,
- 0x1854: 68,
- 0x1855: 68,
- 0x1856: 68,
- 0x1857: 68,
- 0x1858: 68,
- 0x1859: 68,
- 0x185a: 68,
- 0x185b: 68,
- 0x185c: 68,
- 0x185d: 68,
- 0x185e: 68,
- 0x185f: 68,
- 0x1860: 68,
- 0x1861: 68,
- 0x1862: 68,
- 0x1863: 68,
- 0x1864: 68,
- 0x1865: 68,
- 0x1866: 68,
- 0x1867: 68,
- 0x1868: 68,
- 0x1869: 68,
- 0x186a: 68,
- 0x186b: 68,
- 0x186c: 68,
- 0x186d: 68,
- 0x186e: 68,
- 0x186f: 68,
- 0x1870: 68,
- 0x1871: 68,
- 0x1872: 68,
- 0x1873: 68,
- 0x1874: 68,
- 0x1875: 68,
- 0x1876: 68,
- 0x1877: 68,
- 0x1880: 85,
- 0x1881: 85,
- 0x1882: 85,
- 0x1883: 85,
- 0x1884: 85,
- 0x1885: 85,
- 0x1886: 85,
- 0x1887: 68,
- 0x1888: 68,
- 0x1889: 68,
- 0x188a: 68,
- 0x188b: 68,
- 0x188c: 68,
- 0x188d: 68,
- 0x188e: 68,
- 0x188f: 68,
- 0x1890: 68,
- 0x1891: 68,
- 0x1892: 68,
- 0x1893: 68,
- 0x1894: 68,
- 0x1895: 68,
- 0x1896: 68,
- 0x1897: 68,
- 0x1898: 68,
- 0x1899: 68,
- 0x189a: 68,
- 0x189b: 68,
- 0x189c: 68,
- 0x189d: 68,
- 0x189e: 68,
- 0x189f: 68,
- 0x18a0: 68,
- 0x18a1: 68,
- 0x18a2: 68,
- 0x18a3: 68,
- 0x18a4: 68,
- 0x18a5: 68,
- 0x18a6: 68,
- 0x18a7: 68,
- 0x18a8: 68,
- 0x18aa: 68,
- 0x200c: 85,
- 0x200d: 67,
- 0x2066: 85,
- 0x2067: 85,
- 0x2068: 85,
- 0x2069: 85,
- 0xa840: 68,
- 0xa841: 68,
- 0xa842: 68,
- 0xa843: 68,
- 0xa844: 68,
- 0xa845: 68,
- 0xa846: 68,
- 0xa847: 68,
- 0xa848: 68,
- 0xa849: 68,
- 0xa84a: 68,
- 0xa84b: 68,
- 0xa84c: 68,
- 0xa84d: 68,
- 0xa84e: 68,
- 0xa84f: 68,
- 0xa850: 68,
- 0xa851: 68,
- 0xa852: 68,
- 0xa853: 68,
- 0xa854: 68,
- 0xa855: 68,
- 0xa856: 68,
- 0xa857: 68,
- 0xa858: 68,
- 0xa859: 68,
- 0xa85a: 68,
- 0xa85b: 68,
- 0xa85c: 68,
- 0xa85d: 68,
- 0xa85e: 68,
- 0xa85f: 68,
- 0xa860: 68,
- 0xa861: 68,
- 0xa862: 68,
- 0xa863: 68,
- 0xa864: 68,
- 0xa865: 68,
- 0xa866: 68,
- 0xa867: 68,
- 0xa868: 68,
- 0xa869: 68,
- 0xa86a: 68,
- 0xa86b: 68,
- 0xa86c: 68,
- 0xa86d: 68,
- 0xa86e: 68,
- 0xa86f: 68,
- 0xa870: 68,
- 0xa871: 68,
- 0xa872: 76,
- 0xa873: 85,
-}
-codepoint_classes = {
- 'PVALID': (
- 0x2d0000002e,
- 0x300000003a,
- 0x610000007b,
- 0xdf000000f7,
- 0xf800000100,
- 0x10100000102,
- 0x10300000104,
- 0x10500000106,
- 0x10700000108,
- 0x1090000010a,
- 0x10b0000010c,
- 0x10d0000010e,
- 0x10f00000110,
- 0x11100000112,
- 0x11300000114,
- 0x11500000116,
- 0x11700000118,
- 0x1190000011a,
- 0x11b0000011c,
- 0x11d0000011e,
- 0x11f00000120,
- 0x12100000122,
- 0x12300000124,
- 0x12500000126,
- 0x12700000128,
- 0x1290000012a,
- 0x12b0000012c,
- 0x12d0000012e,
- 0x12f00000130,
- 0x13100000132,
- 0x13500000136,
- 0x13700000139,
- 0x13a0000013b,
- 0x13c0000013d,
- 0x13e0000013f,
- 0x14200000143,
- 0x14400000145,
- 0x14600000147,
- 0x14800000149,
- 0x14b0000014c,
- 0x14d0000014e,
- 0x14f00000150,
- 0x15100000152,
- 0x15300000154,
- 0x15500000156,
- 0x15700000158,
- 0x1590000015a,
- 0x15b0000015c,
- 0x15d0000015e,
- 0x15f00000160,
- 0x16100000162,
- 0x16300000164,
- 0x16500000166,
- 0x16700000168,
- 0x1690000016a,
- 0x16b0000016c,
- 0x16d0000016e,
- 0x16f00000170,
- 0x17100000172,
- 0x17300000174,
- 0x17500000176,
- 0x17700000178,
- 0x17a0000017b,
- 0x17c0000017d,
- 0x17e0000017f,
- 0x18000000181,
- 0x18300000184,
- 0x18500000186,
- 0x18800000189,
- 0x18c0000018e,
- 0x19200000193,
- 0x19500000196,
- 0x1990000019c,
- 0x19e0000019f,
- 0x1a1000001a2,
- 0x1a3000001a4,
- 0x1a5000001a6,
- 0x1a8000001a9,
- 0x1aa000001ac,
- 0x1ad000001ae,
- 0x1b0000001b1,
- 0x1b4000001b5,
- 0x1b6000001b7,
- 0x1b9000001bc,
- 0x1bd000001c4,
- 0x1ce000001cf,
- 0x1d0000001d1,
- 0x1d2000001d3,
- 0x1d4000001d5,
- 0x1d6000001d7,
- 0x1d8000001d9,
- 0x1da000001db,
- 0x1dc000001de,
- 0x1df000001e0,
- 0x1e1000001e2,
- 0x1e3000001e4,
- 0x1e5000001e6,
- 0x1e7000001e8,
- 0x1e9000001ea,
- 0x1eb000001ec,
- 0x1ed000001ee,
- 0x1ef000001f1,
- 0x1f5000001f6,
- 0x1f9000001fa,
- 0x1fb000001fc,
- 0x1fd000001fe,
- 0x1ff00000200,
- 0x20100000202,
- 0x20300000204,
- 0x20500000206,
- 0x20700000208,
- 0x2090000020a,
- 0x20b0000020c,
- 0x20d0000020e,
- 0x20f00000210,
- 0x21100000212,
- 0x21300000214,
- 0x21500000216,
- 0x21700000218,
- 0x2190000021a,
- 0x21b0000021c,
- 0x21d0000021e,
- 0x21f00000220,
- 0x22100000222,
- 0x22300000224,
- 0x22500000226,
- 0x22700000228,
- 0x2290000022a,
- 0x22b0000022c,
- 0x22d0000022e,
- 0x22f00000230,
- 0x23100000232,
- 0x2330000023a,
- 0x23c0000023d,
- 0x23f00000241,
- 0x24200000243,
- 0x24700000248,
- 0x2490000024a,
- 0x24b0000024c,
- 0x24d0000024e,
- 0x24f000002b0,
- 0x2b9000002c2,
- 0x2c6000002d2,
- 0x2ec000002ed,
- 0x2ee000002ef,
- 0x30000000340,
- 0x34200000343,
- 0x3460000034f,
- 0x35000000370,
- 0x37100000372,
- 0x37300000374,
- 0x37700000378,
- 0x37b0000037e,
- 0x39000000391,
- 0x3ac000003cf,
- 0x3d7000003d8,
- 0x3d9000003da,
- 0x3db000003dc,
- 0x3dd000003de,
- 0x3df000003e0,
- 0x3e1000003e2,
- 0x3e3000003e4,
- 0x3e5000003e6,
- 0x3e7000003e8,
- 0x3e9000003ea,
- 0x3eb000003ec,
- 0x3ed000003ee,
- 0x3ef000003f0,
- 0x3f3000003f4,
- 0x3f8000003f9,
- 0x3fb000003fd,
- 0x43000000460,
- 0x46100000462,
- 0x46300000464,
- 0x46500000466,
- 0x46700000468,
- 0x4690000046a,
- 0x46b0000046c,
- 0x46d0000046e,
- 0x46f00000470,
- 0x47100000472,
- 0x47300000474,
- 0x47500000476,
- 0x47700000478,
- 0x4790000047a,
- 0x47b0000047c,
- 0x47d0000047e,
- 0x47f00000480,
- 0x48100000482,
- 0x48300000488,
- 0x48b0000048c,
- 0x48d0000048e,
- 0x48f00000490,
- 0x49100000492,
- 0x49300000494,
- 0x49500000496,
- 0x49700000498,
- 0x4990000049a,
- 0x49b0000049c,
- 0x49d0000049e,
- 0x49f000004a0,
- 0x4a1000004a2,
- 0x4a3000004a4,
- 0x4a5000004a6,
- 0x4a7000004a8,
- 0x4a9000004aa,
- 0x4ab000004ac,
- 0x4ad000004ae,
- 0x4af000004b0,
- 0x4b1000004b2,
- 0x4b3000004b4,
- 0x4b5000004b6,
- 0x4b7000004b8,
- 0x4b9000004ba,
- 0x4bb000004bc,
- 0x4bd000004be,
- 0x4bf000004c0,
- 0x4c2000004c3,
- 0x4c4000004c5,
- 0x4c6000004c7,
- 0x4c8000004c9,
- 0x4ca000004cb,
- 0x4cc000004cd,
- 0x4ce000004d0,
- 0x4d1000004d2,
- 0x4d3000004d4,
- 0x4d5000004d6,
- 0x4d7000004d8,
- 0x4d9000004da,
- 0x4db000004dc,
- 0x4dd000004de,
- 0x4df000004e0,
- 0x4e1000004e2,
- 0x4e3000004e4,
- 0x4e5000004e6,
- 0x4e7000004e8,
- 0x4e9000004ea,
- 0x4eb000004ec,
- 0x4ed000004ee,
- 0x4ef000004f0,
- 0x4f1000004f2,
- 0x4f3000004f4,
- 0x4f5000004f6,
- 0x4f7000004f8,
- 0x4f9000004fa,
- 0x4fb000004fc,
- 0x4fd000004fe,
- 0x4ff00000500,
- 0x50100000502,
- 0x50300000504,
- 0x50500000506,
- 0x50700000508,
- 0x5090000050a,
- 0x50b0000050c,
- 0x50d0000050e,
- 0x50f00000510,
- 0x51100000512,
- 0x51300000514,
- 0x51500000516,
- 0x51700000518,
- 0x5190000051a,
- 0x51b0000051c,
- 0x51d0000051e,
- 0x51f00000520,
- 0x52100000522,
- 0x52300000524,
- 0x52500000526,
- 0x52700000528,
- 0x5590000055a,
- 0x56100000587,
- 0x591000005be,
- 0x5bf000005c0,
- 0x5c1000005c3,
- 0x5c4000005c6,
- 0x5c7000005c8,
- 0x5d0000005eb,
- 0x5f0000005f3,
- 0x6100000061b,
- 0x62000000640,
- 0x64100000660,
- 0x66e00000675,
- 0x679000006d4,
- 0x6d5000006dd,
- 0x6df000006e9,
- 0x6ea000006f0,
- 0x6fa00000700,
- 0x7100000074b,
- 0x74d000007b2,
- 0x7c0000007f6,
- 0x8000000082e,
- 0x8400000085c,
- 0x8a0000008a1,
- 0x8a2000008ad,
- 0x8e4000008ff,
- 0x90000000958,
- 0x96000000964,
- 0x96600000970,
- 0x97100000978,
- 0x97900000980,
- 0x98100000984,
- 0x9850000098d,
- 0x98f00000991,
- 0x993000009a9,
- 0x9aa000009b1,
- 0x9b2000009b3,
- 0x9b6000009ba,
- 0x9bc000009c5,
- 0x9c7000009c9,
- 0x9cb000009cf,
- 0x9d7000009d8,
- 0x9e0000009e4,
- 0x9e6000009f2,
- 0xa0100000a04,
- 0xa0500000a0b,
- 0xa0f00000a11,
- 0xa1300000a29,
- 0xa2a00000a31,
- 0xa3200000a33,
- 0xa3500000a36,
- 0xa3800000a3a,
- 0xa3c00000a3d,
- 0xa3e00000a43,
- 0xa4700000a49,
- 0xa4b00000a4e,
- 0xa5100000a52,
- 0xa5c00000a5d,
- 0xa6600000a76,
- 0xa8100000a84,
- 0xa8500000a8e,
- 0xa8f00000a92,
- 0xa9300000aa9,
- 0xaaa00000ab1,
- 0xab200000ab4,
- 0xab500000aba,
- 0xabc00000ac6,
- 0xac700000aca,
- 0xacb00000ace,
- 0xad000000ad1,
- 0xae000000ae4,
- 0xae600000af0,
- 0xb0100000b04,
- 0xb0500000b0d,
- 0xb0f00000b11,
- 0xb1300000b29,
- 0xb2a00000b31,
- 0xb3200000b34,
- 0xb3500000b3a,
- 0xb3c00000b45,
- 0xb4700000b49,
- 0xb4b00000b4e,
- 0xb5600000b58,
- 0xb5f00000b64,
- 0xb6600000b70,
- 0xb7100000b72,
- 0xb8200000b84,
- 0xb8500000b8b,
- 0xb8e00000b91,
- 0xb9200000b96,
- 0xb9900000b9b,
- 0xb9c00000b9d,
- 0xb9e00000ba0,
- 0xba300000ba5,
- 0xba800000bab,
- 0xbae00000bba,
- 0xbbe00000bc3,
- 0xbc600000bc9,
- 0xbca00000bce,
- 0xbd000000bd1,
- 0xbd700000bd8,
- 0xbe600000bf0,
- 0xc0100000c04,
- 0xc0500000c0d,
- 0xc0e00000c11,
- 0xc1200000c29,
- 0xc2a00000c34,
- 0xc3500000c3a,
- 0xc3d00000c45,
- 0xc4600000c49,
- 0xc4a00000c4e,
- 0xc5500000c57,
- 0xc5800000c5a,
- 0xc6000000c64,
- 0xc6600000c70,
- 0xc8200000c84,
- 0xc8500000c8d,
- 0xc8e00000c91,
- 0xc9200000ca9,
- 0xcaa00000cb4,
- 0xcb500000cba,
- 0xcbc00000cc5,
- 0xcc600000cc9,
- 0xcca00000cce,
- 0xcd500000cd7,
- 0xcde00000cdf,
- 0xce000000ce4,
- 0xce600000cf0,
- 0xcf100000cf3,
- 0xd0200000d04,
- 0xd0500000d0d,
- 0xd0e00000d11,
- 0xd1200000d3b,
- 0xd3d00000d45,
- 0xd4600000d49,
- 0xd4a00000d4f,
- 0xd5700000d58,
- 0xd6000000d64,
- 0xd6600000d70,
- 0xd7a00000d80,
- 0xd8200000d84,
- 0xd8500000d97,
- 0xd9a00000db2,
- 0xdb300000dbc,
- 0xdbd00000dbe,
- 0xdc000000dc7,
- 0xdca00000dcb,
- 0xdcf00000dd5,
- 0xdd600000dd7,
- 0xdd800000de0,
- 0xdf200000df4,
- 0xe0100000e33,
- 0xe3400000e3b,
- 0xe4000000e4f,
- 0xe5000000e5a,
- 0xe8100000e83,
- 0xe8400000e85,
- 0xe8700000e89,
- 0xe8a00000e8b,
- 0xe8d00000e8e,
- 0xe9400000e98,
- 0xe9900000ea0,
- 0xea100000ea4,
- 0xea500000ea6,
- 0xea700000ea8,
- 0xeaa00000eac,
- 0xead00000eb3,
- 0xeb400000eba,
- 0xebb00000ebe,
- 0xec000000ec5,
- 0xec600000ec7,
- 0xec800000ece,
- 0xed000000eda,
- 0xede00000ee0,
- 0xf0000000f01,
- 0xf0b00000f0c,
- 0xf1800000f1a,
- 0xf2000000f2a,
- 0xf3500000f36,
- 0xf3700000f38,
- 0xf3900000f3a,
- 0xf3e00000f43,
- 0xf4400000f48,
- 0xf4900000f4d,
- 0xf4e00000f52,
- 0xf5300000f57,
- 0xf5800000f5c,
- 0xf5d00000f69,
- 0xf6a00000f6d,
- 0xf7100000f73,
- 0xf7400000f75,
- 0xf7a00000f81,
- 0xf8200000f85,
- 0xf8600000f93,
- 0xf9400000f98,
- 0xf9900000f9d,
- 0xf9e00000fa2,
- 0xfa300000fa7,
- 0xfa800000fac,
- 0xfad00000fb9,
- 0xfba00000fbd,
- 0xfc600000fc7,
- 0x10000000104a,
- 0x10500000109e,
- 0x10d0000010fb,
- 0x10fd00001100,
- 0x120000001249,
- 0x124a0000124e,
- 0x125000001257,
- 0x125800001259,
- 0x125a0000125e,
- 0x126000001289,
- 0x128a0000128e,
- 0x1290000012b1,
- 0x12b2000012b6,
- 0x12b8000012bf,
- 0x12c0000012c1,
- 0x12c2000012c6,
- 0x12c8000012d7,
- 0x12d800001311,
- 0x131200001316,
- 0x13180000135b,
- 0x135d00001360,
- 0x138000001390,
- 0x13a0000013f5,
- 0x14010000166d,
- 0x166f00001680,
- 0x16810000169b,
- 0x16a0000016eb,
- 0x17000000170d,
- 0x170e00001715,
- 0x172000001735,
- 0x174000001754,
- 0x17600000176d,
- 0x176e00001771,
- 0x177200001774,
- 0x1780000017b4,
- 0x17b6000017d4,
- 0x17d7000017d8,
- 0x17dc000017de,
- 0x17e0000017ea,
- 0x18100000181a,
- 0x182000001878,
- 0x1880000018ab,
- 0x18b0000018f6,
- 0x19000000191d,
- 0x19200000192c,
- 0x19300000193c,
- 0x19460000196e,
- 0x197000001975,
- 0x1980000019ac,
- 0x19b0000019ca,
- 0x19d0000019da,
- 0x1a0000001a1c,
- 0x1a2000001a5f,
- 0x1a6000001a7d,
- 0x1a7f00001a8a,
- 0x1a9000001a9a,
- 0x1aa700001aa8,
- 0x1b0000001b4c,
- 0x1b5000001b5a,
- 0x1b6b00001b74,
- 0x1b8000001bf4,
- 0x1c0000001c38,
- 0x1c4000001c4a,
- 0x1c4d00001c7e,
- 0x1cd000001cd3,
- 0x1cd400001cf7,
- 0x1d0000001d2c,
- 0x1d2f00001d30,
- 0x1d3b00001d3c,
- 0x1d4e00001d4f,
- 0x1d6b00001d78,
- 0x1d7900001d9b,
- 0x1dc000001de7,
- 0x1dfc00001e00,
- 0x1e0100001e02,
- 0x1e0300001e04,
- 0x1e0500001e06,
- 0x1e0700001e08,
- 0x1e0900001e0a,
- 0x1e0b00001e0c,
- 0x1e0d00001e0e,
- 0x1e0f00001e10,
- 0x1e1100001e12,
- 0x1e1300001e14,
- 0x1e1500001e16,
- 0x1e1700001e18,
- 0x1e1900001e1a,
- 0x1e1b00001e1c,
- 0x1e1d00001e1e,
- 0x1e1f00001e20,
- 0x1e2100001e22,
- 0x1e2300001e24,
- 0x1e2500001e26,
- 0x1e2700001e28,
- 0x1e2900001e2a,
- 0x1e2b00001e2c,
- 0x1e2d00001e2e,
- 0x1e2f00001e30,
- 0x1e3100001e32,
- 0x1e3300001e34,
- 0x1e3500001e36,
- 0x1e3700001e38,
- 0x1e3900001e3a,
- 0x1e3b00001e3c,
- 0x1e3d00001e3e,
- 0x1e3f00001e40,
- 0x1e4100001e42,
- 0x1e4300001e44,
- 0x1e4500001e46,
- 0x1e4700001e48,
- 0x1e4900001e4a,
- 0x1e4b00001e4c,
- 0x1e4d00001e4e,
- 0x1e4f00001e50,
- 0x1e5100001e52,
- 0x1e5300001e54,
- 0x1e5500001e56,
- 0x1e5700001e58,
- 0x1e5900001e5a,
- 0x1e5b00001e5c,
- 0x1e5d00001e5e,
- 0x1e5f00001e60,
- 0x1e6100001e62,
- 0x1e6300001e64,
- 0x1e6500001e66,
- 0x1e6700001e68,
- 0x1e6900001e6a,
- 0x1e6b00001e6c,
- 0x1e6d00001e6e,
- 0x1e6f00001e70,
- 0x1e7100001e72,
- 0x1e7300001e74,
- 0x1e7500001e76,
- 0x1e7700001e78,
- 0x1e7900001e7a,
- 0x1e7b00001e7c,
- 0x1e7d00001e7e,
- 0x1e7f00001e80,
- 0x1e8100001e82,
- 0x1e8300001e84,
- 0x1e8500001e86,
- 0x1e8700001e88,
- 0x1e8900001e8a,
- 0x1e8b00001e8c,
- 0x1e8d00001e8e,
- 0x1e8f00001e90,
- 0x1e9100001e92,
- 0x1e9300001e94,
- 0x1e9500001e9a,
- 0x1e9c00001e9e,
- 0x1e9f00001ea0,
- 0x1ea100001ea2,
- 0x1ea300001ea4,
- 0x1ea500001ea6,
- 0x1ea700001ea8,
- 0x1ea900001eaa,
- 0x1eab00001eac,
- 0x1ead00001eae,
- 0x1eaf00001eb0,
- 0x1eb100001eb2,
- 0x1eb300001eb4,
- 0x1eb500001eb6,
- 0x1eb700001eb8,
- 0x1eb900001eba,
- 0x1ebb00001ebc,
- 0x1ebd00001ebe,
- 0x1ebf00001ec0,
- 0x1ec100001ec2,
- 0x1ec300001ec4,
- 0x1ec500001ec6,
- 0x1ec700001ec8,
- 0x1ec900001eca,
- 0x1ecb00001ecc,
- 0x1ecd00001ece,
- 0x1ecf00001ed0,
- 0x1ed100001ed2,
- 0x1ed300001ed4,
- 0x1ed500001ed6,
- 0x1ed700001ed8,
- 0x1ed900001eda,
- 0x1edb00001edc,
- 0x1edd00001ede,
- 0x1edf00001ee0,
- 0x1ee100001ee2,
- 0x1ee300001ee4,
- 0x1ee500001ee6,
- 0x1ee700001ee8,
- 0x1ee900001eea,
- 0x1eeb00001eec,
- 0x1eed00001eee,
- 0x1eef00001ef0,
- 0x1ef100001ef2,
- 0x1ef300001ef4,
- 0x1ef500001ef6,
- 0x1ef700001ef8,
- 0x1ef900001efa,
- 0x1efb00001efc,
- 0x1efd00001efe,
- 0x1eff00001f08,
- 0x1f1000001f16,
- 0x1f2000001f28,
- 0x1f3000001f38,
- 0x1f4000001f46,
- 0x1f5000001f58,
- 0x1f6000001f68,
- 0x1f7000001f71,
- 0x1f7200001f73,
- 0x1f7400001f75,
- 0x1f7600001f77,
- 0x1f7800001f79,
- 0x1f7a00001f7b,
- 0x1f7c00001f7d,
- 0x1fb000001fb2,
- 0x1fb600001fb7,
- 0x1fc600001fc7,
- 0x1fd000001fd3,
- 0x1fd600001fd8,
- 0x1fe000001fe3,
- 0x1fe400001fe8,
- 0x1ff600001ff7,
- 0x214e0000214f,
- 0x218400002185,
- 0x2c3000002c5f,
- 0x2c6100002c62,
- 0x2c6500002c67,
- 0x2c6800002c69,
- 0x2c6a00002c6b,
- 0x2c6c00002c6d,
- 0x2c7100002c72,
- 0x2c7300002c75,
- 0x2c7600002c7c,
- 0x2c8100002c82,
- 0x2c8300002c84,
- 0x2c8500002c86,
- 0x2c8700002c88,
- 0x2c8900002c8a,
- 0x2c8b00002c8c,
- 0x2c8d00002c8e,
- 0x2c8f00002c90,
- 0x2c9100002c92,
- 0x2c9300002c94,
- 0x2c9500002c96,
- 0x2c9700002c98,
- 0x2c9900002c9a,
- 0x2c9b00002c9c,
- 0x2c9d00002c9e,
- 0x2c9f00002ca0,
- 0x2ca100002ca2,
- 0x2ca300002ca4,
- 0x2ca500002ca6,
- 0x2ca700002ca8,
- 0x2ca900002caa,
- 0x2cab00002cac,
- 0x2cad00002cae,
- 0x2caf00002cb0,
- 0x2cb100002cb2,
- 0x2cb300002cb4,
- 0x2cb500002cb6,
- 0x2cb700002cb8,
- 0x2cb900002cba,
- 0x2cbb00002cbc,
- 0x2cbd00002cbe,
- 0x2cbf00002cc0,
- 0x2cc100002cc2,
- 0x2cc300002cc4,
- 0x2cc500002cc6,
- 0x2cc700002cc8,
- 0x2cc900002cca,
- 0x2ccb00002ccc,
- 0x2ccd00002cce,
- 0x2ccf00002cd0,
- 0x2cd100002cd2,
- 0x2cd300002cd4,
- 0x2cd500002cd6,
- 0x2cd700002cd8,
- 0x2cd900002cda,
- 0x2cdb00002cdc,
- 0x2cdd00002cde,
- 0x2cdf00002ce0,
- 0x2ce100002ce2,
- 0x2ce300002ce5,
- 0x2cec00002ced,
- 0x2cee00002cf2,
- 0x2cf300002cf4,
- 0x2d0000002d26,
- 0x2d2700002d28,
- 0x2d2d00002d2e,
- 0x2d3000002d68,
- 0x2d7f00002d97,
- 0x2da000002da7,
- 0x2da800002daf,
- 0x2db000002db7,
- 0x2db800002dbf,
- 0x2dc000002dc7,
- 0x2dc800002dcf,
- 0x2dd000002dd7,
- 0x2dd800002ddf,
- 0x2de000002e00,
- 0x2e2f00002e30,
- 0x300500003008,
- 0x302a0000302e,
- 0x303c0000303d,
- 0x304100003097,
- 0x30990000309b,
- 0x309d0000309f,
- 0x30a1000030fb,
- 0x30fc000030ff,
- 0x31050000312e,
- 0x31a0000031bb,
- 0x31f000003200,
- 0x340000004db6,
- 0x4e0000009fcd,
- 0xa0000000a48d,
- 0xa4d00000a4fe,
- 0xa5000000a60d,
- 0xa6100000a62c,
- 0xa6410000a642,
- 0xa6430000a644,
- 0xa6450000a646,
- 0xa6470000a648,
- 0xa6490000a64a,
- 0xa64b0000a64c,
- 0xa64d0000a64e,
- 0xa64f0000a650,
- 0xa6510000a652,
- 0xa6530000a654,
- 0xa6550000a656,
- 0xa6570000a658,
- 0xa6590000a65a,
- 0xa65b0000a65c,
- 0xa65d0000a65e,
- 0xa65f0000a660,
- 0xa6610000a662,
- 0xa6630000a664,
- 0xa6650000a666,
- 0xa6670000a668,
- 0xa6690000a66a,
- 0xa66b0000a66c,
- 0xa66d0000a670,
- 0xa6740000a67e,
- 0xa67f0000a680,
- 0xa6810000a682,
- 0xa6830000a684,
- 0xa6850000a686,
- 0xa6870000a688,
- 0xa6890000a68a,
- 0xa68b0000a68c,
- 0xa68d0000a68e,
- 0xa68f0000a690,
- 0xa6910000a692,
- 0xa6930000a694,
- 0xa6950000a696,
- 0xa6970000a698,
- 0xa69f0000a6e6,
- 0xa6f00000a6f2,
- 0xa7170000a720,
- 0xa7230000a724,
- 0xa7250000a726,
- 0xa7270000a728,
- 0xa7290000a72a,
- 0xa72b0000a72c,
- 0xa72d0000a72e,
- 0xa72f0000a732,
- 0xa7330000a734,
- 0xa7350000a736,
- 0xa7370000a738,
- 0xa7390000a73a,
- 0xa73b0000a73c,
- 0xa73d0000a73e,
- 0xa73f0000a740,
- 0xa7410000a742,
- 0xa7430000a744,
- 0xa7450000a746,
- 0xa7470000a748,
- 0xa7490000a74a,
- 0xa74b0000a74c,
- 0xa74d0000a74e,
- 0xa74f0000a750,
- 0xa7510000a752,
- 0xa7530000a754,
- 0xa7550000a756,
- 0xa7570000a758,
- 0xa7590000a75a,
- 0xa75b0000a75c,
- 0xa75d0000a75e,
- 0xa75f0000a760,
- 0xa7610000a762,
- 0xa7630000a764,
- 0xa7650000a766,
- 0xa7670000a768,
- 0xa7690000a76a,
- 0xa76b0000a76c,
- 0xa76d0000a76e,
- 0xa76f0000a770,
- 0xa7710000a779,
- 0xa77a0000a77b,
- 0xa77c0000a77d,
- 0xa77f0000a780,
- 0xa7810000a782,
- 0xa7830000a784,
- 0xa7850000a786,
- 0xa7870000a789,
- 0xa78c0000a78d,
- 0xa78e0000a78f,
- 0xa7910000a792,
- 0xa7930000a794,
- 0xa7a10000a7a2,
- 0xa7a30000a7a4,
- 0xa7a50000a7a6,
- 0xa7a70000a7a8,
- 0xa7a90000a7aa,
- 0xa7fa0000a828,
- 0xa8400000a874,
- 0xa8800000a8c5,
- 0xa8d00000a8da,
- 0xa8e00000a8f8,
- 0xa8fb0000a8fc,
- 0xa9000000a92e,
- 0xa9300000a954,
- 0xa9800000a9c1,
- 0xa9cf0000a9da,
- 0xaa000000aa37,
- 0xaa400000aa4e,
- 0xaa500000aa5a,
- 0xaa600000aa77,
- 0xaa7a0000aa7c,
- 0xaa800000aac3,
- 0xaadb0000aade,
- 0xaae00000aaf0,
- 0xaaf20000aaf7,
- 0xab010000ab07,
- 0xab090000ab0f,
- 0xab110000ab17,
- 0xab200000ab27,
- 0xab280000ab2f,
- 0xabc00000abeb,
- 0xabec0000abee,
- 0xabf00000abfa,
- 0xac000000d7a4,
- 0xfa0e0000fa10,
- 0xfa110000fa12,
- 0xfa130000fa15,
- 0xfa1f0000fa20,
- 0xfa210000fa22,
- 0xfa230000fa25,
- 0xfa270000fa2a,
- 0xfb1e0000fb1f,
- 0xfe200000fe27,
- 0xfe730000fe74,
- 0x100000001000c,
- 0x1000d00010027,
- 0x100280001003b,
- 0x1003c0001003e,
- 0x1003f0001004e,
- 0x100500001005e,
- 0x10080000100fb,
- 0x101fd000101fe,
- 0x102800001029d,
- 0x102a0000102d1,
- 0x103000001031f,
- 0x1033000010341,
- 0x103420001034a,
- 0x103800001039e,
- 0x103a0000103c4,
- 0x103c8000103d0,
- 0x104280001049e,
- 0x104a0000104aa,
- 0x1080000010806,
- 0x1080800010809,
- 0x1080a00010836,
- 0x1083700010839,
- 0x1083c0001083d,
- 0x1083f00010856,
- 0x1090000010916,
- 0x109200001093a,
- 0x10980000109b8,
- 0x109be000109c0,
- 0x10a0000010a04,
- 0x10a0500010a07,
- 0x10a0c00010a14,
- 0x10a1500010a18,
- 0x10a1900010a34,
- 0x10a3800010a3b,
- 0x10a3f00010a40,
- 0x10a6000010a7d,
- 0x10b0000010b36,
- 0x10b4000010b56,
- 0x10b6000010b73,
- 0x10c0000010c49,
- 0x1100000011047,
- 0x1106600011070,
- 0x11080000110bb,
- 0x110d0000110e9,
- 0x110f0000110fa,
- 0x1110000011135,
- 0x1113600011140,
- 0x11180000111c5,
- 0x111d0000111da,
- 0x11680000116b8,
- 0x116c0000116ca,
- 0x120000001236f,
- 0x130000001342f,
- 0x1680000016a39,
- 0x16f0000016f45,
- 0x16f5000016f7f,
- 0x16f8f00016fa0,
- 0x1b0000001b002,
- 0x200000002a6d7,
- 0x2a7000002b735,
- 0x2b7400002b81e,
- ),
- 'CONTEXTJ': (
- 0x200c0000200e,
- ),
- 'CONTEXTO': (
- 0xb7000000b8,
- 0x37500000376,
- 0x5f3000005f5,
- 0x6600000066a,
- 0x6f0000006fa,
- 0x30fb000030fc,
- ),
-}
diff --git a/functions/source/CreateSSHKey/idna/intranges.py b/functions/source/CreateSSHKey/idna/intranges.py
deleted file mode 100644
index fa8a735..0000000
--- a/functions/source/CreateSSHKey/idna/intranges.py
+++ /dev/null
@@ -1,53 +0,0 @@
-"""
-Given a list of integers, made up of (hopefully) a small number of long runs
-of consecutive integers, compute a representation of the form
-((start1, end1), (start2, end2) ...). Then answer the question "was x present
-in the original list?" in time O(log(# runs)).
-"""
-
-import bisect
-
-def intranges_from_list(list_):
- """Represent a list of integers as a sequence of ranges:
- ((start_0, end_0), (start_1, end_1), ...), such that the original
- integers are exactly those x such that start_i <= x < end_i for some i.
-
- Ranges are encoded as single integers (start << 32 | end), not as tuples.
- """
-
- sorted_list = sorted(list_)
- ranges = []
- last_write = -1
- for i in range(len(sorted_list)):
- if i+1 < len(sorted_list):
- if sorted_list[i] == sorted_list[i+1]-1:
- continue
- current_range = sorted_list[last_write+1:i+1]
- ranges.append(_encode_range(current_range[0], current_range[-1] + 1))
- last_write = i
-
- return tuple(ranges)
-
-def _encode_range(start, end):
- return (start << 32) | end
-
-def _decode_range(r):
- return (r >> 32), (r & ((1 << 32) - 1))
-
-
-def intranges_contain(int_, ranges):
- """Determine if `int_` falls into one of the ranges in `ranges`."""
- tuple_ = _encode_range(int_, 0)
- pos = bisect.bisect_left(ranges, tuple_)
- # we could be immediately ahead of a tuple (start, end)
- # with start < int_ <= end
- if pos > 0:
- left, right = _decode_range(ranges[pos-1])
- if left <= int_ < right:
- return True
- # or we could be immediately behind a tuple (int_, end)
- if pos < len(ranges):
- left, _ = _decode_range(ranges[pos])
- if left == int_:
- return True
- return False
diff --git a/functions/source/CreateSSHKey/idna/package_data.py b/functions/source/CreateSSHKey/idna/package_data.py
deleted file mode 100644
index fc33139..0000000
--- a/functions/source/CreateSSHKey/idna/package_data.py
+++ /dev/null
@@ -1,2 +0,0 @@
-__version__ = '2.6'
-
diff --git a/functions/source/CreateSSHKey/idna/uts46data.py b/functions/source/CreateSSHKey/idna/uts46data.py
deleted file mode 100644
index f9b3236..0000000
--- a/functions/source/CreateSSHKey/idna/uts46data.py
+++ /dev/null
@@ -1,7634 +0,0 @@
-# This file is automatically generated by tools/idna-data
-# vim: set fileencoding=utf-8 :
-
-"""IDNA Mapping Table from UTS46."""
-
-
-__version__ = "6.3.0"
-def _seg_0():
- return [
- (0x0, '3'),
- (0x1, '3'),
- (0x2, '3'),
- (0x3, '3'),
- (0x4, '3'),
- (0x5, '3'),
- (0x6, '3'),
- (0x7, '3'),
- (0x8, '3'),
- (0x9, '3'),
- (0xA, '3'),
- (0xB, '3'),
- (0xC, '3'),
- (0xD, '3'),
- (0xE, '3'),
- (0xF, '3'),
- (0x10, '3'),
- (0x11, '3'),
- (0x12, '3'),
- (0x13, '3'),
- (0x14, '3'),
- (0x15, '3'),
- (0x16, '3'),
- (0x17, '3'),
- (0x18, '3'),
- (0x19, '3'),
- (0x1A, '3'),
- (0x1B, '3'),
- (0x1C, '3'),
- (0x1D, '3'),
- (0x1E, '3'),
- (0x1F, '3'),
- (0x20, '3'),
- (0x21, '3'),
- (0x22, '3'),
- (0x23, '3'),
- (0x24, '3'),
- (0x25, '3'),
- (0x26, '3'),
- (0x27, '3'),
- (0x28, '3'),
- (0x29, '3'),
- (0x2A, '3'),
- (0x2B, '3'),
- (0x2C, '3'),
- (0x2D, 'V'),
- (0x2E, 'V'),
- (0x2F, '3'),
- (0x30, 'V'),
- (0x31, 'V'),
- (0x32, 'V'),
- (0x33, 'V'),
- (0x34, 'V'),
- (0x35, 'V'),
- (0x36, 'V'),
- (0x37, 'V'),
- (0x38, 'V'),
- (0x39, 'V'),
- (0x3A, '3'),
- (0x3B, '3'),
- (0x3C, '3'),
- (0x3D, '3'),
- (0x3E, '3'),
- (0x3F, '3'),
- (0x40, '3'),
- (0x41, 'M', u'a'),
- (0x42, 'M', u'b'),
- (0x43, 'M', u'c'),
- (0x44, 'M', u'd'),
- (0x45, 'M', u'e'),
- (0x46, 'M', u'f'),
- (0x47, 'M', u'g'),
- (0x48, 'M', u'h'),
- (0x49, 'M', u'i'),
- (0x4A, 'M', u'j'),
- (0x4B, 'M', u'k'),
- (0x4C, 'M', u'l'),
- (0x4D, 'M', u'm'),
- (0x4E, 'M', u'n'),
- (0x4F, 'M', u'o'),
- (0x50, 'M', u'p'),
- (0x51, 'M', u'q'),
- (0x52, 'M', u'r'),
- (0x53, 'M', u's'),
- (0x54, 'M', u't'),
- (0x55, 'M', u'u'),
- (0x56, 'M', u'v'),
- (0x57, 'M', u'w'),
- (0x58, 'M', u'x'),
- (0x59, 'M', u'y'),
- (0x5A, 'M', u'z'),
- (0x5B, '3'),
- (0x5C, '3'),
- (0x5D, '3'),
- (0x5E, '3'),
- (0x5F, '3'),
- (0x60, '3'),
- (0x61, 'V'),
- (0x62, 'V'),
- (0x63, 'V'),
- ]
-
-def _seg_1():
- return [
- (0x64, 'V'),
- (0x65, 'V'),
- (0x66, 'V'),
- (0x67, 'V'),
- (0x68, 'V'),
- (0x69, 'V'),
- (0x6A, 'V'),
- (0x6B, 'V'),
- (0x6C, 'V'),
- (0x6D, 'V'),
- (0x6E, 'V'),
- (0x6F, 'V'),
- (0x70, 'V'),
- (0x71, 'V'),
- (0x72, 'V'),
- (0x73, 'V'),
- (0x74, 'V'),
- (0x75, 'V'),
- (0x76, 'V'),
- (0x77, 'V'),
- (0x78, 'V'),
- (0x79, 'V'),
- (0x7A, 'V'),
- (0x7B, '3'),
- (0x7C, '3'),
- (0x7D, '3'),
- (0x7E, '3'),
- (0x7F, '3'),
- (0x80, 'X'),
- (0x81, 'X'),
- (0x82, 'X'),
- (0x83, 'X'),
- (0x84, 'X'),
- (0x85, 'X'),
- (0x86, 'X'),
- (0x87, 'X'),
- (0x88, 'X'),
- (0x89, 'X'),
- (0x8A, 'X'),
- (0x8B, 'X'),
- (0x8C, 'X'),
- (0x8D, 'X'),
- (0x8E, 'X'),
- (0x8F, 'X'),
- (0x90, 'X'),
- (0x91, 'X'),
- (0x92, 'X'),
- (0x93, 'X'),
- (0x94, 'X'),
- (0x95, 'X'),
- (0x96, 'X'),
- (0x97, 'X'),
- (0x98, 'X'),
- (0x99, 'X'),
- (0x9A, 'X'),
- (0x9B, 'X'),
- (0x9C, 'X'),
- (0x9D, 'X'),
- (0x9E, 'X'),
- (0x9F, 'X'),
- (0xA0, '3', u' '),
- (0xA1, 'V'),
- (0xA2, 'V'),
- (0xA3, 'V'),
- (0xA4, 'V'),
- (0xA5, 'V'),
- (0xA6, 'V'),
- (0xA7, 'V'),
- (0xA8, '3', u' ̈'),
- (0xA9, 'V'),
- (0xAA, 'M', u'a'),
- (0xAB, 'V'),
- (0xAC, 'V'),
- (0xAD, 'I'),
- (0xAE, 'V'),
- (0xAF, '3', u' ̄'),
- (0xB0, 'V'),
- (0xB1, 'V'),
- (0xB2, 'M', u'2'),
- (0xB3, 'M', u'3'),
- (0xB4, '3', u' ́'),
- (0xB5, 'M', u'μ'),
- (0xB6, 'V'),
- (0xB7, 'V'),
- (0xB8, '3', u' ̧'),
- (0xB9, 'M', u'1'),
- (0xBA, 'M', u'o'),
- (0xBB, 'V'),
- (0xBC, 'M', u'1⁄4'),
- (0xBD, 'M', u'1⁄2'),
- (0xBE, 'M', u'3⁄4'),
- (0xBF, 'V'),
- (0xC0, 'M', u'à'),
- (0xC1, 'M', u'á'),
- (0xC2, 'M', u'â'),
- (0xC3, 'M', u'ã'),
- (0xC4, 'M', u'ä'),
- (0xC5, 'M', u'å'),
- (0xC6, 'M', u'æ'),
- (0xC7, 'M', u'ç'),
- ]
-
-def _seg_2():
- return [
- (0xC8, 'M', u'è'),
- (0xC9, 'M', u'é'),
- (0xCA, 'M', u'ê'),
- (0xCB, 'M', u'ë'),
- (0xCC, 'M', u'ì'),
- (0xCD, 'M', u'í'),
- (0xCE, 'M', u'î'),
- (0xCF, 'M', u'ï'),
- (0xD0, 'M', u'ð'),
- (0xD1, 'M', u'ñ'),
- (0xD2, 'M', u'ò'),
- (0xD3, 'M', u'ó'),
- (0xD4, 'M', u'ô'),
- (0xD5, 'M', u'õ'),
- (0xD6, 'M', u'ö'),
- (0xD7, 'V'),
- (0xD8, 'M', u'ø'),
- (0xD9, 'M', u'ù'),
- (0xDA, 'M', u'ú'),
- (0xDB, 'M', u'û'),
- (0xDC, 'M', u'ü'),
- (0xDD, 'M', u'ý'),
- (0xDE, 'M', u'þ'),
- (0xDF, 'D', u'ss'),
- (0xE0, 'V'),
- (0xE1, 'V'),
- (0xE2, 'V'),
- (0xE3, 'V'),
- (0xE4, 'V'),
- (0xE5, 'V'),
- (0xE6, 'V'),
- (0xE7, 'V'),
- (0xE8, 'V'),
- (0xE9, 'V'),
- (0xEA, 'V'),
- (0xEB, 'V'),
- (0xEC, 'V'),
- (0xED, 'V'),
- (0xEE, 'V'),
- (0xEF, 'V'),
- (0xF0, 'V'),
- (0xF1, 'V'),
- (0xF2, 'V'),
- (0xF3, 'V'),
- (0xF4, 'V'),
- (0xF5, 'V'),
- (0xF6, 'V'),
- (0xF7, 'V'),
- (0xF8, 'V'),
- (0xF9, 'V'),
- (0xFA, 'V'),
- (0xFB, 'V'),
- (0xFC, 'V'),
- (0xFD, 'V'),
- (0xFE, 'V'),
- (0xFF, 'V'),
- (0x100, 'M', u'ā'),
- (0x101, 'V'),
- (0x102, 'M', u'ă'),
- (0x103, 'V'),
- (0x104, 'M', u'ą'),
- (0x105, 'V'),
- (0x106, 'M', u'ć'),
- (0x107, 'V'),
- (0x108, 'M', u'ĉ'),
- (0x109, 'V'),
- (0x10A, 'M', u'ċ'),
- (0x10B, 'V'),
- (0x10C, 'M', u'č'),
- (0x10D, 'V'),
- (0x10E, 'M', u'ď'),
- (0x10F, 'V'),
- (0x110, 'M', u'đ'),
- (0x111, 'V'),
- (0x112, 'M', u'ē'),
- (0x113, 'V'),
- (0x114, 'M', u'ĕ'),
- (0x115, 'V'),
- (0x116, 'M', u'ė'),
- (0x117, 'V'),
- (0x118, 'M', u'ę'),
- (0x119, 'V'),
- (0x11A, 'M', u'ě'),
- (0x11B, 'V'),
- (0x11C, 'M', u'ĝ'),
- (0x11D, 'V'),
- (0x11E, 'M', u'ğ'),
- (0x11F, 'V'),
- (0x120, 'M', u'ġ'),
- (0x121, 'V'),
- (0x122, 'M', u'ģ'),
- (0x123, 'V'),
- (0x124, 'M', u'ĥ'),
- (0x125, 'V'),
- (0x126, 'M', u'ħ'),
- (0x127, 'V'),
- (0x128, 'M', u'ĩ'),
- (0x129, 'V'),
- (0x12A, 'M', u'ī'),
- (0x12B, 'V'),
- ]
-
-def _seg_3():
- return [
- (0x12C, 'M', u'ĭ'),
- (0x12D, 'V'),
- (0x12E, 'M', u'į'),
- (0x12F, 'V'),
- (0x130, 'M', u'i̇'),
- (0x131, 'V'),
- (0x132, 'M', u'ij'),
- (0x134, 'M', u'ĵ'),
- (0x135, 'V'),
- (0x136, 'M', u'ķ'),
- (0x137, 'V'),
- (0x139, 'M', u'ĺ'),
- (0x13A, 'V'),
- (0x13B, 'M', u'ļ'),
- (0x13C, 'V'),
- (0x13D, 'M', u'ľ'),
- (0x13E, 'V'),
- (0x13F, 'M', u'l·'),
- (0x141, 'M', u'ł'),
- (0x142, 'V'),
- (0x143, 'M', u'ń'),
- (0x144, 'V'),
- (0x145, 'M', u'ņ'),
- (0x146, 'V'),
- (0x147, 'M', u'ň'),
- (0x148, 'V'),
- (0x149, 'M', u'ʼn'),
- (0x14A, 'M', u'ŋ'),
- (0x14B, 'V'),
- (0x14C, 'M', u'ō'),
- (0x14D, 'V'),
- (0x14E, 'M', u'ŏ'),
- (0x14F, 'V'),
- (0x150, 'M', u'ő'),
- (0x151, 'V'),
- (0x152, 'M', u'œ'),
- (0x153, 'V'),
- (0x154, 'M', u'ŕ'),
- (0x155, 'V'),
- (0x156, 'M', u'ŗ'),
- (0x157, 'V'),
- (0x158, 'M', u'ř'),
- (0x159, 'V'),
- (0x15A, 'M', u'ś'),
- (0x15B, 'V'),
- (0x15C, 'M', u'ŝ'),
- (0x15D, 'V'),
- (0x15E, 'M', u'ş'),
- (0x15F, 'V'),
- (0x160, 'M', u'š'),
- (0x161, 'V'),
- (0x162, 'M', u'ţ'),
- (0x163, 'V'),
- (0x164, 'M', u'ť'),
- (0x165, 'V'),
- (0x166, 'M', u'ŧ'),
- (0x167, 'V'),
- (0x168, 'M', u'ũ'),
- (0x169, 'V'),
- (0x16A, 'M', u'ū'),
- (0x16B, 'V'),
- (0x16C, 'M', u'ŭ'),
- (0x16D, 'V'),
- (0x16E, 'M', u'ů'),
- (0x16F, 'V'),
- (0x170, 'M', u'ű'),
- (0x171, 'V'),
- (0x172, 'M', u'ų'),
- (0x173, 'V'),
- (0x174, 'M', u'ŵ'),
- (0x175, 'V'),
- (0x176, 'M', u'ŷ'),
- (0x177, 'V'),
- (0x178, 'M', u'ÿ'),
- (0x179, 'M', u'ź'),
- (0x17A, 'V'),
- (0x17B, 'M', u'ż'),
- (0x17C, 'V'),
- (0x17D, 'M', u'ž'),
- (0x17E, 'V'),
- (0x17F, 'M', u's'),
- (0x180, 'V'),
- (0x181, 'M', u'ɓ'),
- (0x182, 'M', u'ƃ'),
- (0x183, 'V'),
- (0x184, 'M', u'ƅ'),
- (0x185, 'V'),
- (0x186, 'M', u'ɔ'),
- (0x187, 'M', u'ƈ'),
- (0x188, 'V'),
- (0x189, 'M', u'ɖ'),
- (0x18A, 'M', u'ɗ'),
- (0x18B, 'M', u'ƌ'),
- (0x18C, 'V'),
- (0x18E, 'M', u'ǝ'),
- (0x18F, 'M', u'ə'),
- (0x190, 'M', u'ɛ'),
- (0x191, 'M', u'ƒ'),
- (0x192, 'V'),
- (0x193, 'M', u'ɠ'),
- ]
-
-def _seg_4():
- return [
- (0x194, 'M', u'ɣ'),
- (0x195, 'V'),
- (0x196, 'M', u'ɩ'),
- (0x197, 'M', u'ɨ'),
- (0x198, 'M', u'ƙ'),
- (0x199, 'V'),
- (0x19C, 'M', u'ɯ'),
- (0x19D, 'M', u'ɲ'),
- (0x19E, 'V'),
- (0x19F, 'M', u'ɵ'),
- (0x1A0, 'M', u'ơ'),
- (0x1A1, 'V'),
- (0x1A2, 'M', u'ƣ'),
- (0x1A3, 'V'),
- (0x1A4, 'M', u'ƥ'),
- (0x1A5, 'V'),
- (0x1A6, 'M', u'ʀ'),
- (0x1A7, 'M', u'ƨ'),
- (0x1A8, 'V'),
- (0x1A9, 'M', u'ʃ'),
- (0x1AA, 'V'),
- (0x1AC, 'M', u'ƭ'),
- (0x1AD, 'V'),
- (0x1AE, 'M', u'ʈ'),
- (0x1AF, 'M', u'ư'),
- (0x1B0, 'V'),
- (0x1B1, 'M', u'ʊ'),
- (0x1B2, 'M', u'ʋ'),
- (0x1B3, 'M', u'ƴ'),
- (0x1B4, 'V'),
- (0x1B5, 'M', u'ƶ'),
- (0x1B6, 'V'),
- (0x1B7, 'M', u'ʒ'),
- (0x1B8, 'M', u'ƹ'),
- (0x1B9, 'V'),
- (0x1BC, 'M', u'ƽ'),
- (0x1BD, 'V'),
- (0x1C4, 'M', u'dž'),
- (0x1C7, 'M', u'lj'),
- (0x1CA, 'M', u'nj'),
- (0x1CD, 'M', u'ǎ'),
- (0x1CE, 'V'),
- (0x1CF, 'M', u'ǐ'),
- (0x1D0, 'V'),
- (0x1D1, 'M', u'ǒ'),
- (0x1D2, 'V'),
- (0x1D3, 'M', u'ǔ'),
- (0x1D4, 'V'),
- (0x1D5, 'M', u'ǖ'),
- (0x1D6, 'V'),
- (0x1D7, 'M', u'ǘ'),
- (0x1D8, 'V'),
- (0x1D9, 'M', u'ǚ'),
- (0x1DA, 'V'),
- (0x1DB, 'M', u'ǜ'),
- (0x1DC, 'V'),
- (0x1DE, 'M', u'ǟ'),
- (0x1DF, 'V'),
- (0x1E0, 'M', u'ǡ'),
- (0x1E1, 'V'),
- (0x1E2, 'M', u'ǣ'),
- (0x1E3, 'V'),
- (0x1E4, 'M', u'ǥ'),
- (0x1E5, 'V'),
- (0x1E6, 'M', u'ǧ'),
- (0x1E7, 'V'),
- (0x1E8, 'M', u'ǩ'),
- (0x1E9, 'V'),
- (0x1EA, 'M', u'ǫ'),
- (0x1EB, 'V'),
- (0x1EC, 'M', u'ǭ'),
- (0x1ED, 'V'),
- (0x1EE, 'M', u'ǯ'),
- (0x1EF, 'V'),
- (0x1F1, 'M', u'dz'),
- (0x1F4, 'M', u'ǵ'),
- (0x1F5, 'V'),
- (0x1F6, 'M', u'ƕ'),
- (0x1F7, 'M', u'ƿ'),
- (0x1F8, 'M', u'ǹ'),
- (0x1F9, 'V'),
- (0x1FA, 'M', u'ǻ'),
- (0x1FB, 'V'),
- (0x1FC, 'M', u'ǽ'),
- (0x1FD, 'V'),
- (0x1FE, 'M', u'ǿ'),
- (0x1FF, 'V'),
- (0x200, 'M', u'ȁ'),
- (0x201, 'V'),
- (0x202, 'M', u'ȃ'),
- (0x203, 'V'),
- (0x204, 'M', u'ȅ'),
- (0x205, 'V'),
- (0x206, 'M', u'ȇ'),
- (0x207, 'V'),
- (0x208, 'M', u'ȉ'),
- (0x209, 'V'),
- (0x20A, 'M', u'ȋ'),
- (0x20B, 'V'),
- (0x20C, 'M', u'ȍ'),
- ]
-
-def _seg_5():
- return [
- (0x20D, 'V'),
- (0x20E, 'M', u'ȏ'),
- (0x20F, 'V'),
- (0x210, 'M', u'ȑ'),
- (0x211, 'V'),
- (0x212, 'M', u'ȓ'),
- (0x213, 'V'),
- (0x214, 'M', u'ȕ'),
- (0x215, 'V'),
- (0x216, 'M', u'ȗ'),
- (0x217, 'V'),
- (0x218, 'M', u'ș'),
- (0x219, 'V'),
- (0x21A, 'M', u'ț'),
- (0x21B, 'V'),
- (0x21C, 'M', u'ȝ'),
- (0x21D, 'V'),
- (0x21E, 'M', u'ȟ'),
- (0x21F, 'V'),
- (0x220, 'M', u'ƞ'),
- (0x221, 'V'),
- (0x222, 'M', u'ȣ'),
- (0x223, 'V'),
- (0x224, 'M', u'ȥ'),
- (0x225, 'V'),
- (0x226, 'M', u'ȧ'),
- (0x227, 'V'),
- (0x228, 'M', u'ȩ'),
- (0x229, 'V'),
- (0x22A, 'M', u'ȫ'),
- (0x22B, 'V'),
- (0x22C, 'M', u'ȭ'),
- (0x22D, 'V'),
- (0x22E, 'M', u'ȯ'),
- (0x22F, 'V'),
- (0x230, 'M', u'ȱ'),
- (0x231, 'V'),
- (0x232, 'M', u'ȳ'),
- (0x233, 'V'),
- (0x23A, 'M', u'ⱥ'),
- (0x23B, 'M', u'ȼ'),
- (0x23C, 'V'),
- (0x23D, 'M', u'ƚ'),
- (0x23E, 'M', u'ⱦ'),
- (0x23F, 'V'),
- (0x241, 'M', u'ɂ'),
- (0x242, 'V'),
- (0x243, 'M', u'ƀ'),
- (0x244, 'M', u'ʉ'),
- (0x245, 'M', u'ʌ'),
- (0x246, 'M', u'ɇ'),
- (0x247, 'V'),
- (0x248, 'M', u'ɉ'),
- (0x249, 'V'),
- (0x24A, 'M', u'ɋ'),
- (0x24B, 'V'),
- (0x24C, 'M', u'ɍ'),
- (0x24D, 'V'),
- (0x24E, 'M', u'ɏ'),
- (0x24F, 'V'),
- (0x2B0, 'M', u'h'),
- (0x2B1, 'M', u'ɦ'),
- (0x2B2, 'M', u'j'),
- (0x2B3, 'M', u'r'),
- (0x2B4, 'M', u'ɹ'),
- (0x2B5, 'M', u'ɻ'),
- (0x2B6, 'M', u'ʁ'),
- (0x2B7, 'M', u'w'),
- (0x2B8, 'M', u'y'),
- (0x2B9, 'V'),
- (0x2D8, '3', u' ̆'),
- (0x2D9, '3', u' ̇'),
- (0x2DA, '3', u' ̊'),
- (0x2DB, '3', u' ̨'),
- (0x2DC, '3', u' ̃'),
- (0x2DD, '3', u' ̋'),
- (0x2DE, 'V'),
- (0x2E0, 'M', u'ɣ'),
- (0x2E1, 'M', u'l'),
- (0x2E2, 'M', u's'),
- (0x2E3, 'M', u'x'),
- (0x2E4, 'M', u'ʕ'),
- (0x2E5, 'V'),
- (0x340, 'M', u'̀'),
- (0x341, 'M', u'́'),
- (0x342, 'V'),
- (0x343, 'M', u'̓'),
- (0x344, 'M', u'̈́'),
- (0x345, 'M', u'ι'),
- (0x346, 'V'),
- (0x34F, 'I'),
- (0x350, 'V'),
- (0x370, 'M', u'ͱ'),
- (0x371, 'V'),
- (0x372, 'M', u'ͳ'),
- (0x373, 'V'),
- (0x374, 'M', u'ʹ'),
- (0x375, 'V'),
- (0x376, 'M', u'ͷ'),
- (0x377, 'V'),
- ]
-
-def _seg_6():
- return [
- (0x378, 'X'),
- (0x37A, '3', u' ι'),
- (0x37B, 'V'),
- (0x37E, '3', u';'),
- (0x37F, 'X'),
- (0x384, '3', u' ́'),
- (0x385, '3', u' ̈́'),
- (0x386, 'M', u'ά'),
- (0x387, 'M', u'·'),
- (0x388, 'M', u'έ'),
- (0x389, 'M', u'ή'),
- (0x38A, 'M', u'ί'),
- (0x38B, 'X'),
- (0x38C, 'M', u'ό'),
- (0x38D, 'X'),
- (0x38E, 'M', u'ύ'),
- (0x38F, 'M', u'ώ'),
- (0x390, 'V'),
- (0x391, 'M', u'α'),
- (0x392, 'M', u'β'),
- (0x393, 'M', u'γ'),
- (0x394, 'M', u'δ'),
- (0x395, 'M', u'ε'),
- (0x396, 'M', u'ζ'),
- (0x397, 'M', u'η'),
- (0x398, 'M', u'θ'),
- (0x399, 'M', u'ι'),
- (0x39A, 'M', u'κ'),
- (0x39B, 'M', u'λ'),
- (0x39C, 'M', u'μ'),
- (0x39D, 'M', u'ν'),
- (0x39E, 'M', u'ξ'),
- (0x39F, 'M', u'ο'),
- (0x3A0, 'M', u'π'),
- (0x3A1, 'M', u'ρ'),
- (0x3A2, 'X'),
- (0x3A3, 'M', u'σ'),
- (0x3A4, 'M', u'τ'),
- (0x3A5, 'M', u'υ'),
- (0x3A6, 'M', u'φ'),
- (0x3A7, 'M', u'χ'),
- (0x3A8, 'M', u'ψ'),
- (0x3A9, 'M', u'ω'),
- (0x3AA, 'M', u'ϊ'),
- (0x3AB, 'M', u'ϋ'),
- (0x3AC, 'V'),
- (0x3C2, 'D', u'σ'),
- (0x3C3, 'V'),
- (0x3CF, 'M', u'ϗ'),
- (0x3D0, 'M', u'β'),
- (0x3D1, 'M', u'θ'),
- (0x3D2, 'M', u'υ'),
- (0x3D3, 'M', u'ύ'),
- (0x3D4, 'M', u'ϋ'),
- (0x3D5, 'M', u'φ'),
- (0x3D6, 'M', u'π'),
- (0x3D7, 'V'),
- (0x3D8, 'M', u'ϙ'),
- (0x3D9, 'V'),
- (0x3DA, 'M', u'ϛ'),
- (0x3DB, 'V'),
- (0x3DC, 'M', u'ϝ'),
- (0x3DD, 'V'),
- (0x3DE, 'M', u'ϟ'),
- (0x3DF, 'V'),
- (0x3E0, 'M', u'ϡ'),
- (0x3E1, 'V'),
- (0x3E2, 'M', u'ϣ'),
- (0x3E3, 'V'),
- (0x3E4, 'M', u'ϥ'),
- (0x3E5, 'V'),
- (0x3E6, 'M', u'ϧ'),
- (0x3E7, 'V'),
- (0x3E8, 'M', u'ϩ'),
- (0x3E9, 'V'),
- (0x3EA, 'M', u'ϫ'),
- (0x3EB, 'V'),
- (0x3EC, 'M', u'ϭ'),
- (0x3ED, 'V'),
- (0x3EE, 'M', u'ϯ'),
- (0x3EF, 'V'),
- (0x3F0, 'M', u'κ'),
- (0x3F1, 'M', u'ρ'),
- (0x3F2, 'M', u'σ'),
- (0x3F3, 'V'),
- (0x3F4, 'M', u'θ'),
- (0x3F5, 'M', u'ε'),
- (0x3F6, 'V'),
- (0x3F7, 'M', u'ϸ'),
- (0x3F8, 'V'),
- (0x3F9, 'M', u'σ'),
- (0x3FA, 'M', u'ϻ'),
- (0x3FB, 'V'),
- (0x3FD, 'M', u'ͻ'),
- (0x3FE, 'M', u'ͼ'),
- (0x3FF, 'M', u'ͽ'),
- (0x400, 'M', u'ѐ'),
- (0x401, 'M', u'ё'),
- (0x402, 'M', u'ђ'),
- (0x403, 'M', u'ѓ'),
- ]
-
-def _seg_7():
- return [
- (0x404, 'M', u'є'),
- (0x405, 'M', u'ѕ'),
- (0x406, 'M', u'і'),
- (0x407, 'M', u'ї'),
- (0x408, 'M', u'ј'),
- (0x409, 'M', u'љ'),
- (0x40A, 'M', u'њ'),
- (0x40B, 'M', u'ћ'),
- (0x40C, 'M', u'ќ'),
- (0x40D, 'M', u'ѝ'),
- (0x40E, 'M', u'ў'),
- (0x40F, 'M', u'џ'),
- (0x410, 'M', u'а'),
- (0x411, 'M', u'б'),
- (0x412, 'M', u'в'),
- (0x413, 'M', u'г'),
- (0x414, 'M', u'д'),
- (0x415, 'M', u'е'),
- (0x416, 'M', u'ж'),
- (0x417, 'M', u'з'),
- (0x418, 'M', u'и'),
- (0x419, 'M', u'й'),
- (0x41A, 'M', u'к'),
- (0x41B, 'M', u'л'),
- (0x41C, 'M', u'м'),
- (0x41D, 'M', u'н'),
- (0x41E, 'M', u'о'),
- (0x41F, 'M', u'п'),
- (0x420, 'M', u'р'),
- (0x421, 'M', u'с'),
- (0x422, 'M', u'т'),
- (0x423, 'M', u'у'),
- (0x424, 'M', u'ф'),
- (0x425, 'M', u'х'),
- (0x426, 'M', u'ц'),
- (0x427, 'M', u'ч'),
- (0x428, 'M', u'ш'),
- (0x429, 'M', u'щ'),
- (0x42A, 'M', u'ъ'),
- (0x42B, 'M', u'ы'),
- (0x42C, 'M', u'ь'),
- (0x42D, 'M', u'э'),
- (0x42E, 'M', u'ю'),
- (0x42F, 'M', u'я'),
- (0x430, 'V'),
- (0x460, 'M', u'ѡ'),
- (0x461, 'V'),
- (0x462, 'M', u'ѣ'),
- (0x463, 'V'),
- (0x464, 'M', u'ѥ'),
- (0x465, 'V'),
- (0x466, 'M', u'ѧ'),
- (0x467, 'V'),
- (0x468, 'M', u'ѩ'),
- (0x469, 'V'),
- (0x46A, 'M', u'ѫ'),
- (0x46B, 'V'),
- (0x46C, 'M', u'ѭ'),
- (0x46D, 'V'),
- (0x46E, 'M', u'ѯ'),
- (0x46F, 'V'),
- (0x470, 'M', u'ѱ'),
- (0x471, 'V'),
- (0x472, 'M', u'ѳ'),
- (0x473, 'V'),
- (0x474, 'M', u'ѵ'),
- (0x475, 'V'),
- (0x476, 'M', u'ѷ'),
- (0x477, 'V'),
- (0x478, 'M', u'ѹ'),
- (0x479, 'V'),
- (0x47A, 'M', u'ѻ'),
- (0x47B, 'V'),
- (0x47C, 'M', u'ѽ'),
- (0x47D, 'V'),
- (0x47E, 'M', u'ѿ'),
- (0x47F, 'V'),
- (0x480, 'M', u'ҁ'),
- (0x481, 'V'),
- (0x48A, 'M', u'ҋ'),
- (0x48B, 'V'),
- (0x48C, 'M', u'ҍ'),
- (0x48D, 'V'),
- (0x48E, 'M', u'ҏ'),
- (0x48F, 'V'),
- (0x490, 'M', u'ґ'),
- (0x491, 'V'),
- (0x492, 'M', u'ғ'),
- (0x493, 'V'),
- (0x494, 'M', u'ҕ'),
- (0x495, 'V'),
- (0x496, 'M', u'җ'),
- (0x497, 'V'),
- (0x498, 'M', u'ҙ'),
- (0x499, 'V'),
- (0x49A, 'M', u'қ'),
- (0x49B, 'V'),
- (0x49C, 'M', u'ҝ'),
- (0x49D, 'V'),
- (0x49E, 'M', u'ҟ'),
- ]
-
-def _seg_8():
- return [
- (0x49F, 'V'),
- (0x4A0, 'M', u'ҡ'),
- (0x4A1, 'V'),
- (0x4A2, 'M', u'ң'),
- (0x4A3, 'V'),
- (0x4A4, 'M', u'ҥ'),
- (0x4A5, 'V'),
- (0x4A6, 'M', u'ҧ'),
- (0x4A7, 'V'),
- (0x4A8, 'M', u'ҩ'),
- (0x4A9, 'V'),
- (0x4AA, 'M', u'ҫ'),
- (0x4AB, 'V'),
- (0x4AC, 'M', u'ҭ'),
- (0x4AD, 'V'),
- (0x4AE, 'M', u'ү'),
- (0x4AF, 'V'),
- (0x4B0, 'M', u'ұ'),
- (0x4B1, 'V'),
- (0x4B2, 'M', u'ҳ'),
- (0x4B3, 'V'),
- (0x4B4, 'M', u'ҵ'),
- (0x4B5, 'V'),
- (0x4B6, 'M', u'ҷ'),
- (0x4B7, 'V'),
- (0x4B8, 'M', u'ҹ'),
- (0x4B9, 'V'),
- (0x4BA, 'M', u'һ'),
- (0x4BB, 'V'),
- (0x4BC, 'M', u'ҽ'),
- (0x4BD, 'V'),
- (0x4BE, 'M', u'ҿ'),
- (0x4BF, 'V'),
- (0x4C0, 'X'),
- (0x4C1, 'M', u'ӂ'),
- (0x4C2, 'V'),
- (0x4C3, 'M', u'ӄ'),
- (0x4C4, 'V'),
- (0x4C5, 'M', u'ӆ'),
- (0x4C6, 'V'),
- (0x4C7, 'M', u'ӈ'),
- (0x4C8, 'V'),
- (0x4C9, 'M', u'ӊ'),
- (0x4CA, 'V'),
- (0x4CB, 'M', u'ӌ'),
- (0x4CC, 'V'),
- (0x4CD, 'M', u'ӎ'),
- (0x4CE, 'V'),
- (0x4D0, 'M', u'ӑ'),
- (0x4D1, 'V'),
- (0x4D2, 'M', u'ӓ'),
- (0x4D3, 'V'),
- (0x4D4, 'M', u'ӕ'),
- (0x4D5, 'V'),
- (0x4D6, 'M', u'ӗ'),
- (0x4D7, 'V'),
- (0x4D8, 'M', u'ә'),
- (0x4D9, 'V'),
- (0x4DA, 'M', u'ӛ'),
- (0x4DB, 'V'),
- (0x4DC, 'M', u'ӝ'),
- (0x4DD, 'V'),
- (0x4DE, 'M', u'ӟ'),
- (0x4DF, 'V'),
- (0x4E0, 'M', u'ӡ'),
- (0x4E1, 'V'),
- (0x4E2, 'M', u'ӣ'),
- (0x4E3, 'V'),
- (0x4E4, 'M', u'ӥ'),
- (0x4E5, 'V'),
- (0x4E6, 'M', u'ӧ'),
- (0x4E7, 'V'),
- (0x4E8, 'M', u'ө'),
- (0x4E9, 'V'),
- (0x4EA, 'M', u'ӫ'),
- (0x4EB, 'V'),
- (0x4EC, 'M', u'ӭ'),
- (0x4ED, 'V'),
- (0x4EE, 'M', u'ӯ'),
- (0x4EF, 'V'),
- (0x4F0, 'M', u'ӱ'),
- (0x4F1, 'V'),
- (0x4F2, 'M', u'ӳ'),
- (0x4F3, 'V'),
- (0x4F4, 'M', u'ӵ'),
- (0x4F5, 'V'),
- (0x4F6, 'M', u'ӷ'),
- (0x4F7, 'V'),
- (0x4F8, 'M', u'ӹ'),
- (0x4F9, 'V'),
- (0x4FA, 'M', u'ӻ'),
- (0x4FB, 'V'),
- (0x4FC, 'M', u'ӽ'),
- (0x4FD, 'V'),
- (0x4FE, 'M', u'ӿ'),
- (0x4FF, 'V'),
- (0x500, 'M', u'ԁ'),
- (0x501, 'V'),
- (0x502, 'M', u'ԃ'),
- (0x503, 'V'),
- ]
-
-def _seg_9():
- return [
- (0x504, 'M', u'ԅ'),
- (0x505, 'V'),
- (0x506, 'M', u'ԇ'),
- (0x507, 'V'),
- (0x508, 'M', u'ԉ'),
- (0x509, 'V'),
- (0x50A, 'M', u'ԋ'),
- (0x50B, 'V'),
- (0x50C, 'M', u'ԍ'),
- (0x50D, 'V'),
- (0x50E, 'M', u'ԏ'),
- (0x50F, 'V'),
- (0x510, 'M', u'ԑ'),
- (0x511, 'V'),
- (0x512, 'M', u'ԓ'),
- (0x513, 'V'),
- (0x514, 'M', u'ԕ'),
- (0x515, 'V'),
- (0x516, 'M', u'ԗ'),
- (0x517, 'V'),
- (0x518, 'M', u'ԙ'),
- (0x519, 'V'),
- (0x51A, 'M', u'ԛ'),
- (0x51B, 'V'),
- (0x51C, 'M', u'ԝ'),
- (0x51D, 'V'),
- (0x51E, 'M', u'ԟ'),
- (0x51F, 'V'),
- (0x520, 'M', u'ԡ'),
- (0x521, 'V'),
- (0x522, 'M', u'ԣ'),
- (0x523, 'V'),
- (0x524, 'M', u'ԥ'),
- (0x525, 'V'),
- (0x526, 'M', u'ԧ'),
- (0x527, 'V'),
- (0x528, 'X'),
- (0x531, 'M', u'ա'),
- (0x532, 'M', u'բ'),
- (0x533, 'M', u'գ'),
- (0x534, 'M', u'դ'),
- (0x535, 'M', u'ե'),
- (0x536, 'M', u'զ'),
- (0x537, 'M', u'է'),
- (0x538, 'M', u'ը'),
- (0x539, 'M', u'թ'),
- (0x53A, 'M', u'ժ'),
- (0x53B, 'M', u'ի'),
- (0x53C, 'M', u'լ'),
- (0x53D, 'M', u'խ'),
- (0x53E, 'M', u'ծ'),
- (0x53F, 'M', u'կ'),
- (0x540, 'M', u'հ'),
- (0x541, 'M', u'ձ'),
- (0x542, 'M', u'ղ'),
- (0x543, 'M', u'ճ'),
- (0x544, 'M', u'մ'),
- (0x545, 'M', u'յ'),
- (0x546, 'M', u'ն'),
- (0x547, 'M', u'շ'),
- (0x548, 'M', u'ո'),
- (0x549, 'M', u'չ'),
- (0x54A, 'M', u'պ'),
- (0x54B, 'M', u'ջ'),
- (0x54C, 'M', u'ռ'),
- (0x54D, 'M', u'ս'),
- (0x54E, 'M', u'վ'),
- (0x54F, 'M', u'տ'),
- (0x550, 'M', u'ր'),
- (0x551, 'M', u'ց'),
- (0x552, 'M', u'ւ'),
- (0x553, 'M', u'փ'),
- (0x554, 'M', u'ք'),
- (0x555, 'M', u'օ'),
- (0x556, 'M', u'ֆ'),
- (0x557, 'X'),
- (0x559, 'V'),
- (0x560, 'X'),
- (0x561, 'V'),
- (0x587, 'M', u'եւ'),
- (0x588, 'X'),
- (0x589, 'V'),
- (0x58B, 'X'),
- (0x58F, 'V'),
- (0x590, 'X'),
- (0x591, 'V'),
- (0x5C8, 'X'),
- (0x5D0, 'V'),
- (0x5EB, 'X'),
- (0x5F0, 'V'),
- (0x5F5, 'X'),
- (0x606, 'V'),
- (0x61C, 'X'),
- (0x61E, 'V'),
- (0x675, 'M', u'اٴ'),
- (0x676, 'M', u'وٴ'),
- (0x677, 'M', u'ۇٴ'),
- (0x678, 'M', u'يٴ'),
- (0x679, 'V'),
- (0x6DD, 'X'),
- ]
-
-def _seg_10():
- return [
- (0x6DE, 'V'),
- (0x70E, 'X'),
- (0x710, 'V'),
- (0x74B, 'X'),
- (0x74D, 'V'),
- (0x7B2, 'X'),
- (0x7C0, 'V'),
- (0x7FB, 'X'),
- (0x800, 'V'),
- (0x82E, 'X'),
- (0x830, 'V'),
- (0x83F, 'X'),
- (0x840, 'V'),
- (0x85C, 'X'),
- (0x85E, 'V'),
- (0x85F, 'X'),
- (0x8A0, 'V'),
- (0x8A1, 'X'),
- (0x8A2, 'V'),
- (0x8AD, 'X'),
- (0x8E4, 'V'),
- (0x8FF, 'X'),
- (0x900, 'V'),
- (0x958, 'M', u'क़'),
- (0x959, 'M', u'ख़'),
- (0x95A, 'M', u'ग़'),
- (0x95B, 'M', u'ज़'),
- (0x95C, 'M', u'ड़'),
- (0x95D, 'M', u'ढ़'),
- (0x95E, 'M', u'फ़'),
- (0x95F, 'M', u'य़'),
- (0x960, 'V'),
- (0x978, 'X'),
- (0x979, 'V'),
- (0x980, 'X'),
- (0x981, 'V'),
- (0x984, 'X'),
- (0x985, 'V'),
- (0x98D, 'X'),
- (0x98F, 'V'),
- (0x991, 'X'),
- (0x993, 'V'),
- (0x9A9, 'X'),
- (0x9AA, 'V'),
- (0x9B1, 'X'),
- (0x9B2, 'V'),
- (0x9B3, 'X'),
- (0x9B6, 'V'),
- (0x9BA, 'X'),
- (0x9BC, 'V'),
- (0x9C5, 'X'),
- (0x9C7, 'V'),
- (0x9C9, 'X'),
- (0x9CB, 'V'),
- (0x9CF, 'X'),
- (0x9D7, 'V'),
- (0x9D8, 'X'),
- (0x9DC, 'M', u'ড়'),
- (0x9DD, 'M', u'ঢ়'),
- (0x9DE, 'X'),
- (0x9DF, 'M', u'য়'),
- (0x9E0, 'V'),
- (0x9E4, 'X'),
- (0x9E6, 'V'),
- (0x9FC, 'X'),
- (0xA01, 'V'),
- (0xA04, 'X'),
- (0xA05, 'V'),
- (0xA0B, 'X'),
- (0xA0F, 'V'),
- (0xA11, 'X'),
- (0xA13, 'V'),
- (0xA29, 'X'),
- (0xA2A, 'V'),
- (0xA31, 'X'),
- (0xA32, 'V'),
- (0xA33, 'M', u'ਲ਼'),
- (0xA34, 'X'),
- (0xA35, 'V'),
- (0xA36, 'M', u'ਸ਼'),
- (0xA37, 'X'),
- (0xA38, 'V'),
- (0xA3A, 'X'),
- (0xA3C, 'V'),
- (0xA3D, 'X'),
- (0xA3E, 'V'),
- (0xA43, 'X'),
- (0xA47, 'V'),
- (0xA49, 'X'),
- (0xA4B, 'V'),
- (0xA4E, 'X'),
- (0xA51, 'V'),
- (0xA52, 'X'),
- (0xA59, 'M', u'ਖ਼'),
- (0xA5A, 'M', u'ਗ਼'),
- (0xA5B, 'M', u'ਜ਼'),
- (0xA5C, 'V'),
- (0xA5D, 'X'),
- (0xA5E, 'M', u'ਫ਼'),
- (0xA5F, 'X'),
- ]
-
-def _seg_11():
- return [
- (0xA66, 'V'),
- (0xA76, 'X'),
- (0xA81, 'V'),
- (0xA84, 'X'),
- (0xA85, 'V'),
- (0xA8E, 'X'),
- (0xA8F, 'V'),
- (0xA92, 'X'),
- (0xA93, 'V'),
- (0xAA9, 'X'),
- (0xAAA, 'V'),
- (0xAB1, 'X'),
- (0xAB2, 'V'),
- (0xAB4, 'X'),
- (0xAB5, 'V'),
- (0xABA, 'X'),
- (0xABC, 'V'),
- (0xAC6, 'X'),
- (0xAC7, 'V'),
- (0xACA, 'X'),
- (0xACB, 'V'),
- (0xACE, 'X'),
- (0xAD0, 'V'),
- (0xAD1, 'X'),
- (0xAE0, 'V'),
- (0xAE4, 'X'),
- (0xAE6, 'V'),
- (0xAF2, 'X'),
- (0xB01, 'V'),
- (0xB04, 'X'),
- (0xB05, 'V'),
- (0xB0D, 'X'),
- (0xB0F, 'V'),
- (0xB11, 'X'),
- (0xB13, 'V'),
- (0xB29, 'X'),
- (0xB2A, 'V'),
- (0xB31, 'X'),
- (0xB32, 'V'),
- (0xB34, 'X'),
- (0xB35, 'V'),
- (0xB3A, 'X'),
- (0xB3C, 'V'),
- (0xB45, 'X'),
- (0xB47, 'V'),
- (0xB49, 'X'),
- (0xB4B, 'V'),
- (0xB4E, 'X'),
- (0xB56, 'V'),
- (0xB58, 'X'),
- (0xB5C, 'M', u'ଡ଼'),
- (0xB5D, 'M', u'ଢ଼'),
- (0xB5E, 'X'),
- (0xB5F, 'V'),
- (0xB64, 'X'),
- (0xB66, 'V'),
- (0xB78, 'X'),
- (0xB82, 'V'),
- (0xB84, 'X'),
- (0xB85, 'V'),
- (0xB8B, 'X'),
- (0xB8E, 'V'),
- (0xB91, 'X'),
- (0xB92, 'V'),
- (0xB96, 'X'),
- (0xB99, 'V'),
- (0xB9B, 'X'),
- (0xB9C, 'V'),
- (0xB9D, 'X'),
- (0xB9E, 'V'),
- (0xBA0, 'X'),
- (0xBA3, 'V'),
- (0xBA5, 'X'),
- (0xBA8, 'V'),
- (0xBAB, 'X'),
- (0xBAE, 'V'),
- (0xBBA, 'X'),
- (0xBBE, 'V'),
- (0xBC3, 'X'),
- (0xBC6, 'V'),
- (0xBC9, 'X'),
- (0xBCA, 'V'),
- (0xBCE, 'X'),
- (0xBD0, 'V'),
- (0xBD1, 'X'),
- (0xBD7, 'V'),
- (0xBD8, 'X'),
- (0xBE6, 'V'),
- (0xBFB, 'X'),
- (0xC01, 'V'),
- (0xC04, 'X'),
- (0xC05, 'V'),
- (0xC0D, 'X'),
- (0xC0E, 'V'),
- (0xC11, 'X'),
- (0xC12, 'V'),
- (0xC29, 'X'),
- (0xC2A, 'V'),
- (0xC34, 'X'),
- (0xC35, 'V'),
- ]
-
-def _seg_12():
- return [
- (0xC3A, 'X'),
- (0xC3D, 'V'),
- (0xC45, 'X'),
- (0xC46, 'V'),
- (0xC49, 'X'),
- (0xC4A, 'V'),
- (0xC4E, 'X'),
- (0xC55, 'V'),
- (0xC57, 'X'),
- (0xC58, 'V'),
- (0xC5A, 'X'),
- (0xC60, 'V'),
- (0xC64, 'X'),
- (0xC66, 'V'),
- (0xC70, 'X'),
- (0xC78, 'V'),
- (0xC80, 'X'),
- (0xC82, 'V'),
- (0xC84, 'X'),
- (0xC85, 'V'),
- (0xC8D, 'X'),
- (0xC8E, 'V'),
- (0xC91, 'X'),
- (0xC92, 'V'),
- (0xCA9, 'X'),
- (0xCAA, 'V'),
- (0xCB4, 'X'),
- (0xCB5, 'V'),
- (0xCBA, 'X'),
- (0xCBC, 'V'),
- (0xCC5, 'X'),
- (0xCC6, 'V'),
- (0xCC9, 'X'),
- (0xCCA, 'V'),
- (0xCCE, 'X'),
- (0xCD5, 'V'),
- (0xCD7, 'X'),
- (0xCDE, 'V'),
- (0xCDF, 'X'),
- (0xCE0, 'V'),
- (0xCE4, 'X'),
- (0xCE6, 'V'),
- (0xCF0, 'X'),
- (0xCF1, 'V'),
- (0xCF3, 'X'),
- (0xD02, 'V'),
- (0xD04, 'X'),
- (0xD05, 'V'),
- (0xD0D, 'X'),
- (0xD0E, 'V'),
- (0xD11, 'X'),
- (0xD12, 'V'),
- (0xD3B, 'X'),
- (0xD3D, 'V'),
- (0xD45, 'X'),
- (0xD46, 'V'),
- (0xD49, 'X'),
- (0xD4A, 'V'),
- (0xD4F, 'X'),
- (0xD57, 'V'),
- (0xD58, 'X'),
- (0xD60, 'V'),
- (0xD64, 'X'),
- (0xD66, 'V'),
- (0xD76, 'X'),
- (0xD79, 'V'),
- (0xD80, 'X'),
- (0xD82, 'V'),
- (0xD84, 'X'),
- (0xD85, 'V'),
- (0xD97, 'X'),
- (0xD9A, 'V'),
- (0xDB2, 'X'),
- (0xDB3, 'V'),
- (0xDBC, 'X'),
- (0xDBD, 'V'),
- (0xDBE, 'X'),
- (0xDC0, 'V'),
- (0xDC7, 'X'),
- (0xDCA, 'V'),
- (0xDCB, 'X'),
- (0xDCF, 'V'),
- (0xDD5, 'X'),
- (0xDD6, 'V'),
- (0xDD7, 'X'),
- (0xDD8, 'V'),
- (0xDE0, 'X'),
- (0xDF2, 'V'),
- (0xDF5, 'X'),
- (0xE01, 'V'),
- (0xE33, 'M', u'ํา'),
- (0xE34, 'V'),
- (0xE3B, 'X'),
- (0xE3F, 'V'),
- (0xE5C, 'X'),
- (0xE81, 'V'),
- (0xE83, 'X'),
- (0xE84, 'V'),
- (0xE85, 'X'),
- (0xE87, 'V'),
- ]
-
-def _seg_13():
- return [
- (0xE89, 'X'),
- (0xE8A, 'V'),
- (0xE8B, 'X'),
- (0xE8D, 'V'),
- (0xE8E, 'X'),
- (0xE94, 'V'),
- (0xE98, 'X'),
- (0xE99, 'V'),
- (0xEA0, 'X'),
- (0xEA1, 'V'),
- (0xEA4, 'X'),
- (0xEA5, 'V'),
- (0xEA6, 'X'),
- (0xEA7, 'V'),
- (0xEA8, 'X'),
- (0xEAA, 'V'),
- (0xEAC, 'X'),
- (0xEAD, 'V'),
- (0xEB3, 'M', u'ໍາ'),
- (0xEB4, 'V'),
- (0xEBA, 'X'),
- (0xEBB, 'V'),
- (0xEBE, 'X'),
- (0xEC0, 'V'),
- (0xEC5, 'X'),
- (0xEC6, 'V'),
- (0xEC7, 'X'),
- (0xEC8, 'V'),
- (0xECE, 'X'),
- (0xED0, 'V'),
- (0xEDA, 'X'),
- (0xEDC, 'M', u'ຫນ'),
- (0xEDD, 'M', u'ຫມ'),
- (0xEDE, 'V'),
- (0xEE0, 'X'),
- (0xF00, 'V'),
- (0xF0C, 'M', u'་'),
- (0xF0D, 'V'),
- (0xF43, 'M', u'གྷ'),
- (0xF44, 'V'),
- (0xF48, 'X'),
- (0xF49, 'V'),
- (0xF4D, 'M', u'ཌྷ'),
- (0xF4E, 'V'),
- (0xF52, 'M', u'དྷ'),
- (0xF53, 'V'),
- (0xF57, 'M', u'བྷ'),
- (0xF58, 'V'),
- (0xF5C, 'M', u'ཛྷ'),
- (0xF5D, 'V'),
- (0xF69, 'M', u'ཀྵ'),
- (0xF6A, 'V'),
- (0xF6D, 'X'),
- (0xF71, 'V'),
- (0xF73, 'M', u'ཱི'),
- (0xF74, 'V'),
- (0xF75, 'M', u'ཱུ'),
- (0xF76, 'M', u'ྲྀ'),
- (0xF77, 'M', u'ྲཱྀ'),
- (0xF78, 'M', u'ླྀ'),
- (0xF79, 'M', u'ླཱྀ'),
- (0xF7A, 'V'),
- (0xF81, 'M', u'ཱྀ'),
- (0xF82, 'V'),
- (0xF93, 'M', u'ྒྷ'),
- (0xF94, 'V'),
- (0xF98, 'X'),
- (0xF99, 'V'),
- (0xF9D, 'M', u'ྜྷ'),
- (0xF9E, 'V'),
- (0xFA2, 'M', u'ྡྷ'),
- (0xFA3, 'V'),
- (0xFA7, 'M', u'ྦྷ'),
- (0xFA8, 'V'),
- (0xFAC, 'M', u'ྫྷ'),
- (0xFAD, 'V'),
- (0xFB9, 'M', u'ྐྵ'),
- (0xFBA, 'V'),
- (0xFBD, 'X'),
- (0xFBE, 'V'),
- (0xFCD, 'X'),
- (0xFCE, 'V'),
- (0xFDB, 'X'),
- (0x1000, 'V'),
- (0x10A0, 'X'),
- (0x10C7, 'M', u'ⴧ'),
- (0x10C8, 'X'),
- (0x10CD, 'M', u'ⴭ'),
- (0x10CE, 'X'),
- (0x10D0, 'V'),
- (0x10FC, 'M', u'ნ'),
- (0x10FD, 'V'),
- (0x115F, 'X'),
- (0x1161, 'V'),
- (0x1249, 'X'),
- (0x124A, 'V'),
- (0x124E, 'X'),
- (0x1250, 'V'),
- (0x1257, 'X'),
- (0x1258, 'V'),
- ]
-
-def _seg_14():
- return [
- (0x1259, 'X'),
- (0x125A, 'V'),
- (0x125E, 'X'),
- (0x1260, 'V'),
- (0x1289, 'X'),
- (0x128A, 'V'),
- (0x128E, 'X'),
- (0x1290, 'V'),
- (0x12B1, 'X'),
- (0x12B2, 'V'),
- (0x12B6, 'X'),
- (0x12B8, 'V'),
- (0x12BF, 'X'),
- (0x12C0, 'V'),
- (0x12C1, 'X'),
- (0x12C2, 'V'),
- (0x12C6, 'X'),
- (0x12C8, 'V'),
- (0x12D7, 'X'),
- (0x12D8, 'V'),
- (0x1311, 'X'),
- (0x1312, 'V'),
- (0x1316, 'X'),
- (0x1318, 'V'),
- (0x135B, 'X'),
- (0x135D, 'V'),
- (0x137D, 'X'),
- (0x1380, 'V'),
- (0x139A, 'X'),
- (0x13A0, 'V'),
- (0x13F5, 'X'),
- (0x1400, 'V'),
- (0x1680, 'X'),
- (0x1681, 'V'),
- (0x169D, 'X'),
- (0x16A0, 'V'),
- (0x16F1, 'X'),
- (0x1700, 'V'),
- (0x170D, 'X'),
- (0x170E, 'V'),
- (0x1715, 'X'),
- (0x1720, 'V'),
- (0x1737, 'X'),
- (0x1740, 'V'),
- (0x1754, 'X'),
- (0x1760, 'V'),
- (0x176D, 'X'),
- (0x176E, 'V'),
- (0x1771, 'X'),
- (0x1772, 'V'),
- (0x1774, 'X'),
- (0x1780, 'V'),
- (0x17B4, 'X'),
- (0x17B6, 'V'),
- (0x17DE, 'X'),
- (0x17E0, 'V'),
- (0x17EA, 'X'),
- (0x17F0, 'V'),
- (0x17FA, 'X'),
- (0x1800, 'V'),
- (0x1806, 'X'),
- (0x1807, 'V'),
- (0x180B, 'I'),
- (0x180E, 'X'),
- (0x1810, 'V'),
- (0x181A, 'X'),
- (0x1820, 'V'),
- (0x1878, 'X'),
- (0x1880, 'V'),
- (0x18AB, 'X'),
- (0x18B0, 'V'),
- (0x18F6, 'X'),
- (0x1900, 'V'),
- (0x191D, 'X'),
- (0x1920, 'V'),
- (0x192C, 'X'),
- (0x1930, 'V'),
- (0x193C, 'X'),
- (0x1940, 'V'),
- (0x1941, 'X'),
- (0x1944, 'V'),
- (0x196E, 'X'),
- (0x1970, 'V'),
- (0x1975, 'X'),
- (0x1980, 'V'),
- (0x19AC, 'X'),
- (0x19B0, 'V'),
- (0x19CA, 'X'),
- (0x19D0, 'V'),
- (0x19DB, 'X'),
- (0x19DE, 'V'),
- (0x1A1C, 'X'),
- (0x1A1E, 'V'),
- (0x1A5F, 'X'),
- (0x1A60, 'V'),
- (0x1A7D, 'X'),
- (0x1A7F, 'V'),
- (0x1A8A, 'X'),
- (0x1A90, 'V'),
- (0x1A9A, 'X'),
- ]
-
-def _seg_15():
- return [
- (0x1AA0, 'V'),
- (0x1AAE, 'X'),
- (0x1B00, 'V'),
- (0x1B4C, 'X'),
- (0x1B50, 'V'),
- (0x1B7D, 'X'),
- (0x1B80, 'V'),
- (0x1BF4, 'X'),
- (0x1BFC, 'V'),
- (0x1C38, 'X'),
- (0x1C3B, 'V'),
- (0x1C4A, 'X'),
- (0x1C4D, 'V'),
- (0x1C80, 'X'),
- (0x1CC0, 'V'),
- (0x1CC8, 'X'),
- (0x1CD0, 'V'),
- (0x1CF7, 'X'),
- (0x1D00, 'V'),
- (0x1D2C, 'M', u'a'),
- (0x1D2D, 'M', u'æ'),
- (0x1D2E, 'M', u'b'),
- (0x1D2F, 'V'),
- (0x1D30, 'M', u'd'),
- (0x1D31, 'M', u'e'),
- (0x1D32, 'M', u'ǝ'),
- (0x1D33, 'M', u'g'),
- (0x1D34, 'M', u'h'),
- (0x1D35, 'M', u'i'),
- (0x1D36, 'M', u'j'),
- (0x1D37, 'M', u'k'),
- (0x1D38, 'M', u'l'),
- (0x1D39, 'M', u'm'),
- (0x1D3A, 'M', u'n'),
- (0x1D3B, 'V'),
- (0x1D3C, 'M', u'o'),
- (0x1D3D, 'M', u'ȣ'),
- (0x1D3E, 'M', u'p'),
- (0x1D3F, 'M', u'r'),
- (0x1D40, 'M', u't'),
- (0x1D41, 'M', u'u'),
- (0x1D42, 'M', u'w'),
- (0x1D43, 'M', u'a'),
- (0x1D44, 'M', u'ɐ'),
- (0x1D45, 'M', u'ɑ'),
- (0x1D46, 'M', u'ᴂ'),
- (0x1D47, 'M', u'b'),
- (0x1D48, 'M', u'd'),
- (0x1D49, 'M', u'e'),
- (0x1D4A, 'M', u'ə'),
- (0x1D4B, 'M', u'ɛ'),
- (0x1D4C, 'M', u'ɜ'),
- (0x1D4D, 'M', u'g'),
- (0x1D4E, 'V'),
- (0x1D4F, 'M', u'k'),
- (0x1D50, 'M', u'm'),
- (0x1D51, 'M', u'ŋ'),
- (0x1D52, 'M', u'o'),
- (0x1D53, 'M', u'ɔ'),
- (0x1D54, 'M', u'ᴖ'),
- (0x1D55, 'M', u'ᴗ'),
- (0x1D56, 'M', u'p'),
- (0x1D57, 'M', u't'),
- (0x1D58, 'M', u'u'),
- (0x1D59, 'M', u'ᴝ'),
- (0x1D5A, 'M', u'ɯ'),
- (0x1D5B, 'M', u'v'),
- (0x1D5C, 'M', u'ᴥ'),
- (0x1D5D, 'M', u'β'),
- (0x1D5E, 'M', u'γ'),
- (0x1D5F, 'M', u'δ'),
- (0x1D60, 'M', u'φ'),
- (0x1D61, 'M', u'χ'),
- (0x1D62, 'M', u'i'),
- (0x1D63, 'M', u'r'),
- (0x1D64, 'M', u'u'),
- (0x1D65, 'M', u'v'),
- (0x1D66, 'M', u'β'),
- (0x1D67, 'M', u'γ'),
- (0x1D68, 'M', u'ρ'),
- (0x1D69, 'M', u'φ'),
- (0x1D6A, 'M', u'χ'),
- (0x1D6B, 'V'),
- (0x1D78, 'M', u'н'),
- (0x1D79, 'V'),
- (0x1D9B, 'M', u'ɒ'),
- (0x1D9C, 'M', u'c'),
- (0x1D9D, 'M', u'ɕ'),
- (0x1D9E, 'M', u'ð'),
- (0x1D9F, 'M', u'ɜ'),
- (0x1DA0, 'M', u'f'),
- (0x1DA1, 'M', u'ɟ'),
- (0x1DA2, 'M', u'ɡ'),
- (0x1DA3, 'M', u'ɥ'),
- (0x1DA4, 'M', u'ɨ'),
- (0x1DA5, 'M', u'ɩ'),
- (0x1DA6, 'M', u'ɪ'),
- (0x1DA7, 'M', u'ᵻ'),
- (0x1DA8, 'M', u'ʝ'),
- (0x1DA9, 'M', u'ɭ'),
- ]
-
-def _seg_16():
- return [
- (0x1DAA, 'M', u'ᶅ'),
- (0x1DAB, 'M', u'ʟ'),
- (0x1DAC, 'M', u'ɱ'),
- (0x1DAD, 'M', u'ɰ'),
- (0x1DAE, 'M', u'ɲ'),
- (0x1DAF, 'M', u'ɳ'),
- (0x1DB0, 'M', u'ɴ'),
- (0x1DB1, 'M', u'ɵ'),
- (0x1DB2, 'M', u'ɸ'),
- (0x1DB3, 'M', u'ʂ'),
- (0x1DB4, 'M', u'ʃ'),
- (0x1DB5, 'M', u'ƫ'),
- (0x1DB6, 'M', u'ʉ'),
- (0x1DB7, 'M', u'ʊ'),
- (0x1DB8, 'M', u'ᴜ'),
- (0x1DB9, 'M', u'ʋ'),
- (0x1DBA, 'M', u'ʌ'),
- (0x1DBB, 'M', u'z'),
- (0x1DBC, 'M', u'ʐ'),
- (0x1DBD, 'M', u'ʑ'),
- (0x1DBE, 'M', u'ʒ'),
- (0x1DBF, 'M', u'θ'),
- (0x1DC0, 'V'),
- (0x1DE7, 'X'),
- (0x1DFC, 'V'),
- (0x1E00, 'M', u'ḁ'),
- (0x1E01, 'V'),
- (0x1E02, 'M', u'ḃ'),
- (0x1E03, 'V'),
- (0x1E04, 'M', u'ḅ'),
- (0x1E05, 'V'),
- (0x1E06, 'M', u'ḇ'),
- (0x1E07, 'V'),
- (0x1E08, 'M', u'ḉ'),
- (0x1E09, 'V'),
- (0x1E0A, 'M', u'ḋ'),
- (0x1E0B, 'V'),
- (0x1E0C, 'M', u'ḍ'),
- (0x1E0D, 'V'),
- (0x1E0E, 'M', u'ḏ'),
- (0x1E0F, 'V'),
- (0x1E10, 'M', u'ḑ'),
- (0x1E11, 'V'),
- (0x1E12, 'M', u'ḓ'),
- (0x1E13, 'V'),
- (0x1E14, 'M', u'ḕ'),
- (0x1E15, 'V'),
- (0x1E16, 'M', u'ḗ'),
- (0x1E17, 'V'),
- (0x1E18, 'M', u'ḙ'),
- (0x1E19, 'V'),
- (0x1E1A, 'M', u'ḛ'),
- (0x1E1B, 'V'),
- (0x1E1C, 'M', u'ḝ'),
- (0x1E1D, 'V'),
- (0x1E1E, 'M', u'ḟ'),
- (0x1E1F, 'V'),
- (0x1E20, 'M', u'ḡ'),
- (0x1E21, 'V'),
- (0x1E22, 'M', u'ḣ'),
- (0x1E23, 'V'),
- (0x1E24, 'M', u'ḥ'),
- (0x1E25, 'V'),
- (0x1E26, 'M', u'ḧ'),
- (0x1E27, 'V'),
- (0x1E28, 'M', u'ḩ'),
- (0x1E29, 'V'),
- (0x1E2A, 'M', u'ḫ'),
- (0x1E2B, 'V'),
- (0x1E2C, 'M', u'ḭ'),
- (0x1E2D, 'V'),
- (0x1E2E, 'M', u'ḯ'),
- (0x1E2F, 'V'),
- (0x1E30, 'M', u'ḱ'),
- (0x1E31, 'V'),
- (0x1E32, 'M', u'ḳ'),
- (0x1E33, 'V'),
- (0x1E34, 'M', u'ḵ'),
- (0x1E35, 'V'),
- (0x1E36, 'M', u'ḷ'),
- (0x1E37, 'V'),
- (0x1E38, 'M', u'ḹ'),
- (0x1E39, 'V'),
- (0x1E3A, 'M', u'ḻ'),
- (0x1E3B, 'V'),
- (0x1E3C, 'M', u'ḽ'),
- (0x1E3D, 'V'),
- (0x1E3E, 'M', u'ḿ'),
- (0x1E3F, 'V'),
- (0x1E40, 'M', u'ṁ'),
- (0x1E41, 'V'),
- (0x1E42, 'M', u'ṃ'),
- (0x1E43, 'V'),
- (0x1E44, 'M', u'ṅ'),
- (0x1E45, 'V'),
- (0x1E46, 'M', u'ṇ'),
- (0x1E47, 'V'),
- (0x1E48, 'M', u'ṉ'),
- (0x1E49, 'V'),
- (0x1E4A, 'M', u'ṋ'),
- ]
-
-def _seg_17():
- return [
- (0x1E4B, 'V'),
- (0x1E4C, 'M', u'ṍ'),
- (0x1E4D, 'V'),
- (0x1E4E, 'M', u'ṏ'),
- (0x1E4F, 'V'),
- (0x1E50, 'M', u'ṑ'),
- (0x1E51, 'V'),
- (0x1E52, 'M', u'ṓ'),
- (0x1E53, 'V'),
- (0x1E54, 'M', u'ṕ'),
- (0x1E55, 'V'),
- (0x1E56, 'M', u'ṗ'),
- (0x1E57, 'V'),
- (0x1E58, 'M', u'ṙ'),
- (0x1E59, 'V'),
- (0x1E5A, 'M', u'ṛ'),
- (0x1E5B, 'V'),
- (0x1E5C, 'M', u'ṝ'),
- (0x1E5D, 'V'),
- (0x1E5E, 'M', u'ṟ'),
- (0x1E5F, 'V'),
- (0x1E60, 'M', u'ṡ'),
- (0x1E61, 'V'),
- (0x1E62, 'M', u'ṣ'),
- (0x1E63, 'V'),
- (0x1E64, 'M', u'ṥ'),
- (0x1E65, 'V'),
- (0x1E66, 'M', u'ṧ'),
- (0x1E67, 'V'),
- (0x1E68, 'M', u'ṩ'),
- (0x1E69, 'V'),
- (0x1E6A, 'M', u'ṫ'),
- (0x1E6B, 'V'),
- (0x1E6C, 'M', u'ṭ'),
- (0x1E6D, 'V'),
- (0x1E6E, 'M', u'ṯ'),
- (0x1E6F, 'V'),
- (0x1E70, 'M', u'ṱ'),
- (0x1E71, 'V'),
- (0x1E72, 'M', u'ṳ'),
- (0x1E73, 'V'),
- (0x1E74, 'M', u'ṵ'),
- (0x1E75, 'V'),
- (0x1E76, 'M', u'ṷ'),
- (0x1E77, 'V'),
- (0x1E78, 'M', u'ṹ'),
- (0x1E79, 'V'),
- (0x1E7A, 'M', u'ṻ'),
- (0x1E7B, 'V'),
- (0x1E7C, 'M', u'ṽ'),
- (0x1E7D, 'V'),
- (0x1E7E, 'M', u'ṿ'),
- (0x1E7F, 'V'),
- (0x1E80, 'M', u'ẁ'),
- (0x1E81, 'V'),
- (0x1E82, 'M', u'ẃ'),
- (0x1E83, 'V'),
- (0x1E84, 'M', u'ẅ'),
- (0x1E85, 'V'),
- (0x1E86, 'M', u'ẇ'),
- (0x1E87, 'V'),
- (0x1E88, 'M', u'ẉ'),
- (0x1E89, 'V'),
- (0x1E8A, 'M', u'ẋ'),
- (0x1E8B, 'V'),
- (0x1E8C, 'M', u'ẍ'),
- (0x1E8D, 'V'),
- (0x1E8E, 'M', u'ẏ'),
- (0x1E8F, 'V'),
- (0x1E90, 'M', u'ẑ'),
- (0x1E91, 'V'),
- (0x1E92, 'M', u'ẓ'),
- (0x1E93, 'V'),
- (0x1E94, 'M', u'ẕ'),
- (0x1E95, 'V'),
- (0x1E9A, 'M', u'aʾ'),
- (0x1E9B, 'M', u'ṡ'),
- (0x1E9C, 'V'),
- (0x1E9E, 'M', u'ss'),
- (0x1E9F, 'V'),
- (0x1EA0, 'M', u'ạ'),
- (0x1EA1, 'V'),
- (0x1EA2, 'M', u'ả'),
- (0x1EA3, 'V'),
- (0x1EA4, 'M', u'ấ'),
- (0x1EA5, 'V'),
- (0x1EA6, 'M', u'ầ'),
- (0x1EA7, 'V'),
- (0x1EA8, 'M', u'ẩ'),
- (0x1EA9, 'V'),
- (0x1EAA, 'M', u'ẫ'),
- (0x1EAB, 'V'),
- (0x1EAC, 'M', u'ậ'),
- (0x1EAD, 'V'),
- (0x1EAE, 'M', u'ắ'),
- (0x1EAF, 'V'),
- (0x1EB0, 'M', u'ằ'),
- (0x1EB1, 'V'),
- (0x1EB2, 'M', u'ẳ'),
- (0x1EB3, 'V'),
- ]
-
-def _seg_18():
- return [
- (0x1EB4, 'M', u'ẵ'),
- (0x1EB5, 'V'),
- (0x1EB6, 'M', u'ặ'),
- (0x1EB7, 'V'),
- (0x1EB8, 'M', u'ẹ'),
- (0x1EB9, 'V'),
- (0x1EBA, 'M', u'ẻ'),
- (0x1EBB, 'V'),
- (0x1EBC, 'M', u'ẽ'),
- (0x1EBD, 'V'),
- (0x1EBE, 'M', u'ế'),
- (0x1EBF, 'V'),
- (0x1EC0, 'M', u'ề'),
- (0x1EC1, 'V'),
- (0x1EC2, 'M', u'ể'),
- (0x1EC3, 'V'),
- (0x1EC4, 'M', u'ễ'),
- (0x1EC5, 'V'),
- (0x1EC6, 'M', u'ệ'),
- (0x1EC7, 'V'),
- (0x1EC8, 'M', u'ỉ'),
- (0x1EC9, 'V'),
- (0x1ECA, 'M', u'ị'),
- (0x1ECB, 'V'),
- (0x1ECC, 'M', u'ọ'),
- (0x1ECD, 'V'),
- (0x1ECE, 'M', u'ỏ'),
- (0x1ECF, 'V'),
- (0x1ED0, 'M', u'ố'),
- (0x1ED1, 'V'),
- (0x1ED2, 'M', u'ồ'),
- (0x1ED3, 'V'),
- (0x1ED4, 'M', u'ổ'),
- (0x1ED5, 'V'),
- (0x1ED6, 'M', u'ỗ'),
- (0x1ED7, 'V'),
- (0x1ED8, 'M', u'ộ'),
- (0x1ED9, 'V'),
- (0x1EDA, 'M', u'ớ'),
- (0x1EDB, 'V'),
- (0x1EDC, 'M', u'ờ'),
- (0x1EDD, 'V'),
- (0x1EDE, 'M', u'ở'),
- (0x1EDF, 'V'),
- (0x1EE0, 'M', u'ỡ'),
- (0x1EE1, 'V'),
- (0x1EE2, 'M', u'ợ'),
- (0x1EE3, 'V'),
- (0x1EE4, 'M', u'ụ'),
- (0x1EE5, 'V'),
- (0x1EE6, 'M', u'ủ'),
- (0x1EE7, 'V'),
- (0x1EE8, 'M', u'ứ'),
- (0x1EE9, 'V'),
- (0x1EEA, 'M', u'ừ'),
- (0x1EEB, 'V'),
- (0x1EEC, 'M', u'ử'),
- (0x1EED, 'V'),
- (0x1EEE, 'M', u'ữ'),
- (0x1EEF, 'V'),
- (0x1EF0, 'M', u'ự'),
- (0x1EF1, 'V'),
- (0x1EF2, 'M', u'ỳ'),
- (0x1EF3, 'V'),
- (0x1EF4, 'M', u'ỵ'),
- (0x1EF5, 'V'),
- (0x1EF6, 'M', u'ỷ'),
- (0x1EF7, 'V'),
- (0x1EF8, 'M', u'ỹ'),
- (0x1EF9, 'V'),
- (0x1EFA, 'M', u'ỻ'),
- (0x1EFB, 'V'),
- (0x1EFC, 'M', u'ỽ'),
- (0x1EFD, 'V'),
- (0x1EFE, 'M', u'ỿ'),
- (0x1EFF, 'V'),
- (0x1F08, 'M', u'ἀ'),
- (0x1F09, 'M', u'ἁ'),
- (0x1F0A, 'M', u'ἂ'),
- (0x1F0B, 'M', u'ἃ'),
- (0x1F0C, 'M', u'ἄ'),
- (0x1F0D, 'M', u'ἅ'),
- (0x1F0E, 'M', u'ἆ'),
- (0x1F0F, 'M', u'ἇ'),
- (0x1F10, 'V'),
- (0x1F16, 'X'),
- (0x1F18, 'M', u'ἐ'),
- (0x1F19, 'M', u'ἑ'),
- (0x1F1A, 'M', u'ἒ'),
- (0x1F1B, 'M', u'ἓ'),
- (0x1F1C, 'M', u'ἔ'),
- (0x1F1D, 'M', u'ἕ'),
- (0x1F1E, 'X'),
- (0x1F20, 'V'),
- (0x1F28, 'M', u'ἠ'),
- (0x1F29, 'M', u'ἡ'),
- (0x1F2A, 'M', u'ἢ'),
- (0x1F2B, 'M', u'ἣ'),
- (0x1F2C, 'M', u'ἤ'),
- (0x1F2D, 'M', u'ἥ'),
- ]
-
-def _seg_19():
- return [
- (0x1F2E, 'M', u'ἦ'),
- (0x1F2F, 'M', u'ἧ'),
- (0x1F30, 'V'),
- (0x1F38, 'M', u'ἰ'),
- (0x1F39, 'M', u'ἱ'),
- (0x1F3A, 'M', u'ἲ'),
- (0x1F3B, 'M', u'ἳ'),
- (0x1F3C, 'M', u'ἴ'),
- (0x1F3D, 'M', u'ἵ'),
- (0x1F3E, 'M', u'ἶ'),
- (0x1F3F, 'M', u'ἷ'),
- (0x1F40, 'V'),
- (0x1F46, 'X'),
- (0x1F48, 'M', u'ὀ'),
- (0x1F49, 'M', u'ὁ'),
- (0x1F4A, 'M', u'ὂ'),
- (0x1F4B, 'M', u'ὃ'),
- (0x1F4C, 'M', u'ὄ'),
- (0x1F4D, 'M', u'ὅ'),
- (0x1F4E, 'X'),
- (0x1F50, 'V'),
- (0x1F58, 'X'),
- (0x1F59, 'M', u'ὑ'),
- (0x1F5A, 'X'),
- (0x1F5B, 'M', u'ὓ'),
- (0x1F5C, 'X'),
- (0x1F5D, 'M', u'ὕ'),
- (0x1F5E, 'X'),
- (0x1F5F, 'M', u'ὗ'),
- (0x1F60, 'V'),
- (0x1F68, 'M', u'ὠ'),
- (0x1F69, 'M', u'ὡ'),
- (0x1F6A, 'M', u'ὢ'),
- (0x1F6B, 'M', u'ὣ'),
- (0x1F6C, 'M', u'ὤ'),
- (0x1F6D, 'M', u'ὥ'),
- (0x1F6E, 'M', u'ὦ'),
- (0x1F6F, 'M', u'ὧ'),
- (0x1F70, 'V'),
- (0x1F71, 'M', u'ά'),
- (0x1F72, 'V'),
- (0x1F73, 'M', u'έ'),
- (0x1F74, 'V'),
- (0x1F75, 'M', u'ή'),
- (0x1F76, 'V'),
- (0x1F77, 'M', u'ί'),
- (0x1F78, 'V'),
- (0x1F79, 'M', u'ό'),
- (0x1F7A, 'V'),
- (0x1F7B, 'M', u'ύ'),
- (0x1F7C, 'V'),
- (0x1F7D, 'M', u'ώ'),
- (0x1F7E, 'X'),
- (0x1F80, 'M', u'ἀι'),
- (0x1F81, 'M', u'ἁι'),
- (0x1F82, 'M', u'ἂι'),
- (0x1F83, 'M', u'ἃι'),
- (0x1F84, 'M', u'ἄι'),
- (0x1F85, 'M', u'ἅι'),
- (0x1F86, 'M', u'ἆι'),
- (0x1F87, 'M', u'ἇι'),
- (0x1F88, 'M', u'ἀι'),
- (0x1F89, 'M', u'ἁι'),
- (0x1F8A, 'M', u'ἂι'),
- (0x1F8B, 'M', u'ἃι'),
- (0x1F8C, 'M', u'ἄι'),
- (0x1F8D, 'M', u'ἅι'),
- (0x1F8E, 'M', u'ἆι'),
- (0x1F8F, 'M', u'ἇι'),
- (0x1F90, 'M', u'ἠι'),
- (0x1F91, 'M', u'ἡι'),
- (0x1F92, 'M', u'ἢι'),
- (0x1F93, 'M', u'ἣι'),
- (0x1F94, 'M', u'ἤι'),
- (0x1F95, 'M', u'ἥι'),
- (0x1F96, 'M', u'ἦι'),
- (0x1F97, 'M', u'ἧι'),
- (0x1F98, 'M', u'ἠι'),
- (0x1F99, 'M', u'ἡι'),
- (0x1F9A, 'M', u'ἢι'),
- (0x1F9B, 'M', u'ἣι'),
- (0x1F9C, 'M', u'ἤι'),
- (0x1F9D, 'M', u'ἥι'),
- (0x1F9E, 'M', u'ἦι'),
- (0x1F9F, 'M', u'ἧι'),
- (0x1FA0, 'M', u'ὠι'),
- (0x1FA1, 'M', u'ὡι'),
- (0x1FA2, 'M', u'ὢι'),
- (0x1FA3, 'M', u'ὣι'),
- (0x1FA4, 'M', u'ὤι'),
- (0x1FA5, 'M', u'ὥι'),
- (0x1FA6, 'M', u'ὦι'),
- (0x1FA7, 'M', u'ὧι'),
- (0x1FA8, 'M', u'ὠι'),
- (0x1FA9, 'M', u'ὡι'),
- (0x1FAA, 'M', u'ὢι'),
- (0x1FAB, 'M', u'ὣι'),
- (0x1FAC, 'M', u'ὤι'),
- (0x1FAD, 'M', u'ὥι'),
- (0x1FAE, 'M', u'ὦι'),
- ]
-
-def _seg_20():
- return [
- (0x1FAF, 'M', u'ὧι'),
- (0x1FB0, 'V'),
- (0x1FB2, 'M', u'ὰι'),
- (0x1FB3, 'M', u'αι'),
- (0x1FB4, 'M', u'άι'),
- (0x1FB5, 'X'),
- (0x1FB6, 'V'),
- (0x1FB7, 'M', u'ᾶι'),
- (0x1FB8, 'M', u'ᾰ'),
- (0x1FB9, 'M', u'ᾱ'),
- (0x1FBA, 'M', u'ὰ'),
- (0x1FBB, 'M', u'ά'),
- (0x1FBC, 'M', u'αι'),
- (0x1FBD, '3', u' ̓'),
- (0x1FBE, 'M', u'ι'),
- (0x1FBF, '3', u' ̓'),
- (0x1FC0, '3', u' ͂'),
- (0x1FC1, '3', u' ̈͂'),
- (0x1FC2, 'M', u'ὴι'),
- (0x1FC3, 'M', u'ηι'),
- (0x1FC4, 'M', u'ήι'),
- (0x1FC5, 'X'),
- (0x1FC6, 'V'),
- (0x1FC7, 'M', u'ῆι'),
- (0x1FC8, 'M', u'ὲ'),
- (0x1FC9, 'M', u'έ'),
- (0x1FCA, 'M', u'ὴ'),
- (0x1FCB, 'M', u'ή'),
- (0x1FCC, 'M', u'ηι'),
- (0x1FCD, '3', u' ̓̀'),
- (0x1FCE, '3', u' ̓́'),
- (0x1FCF, '3', u' ̓͂'),
- (0x1FD0, 'V'),
- (0x1FD3, 'M', u'ΐ'),
- (0x1FD4, 'X'),
- (0x1FD6, 'V'),
- (0x1FD8, 'M', u'ῐ'),
- (0x1FD9, 'M', u'ῑ'),
- (0x1FDA, 'M', u'ὶ'),
- (0x1FDB, 'M', u'ί'),
- (0x1FDC, 'X'),
- (0x1FDD, '3', u' ̔̀'),
- (0x1FDE, '3', u' ̔́'),
- (0x1FDF, '3', u' ̔͂'),
- (0x1FE0, 'V'),
- (0x1FE3, 'M', u'ΰ'),
- (0x1FE4, 'V'),
- (0x1FE8, 'M', u'ῠ'),
- (0x1FE9, 'M', u'ῡ'),
- (0x1FEA, 'M', u'ὺ'),
- (0x1FEB, 'M', u'ύ'),
- (0x1FEC, 'M', u'ῥ'),
- (0x1FED, '3', u' ̈̀'),
- (0x1FEE, '3', u' ̈́'),
- (0x1FEF, '3', u'`'),
- (0x1FF0, 'X'),
- (0x1FF2, 'M', u'ὼι'),
- (0x1FF3, 'M', u'ωι'),
- (0x1FF4, 'M', u'ώι'),
- (0x1FF5, 'X'),
- (0x1FF6, 'V'),
- (0x1FF7, 'M', u'ῶι'),
- (0x1FF8, 'M', u'ὸ'),
- (0x1FF9, 'M', u'ό'),
- (0x1FFA, 'M', u'ὼ'),
- (0x1FFB, 'M', u'ώ'),
- (0x1FFC, 'M', u'ωι'),
- (0x1FFD, '3', u' ́'),
- (0x1FFE, '3', u' ̔'),
- (0x1FFF, 'X'),
- (0x2000, '3', u' '),
- (0x200B, 'I'),
- (0x200C, 'D', u''),
- (0x200E, 'X'),
- (0x2010, 'V'),
- (0x2011, 'M', u'‐'),
- (0x2012, 'V'),
- (0x2017, '3', u' ̳'),
- (0x2018, 'V'),
- (0x2024, 'X'),
- (0x2027, 'V'),
- (0x2028, 'X'),
- (0x202F, '3', u' '),
- (0x2030, 'V'),
- (0x2033, 'M', u'′′'),
- (0x2034, 'M', u'′′′'),
- (0x2035, 'V'),
- (0x2036, 'M', u'‵‵'),
- (0x2037, 'M', u'‵‵‵'),
- (0x2038, 'V'),
- (0x203C, '3', u'!!'),
- (0x203D, 'V'),
- (0x203E, '3', u' ̅'),
- (0x203F, 'V'),
- (0x2047, '3', u'??'),
- (0x2048, '3', u'?!'),
- (0x2049, '3', u'!?'),
- (0x204A, 'V'),
- (0x2057, 'M', u'′′′′'),
- (0x2058, 'V'),
- ]
-
-def _seg_21():
- return [
- (0x205F, '3', u' '),
- (0x2060, 'I'),
- (0x2061, 'X'),
- (0x2064, 'I'),
- (0x2065, 'X'),
- (0x2070, 'M', u'0'),
- (0x2071, 'M', u'i'),
- (0x2072, 'X'),
- (0x2074, 'M', u'4'),
- (0x2075, 'M', u'5'),
- (0x2076, 'M', u'6'),
- (0x2077, 'M', u'7'),
- (0x2078, 'M', u'8'),
- (0x2079, 'M', u'9'),
- (0x207A, '3', u'+'),
- (0x207B, 'M', u'−'),
- (0x207C, '3', u'='),
- (0x207D, '3', u'('),
- (0x207E, '3', u')'),
- (0x207F, 'M', u'n'),
- (0x2080, 'M', u'0'),
- (0x2081, 'M', u'1'),
- (0x2082, 'M', u'2'),
- (0x2083, 'M', u'3'),
- (0x2084, 'M', u'4'),
- (0x2085, 'M', u'5'),
- (0x2086, 'M', u'6'),
- (0x2087, 'M', u'7'),
- (0x2088, 'M', u'8'),
- (0x2089, 'M', u'9'),
- (0x208A, '3', u'+'),
- (0x208B, 'M', u'−'),
- (0x208C, '3', u'='),
- (0x208D, '3', u'('),
- (0x208E, '3', u')'),
- (0x208F, 'X'),
- (0x2090, 'M', u'a'),
- (0x2091, 'M', u'e'),
- (0x2092, 'M', u'o'),
- (0x2093, 'M', u'x'),
- (0x2094, 'M', u'ə'),
- (0x2095, 'M', u'h'),
- (0x2096, 'M', u'k'),
- (0x2097, 'M', u'l'),
- (0x2098, 'M', u'm'),
- (0x2099, 'M', u'n'),
- (0x209A, 'M', u'p'),
- (0x209B, 'M', u's'),
- (0x209C, 'M', u't'),
- (0x209D, 'X'),
- (0x20A0, 'V'),
- (0x20A8, 'M', u'rs'),
- (0x20A9, 'V'),
- (0x20BB, 'X'),
- (0x20D0, 'V'),
- (0x20F1, 'X'),
- (0x2100, '3', u'a/c'),
- (0x2101, '3', u'a/s'),
- (0x2102, 'M', u'c'),
- (0x2103, 'M', u'°c'),
- (0x2104, 'V'),
- (0x2105, '3', u'c/o'),
- (0x2106, '3', u'c/u'),
- (0x2107, 'M', u'ɛ'),
- (0x2108, 'V'),
- (0x2109, 'M', u'°f'),
- (0x210A, 'M', u'g'),
- (0x210B, 'M', u'h'),
- (0x210F, 'M', u'ħ'),
- (0x2110, 'M', u'i'),
- (0x2112, 'M', u'l'),
- (0x2114, 'V'),
- (0x2115, 'M', u'n'),
- (0x2116, 'M', u'no'),
- (0x2117, 'V'),
- (0x2119, 'M', u'p'),
- (0x211A, 'M', u'q'),
- (0x211B, 'M', u'r'),
- (0x211E, 'V'),
- (0x2120, 'M', u'sm'),
- (0x2121, 'M', u'tel'),
- (0x2122, 'M', u'tm'),
- (0x2123, 'V'),
- (0x2124, 'M', u'z'),
- (0x2125, 'V'),
- (0x2126, 'M', u'ω'),
- (0x2127, 'V'),
- (0x2128, 'M', u'z'),
- (0x2129, 'V'),
- (0x212A, 'M', u'k'),
- (0x212B, 'M', u'å'),
- (0x212C, 'M', u'b'),
- (0x212D, 'M', u'c'),
- (0x212E, 'V'),
- (0x212F, 'M', u'e'),
- (0x2131, 'M', u'f'),
- (0x2132, 'X'),
- (0x2133, 'M', u'm'),
- (0x2134, 'M', u'o'),
- (0x2135, 'M', u'א'),
- ]
-
-def _seg_22():
- return [
- (0x2136, 'M', u'ב'),
- (0x2137, 'M', u'ג'),
- (0x2138, 'M', u'ד'),
- (0x2139, 'M', u'i'),
- (0x213A, 'V'),
- (0x213B, 'M', u'fax'),
- (0x213C, 'M', u'π'),
- (0x213D, 'M', u'γ'),
- (0x213F, 'M', u'π'),
- (0x2140, 'M', u'∑'),
- (0x2141, 'V'),
- (0x2145, 'M', u'd'),
- (0x2147, 'M', u'e'),
- (0x2148, 'M', u'i'),
- (0x2149, 'M', u'j'),
- (0x214A, 'V'),
- (0x2150, 'M', u'1⁄7'),
- (0x2151, 'M', u'1⁄9'),
- (0x2152, 'M', u'1⁄10'),
- (0x2153, 'M', u'1⁄3'),
- (0x2154, 'M', u'2⁄3'),
- (0x2155, 'M', u'1⁄5'),
- (0x2156, 'M', u'2⁄5'),
- (0x2157, 'M', u'3⁄5'),
- (0x2158, 'M', u'4⁄5'),
- (0x2159, 'M', u'1⁄6'),
- (0x215A, 'M', u'5⁄6'),
- (0x215B, 'M', u'1⁄8'),
- (0x215C, 'M', u'3⁄8'),
- (0x215D, 'M', u'5⁄8'),
- (0x215E, 'M', u'7⁄8'),
- (0x215F, 'M', u'1⁄'),
- (0x2160, 'M', u'i'),
- (0x2161, 'M', u'ii'),
- (0x2162, 'M', u'iii'),
- (0x2163, 'M', u'iv'),
- (0x2164, 'M', u'v'),
- (0x2165, 'M', u'vi'),
- (0x2166, 'M', u'vii'),
- (0x2167, 'M', u'viii'),
- (0x2168, 'M', u'ix'),
- (0x2169, 'M', u'x'),
- (0x216A, 'M', u'xi'),
- (0x216B, 'M', u'xii'),
- (0x216C, 'M', u'l'),
- (0x216D, 'M', u'c'),
- (0x216E, 'M', u'd'),
- (0x216F, 'M', u'm'),
- (0x2170, 'M', u'i'),
- (0x2171, 'M', u'ii'),
- (0x2172, 'M', u'iii'),
- (0x2173, 'M', u'iv'),
- (0x2174, 'M', u'v'),
- (0x2175, 'M', u'vi'),
- (0x2176, 'M', u'vii'),
- (0x2177, 'M', u'viii'),
- (0x2178, 'M', u'ix'),
- (0x2179, 'M', u'x'),
- (0x217A, 'M', u'xi'),
- (0x217B, 'M', u'xii'),
- (0x217C, 'M', u'l'),
- (0x217D, 'M', u'c'),
- (0x217E, 'M', u'd'),
- (0x217F, 'M', u'm'),
- (0x2180, 'V'),
- (0x2183, 'X'),
- (0x2184, 'V'),
- (0x2189, 'M', u'0⁄3'),
- (0x218A, 'X'),
- (0x2190, 'V'),
- (0x222C, 'M', u'∫∫'),
- (0x222D, 'M', u'∫∫∫'),
- (0x222E, 'V'),
- (0x222F, 'M', u'∮∮'),
- (0x2230, 'M', u'∮∮∮'),
- (0x2231, 'V'),
- (0x2260, '3'),
- (0x2261, 'V'),
- (0x226E, '3'),
- (0x2270, 'V'),
- (0x2329, 'M', u'〈'),
- (0x232A, 'M', u'〉'),
- (0x232B, 'V'),
- (0x23F4, 'X'),
- (0x2400, 'V'),
- (0x2427, 'X'),
- (0x2440, 'V'),
- (0x244B, 'X'),
- (0x2460, 'M', u'1'),
- (0x2461, 'M', u'2'),
- (0x2462, 'M', u'3'),
- (0x2463, 'M', u'4'),
- (0x2464, 'M', u'5'),
- (0x2465, 'M', u'6'),
- (0x2466, 'M', u'7'),
- (0x2467, 'M', u'8'),
- (0x2468, 'M', u'9'),
- (0x2469, 'M', u'10'),
- (0x246A, 'M', u'11'),
- (0x246B, 'M', u'12'),
- ]
-
-def _seg_23():
- return [
- (0x246C, 'M', u'13'),
- (0x246D, 'M', u'14'),
- (0x246E, 'M', u'15'),
- (0x246F, 'M', u'16'),
- (0x2470, 'M', u'17'),
- (0x2471, 'M', u'18'),
- (0x2472, 'M', u'19'),
- (0x2473, 'M', u'20'),
- (0x2474, '3', u'(1)'),
- (0x2475, '3', u'(2)'),
- (0x2476, '3', u'(3)'),
- (0x2477, '3', u'(4)'),
- (0x2478, '3', u'(5)'),
- (0x2479, '3', u'(6)'),
- (0x247A, '3', u'(7)'),
- (0x247B, '3', u'(8)'),
- (0x247C, '3', u'(9)'),
- (0x247D, '3', u'(10)'),
- (0x247E, '3', u'(11)'),
- (0x247F, '3', u'(12)'),
- (0x2480, '3', u'(13)'),
- (0x2481, '3', u'(14)'),
- (0x2482, '3', u'(15)'),
- (0x2483, '3', u'(16)'),
- (0x2484, '3', u'(17)'),
- (0x2485, '3', u'(18)'),
- (0x2486, '3', u'(19)'),
- (0x2487, '3', u'(20)'),
- (0x2488, 'X'),
- (0x249C, '3', u'(a)'),
- (0x249D, '3', u'(b)'),
- (0x249E, '3', u'(c)'),
- (0x249F, '3', u'(d)'),
- (0x24A0, '3', u'(e)'),
- (0x24A1, '3', u'(f)'),
- (0x24A2, '3', u'(g)'),
- (0x24A3, '3', u'(h)'),
- (0x24A4, '3', u'(i)'),
- (0x24A5, '3', u'(j)'),
- (0x24A6, '3', u'(k)'),
- (0x24A7, '3', u'(l)'),
- (0x24A8, '3', u'(m)'),
- (0x24A9, '3', u'(n)'),
- (0x24AA, '3', u'(o)'),
- (0x24AB, '3', u'(p)'),
- (0x24AC, '3', u'(q)'),
- (0x24AD, '3', u'(r)'),
- (0x24AE, '3', u'(s)'),
- (0x24AF, '3', u'(t)'),
- (0x24B0, '3', u'(u)'),
- (0x24B1, '3', u'(v)'),
- (0x24B2, '3', u'(w)'),
- (0x24B3, '3', u'(x)'),
- (0x24B4, '3', u'(y)'),
- (0x24B5, '3', u'(z)'),
- (0x24B6, 'M', u'a'),
- (0x24B7, 'M', u'b'),
- (0x24B8, 'M', u'c'),
- (0x24B9, 'M', u'd'),
- (0x24BA, 'M', u'e'),
- (0x24BB, 'M', u'f'),
- (0x24BC, 'M', u'g'),
- (0x24BD, 'M', u'h'),
- (0x24BE, 'M', u'i'),
- (0x24BF, 'M', u'j'),
- (0x24C0, 'M', u'k'),
- (0x24C1, 'M', u'l'),
- (0x24C2, 'M', u'm'),
- (0x24C3, 'M', u'n'),
- (0x24C4, 'M', u'o'),
- (0x24C5, 'M', u'p'),
- (0x24C6, 'M', u'q'),
- (0x24C7, 'M', u'r'),
- (0x24C8, 'M', u's'),
- (0x24C9, 'M', u't'),
- (0x24CA, 'M', u'u'),
- (0x24CB, 'M', u'v'),
- (0x24CC, 'M', u'w'),
- (0x24CD, 'M', u'x'),
- (0x24CE, 'M', u'y'),
- (0x24CF, 'M', u'z'),
- (0x24D0, 'M', u'a'),
- (0x24D1, 'M', u'b'),
- (0x24D2, 'M', u'c'),
- (0x24D3, 'M', u'd'),
- (0x24D4, 'M', u'e'),
- (0x24D5, 'M', u'f'),
- (0x24D6, 'M', u'g'),
- (0x24D7, 'M', u'h'),
- (0x24D8, 'M', u'i'),
- (0x24D9, 'M', u'j'),
- (0x24DA, 'M', u'k'),
- (0x24DB, 'M', u'l'),
- (0x24DC, 'M', u'm'),
- (0x24DD, 'M', u'n'),
- (0x24DE, 'M', u'o'),
- (0x24DF, 'M', u'p'),
- (0x24E0, 'M', u'q'),
- (0x24E1, 'M', u'r'),
- (0x24E2, 'M', u's'),
- ]
-
-def _seg_24():
- return [
- (0x24E3, 'M', u't'),
- (0x24E4, 'M', u'u'),
- (0x24E5, 'M', u'v'),
- (0x24E6, 'M', u'w'),
- (0x24E7, 'M', u'x'),
- (0x24E8, 'M', u'y'),
- (0x24E9, 'M', u'z'),
- (0x24EA, 'M', u'0'),
- (0x24EB, 'V'),
- (0x2700, 'X'),
- (0x2701, 'V'),
- (0x2A0C, 'M', u'∫∫∫∫'),
- (0x2A0D, 'V'),
- (0x2A74, '3', u'::='),
- (0x2A75, '3', u'=='),
- (0x2A76, '3', u'==='),
- (0x2A77, 'V'),
- (0x2ADC, 'M', u'⫝̸'),
- (0x2ADD, 'V'),
- (0x2B4D, 'X'),
- (0x2B50, 'V'),
- (0x2B5A, 'X'),
- (0x2C00, 'M', u'ⰰ'),
- (0x2C01, 'M', u'ⰱ'),
- (0x2C02, 'M', u'ⰲ'),
- (0x2C03, 'M', u'ⰳ'),
- (0x2C04, 'M', u'ⰴ'),
- (0x2C05, 'M', u'ⰵ'),
- (0x2C06, 'M', u'ⰶ'),
- (0x2C07, 'M', u'ⰷ'),
- (0x2C08, 'M', u'ⰸ'),
- (0x2C09, 'M', u'ⰹ'),
- (0x2C0A, 'M', u'ⰺ'),
- (0x2C0B, 'M', u'ⰻ'),
- (0x2C0C, 'M', u'ⰼ'),
- (0x2C0D, 'M', u'ⰽ'),
- (0x2C0E, 'M', u'ⰾ'),
- (0x2C0F, 'M', u'ⰿ'),
- (0x2C10, 'M', u'ⱀ'),
- (0x2C11, 'M', u'ⱁ'),
- (0x2C12, 'M', u'ⱂ'),
- (0x2C13, 'M', u'ⱃ'),
- (0x2C14, 'M', u'ⱄ'),
- (0x2C15, 'M', u'ⱅ'),
- (0x2C16, 'M', u'ⱆ'),
- (0x2C17, 'M', u'ⱇ'),
- (0x2C18, 'M', u'ⱈ'),
- (0x2C19, 'M', u'ⱉ'),
- (0x2C1A, 'M', u'ⱊ'),
- (0x2C1B, 'M', u'ⱋ'),
- (0x2C1C, 'M', u'ⱌ'),
- (0x2C1D, 'M', u'ⱍ'),
- (0x2C1E, 'M', u'ⱎ'),
- (0x2C1F, 'M', u'ⱏ'),
- (0x2C20, 'M', u'ⱐ'),
- (0x2C21, 'M', u'ⱑ'),
- (0x2C22, 'M', u'ⱒ'),
- (0x2C23, 'M', u'ⱓ'),
- (0x2C24, 'M', u'ⱔ'),
- (0x2C25, 'M', u'ⱕ'),
- (0x2C26, 'M', u'ⱖ'),
- (0x2C27, 'M', u'ⱗ'),
- (0x2C28, 'M', u'ⱘ'),
- (0x2C29, 'M', u'ⱙ'),
- (0x2C2A, 'M', u'ⱚ'),
- (0x2C2B, 'M', u'ⱛ'),
- (0x2C2C, 'M', u'ⱜ'),
- (0x2C2D, 'M', u'ⱝ'),
- (0x2C2E, 'M', u'ⱞ'),
- (0x2C2F, 'X'),
- (0x2C30, 'V'),
- (0x2C5F, 'X'),
- (0x2C60, 'M', u'ⱡ'),
- (0x2C61, 'V'),
- (0x2C62, 'M', u'ɫ'),
- (0x2C63, 'M', u'ᵽ'),
- (0x2C64, 'M', u'ɽ'),
- (0x2C65, 'V'),
- (0x2C67, 'M', u'ⱨ'),
- (0x2C68, 'V'),
- (0x2C69, 'M', u'ⱪ'),
- (0x2C6A, 'V'),
- (0x2C6B, 'M', u'ⱬ'),
- (0x2C6C, 'V'),
- (0x2C6D, 'M', u'ɑ'),
- (0x2C6E, 'M', u'ɱ'),
- (0x2C6F, 'M', u'ɐ'),
- (0x2C70, 'M', u'ɒ'),
- (0x2C71, 'V'),
- (0x2C72, 'M', u'ⱳ'),
- (0x2C73, 'V'),
- (0x2C75, 'M', u'ⱶ'),
- (0x2C76, 'V'),
- (0x2C7C, 'M', u'j'),
- (0x2C7D, 'M', u'v'),
- (0x2C7E, 'M', u'ȿ'),
- (0x2C7F, 'M', u'ɀ'),
- (0x2C80, 'M', u'ⲁ'),
- (0x2C81, 'V'),
- (0x2C82, 'M', u'ⲃ'),
- ]
-
-def _seg_25():
- return [
- (0x2C83, 'V'),
- (0x2C84, 'M', u'ⲅ'),
- (0x2C85, 'V'),
- (0x2C86, 'M', u'ⲇ'),
- (0x2C87, 'V'),
- (0x2C88, 'M', u'ⲉ'),
- (0x2C89, 'V'),
- (0x2C8A, 'M', u'ⲋ'),
- (0x2C8B, 'V'),
- (0x2C8C, 'M', u'ⲍ'),
- (0x2C8D, 'V'),
- (0x2C8E, 'M', u'ⲏ'),
- (0x2C8F, 'V'),
- (0x2C90, 'M', u'ⲑ'),
- (0x2C91, 'V'),
- (0x2C92, 'M', u'ⲓ'),
- (0x2C93, 'V'),
- (0x2C94, 'M', u'ⲕ'),
- (0x2C95, 'V'),
- (0x2C96, 'M', u'ⲗ'),
- (0x2C97, 'V'),
- (0x2C98, 'M', u'ⲙ'),
- (0x2C99, 'V'),
- (0x2C9A, 'M', u'ⲛ'),
- (0x2C9B, 'V'),
- (0x2C9C, 'M', u'ⲝ'),
- (0x2C9D, 'V'),
- (0x2C9E, 'M', u'ⲟ'),
- (0x2C9F, 'V'),
- (0x2CA0, 'M', u'ⲡ'),
- (0x2CA1, 'V'),
- (0x2CA2, 'M', u'ⲣ'),
- (0x2CA3, 'V'),
- (0x2CA4, 'M', u'ⲥ'),
- (0x2CA5, 'V'),
- (0x2CA6, 'M', u'ⲧ'),
- (0x2CA7, 'V'),
- (0x2CA8, 'M', u'ⲩ'),
- (0x2CA9, 'V'),
- (0x2CAA, 'M', u'ⲫ'),
- (0x2CAB, 'V'),
- (0x2CAC, 'M', u'ⲭ'),
- (0x2CAD, 'V'),
- (0x2CAE, 'M', u'ⲯ'),
- (0x2CAF, 'V'),
- (0x2CB0, 'M', u'ⲱ'),
- (0x2CB1, 'V'),
- (0x2CB2, 'M', u'ⲳ'),
- (0x2CB3, 'V'),
- (0x2CB4, 'M', u'ⲵ'),
- (0x2CB5, 'V'),
- (0x2CB6, 'M', u'ⲷ'),
- (0x2CB7, 'V'),
- (0x2CB8, 'M', u'ⲹ'),
- (0x2CB9, 'V'),
- (0x2CBA, 'M', u'ⲻ'),
- (0x2CBB, 'V'),
- (0x2CBC, 'M', u'ⲽ'),
- (0x2CBD, 'V'),
- (0x2CBE, 'M', u'ⲿ'),
- (0x2CBF, 'V'),
- (0x2CC0, 'M', u'ⳁ'),
- (0x2CC1, 'V'),
- (0x2CC2, 'M', u'ⳃ'),
- (0x2CC3, 'V'),
- (0x2CC4, 'M', u'ⳅ'),
- (0x2CC5, 'V'),
- (0x2CC6, 'M', u'ⳇ'),
- (0x2CC7, 'V'),
- (0x2CC8, 'M', u'ⳉ'),
- (0x2CC9, 'V'),
- (0x2CCA, 'M', u'ⳋ'),
- (0x2CCB, 'V'),
- (0x2CCC, 'M', u'ⳍ'),
- (0x2CCD, 'V'),
- (0x2CCE, 'M', u'ⳏ'),
- (0x2CCF, 'V'),
- (0x2CD0, 'M', u'ⳑ'),
- (0x2CD1, 'V'),
- (0x2CD2, 'M', u'ⳓ'),
- (0x2CD3, 'V'),
- (0x2CD4, 'M', u'ⳕ'),
- (0x2CD5, 'V'),
- (0x2CD6, 'M', u'ⳗ'),
- (0x2CD7, 'V'),
- (0x2CD8, 'M', u'ⳙ'),
- (0x2CD9, 'V'),
- (0x2CDA, 'M', u'ⳛ'),
- (0x2CDB, 'V'),
- (0x2CDC, 'M', u'ⳝ'),
- (0x2CDD, 'V'),
- (0x2CDE, 'M', u'ⳟ'),
- (0x2CDF, 'V'),
- (0x2CE0, 'M', u'ⳡ'),
- (0x2CE1, 'V'),
- (0x2CE2, 'M', u'ⳣ'),
- (0x2CE3, 'V'),
- (0x2CEB, 'M', u'ⳬ'),
- (0x2CEC, 'V'),
- (0x2CED, 'M', u'ⳮ'),
- ]
-
-def _seg_26():
- return [
- (0x2CEE, 'V'),
- (0x2CF2, 'M', u'ⳳ'),
- (0x2CF3, 'V'),
- (0x2CF4, 'X'),
- (0x2CF9, 'V'),
- (0x2D26, 'X'),
- (0x2D27, 'V'),
- (0x2D28, 'X'),
- (0x2D2D, 'V'),
- (0x2D2E, 'X'),
- (0x2D30, 'V'),
- (0x2D68, 'X'),
- (0x2D6F, 'M', u'ⵡ'),
- (0x2D70, 'V'),
- (0x2D71, 'X'),
- (0x2D7F, 'V'),
- (0x2D97, 'X'),
- (0x2DA0, 'V'),
- (0x2DA7, 'X'),
- (0x2DA8, 'V'),
- (0x2DAF, 'X'),
- (0x2DB0, 'V'),
- (0x2DB7, 'X'),
- (0x2DB8, 'V'),
- (0x2DBF, 'X'),
- (0x2DC0, 'V'),
- (0x2DC7, 'X'),
- (0x2DC8, 'V'),
- (0x2DCF, 'X'),
- (0x2DD0, 'V'),
- (0x2DD7, 'X'),
- (0x2DD8, 'V'),
- (0x2DDF, 'X'),
- (0x2DE0, 'V'),
- (0x2E3C, 'X'),
- (0x2E80, 'V'),
- (0x2E9A, 'X'),
- (0x2E9B, 'V'),
- (0x2E9F, 'M', u'母'),
- (0x2EA0, 'V'),
- (0x2EF3, 'M', u'龟'),
- (0x2EF4, 'X'),
- (0x2F00, 'M', u'一'),
- (0x2F01, 'M', u'丨'),
- (0x2F02, 'M', u'丶'),
- (0x2F03, 'M', u'丿'),
- (0x2F04, 'M', u'乙'),
- (0x2F05, 'M', u'亅'),
- (0x2F06, 'M', u'二'),
- (0x2F07, 'M', u'亠'),
- (0x2F08, 'M', u'人'),
- (0x2F09, 'M', u'儿'),
- (0x2F0A, 'M', u'入'),
- (0x2F0B, 'M', u'八'),
- (0x2F0C, 'M', u'冂'),
- (0x2F0D, 'M', u'冖'),
- (0x2F0E, 'M', u'冫'),
- (0x2F0F, 'M', u'几'),
- (0x2F10, 'M', u'凵'),
- (0x2F11, 'M', u'刀'),
- (0x2F12, 'M', u'力'),
- (0x2F13, 'M', u'勹'),
- (0x2F14, 'M', u'匕'),
- (0x2F15, 'M', u'匚'),
- (0x2F16, 'M', u'匸'),
- (0x2F17, 'M', u'十'),
- (0x2F18, 'M', u'卜'),
- (0x2F19, 'M', u'卩'),
- (0x2F1A, 'M', u'厂'),
- (0x2F1B, 'M', u'厶'),
- (0x2F1C, 'M', u'又'),
- (0x2F1D, 'M', u'口'),
- (0x2F1E, 'M', u'囗'),
- (0x2F1F, 'M', u'土'),
- (0x2F20, 'M', u'士'),
- (0x2F21, 'M', u'夂'),
- (0x2F22, 'M', u'夊'),
- (0x2F23, 'M', u'夕'),
- (0x2F24, 'M', u'大'),
- (0x2F25, 'M', u'女'),
- (0x2F26, 'M', u'子'),
- (0x2F27, 'M', u'宀'),
- (0x2F28, 'M', u'寸'),
- (0x2F29, 'M', u'小'),
- (0x2F2A, 'M', u'尢'),
- (0x2F2B, 'M', u'尸'),
- (0x2F2C, 'M', u'屮'),
- (0x2F2D, 'M', u'山'),
- (0x2F2E, 'M', u'巛'),
- (0x2F2F, 'M', u'工'),
- (0x2F30, 'M', u'己'),
- (0x2F31, 'M', u'巾'),
- (0x2F32, 'M', u'干'),
- (0x2F33, 'M', u'幺'),
- (0x2F34, 'M', u'广'),
- (0x2F35, 'M', u'廴'),
- (0x2F36, 'M', u'廾'),
- (0x2F37, 'M', u'弋'),
- (0x2F38, 'M', u'弓'),
- (0x2F39, 'M', u'彐'),
- ]
-
-def _seg_27():
- return [
- (0x2F3A, 'M', u'彡'),
- (0x2F3B, 'M', u'彳'),
- (0x2F3C, 'M', u'心'),
- (0x2F3D, 'M', u'戈'),
- (0x2F3E, 'M', u'戶'),
- (0x2F3F, 'M', u'手'),
- (0x2F40, 'M', u'支'),
- (0x2F41, 'M', u'攴'),
- (0x2F42, 'M', u'文'),
- (0x2F43, 'M', u'斗'),
- (0x2F44, 'M', u'斤'),
- (0x2F45, 'M', u'方'),
- (0x2F46, 'M', u'无'),
- (0x2F47, 'M', u'日'),
- (0x2F48, 'M', u'曰'),
- (0x2F49, 'M', u'月'),
- (0x2F4A, 'M', u'木'),
- (0x2F4B, 'M', u'欠'),
- (0x2F4C, 'M', u'止'),
- (0x2F4D, 'M', u'歹'),
- (0x2F4E, 'M', u'殳'),
- (0x2F4F, 'M', u'毋'),
- (0x2F50, 'M', u'比'),
- (0x2F51, 'M', u'毛'),
- (0x2F52, 'M', u'氏'),
- (0x2F53, 'M', u'气'),
- (0x2F54, 'M', u'水'),
- (0x2F55, 'M', u'火'),
- (0x2F56, 'M', u'爪'),
- (0x2F57, 'M', u'父'),
- (0x2F58, 'M', u'爻'),
- (0x2F59, 'M', u'爿'),
- (0x2F5A, 'M', u'片'),
- (0x2F5B, 'M', u'牙'),
- (0x2F5C, 'M', u'牛'),
- (0x2F5D, 'M', u'犬'),
- (0x2F5E, 'M', u'玄'),
- (0x2F5F, 'M', u'玉'),
- (0x2F60, 'M', u'瓜'),
- (0x2F61, 'M', u'瓦'),
- (0x2F62, 'M', u'甘'),
- (0x2F63, 'M', u'生'),
- (0x2F64, 'M', u'用'),
- (0x2F65, 'M', u'田'),
- (0x2F66, 'M', u'疋'),
- (0x2F67, 'M', u'疒'),
- (0x2F68, 'M', u'癶'),
- (0x2F69, 'M', u'白'),
- (0x2F6A, 'M', u'皮'),
- (0x2F6B, 'M', u'皿'),
- (0x2F6C, 'M', u'目'),
- (0x2F6D, 'M', u'矛'),
- (0x2F6E, 'M', u'矢'),
- (0x2F6F, 'M', u'石'),
- (0x2F70, 'M', u'示'),
- (0x2F71, 'M', u'禸'),
- (0x2F72, 'M', u'禾'),
- (0x2F73, 'M', u'穴'),
- (0x2F74, 'M', u'立'),
- (0x2F75, 'M', u'竹'),
- (0x2F76, 'M', u'米'),
- (0x2F77, 'M', u'糸'),
- (0x2F78, 'M', u'缶'),
- (0x2F79, 'M', u'网'),
- (0x2F7A, 'M', u'羊'),
- (0x2F7B, 'M', u'羽'),
- (0x2F7C, 'M', u'老'),
- (0x2F7D, 'M', u'而'),
- (0x2F7E, 'M', u'耒'),
- (0x2F7F, 'M', u'耳'),
- (0x2F80, 'M', u'聿'),
- (0x2F81, 'M', u'肉'),
- (0x2F82, 'M', u'臣'),
- (0x2F83, 'M', u'自'),
- (0x2F84, 'M', u'至'),
- (0x2F85, 'M', u'臼'),
- (0x2F86, 'M', u'舌'),
- (0x2F87, 'M', u'舛'),
- (0x2F88, 'M', u'舟'),
- (0x2F89, 'M', u'艮'),
- (0x2F8A, 'M', u'色'),
- (0x2F8B, 'M', u'艸'),
- (0x2F8C, 'M', u'虍'),
- (0x2F8D, 'M', u'虫'),
- (0x2F8E, 'M', u'血'),
- (0x2F8F, 'M', u'行'),
- (0x2F90, 'M', u'衣'),
- (0x2F91, 'M', u'襾'),
- (0x2F92, 'M', u'見'),
- (0x2F93, 'M', u'角'),
- (0x2F94, 'M', u'言'),
- (0x2F95, 'M', u'谷'),
- (0x2F96, 'M', u'豆'),
- (0x2F97, 'M', u'豕'),
- (0x2F98, 'M', u'豸'),
- (0x2F99, 'M', u'貝'),
- (0x2F9A, 'M', u'赤'),
- (0x2F9B, 'M', u'走'),
- (0x2F9C, 'M', u'足'),
- (0x2F9D, 'M', u'身'),
- ]
-
-def _seg_28():
- return [
- (0x2F9E, 'M', u'車'),
- (0x2F9F, 'M', u'辛'),
- (0x2FA0, 'M', u'辰'),
- (0x2FA1, 'M', u'辵'),
- (0x2FA2, 'M', u'邑'),
- (0x2FA3, 'M', u'酉'),
- (0x2FA4, 'M', u'釆'),
- (0x2FA5, 'M', u'里'),
- (0x2FA6, 'M', u'金'),
- (0x2FA7, 'M', u'長'),
- (0x2FA8, 'M', u'門'),
- (0x2FA9, 'M', u'阜'),
- (0x2FAA, 'M', u'隶'),
- (0x2FAB, 'M', u'隹'),
- (0x2FAC, 'M', u'雨'),
- (0x2FAD, 'M', u'靑'),
- (0x2FAE, 'M', u'非'),
- (0x2FAF, 'M', u'面'),
- (0x2FB0, 'M', u'革'),
- (0x2FB1, 'M', u'韋'),
- (0x2FB2, 'M', u'韭'),
- (0x2FB3, 'M', u'音'),
- (0x2FB4, 'M', u'頁'),
- (0x2FB5, 'M', u'風'),
- (0x2FB6, 'M', u'飛'),
- (0x2FB7, 'M', u'食'),
- (0x2FB8, 'M', u'首'),
- (0x2FB9, 'M', u'香'),
- (0x2FBA, 'M', u'馬'),
- (0x2FBB, 'M', u'骨'),
- (0x2FBC, 'M', u'高'),
- (0x2FBD, 'M', u'髟'),
- (0x2FBE, 'M', u'鬥'),
- (0x2FBF, 'M', u'鬯'),
- (0x2FC0, 'M', u'鬲'),
- (0x2FC1, 'M', u'鬼'),
- (0x2FC2, 'M', u'魚'),
- (0x2FC3, 'M', u'鳥'),
- (0x2FC4, 'M', u'鹵'),
- (0x2FC5, 'M', u'鹿'),
- (0x2FC6, 'M', u'麥'),
- (0x2FC7, 'M', u'麻'),
- (0x2FC8, 'M', u'黃'),
- (0x2FC9, 'M', u'黍'),
- (0x2FCA, 'M', u'黑'),
- (0x2FCB, 'M', u'黹'),
- (0x2FCC, 'M', u'黽'),
- (0x2FCD, 'M', u'鼎'),
- (0x2FCE, 'M', u'鼓'),
- (0x2FCF, 'M', u'鼠'),
- (0x2FD0, 'M', u'鼻'),
- (0x2FD1, 'M', u'齊'),
- (0x2FD2, 'M', u'齒'),
- (0x2FD3, 'M', u'龍'),
- (0x2FD4, 'M', u'龜'),
- (0x2FD5, 'M', u'龠'),
- (0x2FD6, 'X'),
- (0x3000, '3', u' '),
- (0x3001, 'V'),
- (0x3002, 'M', u'.'),
- (0x3003, 'V'),
- (0x3036, 'M', u'〒'),
- (0x3037, 'V'),
- (0x3038, 'M', u'十'),
- (0x3039, 'M', u'卄'),
- (0x303A, 'M', u'卅'),
- (0x303B, 'V'),
- (0x3040, 'X'),
- (0x3041, 'V'),
- (0x3097, 'X'),
- (0x3099, 'V'),
- (0x309B, '3', u' ゙'),
- (0x309C, '3', u' ゚'),
- (0x309D, 'V'),
- (0x309F, 'M', u'より'),
- (0x30A0, 'V'),
- (0x30FF, 'M', u'コト'),
- (0x3100, 'X'),
- (0x3105, 'V'),
- (0x312E, 'X'),
- (0x3131, 'M', u'ᄀ'),
- (0x3132, 'M', u'ᄁ'),
- (0x3133, 'M', u'ᆪ'),
- (0x3134, 'M', u'ᄂ'),
- (0x3135, 'M', u'ᆬ'),
- (0x3136, 'M', u'ᆭ'),
- (0x3137, 'M', u'ᄃ'),
- (0x3138, 'M', u'ᄄ'),
- (0x3139, 'M', u'ᄅ'),
- (0x313A, 'M', u'ᆰ'),
- (0x313B, 'M', u'ᆱ'),
- (0x313C, 'M', u'ᆲ'),
- (0x313D, 'M', u'ᆳ'),
- (0x313E, 'M', u'ᆴ'),
- (0x313F, 'M', u'ᆵ'),
- (0x3140, 'M', u'ᄚ'),
- (0x3141, 'M', u'ᄆ'),
- (0x3142, 'M', u'ᄇ'),
- (0x3143, 'M', u'ᄈ'),
- (0x3144, 'M', u'ᄡ'),
- ]
-
-def _seg_29():
- return [
- (0x3145, 'M', u'ᄉ'),
- (0x3146, 'M', u'ᄊ'),
- (0x3147, 'M', u'ᄋ'),
- (0x3148, 'M', u'ᄌ'),
- (0x3149, 'M', u'ᄍ'),
- (0x314A, 'M', u'ᄎ'),
- (0x314B, 'M', u'ᄏ'),
- (0x314C, 'M', u'ᄐ'),
- (0x314D, 'M', u'ᄑ'),
- (0x314E, 'M', u'ᄒ'),
- (0x314F, 'M', u'ᅡ'),
- (0x3150, 'M', u'ᅢ'),
- (0x3151, 'M', u'ᅣ'),
- (0x3152, 'M', u'ᅤ'),
- (0x3153, 'M', u'ᅥ'),
- (0x3154, 'M', u'ᅦ'),
- (0x3155, 'M', u'ᅧ'),
- (0x3156, 'M', u'ᅨ'),
- (0x3157, 'M', u'ᅩ'),
- (0x3158, 'M', u'ᅪ'),
- (0x3159, 'M', u'ᅫ'),
- (0x315A, 'M', u'ᅬ'),
- (0x315B, 'M', u'ᅭ'),
- (0x315C, 'M', u'ᅮ'),
- (0x315D, 'M', u'ᅯ'),
- (0x315E, 'M', u'ᅰ'),
- (0x315F, 'M', u'ᅱ'),
- (0x3160, 'M', u'ᅲ'),
- (0x3161, 'M', u'ᅳ'),
- (0x3162, 'M', u'ᅴ'),
- (0x3163, 'M', u'ᅵ'),
- (0x3164, 'X'),
- (0x3165, 'M', u'ᄔ'),
- (0x3166, 'M', u'ᄕ'),
- (0x3167, 'M', u'ᇇ'),
- (0x3168, 'M', u'ᇈ'),
- (0x3169, 'M', u'ᇌ'),
- (0x316A, 'M', u'ᇎ'),
- (0x316B, 'M', u'ᇓ'),
- (0x316C, 'M', u'ᇗ'),
- (0x316D, 'M', u'ᇙ'),
- (0x316E, 'M', u'ᄜ'),
- (0x316F, 'M', u'ᇝ'),
- (0x3170, 'M', u'ᇟ'),
- (0x3171, 'M', u'ᄝ'),
- (0x3172, 'M', u'ᄞ'),
- (0x3173, 'M', u'ᄠ'),
- (0x3174, 'M', u'ᄢ'),
- (0x3175, 'M', u'ᄣ'),
- (0x3176, 'M', u'ᄧ'),
- (0x3177, 'M', u'ᄩ'),
- (0x3178, 'M', u'ᄫ'),
- (0x3179, 'M', u'ᄬ'),
- (0x317A, 'M', u'ᄭ'),
- (0x317B, 'M', u'ᄮ'),
- (0x317C, 'M', u'ᄯ'),
- (0x317D, 'M', u'ᄲ'),
- (0x317E, 'M', u'ᄶ'),
- (0x317F, 'M', u'ᅀ'),
- (0x3180, 'M', u'ᅇ'),
- (0x3181, 'M', u'ᅌ'),
- (0x3182, 'M', u'ᇱ'),
- (0x3183, 'M', u'ᇲ'),
- (0x3184, 'M', u'ᅗ'),
- (0x3185, 'M', u'ᅘ'),
- (0x3186, 'M', u'ᅙ'),
- (0x3187, 'M', u'ᆄ'),
- (0x3188, 'M', u'ᆅ'),
- (0x3189, 'M', u'ᆈ'),
- (0x318A, 'M', u'ᆑ'),
- (0x318B, 'M', u'ᆒ'),
- (0x318C, 'M', u'ᆔ'),
- (0x318D, 'M', u'ᆞ'),
- (0x318E, 'M', u'ᆡ'),
- (0x318F, 'X'),
- (0x3190, 'V'),
- (0x3192, 'M', u'一'),
- (0x3193, 'M', u'二'),
- (0x3194, 'M', u'三'),
- (0x3195, 'M', u'四'),
- (0x3196, 'M', u'上'),
- (0x3197, 'M', u'中'),
- (0x3198, 'M', u'下'),
- (0x3199, 'M', u'甲'),
- (0x319A, 'M', u'乙'),
- (0x319B, 'M', u'丙'),
- (0x319C, 'M', u'丁'),
- (0x319D, 'M', u'天'),
- (0x319E, 'M', u'地'),
- (0x319F, 'M', u'人'),
- (0x31A0, 'V'),
- (0x31BB, 'X'),
- (0x31C0, 'V'),
- (0x31E4, 'X'),
- (0x31F0, 'V'),
- (0x3200, '3', u'(ᄀ)'),
- (0x3201, '3', u'(ᄂ)'),
- (0x3202, '3', u'(ᄃ)'),
- (0x3203, '3', u'(ᄅ)'),
- (0x3204, '3', u'(ᄆ)'),
- ]
-
-def _seg_30():
- return [
- (0x3205, '3', u'(ᄇ)'),
- (0x3206, '3', u'(ᄉ)'),
- (0x3207, '3', u'(ᄋ)'),
- (0x3208, '3', u'(ᄌ)'),
- (0x3209, '3', u'(ᄎ)'),
- (0x320A, '3', u'(ᄏ)'),
- (0x320B, '3', u'(ᄐ)'),
- (0x320C, '3', u'(ᄑ)'),
- (0x320D, '3', u'(ᄒ)'),
- (0x320E, '3', u'(가)'),
- (0x320F, '3', u'(나)'),
- (0x3210, '3', u'(다)'),
- (0x3211, '3', u'(라)'),
- (0x3212, '3', u'(마)'),
- (0x3213, '3', u'(바)'),
- (0x3214, '3', u'(사)'),
- (0x3215, '3', u'(아)'),
- (0x3216, '3', u'(자)'),
- (0x3217, '3', u'(차)'),
- (0x3218, '3', u'(카)'),
- (0x3219, '3', u'(타)'),
- (0x321A, '3', u'(파)'),
- (0x321B, '3', u'(하)'),
- (0x321C, '3', u'(주)'),
- (0x321D, '3', u'(오전)'),
- (0x321E, '3', u'(오후)'),
- (0x321F, 'X'),
- (0x3220, '3', u'(一)'),
- (0x3221, '3', u'(二)'),
- (0x3222, '3', u'(三)'),
- (0x3223, '3', u'(四)'),
- (0x3224, '3', u'(五)'),
- (0x3225, '3', u'(六)'),
- (0x3226, '3', u'(七)'),
- (0x3227, '3', u'(八)'),
- (0x3228, '3', u'(九)'),
- (0x3229, '3', u'(十)'),
- (0x322A, '3', u'(月)'),
- (0x322B, '3', u'(火)'),
- (0x322C, '3', u'(水)'),
- (0x322D, '3', u'(木)'),
- (0x322E, '3', u'(金)'),
- (0x322F, '3', u'(土)'),
- (0x3230, '3', u'(日)'),
- (0x3231, '3', u'(株)'),
- (0x3232, '3', u'(有)'),
- (0x3233, '3', u'(社)'),
- (0x3234, '3', u'(名)'),
- (0x3235, '3', u'(特)'),
- (0x3236, '3', u'(財)'),
- (0x3237, '3', u'(祝)'),
- (0x3238, '3', u'(労)'),
- (0x3239, '3', u'(代)'),
- (0x323A, '3', u'(呼)'),
- (0x323B, '3', u'(学)'),
- (0x323C, '3', u'(監)'),
- (0x323D, '3', u'(企)'),
- (0x323E, '3', u'(資)'),
- (0x323F, '3', u'(協)'),
- (0x3240, '3', u'(祭)'),
- (0x3241, '3', u'(休)'),
- (0x3242, '3', u'(自)'),
- (0x3243, '3', u'(至)'),
- (0x3244, 'M', u'問'),
- (0x3245, 'M', u'幼'),
- (0x3246, 'M', u'文'),
- (0x3247, 'M', u'箏'),
- (0x3248, 'V'),
- (0x3250, 'M', u'pte'),
- (0x3251, 'M', u'21'),
- (0x3252, 'M', u'22'),
- (0x3253, 'M', u'23'),
- (0x3254, 'M', u'24'),
- (0x3255, 'M', u'25'),
- (0x3256, 'M', u'26'),
- (0x3257, 'M', u'27'),
- (0x3258, 'M', u'28'),
- (0x3259, 'M', u'29'),
- (0x325A, 'M', u'30'),
- (0x325B, 'M', u'31'),
- (0x325C, 'M', u'32'),
- (0x325D, 'M', u'33'),
- (0x325E, 'M', u'34'),
- (0x325F, 'M', u'35'),
- (0x3260, 'M', u'ᄀ'),
- (0x3261, 'M', u'ᄂ'),
- (0x3262, 'M', u'ᄃ'),
- (0x3263, 'M', u'ᄅ'),
- (0x3264, 'M', u'ᄆ'),
- (0x3265, 'M', u'ᄇ'),
- (0x3266, 'M', u'ᄉ'),
- (0x3267, 'M', u'ᄋ'),
- (0x3268, 'M', u'ᄌ'),
- (0x3269, 'M', u'ᄎ'),
- (0x326A, 'M', u'ᄏ'),
- (0x326B, 'M', u'ᄐ'),
- (0x326C, 'M', u'ᄑ'),
- (0x326D, 'M', u'ᄒ'),
- (0x326E, 'M', u'가'),
- (0x326F, 'M', u'나'),
- ]
-
-def _seg_31():
- return [
- (0x3270, 'M', u'다'),
- (0x3271, 'M', u'라'),
- (0x3272, 'M', u'마'),
- (0x3273, 'M', u'바'),
- (0x3274, 'M', u'사'),
- (0x3275, 'M', u'아'),
- (0x3276, 'M', u'자'),
- (0x3277, 'M', u'차'),
- (0x3278, 'M', u'카'),
- (0x3279, 'M', u'타'),
- (0x327A, 'M', u'파'),
- (0x327B, 'M', u'하'),
- (0x327C, 'M', u'참고'),
- (0x327D, 'M', u'주의'),
- (0x327E, 'M', u'우'),
- (0x327F, 'V'),
- (0x3280, 'M', u'一'),
- (0x3281, 'M', u'二'),
- (0x3282, 'M', u'三'),
- (0x3283, 'M', u'四'),
- (0x3284, 'M', u'五'),
- (0x3285, 'M', u'六'),
- (0x3286, 'M', u'七'),
- (0x3287, 'M', u'八'),
- (0x3288, 'M', u'九'),
- (0x3289, 'M', u'十'),
- (0x328A, 'M', u'月'),
- (0x328B, 'M', u'火'),
- (0x328C, 'M', u'水'),
- (0x328D, 'M', u'木'),
- (0x328E, 'M', u'金'),
- (0x328F, 'M', u'土'),
- (0x3290, 'M', u'日'),
- (0x3291, 'M', u'株'),
- (0x3292, 'M', u'有'),
- (0x3293, 'M', u'社'),
- (0x3294, 'M', u'名'),
- (0x3295, 'M', u'特'),
- (0x3296, 'M', u'財'),
- (0x3297, 'M', u'祝'),
- (0x3298, 'M', u'労'),
- (0x3299, 'M', u'秘'),
- (0x329A, 'M', u'男'),
- (0x329B, 'M', u'女'),
- (0x329C, 'M', u'適'),
- (0x329D, 'M', u'優'),
- (0x329E, 'M', u'印'),
- (0x329F, 'M', u'注'),
- (0x32A0, 'M', u'項'),
- (0x32A1, 'M', u'休'),
- (0x32A2, 'M', u'写'),
- (0x32A3, 'M', u'正'),
- (0x32A4, 'M', u'上'),
- (0x32A5, 'M', u'中'),
- (0x32A6, 'M', u'下'),
- (0x32A7, 'M', u'左'),
- (0x32A8, 'M', u'右'),
- (0x32A9, 'M', u'医'),
- (0x32AA, 'M', u'宗'),
- (0x32AB, 'M', u'学'),
- (0x32AC, 'M', u'監'),
- (0x32AD, 'M', u'企'),
- (0x32AE, 'M', u'資'),
- (0x32AF, 'M', u'協'),
- (0x32B0, 'M', u'夜'),
- (0x32B1, 'M', u'36'),
- (0x32B2, 'M', u'37'),
- (0x32B3, 'M', u'38'),
- (0x32B4, 'M', u'39'),
- (0x32B5, 'M', u'40'),
- (0x32B6, 'M', u'41'),
- (0x32B7, 'M', u'42'),
- (0x32B8, 'M', u'43'),
- (0x32B9, 'M', u'44'),
- (0x32BA, 'M', u'45'),
- (0x32BB, 'M', u'46'),
- (0x32BC, 'M', u'47'),
- (0x32BD, 'M', u'48'),
- (0x32BE, 'M', u'49'),
- (0x32BF, 'M', u'50'),
- (0x32C0, 'M', u'1月'),
- (0x32C1, 'M', u'2月'),
- (0x32C2, 'M', u'3月'),
- (0x32C3, 'M', u'4月'),
- (0x32C4, 'M', u'5月'),
- (0x32C5, 'M', u'6月'),
- (0x32C6, 'M', u'7月'),
- (0x32C7, 'M', u'8月'),
- (0x32C8, 'M', u'9月'),
- (0x32C9, 'M', u'10月'),
- (0x32CA, 'M', u'11月'),
- (0x32CB, 'M', u'12月'),
- (0x32CC, 'M', u'hg'),
- (0x32CD, 'M', u'erg'),
- (0x32CE, 'M', u'ev'),
- (0x32CF, 'M', u'ltd'),
- (0x32D0, 'M', u'ア'),
- (0x32D1, 'M', u'イ'),
- (0x32D2, 'M', u'ウ'),
- (0x32D3, 'M', u'エ'),
- ]
-
-def _seg_32():
- return [
- (0x32D4, 'M', u'オ'),
- (0x32D5, 'M', u'カ'),
- (0x32D6, 'M', u'キ'),
- (0x32D7, 'M', u'ク'),
- (0x32D8, 'M', u'ケ'),
- (0x32D9, 'M', u'コ'),
- (0x32DA, 'M', u'サ'),
- (0x32DB, 'M', u'シ'),
- (0x32DC, 'M', u'ス'),
- (0x32DD, 'M', u'セ'),
- (0x32DE, 'M', u'ソ'),
- (0x32DF, 'M', u'タ'),
- (0x32E0, 'M', u'チ'),
- (0x32E1, 'M', u'ツ'),
- (0x32E2, 'M', u'テ'),
- (0x32E3, 'M', u'ト'),
- (0x32E4, 'M', u'ナ'),
- (0x32E5, 'M', u'ニ'),
- (0x32E6, 'M', u'ヌ'),
- (0x32E7, 'M', u'ネ'),
- (0x32E8, 'M', u'ノ'),
- (0x32E9, 'M', u'ハ'),
- (0x32EA, 'M', u'ヒ'),
- (0x32EB, 'M', u'フ'),
- (0x32EC, 'M', u'ヘ'),
- (0x32ED, 'M', u'ホ'),
- (0x32EE, 'M', u'マ'),
- (0x32EF, 'M', u'ミ'),
- (0x32F0, 'M', u'ム'),
- (0x32F1, 'M', u'メ'),
- (0x32F2, 'M', u'モ'),
- (0x32F3, 'M', u'ヤ'),
- (0x32F4, 'M', u'ユ'),
- (0x32F5, 'M', u'ヨ'),
- (0x32F6, 'M', u'ラ'),
- (0x32F7, 'M', u'リ'),
- (0x32F8, 'M', u'ル'),
- (0x32F9, 'M', u'レ'),
- (0x32FA, 'M', u'ロ'),
- (0x32FB, 'M', u'ワ'),
- (0x32FC, 'M', u'ヰ'),
- (0x32FD, 'M', u'ヱ'),
- (0x32FE, 'M', u'ヲ'),
- (0x32FF, 'X'),
- (0x3300, 'M', u'アパート'),
- (0x3301, 'M', u'アルファ'),
- (0x3302, 'M', u'アンペア'),
- (0x3303, 'M', u'アール'),
- (0x3304, 'M', u'イニング'),
- (0x3305, 'M', u'インチ'),
- (0x3306, 'M', u'ウォン'),
- (0x3307, 'M', u'エスクード'),
- (0x3308, 'M', u'エーカー'),
- (0x3309, 'M', u'オンス'),
- (0x330A, 'M', u'オーム'),
- (0x330B, 'M', u'カイリ'),
- (0x330C, 'M', u'カラット'),
- (0x330D, 'M', u'カロリー'),
- (0x330E, 'M', u'ガロン'),
- (0x330F, 'M', u'ガンマ'),
- (0x3310, 'M', u'ギガ'),
- (0x3311, 'M', u'ギニー'),
- (0x3312, 'M', u'キュリー'),
- (0x3313, 'M', u'ギルダー'),
- (0x3314, 'M', u'キロ'),
- (0x3315, 'M', u'キログラム'),
- (0x3316, 'M', u'キロメートル'),
- (0x3317, 'M', u'キロワット'),
- (0x3318, 'M', u'グラム'),
- (0x3319, 'M', u'グラムトン'),
- (0x331A, 'M', u'クルゼイロ'),
- (0x331B, 'M', u'クローネ'),
- (0x331C, 'M', u'ケース'),
- (0x331D, 'M', u'コルナ'),
- (0x331E, 'M', u'コーポ'),
- (0x331F, 'M', u'サイクル'),
- (0x3320, 'M', u'サンチーム'),
- (0x3321, 'M', u'シリング'),
- (0x3322, 'M', u'センチ'),
- (0x3323, 'M', u'セント'),
- (0x3324, 'M', u'ダース'),
- (0x3325, 'M', u'デシ'),
- (0x3326, 'M', u'ドル'),
- (0x3327, 'M', u'トン'),
- (0x3328, 'M', u'ナノ'),
- (0x3329, 'M', u'ノット'),
- (0x332A, 'M', u'ハイツ'),
- (0x332B, 'M', u'パーセント'),
- (0x332C, 'M', u'パーツ'),
- (0x332D, 'M', u'バーレル'),
- (0x332E, 'M', u'ピアストル'),
- (0x332F, 'M', u'ピクル'),
- (0x3330, 'M', u'ピコ'),
- (0x3331, 'M', u'ビル'),
- (0x3332, 'M', u'ファラッド'),
- (0x3333, 'M', u'フィート'),
- (0x3334, 'M', u'ブッシェル'),
- (0x3335, 'M', u'フラン'),
- (0x3336, 'M', u'ヘクタール'),
- (0x3337, 'M', u'ペソ'),
- ]
-
-def _seg_33():
- return [
- (0x3338, 'M', u'ペニヒ'),
- (0x3339, 'M', u'ヘルツ'),
- (0x333A, 'M', u'ペンス'),
- (0x333B, 'M', u'ページ'),
- (0x333C, 'M', u'ベータ'),
- (0x333D, 'M', u'ポイント'),
- (0x333E, 'M', u'ボルト'),
- (0x333F, 'M', u'ホン'),
- (0x3340, 'M', u'ポンド'),
- (0x3341, 'M', u'ホール'),
- (0x3342, 'M', u'ホーン'),
- (0x3343, 'M', u'マイクロ'),
- (0x3344, 'M', u'マイル'),
- (0x3345, 'M', u'マッハ'),
- (0x3346, 'M', u'マルク'),
- (0x3347, 'M', u'マンション'),
- (0x3348, 'M', u'ミクロン'),
- (0x3349, 'M', u'ミリ'),
- (0x334A, 'M', u'ミリバール'),
- (0x334B, 'M', u'メガ'),
- (0x334C, 'M', u'メガトン'),
- (0x334D, 'M', u'メートル'),
- (0x334E, 'M', u'ヤード'),
- (0x334F, 'M', u'ヤール'),
- (0x3350, 'M', u'ユアン'),
- (0x3351, 'M', u'リットル'),
- (0x3352, 'M', u'リラ'),
- (0x3353, 'M', u'ルピー'),
- (0x3354, 'M', u'ルーブル'),
- (0x3355, 'M', u'レム'),
- (0x3356, 'M', u'レントゲン'),
- (0x3357, 'M', u'ワット'),
- (0x3358, 'M', u'0点'),
- (0x3359, 'M', u'1点'),
- (0x335A, 'M', u'2点'),
- (0x335B, 'M', u'3点'),
- (0x335C, 'M', u'4点'),
- (0x335D, 'M', u'5点'),
- (0x335E, 'M', u'6点'),
- (0x335F, 'M', u'7点'),
- (0x3360, 'M', u'8点'),
- (0x3361, 'M', u'9点'),
- (0x3362, 'M', u'10点'),
- (0x3363, 'M', u'11点'),
- (0x3364, 'M', u'12点'),
- (0x3365, 'M', u'13点'),
- (0x3366, 'M', u'14点'),
- (0x3367, 'M', u'15点'),
- (0x3368, 'M', u'16点'),
- (0x3369, 'M', u'17点'),
- (0x336A, 'M', u'18点'),
- (0x336B, 'M', u'19点'),
- (0x336C, 'M', u'20点'),
- (0x336D, 'M', u'21点'),
- (0x336E, 'M', u'22点'),
- (0x336F, 'M', u'23点'),
- (0x3370, 'M', u'24点'),
- (0x3371, 'M', u'hpa'),
- (0x3372, 'M', u'da'),
- (0x3373, 'M', u'au'),
- (0x3374, 'M', u'bar'),
- (0x3375, 'M', u'ov'),
- (0x3376, 'M', u'pc'),
- (0x3377, 'M', u'dm'),
- (0x3378, 'M', u'dm2'),
- (0x3379, 'M', u'dm3'),
- (0x337A, 'M', u'iu'),
- (0x337B, 'M', u'平成'),
- (0x337C, 'M', u'昭和'),
- (0x337D, 'M', u'大正'),
- (0x337E, 'M', u'明治'),
- (0x337F, 'M', u'株式会社'),
- (0x3380, 'M', u'pa'),
- (0x3381, 'M', u'na'),
- (0x3382, 'M', u'μa'),
- (0x3383, 'M', u'ma'),
- (0x3384, 'M', u'ka'),
- (0x3385, 'M', u'kb'),
- (0x3386, 'M', u'mb'),
- (0x3387, 'M', u'gb'),
- (0x3388, 'M', u'cal'),
- (0x3389, 'M', u'kcal'),
- (0x338A, 'M', u'pf'),
- (0x338B, 'M', u'nf'),
- (0x338C, 'M', u'μf'),
- (0x338D, 'M', u'μg'),
- (0x338E, 'M', u'mg'),
- (0x338F, 'M', u'kg'),
- (0x3390, 'M', u'hz'),
- (0x3391, 'M', u'khz'),
- (0x3392, 'M', u'mhz'),
- (0x3393, 'M', u'ghz'),
- (0x3394, 'M', u'thz'),
- (0x3395, 'M', u'μl'),
- (0x3396, 'M', u'ml'),
- (0x3397, 'M', u'dl'),
- (0x3398, 'M', u'kl'),
- (0x3399, 'M', u'fm'),
- (0x339A, 'M', u'nm'),
- (0x339B, 'M', u'μm'),
- ]
-
-def _seg_34():
- return [
- (0x339C, 'M', u'mm'),
- (0x339D, 'M', u'cm'),
- (0x339E, 'M', u'km'),
- (0x339F, 'M', u'mm2'),
- (0x33A0, 'M', u'cm2'),
- (0x33A1, 'M', u'm2'),
- (0x33A2, 'M', u'km2'),
- (0x33A3, 'M', u'mm3'),
- (0x33A4, 'M', u'cm3'),
- (0x33A5, 'M', u'm3'),
- (0x33A6, 'M', u'km3'),
- (0x33A7, 'M', u'm∕s'),
- (0x33A8, 'M', u'm∕s2'),
- (0x33A9, 'M', u'pa'),
- (0x33AA, 'M', u'kpa'),
- (0x33AB, 'M', u'mpa'),
- (0x33AC, 'M', u'gpa'),
- (0x33AD, 'M', u'rad'),
- (0x33AE, 'M', u'rad∕s'),
- (0x33AF, 'M', u'rad∕s2'),
- (0x33B0, 'M', u'ps'),
- (0x33B1, 'M', u'ns'),
- (0x33B2, 'M', u'μs'),
- (0x33B3, 'M', u'ms'),
- (0x33B4, 'M', u'pv'),
- (0x33B5, 'M', u'nv'),
- (0x33B6, 'M', u'μv'),
- (0x33B7, 'M', u'mv'),
- (0x33B8, 'M', u'kv'),
- (0x33B9, 'M', u'mv'),
- (0x33BA, 'M', u'pw'),
- (0x33BB, 'M', u'nw'),
- (0x33BC, 'M', u'μw'),
- (0x33BD, 'M', u'mw'),
- (0x33BE, 'M', u'kw'),
- (0x33BF, 'M', u'mw'),
- (0x33C0, 'M', u'kω'),
- (0x33C1, 'M', u'mω'),
- (0x33C2, 'X'),
- (0x33C3, 'M', u'bq'),
- (0x33C4, 'M', u'cc'),
- (0x33C5, 'M', u'cd'),
- (0x33C6, 'M', u'c∕kg'),
- (0x33C7, 'X'),
- (0x33C8, 'M', u'db'),
- (0x33C9, 'M', u'gy'),
- (0x33CA, 'M', u'ha'),
- (0x33CB, 'M', u'hp'),
- (0x33CC, 'M', u'in'),
- (0x33CD, 'M', u'kk'),
- (0x33CE, 'M', u'km'),
- (0x33CF, 'M', u'kt'),
- (0x33D0, 'M', u'lm'),
- (0x33D1, 'M', u'ln'),
- (0x33D2, 'M', u'log'),
- (0x33D3, 'M', u'lx'),
- (0x33D4, 'M', u'mb'),
- (0x33D5, 'M', u'mil'),
- (0x33D6, 'M', u'mol'),
- (0x33D7, 'M', u'ph'),
- (0x33D8, 'X'),
- (0x33D9, 'M', u'ppm'),
- (0x33DA, 'M', u'pr'),
- (0x33DB, 'M', u'sr'),
- (0x33DC, 'M', u'sv'),
- (0x33DD, 'M', u'wb'),
- (0x33DE, 'M', u'v∕m'),
- (0x33DF, 'M', u'a∕m'),
- (0x33E0, 'M', u'1日'),
- (0x33E1, 'M', u'2日'),
- (0x33E2, 'M', u'3日'),
- (0x33E3, 'M', u'4日'),
- (0x33E4, 'M', u'5日'),
- (0x33E5, 'M', u'6日'),
- (0x33E6, 'M', u'7日'),
- (0x33E7, 'M', u'8日'),
- (0x33E8, 'M', u'9日'),
- (0x33E9, 'M', u'10日'),
- (0x33EA, 'M', u'11日'),
- (0x33EB, 'M', u'12日'),
- (0x33EC, 'M', u'13日'),
- (0x33ED, 'M', u'14日'),
- (0x33EE, 'M', u'15日'),
- (0x33EF, 'M', u'16日'),
- (0x33F0, 'M', u'17日'),
- (0x33F1, 'M', u'18日'),
- (0x33F2, 'M', u'19日'),
- (0x33F3, 'M', u'20日'),
- (0x33F4, 'M', u'21日'),
- (0x33F5, 'M', u'22日'),
- (0x33F6, 'M', u'23日'),
- (0x33F7, 'M', u'24日'),
- (0x33F8, 'M', u'25日'),
- (0x33F9, 'M', u'26日'),
- (0x33FA, 'M', u'27日'),
- (0x33FB, 'M', u'28日'),
- (0x33FC, 'M', u'29日'),
- (0x33FD, 'M', u'30日'),
- (0x33FE, 'M', u'31日'),
- (0x33FF, 'M', u'gal'),
- ]
-
-def _seg_35():
- return [
- (0x3400, 'V'),
- (0x4DB6, 'X'),
- (0x4DC0, 'V'),
- (0x9FCD, 'X'),
- (0xA000, 'V'),
- (0xA48D, 'X'),
- (0xA490, 'V'),
- (0xA4C7, 'X'),
- (0xA4D0, 'V'),
- (0xA62C, 'X'),
- (0xA640, 'M', u'ꙁ'),
- (0xA641, 'V'),
- (0xA642, 'M', u'ꙃ'),
- (0xA643, 'V'),
- (0xA644, 'M', u'ꙅ'),
- (0xA645, 'V'),
- (0xA646, 'M', u'ꙇ'),
- (0xA647, 'V'),
- (0xA648, 'M', u'ꙉ'),
- (0xA649, 'V'),
- (0xA64A, 'M', u'ꙋ'),
- (0xA64B, 'V'),
- (0xA64C, 'M', u'ꙍ'),
- (0xA64D, 'V'),
- (0xA64E, 'M', u'ꙏ'),
- (0xA64F, 'V'),
- (0xA650, 'M', u'ꙑ'),
- (0xA651, 'V'),
- (0xA652, 'M', u'ꙓ'),
- (0xA653, 'V'),
- (0xA654, 'M', u'ꙕ'),
- (0xA655, 'V'),
- (0xA656, 'M', u'ꙗ'),
- (0xA657, 'V'),
- (0xA658, 'M', u'ꙙ'),
- (0xA659, 'V'),
- (0xA65A, 'M', u'ꙛ'),
- (0xA65B, 'V'),
- (0xA65C, 'M', u'ꙝ'),
- (0xA65D, 'V'),
- (0xA65E, 'M', u'ꙟ'),
- (0xA65F, 'V'),
- (0xA660, 'M', u'ꙡ'),
- (0xA661, 'V'),
- (0xA662, 'M', u'ꙣ'),
- (0xA663, 'V'),
- (0xA664, 'M', u'ꙥ'),
- (0xA665, 'V'),
- (0xA666, 'M', u'ꙧ'),
- (0xA667, 'V'),
- (0xA668, 'M', u'ꙩ'),
- (0xA669, 'V'),
- (0xA66A, 'M', u'ꙫ'),
- (0xA66B, 'V'),
- (0xA66C, 'M', u'ꙭ'),
- (0xA66D, 'V'),
- (0xA680, 'M', u'ꚁ'),
- (0xA681, 'V'),
- (0xA682, 'M', u'ꚃ'),
- (0xA683, 'V'),
- (0xA684, 'M', u'ꚅ'),
- (0xA685, 'V'),
- (0xA686, 'M', u'ꚇ'),
- (0xA687, 'V'),
- (0xA688, 'M', u'ꚉ'),
- (0xA689, 'V'),
- (0xA68A, 'M', u'ꚋ'),
- (0xA68B, 'V'),
- (0xA68C, 'M', u'ꚍ'),
- (0xA68D, 'V'),
- (0xA68E, 'M', u'ꚏ'),
- (0xA68F, 'V'),
- (0xA690, 'M', u'ꚑ'),
- (0xA691, 'V'),
- (0xA692, 'M', u'ꚓ'),
- (0xA693, 'V'),
- (0xA694, 'M', u'ꚕ'),
- (0xA695, 'V'),
- (0xA696, 'M', u'ꚗ'),
- (0xA697, 'V'),
- (0xA698, 'X'),
- (0xA69F, 'V'),
- (0xA6F8, 'X'),
- (0xA700, 'V'),
- (0xA722, 'M', u'ꜣ'),
- (0xA723, 'V'),
- (0xA724, 'M', u'ꜥ'),
- (0xA725, 'V'),
- (0xA726, 'M', u'ꜧ'),
- (0xA727, 'V'),
- (0xA728, 'M', u'ꜩ'),
- (0xA729, 'V'),
- (0xA72A, 'M', u'ꜫ'),
- (0xA72B, 'V'),
- (0xA72C, 'M', u'ꜭ'),
- (0xA72D, 'V'),
- (0xA72E, 'M', u'ꜯ'),
- (0xA72F, 'V'),
- (0xA732, 'M', u'ꜳ'),
- (0xA733, 'V'),
- ]
-
-def _seg_36():
- return [
- (0xA734, 'M', u'ꜵ'),
- (0xA735, 'V'),
- (0xA736, 'M', u'ꜷ'),
- (0xA737, 'V'),
- (0xA738, 'M', u'ꜹ'),
- (0xA739, 'V'),
- (0xA73A, 'M', u'ꜻ'),
- (0xA73B, 'V'),
- (0xA73C, 'M', u'ꜽ'),
- (0xA73D, 'V'),
- (0xA73E, 'M', u'ꜿ'),
- (0xA73F, 'V'),
- (0xA740, 'M', u'ꝁ'),
- (0xA741, 'V'),
- (0xA742, 'M', u'ꝃ'),
- (0xA743, 'V'),
- (0xA744, 'M', u'ꝅ'),
- (0xA745, 'V'),
- (0xA746, 'M', u'ꝇ'),
- (0xA747, 'V'),
- (0xA748, 'M', u'ꝉ'),
- (0xA749, 'V'),
- (0xA74A, 'M', u'ꝋ'),
- (0xA74B, 'V'),
- (0xA74C, 'M', u'ꝍ'),
- (0xA74D, 'V'),
- (0xA74E, 'M', u'ꝏ'),
- (0xA74F, 'V'),
- (0xA750, 'M', u'ꝑ'),
- (0xA751, 'V'),
- (0xA752, 'M', u'ꝓ'),
- (0xA753, 'V'),
- (0xA754, 'M', u'ꝕ'),
- (0xA755, 'V'),
- (0xA756, 'M', u'ꝗ'),
- (0xA757, 'V'),
- (0xA758, 'M', u'ꝙ'),
- (0xA759, 'V'),
- (0xA75A, 'M', u'ꝛ'),
- (0xA75B, 'V'),
- (0xA75C, 'M', u'ꝝ'),
- (0xA75D, 'V'),
- (0xA75E, 'M', u'ꝟ'),
- (0xA75F, 'V'),
- (0xA760, 'M', u'ꝡ'),
- (0xA761, 'V'),
- (0xA762, 'M', u'ꝣ'),
- (0xA763, 'V'),
- (0xA764, 'M', u'ꝥ'),
- (0xA765, 'V'),
- (0xA766, 'M', u'ꝧ'),
- (0xA767, 'V'),
- (0xA768, 'M', u'ꝩ'),
- (0xA769, 'V'),
- (0xA76A, 'M', u'ꝫ'),
- (0xA76B, 'V'),
- (0xA76C, 'M', u'ꝭ'),
- (0xA76D, 'V'),
- (0xA76E, 'M', u'ꝯ'),
- (0xA76F, 'V'),
- (0xA770, 'M', u'ꝯ'),
- (0xA771, 'V'),
- (0xA779, 'M', u'ꝺ'),
- (0xA77A, 'V'),
- (0xA77B, 'M', u'ꝼ'),
- (0xA77C, 'V'),
- (0xA77D, 'M', u'ᵹ'),
- (0xA77E, 'M', u'ꝿ'),
- (0xA77F, 'V'),
- (0xA780, 'M', u'ꞁ'),
- (0xA781, 'V'),
- (0xA782, 'M', u'ꞃ'),
- (0xA783, 'V'),
- (0xA784, 'M', u'ꞅ'),
- (0xA785, 'V'),
- (0xA786, 'M', u'ꞇ'),
- (0xA787, 'V'),
- (0xA78B, 'M', u'ꞌ'),
- (0xA78C, 'V'),
- (0xA78D, 'M', u'ɥ'),
- (0xA78E, 'V'),
- (0xA78F, 'X'),
- (0xA790, 'M', u'ꞑ'),
- (0xA791, 'V'),
- (0xA792, 'M', u'ꞓ'),
- (0xA793, 'V'),
- (0xA794, 'X'),
- (0xA7A0, 'M', u'ꞡ'),
- (0xA7A1, 'V'),
- (0xA7A2, 'M', u'ꞣ'),
- (0xA7A3, 'V'),
- (0xA7A4, 'M', u'ꞥ'),
- (0xA7A5, 'V'),
- (0xA7A6, 'M', u'ꞧ'),
- (0xA7A7, 'V'),
- (0xA7A8, 'M', u'ꞩ'),
- (0xA7A9, 'V'),
- (0xA7AA, 'M', u'ɦ'),
- (0xA7AB, 'X'),
- (0xA7F8, 'M', u'ħ'),
- ]
-
-def _seg_37():
- return [
- (0xA7F9, 'M', u'œ'),
- (0xA7FA, 'V'),
- (0xA82C, 'X'),
- (0xA830, 'V'),
- (0xA83A, 'X'),
- (0xA840, 'V'),
- (0xA878, 'X'),
- (0xA880, 'V'),
- (0xA8C5, 'X'),
- (0xA8CE, 'V'),
- (0xA8DA, 'X'),
- (0xA8E0, 'V'),
- (0xA8FC, 'X'),
- (0xA900, 'V'),
- (0xA954, 'X'),
- (0xA95F, 'V'),
- (0xA97D, 'X'),
- (0xA980, 'V'),
- (0xA9CE, 'X'),
- (0xA9CF, 'V'),
- (0xA9DA, 'X'),
- (0xA9DE, 'V'),
- (0xA9E0, 'X'),
- (0xAA00, 'V'),
- (0xAA37, 'X'),
- (0xAA40, 'V'),
- (0xAA4E, 'X'),
- (0xAA50, 'V'),
- (0xAA5A, 'X'),
- (0xAA5C, 'V'),
- (0xAA7C, 'X'),
- (0xAA80, 'V'),
- (0xAAC3, 'X'),
- (0xAADB, 'V'),
- (0xAAF7, 'X'),
- (0xAB01, 'V'),
- (0xAB07, 'X'),
- (0xAB09, 'V'),
- (0xAB0F, 'X'),
- (0xAB11, 'V'),
- (0xAB17, 'X'),
- (0xAB20, 'V'),
- (0xAB27, 'X'),
- (0xAB28, 'V'),
- (0xAB2F, 'X'),
- (0xABC0, 'V'),
- (0xABEE, 'X'),
- (0xABF0, 'V'),
- (0xABFA, 'X'),
- (0xAC00, 'V'),
- (0xD7A4, 'X'),
- (0xD7B0, 'V'),
- (0xD7C7, 'X'),
- (0xD7CB, 'V'),
- (0xD7FC, 'X'),
- (0xF900, 'M', u'豈'),
- (0xF901, 'M', u'更'),
- (0xF902, 'M', u'車'),
- (0xF903, 'M', u'賈'),
- (0xF904, 'M', u'滑'),
- (0xF905, 'M', u'串'),
- (0xF906, 'M', u'句'),
- (0xF907, 'M', u'龜'),
- (0xF909, 'M', u'契'),
- (0xF90A, 'M', u'金'),
- (0xF90B, 'M', u'喇'),
- (0xF90C, 'M', u'奈'),
- (0xF90D, 'M', u'懶'),
- (0xF90E, 'M', u'癩'),
- (0xF90F, 'M', u'羅'),
- (0xF910, 'M', u'蘿'),
- (0xF911, 'M', u'螺'),
- (0xF912, 'M', u'裸'),
- (0xF913, 'M', u'邏'),
- (0xF914, 'M', u'樂'),
- (0xF915, 'M', u'洛'),
- (0xF916, 'M', u'烙'),
- (0xF917, 'M', u'珞'),
- (0xF918, 'M', u'落'),
- (0xF919, 'M', u'酪'),
- (0xF91A, 'M', u'駱'),
- (0xF91B, 'M', u'亂'),
- (0xF91C, 'M', u'卵'),
- (0xF91D, 'M', u'欄'),
- (0xF91E, 'M', u'爛'),
- (0xF91F, 'M', u'蘭'),
- (0xF920, 'M', u'鸞'),
- (0xF921, 'M', u'嵐'),
- (0xF922, 'M', u'濫'),
- (0xF923, 'M', u'藍'),
- (0xF924, 'M', u'襤'),
- (0xF925, 'M', u'拉'),
- (0xF926, 'M', u'臘'),
- (0xF927, 'M', u'蠟'),
- (0xF928, 'M', u'廊'),
- (0xF929, 'M', u'朗'),
- (0xF92A, 'M', u'浪'),
- (0xF92B, 'M', u'狼'),
- (0xF92C, 'M', u'郎'),
- (0xF92D, 'M', u'來'),
- ]
-
-def _seg_38():
- return [
- (0xF92E, 'M', u'冷'),
- (0xF92F, 'M', u'勞'),
- (0xF930, 'M', u'擄'),
- (0xF931, 'M', u'櫓'),
- (0xF932, 'M', u'爐'),
- (0xF933, 'M', u'盧'),
- (0xF934, 'M', u'老'),
- (0xF935, 'M', u'蘆'),
- (0xF936, 'M', u'虜'),
- (0xF937, 'M', u'路'),
- (0xF938, 'M', u'露'),
- (0xF939, 'M', u'魯'),
- (0xF93A, 'M', u'鷺'),
- (0xF93B, 'M', u'碌'),
- (0xF93C, 'M', u'祿'),
- (0xF93D, 'M', u'綠'),
- (0xF93E, 'M', u'菉'),
- (0xF93F, 'M', u'錄'),
- (0xF940, 'M', u'鹿'),
- (0xF941, 'M', u'論'),
- (0xF942, 'M', u'壟'),
- (0xF943, 'M', u'弄'),
- (0xF944, 'M', u'籠'),
- (0xF945, 'M', u'聾'),
- (0xF946, 'M', u'牢'),
- (0xF947, 'M', u'磊'),
- (0xF948, 'M', u'賂'),
- (0xF949, 'M', u'雷'),
- (0xF94A, 'M', u'壘'),
- (0xF94B, 'M', u'屢'),
- (0xF94C, 'M', u'樓'),
- (0xF94D, 'M', u'淚'),
- (0xF94E, 'M', u'漏'),
- (0xF94F, 'M', u'累'),
- (0xF950, 'M', u'縷'),
- (0xF951, 'M', u'陋'),
- (0xF952, 'M', u'勒'),
- (0xF953, 'M', u'肋'),
- (0xF954, 'M', u'凜'),
- (0xF955, 'M', u'凌'),
- (0xF956, 'M', u'稜'),
- (0xF957, 'M', u'綾'),
- (0xF958, 'M', u'菱'),
- (0xF959, 'M', u'陵'),
- (0xF95A, 'M', u'讀'),
- (0xF95B, 'M', u'拏'),
- (0xF95C, 'M', u'樂'),
- (0xF95D, 'M', u'諾'),
- (0xF95E, 'M', u'丹'),
- (0xF95F, 'M', u'寧'),
- (0xF960, 'M', u'怒'),
- (0xF961, 'M', u'率'),
- (0xF962, 'M', u'異'),
- (0xF963, 'M', u'北'),
- (0xF964, 'M', u'磻'),
- (0xF965, 'M', u'便'),
- (0xF966, 'M', u'復'),
- (0xF967, 'M', u'不'),
- (0xF968, 'M', u'泌'),
- (0xF969, 'M', u'數'),
- (0xF96A, 'M', u'索'),
- (0xF96B, 'M', u'參'),
- (0xF96C, 'M', u'塞'),
- (0xF96D, 'M', u'省'),
- (0xF96E, 'M', u'葉'),
- (0xF96F, 'M', u'說'),
- (0xF970, 'M', u'殺'),
- (0xF971, 'M', u'辰'),
- (0xF972, 'M', u'沈'),
- (0xF973, 'M', u'拾'),
- (0xF974, 'M', u'若'),
- (0xF975, 'M', u'掠'),
- (0xF976, 'M', u'略'),
- (0xF977, 'M', u'亮'),
- (0xF978, 'M', u'兩'),
- (0xF979, 'M', u'凉'),
- (0xF97A, 'M', u'梁'),
- (0xF97B, 'M', u'糧'),
- (0xF97C, 'M', u'良'),
- (0xF97D, 'M', u'諒'),
- (0xF97E, 'M', u'量'),
- (0xF97F, 'M', u'勵'),
- (0xF980, 'M', u'呂'),
- (0xF981, 'M', u'女'),
- (0xF982, 'M', u'廬'),
- (0xF983, 'M', u'旅'),
- (0xF984, 'M', u'濾'),
- (0xF985, 'M', u'礪'),
- (0xF986, 'M', u'閭'),
- (0xF987, 'M', u'驪'),
- (0xF988, 'M', u'麗'),
- (0xF989, 'M', u'黎'),
- (0xF98A, 'M', u'力'),
- (0xF98B, 'M', u'曆'),
- (0xF98C, 'M', u'歷'),
- (0xF98D, 'M', u'轢'),
- (0xF98E, 'M', u'年'),
- (0xF98F, 'M', u'憐'),
- (0xF990, 'M', u'戀'),
- (0xF991, 'M', u'撚'),
- ]
-
-def _seg_39():
- return [
- (0xF992, 'M', u'漣'),
- (0xF993, 'M', u'煉'),
- (0xF994, 'M', u'璉'),
- (0xF995, 'M', u'秊'),
- (0xF996, 'M', u'練'),
- (0xF997, 'M', u'聯'),
- (0xF998, 'M', u'輦'),
- (0xF999, 'M', u'蓮'),
- (0xF99A, 'M', u'連'),
- (0xF99B, 'M', u'鍊'),
- (0xF99C, 'M', u'列'),
- (0xF99D, 'M', u'劣'),
- (0xF99E, 'M', u'咽'),
- (0xF99F, 'M', u'烈'),
- (0xF9A0, 'M', u'裂'),
- (0xF9A1, 'M', u'說'),
- (0xF9A2, 'M', u'廉'),
- (0xF9A3, 'M', u'念'),
- (0xF9A4, 'M', u'捻'),
- (0xF9A5, 'M', u'殮'),
- (0xF9A6, 'M', u'簾'),
- (0xF9A7, 'M', u'獵'),
- (0xF9A8, 'M', u'令'),
- (0xF9A9, 'M', u'囹'),
- (0xF9AA, 'M', u'寧'),
- (0xF9AB, 'M', u'嶺'),
- (0xF9AC, 'M', u'怜'),
- (0xF9AD, 'M', u'玲'),
- (0xF9AE, 'M', u'瑩'),
- (0xF9AF, 'M', u'羚'),
- (0xF9B0, 'M', u'聆'),
- (0xF9B1, 'M', u'鈴'),
- (0xF9B2, 'M', u'零'),
- (0xF9B3, 'M', u'靈'),
- (0xF9B4, 'M', u'領'),
- (0xF9B5, 'M', u'例'),
- (0xF9B6, 'M', u'禮'),
- (0xF9B7, 'M', u'醴'),
- (0xF9B8, 'M', u'隸'),
- (0xF9B9, 'M', u'惡'),
- (0xF9BA, 'M', u'了'),
- (0xF9BB, 'M', u'僚'),
- (0xF9BC, 'M', u'寮'),
- (0xF9BD, 'M', u'尿'),
- (0xF9BE, 'M', u'料'),
- (0xF9BF, 'M', u'樂'),
- (0xF9C0, 'M', u'燎'),
- (0xF9C1, 'M', u'療'),
- (0xF9C2, 'M', u'蓼'),
- (0xF9C3, 'M', u'遼'),
- (0xF9C4, 'M', u'龍'),
- (0xF9C5, 'M', u'暈'),
- (0xF9C6, 'M', u'阮'),
- (0xF9C7, 'M', u'劉'),
- (0xF9C8, 'M', u'杻'),
- (0xF9C9, 'M', u'柳'),
- (0xF9CA, 'M', u'流'),
- (0xF9CB, 'M', u'溜'),
- (0xF9CC, 'M', u'琉'),
- (0xF9CD, 'M', u'留'),
- (0xF9CE, 'M', u'硫'),
- (0xF9CF, 'M', u'紐'),
- (0xF9D0, 'M', u'類'),
- (0xF9D1, 'M', u'六'),
- (0xF9D2, 'M', u'戮'),
- (0xF9D3, 'M', u'陸'),
- (0xF9D4, 'M', u'倫'),
- (0xF9D5, 'M', u'崙'),
- (0xF9D6, 'M', u'淪'),
- (0xF9D7, 'M', u'輪'),
- (0xF9D8, 'M', u'律'),
- (0xF9D9, 'M', u'慄'),
- (0xF9DA, 'M', u'栗'),
- (0xF9DB, 'M', u'率'),
- (0xF9DC, 'M', u'隆'),
- (0xF9DD, 'M', u'利'),
- (0xF9DE, 'M', u'吏'),
- (0xF9DF, 'M', u'履'),
- (0xF9E0, 'M', u'易'),
- (0xF9E1, 'M', u'李'),
- (0xF9E2, 'M', u'梨'),
- (0xF9E3, 'M', u'泥'),
- (0xF9E4, 'M', u'理'),
- (0xF9E5, 'M', u'痢'),
- (0xF9E6, 'M', u'罹'),
- (0xF9E7, 'M', u'裏'),
- (0xF9E8, 'M', u'裡'),
- (0xF9E9, 'M', u'里'),
- (0xF9EA, 'M', u'離'),
- (0xF9EB, 'M', u'匿'),
- (0xF9EC, 'M', u'溺'),
- (0xF9ED, 'M', u'吝'),
- (0xF9EE, 'M', u'燐'),
- (0xF9EF, 'M', u'璘'),
- (0xF9F0, 'M', u'藺'),
- (0xF9F1, 'M', u'隣'),
- (0xF9F2, 'M', u'鱗'),
- (0xF9F3, 'M', u'麟'),
- (0xF9F4, 'M', u'林'),
- (0xF9F5, 'M', u'淋'),
- ]
-
-def _seg_40():
- return [
- (0xF9F6, 'M', u'臨'),
- (0xF9F7, 'M', u'立'),
- (0xF9F8, 'M', u'笠'),
- (0xF9F9, 'M', u'粒'),
- (0xF9FA, 'M', u'狀'),
- (0xF9FB, 'M', u'炙'),
- (0xF9FC, 'M', u'識'),
- (0xF9FD, 'M', u'什'),
- (0xF9FE, 'M', u'茶'),
- (0xF9FF, 'M', u'刺'),
- (0xFA00, 'M', u'切'),
- (0xFA01, 'M', u'度'),
- (0xFA02, 'M', u'拓'),
- (0xFA03, 'M', u'糖'),
- (0xFA04, 'M', u'宅'),
- (0xFA05, 'M', u'洞'),
- (0xFA06, 'M', u'暴'),
- (0xFA07, 'M', u'輻'),
- (0xFA08, 'M', u'行'),
- (0xFA09, 'M', u'降'),
- (0xFA0A, 'M', u'見'),
- (0xFA0B, 'M', u'廓'),
- (0xFA0C, 'M', u'兀'),
- (0xFA0D, 'M', u'嗀'),
- (0xFA0E, 'V'),
- (0xFA10, 'M', u'塚'),
- (0xFA11, 'V'),
- (0xFA12, 'M', u'晴'),
- (0xFA13, 'V'),
- (0xFA15, 'M', u'凞'),
- (0xFA16, 'M', u'猪'),
- (0xFA17, 'M', u'益'),
- (0xFA18, 'M', u'礼'),
- (0xFA19, 'M', u'神'),
- (0xFA1A, 'M', u'祥'),
- (0xFA1B, 'M', u'福'),
- (0xFA1C, 'M', u'靖'),
- (0xFA1D, 'M', u'精'),
- (0xFA1E, 'M', u'羽'),
- (0xFA1F, 'V'),
- (0xFA20, 'M', u'蘒'),
- (0xFA21, 'V'),
- (0xFA22, 'M', u'諸'),
- (0xFA23, 'V'),
- (0xFA25, 'M', u'逸'),
- (0xFA26, 'M', u'都'),
- (0xFA27, 'V'),
- (0xFA2A, 'M', u'飯'),
- (0xFA2B, 'M', u'飼'),
- (0xFA2C, 'M', u'館'),
- (0xFA2D, 'M', u'鶴'),
- (0xFA2E, 'M', u'郞'),
- (0xFA2F, 'M', u'隷'),
- (0xFA30, 'M', u'侮'),
- (0xFA31, 'M', u'僧'),
- (0xFA32, 'M', u'免'),
- (0xFA33, 'M', u'勉'),
- (0xFA34, 'M', u'勤'),
- (0xFA35, 'M', u'卑'),
- (0xFA36, 'M', u'喝'),
- (0xFA37, 'M', u'嘆'),
- (0xFA38, 'M', u'器'),
- (0xFA39, 'M', u'塀'),
- (0xFA3A, 'M', u'墨'),
- (0xFA3B, 'M', u'層'),
- (0xFA3C, 'M', u'屮'),
- (0xFA3D, 'M', u'悔'),
- (0xFA3E, 'M', u'慨'),
- (0xFA3F, 'M', u'憎'),
- (0xFA40, 'M', u'懲'),
- (0xFA41, 'M', u'敏'),
- (0xFA42, 'M', u'既'),
- (0xFA43, 'M', u'暑'),
- (0xFA44, 'M', u'梅'),
- (0xFA45, 'M', u'海'),
- (0xFA46, 'M', u'渚'),
- (0xFA47, 'M', u'漢'),
- (0xFA48, 'M', u'煮'),
- (0xFA49, 'M', u'爫'),
- (0xFA4A, 'M', u'琢'),
- (0xFA4B, 'M', u'碑'),
- (0xFA4C, 'M', u'社'),
- (0xFA4D, 'M', u'祉'),
- (0xFA4E, 'M', u'祈'),
- (0xFA4F, 'M', u'祐'),
- (0xFA50, 'M', u'祖'),
- (0xFA51, 'M', u'祝'),
- (0xFA52, 'M', u'禍'),
- (0xFA53, 'M', u'禎'),
- (0xFA54, 'M', u'穀'),
- (0xFA55, 'M', u'突'),
- (0xFA56, 'M', u'節'),
- (0xFA57, 'M', u'練'),
- (0xFA58, 'M', u'縉'),
- (0xFA59, 'M', u'繁'),
- (0xFA5A, 'M', u'署'),
- (0xFA5B, 'M', u'者'),
- (0xFA5C, 'M', u'臭'),
- (0xFA5D, 'M', u'艹'),
- (0xFA5F, 'M', u'著'),
- ]
-
-def _seg_41():
- return [
- (0xFA60, 'M', u'褐'),
- (0xFA61, 'M', u'視'),
- (0xFA62, 'M', u'謁'),
- (0xFA63, 'M', u'謹'),
- (0xFA64, 'M', u'賓'),
- (0xFA65, 'M', u'贈'),
- (0xFA66, 'M', u'辶'),
- (0xFA67, 'M', u'逸'),
- (0xFA68, 'M', u'難'),
- (0xFA69, 'M', u'響'),
- (0xFA6A, 'M', u'頻'),
- (0xFA6B, 'M', u'恵'),
- (0xFA6C, 'M', u'𤋮'),
- (0xFA6D, 'M', u'舘'),
- (0xFA6E, 'X'),
- (0xFA70, 'M', u'並'),
- (0xFA71, 'M', u'况'),
- (0xFA72, 'M', u'全'),
- (0xFA73, 'M', u'侀'),
- (0xFA74, 'M', u'充'),
- (0xFA75, 'M', u'冀'),
- (0xFA76, 'M', u'勇'),
- (0xFA77, 'M', u'勺'),
- (0xFA78, 'M', u'喝'),
- (0xFA79, 'M', u'啕'),
- (0xFA7A, 'M', u'喙'),
- (0xFA7B, 'M', u'嗢'),
- (0xFA7C, 'M', u'塚'),
- (0xFA7D, 'M', u'墳'),
- (0xFA7E, 'M', u'奄'),
- (0xFA7F, 'M', u'奔'),
- (0xFA80, 'M', u'婢'),
- (0xFA81, 'M', u'嬨'),
- (0xFA82, 'M', u'廒'),
- (0xFA83, 'M', u'廙'),
- (0xFA84, 'M', u'彩'),
- (0xFA85, 'M', u'徭'),
- (0xFA86, 'M', u'惘'),
- (0xFA87, 'M', u'慎'),
- (0xFA88, 'M', u'愈'),
- (0xFA89, 'M', u'憎'),
- (0xFA8A, 'M', u'慠'),
- (0xFA8B, 'M', u'懲'),
- (0xFA8C, 'M', u'戴'),
- (0xFA8D, 'M', u'揄'),
- (0xFA8E, 'M', u'搜'),
- (0xFA8F, 'M', u'摒'),
- (0xFA90, 'M', u'敖'),
- (0xFA91, 'M', u'晴'),
- (0xFA92, 'M', u'朗'),
- (0xFA93, 'M', u'望'),
- (0xFA94, 'M', u'杖'),
- (0xFA95, 'M', u'歹'),
- (0xFA96, 'M', u'殺'),
- (0xFA97, 'M', u'流'),
- (0xFA98, 'M', u'滛'),
- (0xFA99, 'M', u'滋'),
- (0xFA9A, 'M', u'漢'),
- (0xFA9B, 'M', u'瀞'),
- (0xFA9C, 'M', u'煮'),
- (0xFA9D, 'M', u'瞧'),
- (0xFA9E, 'M', u'爵'),
- (0xFA9F, 'M', u'犯'),
- (0xFAA0, 'M', u'猪'),
- (0xFAA1, 'M', u'瑱'),
- (0xFAA2, 'M', u'甆'),
- (0xFAA3, 'M', u'画'),
- (0xFAA4, 'M', u'瘝'),
- (0xFAA5, 'M', u'瘟'),
- (0xFAA6, 'M', u'益'),
- (0xFAA7, 'M', u'盛'),
- (0xFAA8, 'M', u'直'),
- (0xFAA9, 'M', u'睊'),
- (0xFAAA, 'M', u'着'),
- (0xFAAB, 'M', u'磌'),
- (0xFAAC, 'M', u'窱'),
- (0xFAAD, 'M', u'節'),
- (0xFAAE, 'M', u'类'),
- (0xFAAF, 'M', u'絛'),
- (0xFAB0, 'M', u'練'),
- (0xFAB1, 'M', u'缾'),
- (0xFAB2, 'M', u'者'),
- (0xFAB3, 'M', u'荒'),
- (0xFAB4, 'M', u'華'),
- (0xFAB5, 'M', u'蝹'),
- (0xFAB6, 'M', u'襁'),
- (0xFAB7, 'M', u'覆'),
- (0xFAB8, 'M', u'視'),
- (0xFAB9, 'M', u'調'),
- (0xFABA, 'M', u'諸'),
- (0xFABB, 'M', u'請'),
- (0xFABC, 'M', u'謁'),
- (0xFABD, 'M', u'諾'),
- (0xFABE, 'M', u'諭'),
- (0xFABF, 'M', u'謹'),
- (0xFAC0, 'M', u'變'),
- (0xFAC1, 'M', u'贈'),
- (0xFAC2, 'M', u'輸'),
- (0xFAC3, 'M', u'遲'),
- (0xFAC4, 'M', u'醙'),
- ]
-
-def _seg_42():
- return [
- (0xFAC5, 'M', u'鉶'),
- (0xFAC6, 'M', u'陼'),
- (0xFAC7, 'M', u'難'),
- (0xFAC8, 'M', u'靖'),
- (0xFAC9, 'M', u'韛'),
- (0xFACA, 'M', u'響'),
- (0xFACB, 'M', u'頋'),
- (0xFACC, 'M', u'頻'),
- (0xFACD, 'M', u'鬒'),
- (0xFACE, 'M', u'龜'),
- (0xFACF, 'M', u'𢡊'),
- (0xFAD0, 'M', u'𢡄'),
- (0xFAD1, 'M', u'𣏕'),
- (0xFAD2, 'M', u'㮝'),
- (0xFAD3, 'M', u'䀘'),
- (0xFAD4, 'M', u'䀹'),
- (0xFAD5, 'M', u'𥉉'),
- (0xFAD6, 'M', u'𥳐'),
- (0xFAD7, 'M', u'𧻓'),
- (0xFAD8, 'M', u'齃'),
- (0xFAD9, 'M', u'龎'),
- (0xFADA, 'X'),
- (0xFB00, 'M', u'ff'),
- (0xFB01, 'M', u'fi'),
- (0xFB02, 'M', u'fl'),
- (0xFB03, 'M', u'ffi'),
- (0xFB04, 'M', u'ffl'),
- (0xFB05, 'M', u'st'),
- (0xFB07, 'X'),
- (0xFB13, 'M', u'մն'),
- (0xFB14, 'M', u'մե'),
- (0xFB15, 'M', u'մի'),
- (0xFB16, 'M', u'վն'),
- (0xFB17, 'M', u'մխ'),
- (0xFB18, 'X'),
- (0xFB1D, 'M', u'יִ'),
- (0xFB1E, 'V'),
- (0xFB1F, 'M', u'ײַ'),
- (0xFB20, 'M', u'ע'),
- (0xFB21, 'M', u'א'),
- (0xFB22, 'M', u'ד'),
- (0xFB23, 'M', u'ה'),
- (0xFB24, 'M', u'כ'),
- (0xFB25, 'M', u'ל'),
- (0xFB26, 'M', u'ם'),
- (0xFB27, 'M', u'ר'),
- (0xFB28, 'M', u'ת'),
- (0xFB29, '3', u'+'),
- (0xFB2A, 'M', u'שׁ'),
- (0xFB2B, 'M', u'שׂ'),
- (0xFB2C, 'M', u'שּׁ'),
- (0xFB2D, 'M', u'שּׂ'),
- (0xFB2E, 'M', u'אַ'),
- (0xFB2F, 'M', u'אָ'),
- (0xFB30, 'M', u'אּ'),
- (0xFB31, 'M', u'בּ'),
- (0xFB32, 'M', u'גּ'),
- (0xFB33, 'M', u'דּ'),
- (0xFB34, 'M', u'הּ'),
- (0xFB35, 'M', u'וּ'),
- (0xFB36, 'M', u'זּ'),
- (0xFB37, 'X'),
- (0xFB38, 'M', u'טּ'),
- (0xFB39, 'M', u'יּ'),
- (0xFB3A, 'M', u'ךּ'),
- (0xFB3B, 'M', u'כּ'),
- (0xFB3C, 'M', u'לּ'),
- (0xFB3D, 'X'),
- (0xFB3E, 'M', u'מּ'),
- (0xFB3F, 'X'),
- (0xFB40, 'M', u'נּ'),
- (0xFB41, 'M', u'סּ'),
- (0xFB42, 'X'),
- (0xFB43, 'M', u'ףּ'),
- (0xFB44, 'M', u'פּ'),
- (0xFB45, 'X'),
- (0xFB46, 'M', u'צּ'),
- (0xFB47, 'M', u'קּ'),
- (0xFB48, 'M', u'רּ'),
- (0xFB49, 'M', u'שּ'),
- (0xFB4A, 'M', u'תּ'),
- (0xFB4B, 'M', u'וֹ'),
- (0xFB4C, 'M', u'בֿ'),
- (0xFB4D, 'M', u'כֿ'),
- (0xFB4E, 'M', u'פֿ'),
- (0xFB4F, 'M', u'אל'),
- (0xFB50, 'M', u'ٱ'),
- (0xFB52, 'M', u'ٻ'),
- (0xFB56, 'M', u'پ'),
- (0xFB5A, 'M', u'ڀ'),
- (0xFB5E, 'M', u'ٺ'),
- (0xFB62, 'M', u'ٿ'),
- (0xFB66, 'M', u'ٹ'),
- (0xFB6A, 'M', u'ڤ'),
- (0xFB6E, 'M', u'ڦ'),
- (0xFB72, 'M', u'ڄ'),
- (0xFB76, 'M', u'ڃ'),
- (0xFB7A, 'M', u'چ'),
- (0xFB7E, 'M', u'ڇ'),
- (0xFB82, 'M', u'ڍ'),
- ]
-
-def _seg_43():
- return [
- (0xFB84, 'M', u'ڌ'),
- (0xFB86, 'M', u'ڎ'),
- (0xFB88, 'M', u'ڈ'),
- (0xFB8A, 'M', u'ژ'),
- (0xFB8C, 'M', u'ڑ'),
- (0xFB8E, 'M', u'ک'),
- (0xFB92, 'M', u'گ'),
- (0xFB96, 'M', u'ڳ'),
- (0xFB9A, 'M', u'ڱ'),
- (0xFB9E, 'M', u'ں'),
- (0xFBA0, 'M', u'ڻ'),
- (0xFBA4, 'M', u'ۀ'),
- (0xFBA6, 'M', u'ہ'),
- (0xFBAA, 'M', u'ھ'),
- (0xFBAE, 'M', u'ے'),
- (0xFBB0, 'M', u'ۓ'),
- (0xFBB2, 'V'),
- (0xFBC2, 'X'),
- (0xFBD3, 'M', u'ڭ'),
- (0xFBD7, 'M', u'ۇ'),
- (0xFBD9, 'M', u'ۆ'),
- (0xFBDB, 'M', u'ۈ'),
- (0xFBDD, 'M', u'ۇٴ'),
- (0xFBDE, 'M', u'ۋ'),
- (0xFBE0, 'M', u'ۅ'),
- (0xFBE2, 'M', u'ۉ'),
- (0xFBE4, 'M', u'ې'),
- (0xFBE8, 'M', u'ى'),
- (0xFBEA, 'M', u'ئا'),
- (0xFBEC, 'M', u'ئە'),
- (0xFBEE, 'M', u'ئو'),
- (0xFBF0, 'M', u'ئۇ'),
- (0xFBF2, 'M', u'ئۆ'),
- (0xFBF4, 'M', u'ئۈ'),
- (0xFBF6, 'M', u'ئې'),
- (0xFBF9, 'M', u'ئى'),
- (0xFBFC, 'M', u'ی'),
- (0xFC00, 'M', u'ئج'),
- (0xFC01, 'M', u'ئح'),
- (0xFC02, 'M', u'ئم'),
- (0xFC03, 'M', u'ئى'),
- (0xFC04, 'M', u'ئي'),
- (0xFC05, 'M', u'بج'),
- (0xFC06, 'M', u'بح'),
- (0xFC07, 'M', u'بخ'),
- (0xFC08, 'M', u'بم'),
- (0xFC09, 'M', u'بى'),
- (0xFC0A, 'M', u'بي'),
- (0xFC0B, 'M', u'تج'),
- (0xFC0C, 'M', u'تح'),
- (0xFC0D, 'M', u'تخ'),
- (0xFC0E, 'M', u'تم'),
- (0xFC0F, 'M', u'تى'),
- (0xFC10, 'M', u'تي'),
- (0xFC11, 'M', u'ثج'),
- (0xFC12, 'M', u'ثم'),
- (0xFC13, 'M', u'ثى'),
- (0xFC14, 'M', u'ثي'),
- (0xFC15, 'M', u'جح'),
- (0xFC16, 'M', u'جم'),
- (0xFC17, 'M', u'حج'),
- (0xFC18, 'M', u'حم'),
- (0xFC19, 'M', u'خج'),
- (0xFC1A, 'M', u'خح'),
- (0xFC1B, 'M', u'خم'),
- (0xFC1C, 'M', u'سج'),
- (0xFC1D, 'M', u'سح'),
- (0xFC1E, 'M', u'سخ'),
- (0xFC1F, 'M', u'سم'),
- (0xFC20, 'M', u'صح'),
- (0xFC21, 'M', u'صم'),
- (0xFC22, 'M', u'ضج'),
- (0xFC23, 'M', u'ضح'),
- (0xFC24, 'M', u'ضخ'),
- (0xFC25, 'M', u'ضم'),
- (0xFC26, 'M', u'طح'),
- (0xFC27, 'M', u'طم'),
- (0xFC28, 'M', u'ظم'),
- (0xFC29, 'M', u'عج'),
- (0xFC2A, 'M', u'عم'),
- (0xFC2B, 'M', u'غج'),
- (0xFC2C, 'M', u'غم'),
- (0xFC2D, 'M', u'فج'),
- (0xFC2E, 'M', u'فح'),
- (0xFC2F, 'M', u'فخ'),
- (0xFC30, 'M', u'فم'),
- (0xFC31, 'M', u'فى'),
- (0xFC32, 'M', u'في'),
- (0xFC33, 'M', u'قح'),
- (0xFC34, 'M', u'قم'),
- (0xFC35, 'M', u'قى'),
- (0xFC36, 'M', u'قي'),
- (0xFC37, 'M', u'كا'),
- (0xFC38, 'M', u'كج'),
- (0xFC39, 'M', u'كح'),
- (0xFC3A, 'M', u'كخ'),
- (0xFC3B, 'M', u'كل'),
- (0xFC3C, 'M', u'كم'),
- (0xFC3D, 'M', u'كى'),
- (0xFC3E, 'M', u'كي'),
- ]
-
-def _seg_44():
- return [
- (0xFC3F, 'M', u'لج'),
- (0xFC40, 'M', u'لح'),
- (0xFC41, 'M', u'لخ'),
- (0xFC42, 'M', u'لم'),
- (0xFC43, 'M', u'لى'),
- (0xFC44, 'M', u'لي'),
- (0xFC45, 'M', u'مج'),
- (0xFC46, 'M', u'مح'),
- (0xFC47, 'M', u'مخ'),
- (0xFC48, 'M', u'مم'),
- (0xFC49, 'M', u'مى'),
- (0xFC4A, 'M', u'مي'),
- (0xFC4B, 'M', u'نج'),
- (0xFC4C, 'M', u'نح'),
- (0xFC4D, 'M', u'نخ'),
- (0xFC4E, 'M', u'نم'),
- (0xFC4F, 'M', u'نى'),
- (0xFC50, 'M', u'ني'),
- (0xFC51, 'M', u'هج'),
- (0xFC52, 'M', u'هم'),
- (0xFC53, 'M', u'هى'),
- (0xFC54, 'M', u'هي'),
- (0xFC55, 'M', u'يج'),
- (0xFC56, 'M', u'يح'),
- (0xFC57, 'M', u'يخ'),
- (0xFC58, 'M', u'يم'),
- (0xFC59, 'M', u'يى'),
- (0xFC5A, 'M', u'يي'),
- (0xFC5B, 'M', u'ذٰ'),
- (0xFC5C, 'M', u'رٰ'),
- (0xFC5D, 'M', u'ىٰ'),
- (0xFC5E, '3', u' ٌّ'),
- (0xFC5F, '3', u' ٍّ'),
- (0xFC60, '3', u' َّ'),
- (0xFC61, '3', u' ُّ'),
- (0xFC62, '3', u' ِّ'),
- (0xFC63, '3', u' ّٰ'),
- (0xFC64, 'M', u'ئر'),
- (0xFC65, 'M', u'ئز'),
- (0xFC66, 'M', u'ئم'),
- (0xFC67, 'M', u'ئن'),
- (0xFC68, 'M', u'ئى'),
- (0xFC69, 'M', u'ئي'),
- (0xFC6A, 'M', u'بر'),
- (0xFC6B, 'M', u'بز'),
- (0xFC6C, 'M', u'بم'),
- (0xFC6D, 'M', u'بن'),
- (0xFC6E, 'M', u'بى'),
- (0xFC6F, 'M', u'بي'),
- (0xFC70, 'M', u'تر'),
- (0xFC71, 'M', u'تز'),
- (0xFC72, 'M', u'تم'),
- (0xFC73, 'M', u'تن'),
- (0xFC74, 'M', u'تى'),
- (0xFC75, 'M', u'تي'),
- (0xFC76, 'M', u'ثر'),
- (0xFC77, 'M', u'ثز'),
- (0xFC78, 'M', u'ثم'),
- (0xFC79, 'M', u'ثن'),
- (0xFC7A, 'M', u'ثى'),
- (0xFC7B, 'M', u'ثي'),
- (0xFC7C, 'M', u'فى'),
- (0xFC7D, 'M', u'في'),
- (0xFC7E, 'M', u'قى'),
- (0xFC7F, 'M', u'قي'),
- (0xFC80, 'M', u'كا'),
- (0xFC81, 'M', u'كل'),
- (0xFC82, 'M', u'كم'),
- (0xFC83, 'M', u'كى'),
- (0xFC84, 'M', u'كي'),
- (0xFC85, 'M', u'لم'),
- (0xFC86, 'M', u'لى'),
- (0xFC87, 'M', u'لي'),
- (0xFC88, 'M', u'ما'),
- (0xFC89, 'M', u'مم'),
- (0xFC8A, 'M', u'نر'),
- (0xFC8B, 'M', u'نز'),
- (0xFC8C, 'M', u'نم'),
- (0xFC8D, 'M', u'نن'),
- (0xFC8E, 'M', u'نى'),
- (0xFC8F, 'M', u'ني'),
- (0xFC90, 'M', u'ىٰ'),
- (0xFC91, 'M', u'ير'),
- (0xFC92, 'M', u'يز'),
- (0xFC93, 'M', u'يم'),
- (0xFC94, 'M', u'ين'),
- (0xFC95, 'M', u'يى'),
- (0xFC96, 'M', u'يي'),
- (0xFC97, 'M', u'ئج'),
- (0xFC98, 'M', u'ئح'),
- (0xFC99, 'M', u'ئخ'),
- (0xFC9A, 'M', u'ئم'),
- (0xFC9B, 'M', u'ئه'),
- (0xFC9C, 'M', u'بج'),
- (0xFC9D, 'M', u'بح'),
- (0xFC9E, 'M', u'بخ'),
- (0xFC9F, 'M', u'بم'),
- (0xFCA0, 'M', u'به'),
- (0xFCA1, 'M', u'تج'),
- (0xFCA2, 'M', u'تح'),
- ]
-
-def _seg_45():
- return [
- (0xFCA3, 'M', u'تخ'),
- (0xFCA4, 'M', u'تم'),
- (0xFCA5, 'M', u'ته'),
- (0xFCA6, 'M', u'ثم'),
- (0xFCA7, 'M', u'جح'),
- (0xFCA8, 'M', u'جم'),
- (0xFCA9, 'M', u'حج'),
- (0xFCAA, 'M', u'حم'),
- (0xFCAB, 'M', u'خج'),
- (0xFCAC, 'M', u'خم'),
- (0xFCAD, 'M', u'سج'),
- (0xFCAE, 'M', u'سح'),
- (0xFCAF, 'M', u'سخ'),
- (0xFCB0, 'M', u'سم'),
- (0xFCB1, 'M', u'صح'),
- (0xFCB2, 'M', u'صخ'),
- (0xFCB3, 'M', u'صم'),
- (0xFCB4, 'M', u'ضج'),
- (0xFCB5, 'M', u'ضح'),
- (0xFCB6, 'M', u'ضخ'),
- (0xFCB7, 'M', u'ضم'),
- (0xFCB8, 'M', u'طح'),
- (0xFCB9, 'M', u'ظم'),
- (0xFCBA, 'M', u'عج'),
- (0xFCBB, 'M', u'عم'),
- (0xFCBC, 'M', u'غج'),
- (0xFCBD, 'M', u'غم'),
- (0xFCBE, 'M', u'فج'),
- (0xFCBF, 'M', u'فح'),
- (0xFCC0, 'M', u'فخ'),
- (0xFCC1, 'M', u'فم'),
- (0xFCC2, 'M', u'قح'),
- (0xFCC3, 'M', u'قم'),
- (0xFCC4, 'M', u'كج'),
- (0xFCC5, 'M', u'كح'),
- (0xFCC6, 'M', u'كخ'),
- (0xFCC7, 'M', u'كل'),
- (0xFCC8, 'M', u'كم'),
- (0xFCC9, 'M', u'لج'),
- (0xFCCA, 'M', u'لح'),
- (0xFCCB, 'M', u'لخ'),
- (0xFCCC, 'M', u'لم'),
- (0xFCCD, 'M', u'له'),
- (0xFCCE, 'M', u'مج'),
- (0xFCCF, 'M', u'مح'),
- (0xFCD0, 'M', u'مخ'),
- (0xFCD1, 'M', u'مم'),
- (0xFCD2, 'M', u'نج'),
- (0xFCD3, 'M', u'نح'),
- (0xFCD4, 'M', u'نخ'),
- (0xFCD5, 'M', u'نم'),
- (0xFCD6, 'M', u'نه'),
- (0xFCD7, 'M', u'هج'),
- (0xFCD8, 'M', u'هم'),
- (0xFCD9, 'M', u'هٰ'),
- (0xFCDA, 'M', u'يج'),
- (0xFCDB, 'M', u'يح'),
- (0xFCDC, 'M', u'يخ'),
- (0xFCDD, 'M', u'يم'),
- (0xFCDE, 'M', u'يه'),
- (0xFCDF, 'M', u'ئم'),
- (0xFCE0, 'M', u'ئه'),
- (0xFCE1, 'M', u'بم'),
- (0xFCE2, 'M', u'به'),
- (0xFCE3, 'M', u'تم'),
- (0xFCE4, 'M', u'ته'),
- (0xFCE5, 'M', u'ثم'),
- (0xFCE6, 'M', u'ثه'),
- (0xFCE7, 'M', u'سم'),
- (0xFCE8, 'M', u'سه'),
- (0xFCE9, 'M', u'شم'),
- (0xFCEA, 'M', u'شه'),
- (0xFCEB, 'M', u'كل'),
- (0xFCEC, 'M', u'كم'),
- (0xFCED, 'M', u'لم'),
- (0xFCEE, 'M', u'نم'),
- (0xFCEF, 'M', u'نه'),
- (0xFCF0, 'M', u'يم'),
- (0xFCF1, 'M', u'يه'),
- (0xFCF2, 'M', u'ـَّ'),
- (0xFCF3, 'M', u'ـُّ'),
- (0xFCF4, 'M', u'ـِّ'),
- (0xFCF5, 'M', u'طى'),
- (0xFCF6, 'M', u'طي'),
- (0xFCF7, 'M', u'عى'),
- (0xFCF8, 'M', u'عي'),
- (0xFCF9, 'M', u'غى'),
- (0xFCFA, 'M', u'غي'),
- (0xFCFB, 'M', u'سى'),
- (0xFCFC, 'M', u'سي'),
- (0xFCFD, 'M', u'شى'),
- (0xFCFE, 'M', u'شي'),
- (0xFCFF, 'M', u'حى'),
- (0xFD00, 'M', u'حي'),
- (0xFD01, 'M', u'جى'),
- (0xFD02, 'M', u'جي'),
- (0xFD03, 'M', u'خى'),
- (0xFD04, 'M', u'خي'),
- (0xFD05, 'M', u'صى'),
- (0xFD06, 'M', u'صي'),
- ]
-
-def _seg_46():
- return [
- (0xFD07, 'M', u'ضى'),
- (0xFD08, 'M', u'ضي'),
- (0xFD09, 'M', u'شج'),
- (0xFD0A, 'M', u'شح'),
- (0xFD0B, 'M', u'شخ'),
- (0xFD0C, 'M', u'شم'),
- (0xFD0D, 'M', u'شر'),
- (0xFD0E, 'M', u'سر'),
- (0xFD0F, 'M', u'صر'),
- (0xFD10, 'M', u'ضر'),
- (0xFD11, 'M', u'طى'),
- (0xFD12, 'M', u'طي'),
- (0xFD13, 'M', u'عى'),
- (0xFD14, 'M', u'عي'),
- (0xFD15, 'M', u'غى'),
- (0xFD16, 'M', u'غي'),
- (0xFD17, 'M', u'سى'),
- (0xFD18, 'M', u'سي'),
- (0xFD19, 'M', u'شى'),
- (0xFD1A, 'M', u'شي'),
- (0xFD1B, 'M', u'حى'),
- (0xFD1C, 'M', u'حي'),
- (0xFD1D, 'M', u'جى'),
- (0xFD1E, 'M', u'جي'),
- (0xFD1F, 'M', u'خى'),
- (0xFD20, 'M', u'خي'),
- (0xFD21, 'M', u'صى'),
- (0xFD22, 'M', u'صي'),
- (0xFD23, 'M', u'ضى'),
- (0xFD24, 'M', u'ضي'),
- (0xFD25, 'M', u'شج'),
- (0xFD26, 'M', u'شح'),
- (0xFD27, 'M', u'شخ'),
- (0xFD28, 'M', u'شم'),
- (0xFD29, 'M', u'شر'),
- (0xFD2A, 'M', u'سر'),
- (0xFD2B, 'M', u'صر'),
- (0xFD2C, 'M', u'ضر'),
- (0xFD2D, 'M', u'شج'),
- (0xFD2E, 'M', u'شح'),
- (0xFD2F, 'M', u'شخ'),
- (0xFD30, 'M', u'شم'),
- (0xFD31, 'M', u'سه'),
- (0xFD32, 'M', u'شه'),
- (0xFD33, 'M', u'طم'),
- (0xFD34, 'M', u'سج'),
- (0xFD35, 'M', u'سح'),
- (0xFD36, 'M', u'سخ'),
- (0xFD37, 'M', u'شج'),
- (0xFD38, 'M', u'شح'),
- (0xFD39, 'M', u'شخ'),
- (0xFD3A, 'M', u'طم'),
- (0xFD3B, 'M', u'ظم'),
- (0xFD3C, 'M', u'اً'),
- (0xFD3E, 'V'),
- (0xFD40, 'X'),
- (0xFD50, 'M', u'تجم'),
- (0xFD51, 'M', u'تحج'),
- (0xFD53, 'M', u'تحم'),
- (0xFD54, 'M', u'تخم'),
- (0xFD55, 'M', u'تمج'),
- (0xFD56, 'M', u'تمح'),
- (0xFD57, 'M', u'تمخ'),
- (0xFD58, 'M', u'جمح'),
- (0xFD5A, 'M', u'حمي'),
- (0xFD5B, 'M', u'حمى'),
- (0xFD5C, 'M', u'سحج'),
- (0xFD5D, 'M', u'سجح'),
- (0xFD5E, 'M', u'سجى'),
- (0xFD5F, 'M', u'سمح'),
- (0xFD61, 'M', u'سمج'),
- (0xFD62, 'M', u'سمم'),
- (0xFD64, 'M', u'صحح'),
- (0xFD66, 'M', u'صمم'),
- (0xFD67, 'M', u'شحم'),
- (0xFD69, 'M', u'شجي'),
- (0xFD6A, 'M', u'شمخ'),
- (0xFD6C, 'M', u'شمم'),
- (0xFD6E, 'M', u'ضحى'),
- (0xFD6F, 'M', u'ضخم'),
- (0xFD71, 'M', u'طمح'),
- (0xFD73, 'M', u'طمم'),
- (0xFD74, 'M', u'طمي'),
- (0xFD75, 'M', u'عجم'),
- (0xFD76, 'M', u'عمم'),
- (0xFD78, 'M', u'عمى'),
- (0xFD79, 'M', u'غمم'),
- (0xFD7A, 'M', u'غمي'),
- (0xFD7B, 'M', u'غمى'),
- (0xFD7C, 'M', u'فخم'),
- (0xFD7E, 'M', u'قمح'),
- (0xFD7F, 'M', u'قمم'),
- (0xFD80, 'M', u'لحم'),
- (0xFD81, 'M', u'لحي'),
- (0xFD82, 'M', u'لحى'),
- (0xFD83, 'M', u'لجج'),
- (0xFD85, 'M', u'لخم'),
- (0xFD87, 'M', u'لمح'),
- (0xFD89, 'M', u'محج'),
- (0xFD8A, 'M', u'محم'),
- ]
-
-def _seg_47():
- return [
- (0xFD8B, 'M', u'محي'),
- (0xFD8C, 'M', u'مجح'),
- (0xFD8D, 'M', u'مجم'),
- (0xFD8E, 'M', u'مخج'),
- (0xFD8F, 'M', u'مخم'),
- (0xFD90, 'X'),
- (0xFD92, 'M', u'مجخ'),
- (0xFD93, 'M', u'همج'),
- (0xFD94, 'M', u'همم'),
- (0xFD95, 'M', u'نحم'),
- (0xFD96, 'M', u'نحى'),
- (0xFD97, 'M', u'نجم'),
- (0xFD99, 'M', u'نجى'),
- (0xFD9A, 'M', u'نمي'),
- (0xFD9B, 'M', u'نمى'),
- (0xFD9C, 'M', u'يمم'),
- (0xFD9E, 'M', u'بخي'),
- (0xFD9F, 'M', u'تجي'),
- (0xFDA0, 'M', u'تجى'),
- (0xFDA1, 'M', u'تخي'),
- (0xFDA2, 'M', u'تخى'),
- (0xFDA3, 'M', u'تمي'),
- (0xFDA4, 'M', u'تمى'),
- (0xFDA5, 'M', u'جمي'),
- (0xFDA6, 'M', u'جحى'),
- (0xFDA7, 'M', u'جمى'),
- (0xFDA8, 'M', u'سخى'),
- (0xFDA9, 'M', u'صحي'),
- (0xFDAA, 'M', u'شحي'),
- (0xFDAB, 'M', u'ضحي'),
- (0xFDAC, 'M', u'لجي'),
- (0xFDAD, 'M', u'لمي'),
- (0xFDAE, 'M', u'يحي'),
- (0xFDAF, 'M', u'يجي'),
- (0xFDB0, 'M', u'يمي'),
- (0xFDB1, 'M', u'ممي'),
- (0xFDB2, 'M', u'قمي'),
- (0xFDB3, 'M', u'نحي'),
- (0xFDB4, 'M', u'قمح'),
- (0xFDB5, 'M', u'لحم'),
- (0xFDB6, 'M', u'عمي'),
- (0xFDB7, 'M', u'كمي'),
- (0xFDB8, 'M', u'نجح'),
- (0xFDB9, 'M', u'مخي'),
- (0xFDBA, 'M', u'لجم'),
- (0xFDBB, 'M', u'كمم'),
- (0xFDBC, 'M', u'لجم'),
- (0xFDBD, 'M', u'نجح'),
- (0xFDBE, 'M', u'جحي'),
- (0xFDBF, 'M', u'حجي'),
- (0xFDC0, 'M', u'مجي'),
- (0xFDC1, 'M', u'فمي'),
- (0xFDC2, 'M', u'بحي'),
- (0xFDC3, 'M', u'كمم'),
- (0xFDC4, 'M', u'عجم'),
- (0xFDC5, 'M', u'صمم'),
- (0xFDC6, 'M', u'سخي'),
- (0xFDC7, 'M', u'نجي'),
- (0xFDC8, 'X'),
- (0xFDF0, 'M', u'صلے'),
- (0xFDF1, 'M', u'قلے'),
- (0xFDF2, 'M', u'الله'),
- (0xFDF3, 'M', u'اكبر'),
- (0xFDF4, 'M', u'محمد'),
- (0xFDF5, 'M', u'صلعم'),
- (0xFDF6, 'M', u'رسول'),
- (0xFDF7, 'M', u'عليه'),
- (0xFDF8, 'M', u'وسلم'),
- (0xFDF9, 'M', u'صلى'),
- (0xFDFA, '3', u'صلى الله عليه وسلم'),
- (0xFDFB, '3', u'جل جلاله'),
- (0xFDFC, 'M', u'ریال'),
- (0xFDFD, 'V'),
- (0xFDFE, 'X'),
- (0xFE00, 'I'),
- (0xFE10, '3', u','),
- (0xFE11, 'M', u'、'),
- (0xFE12, 'X'),
- (0xFE13, '3', u':'),
- (0xFE14, '3', u';'),
- (0xFE15, '3', u'!'),
- (0xFE16, '3', u'?'),
- (0xFE17, 'M', u'〖'),
- (0xFE18, 'M', u'〗'),
- (0xFE19, 'X'),
- (0xFE20, 'V'),
- (0xFE27, 'X'),
- (0xFE31, 'M', u'—'),
- (0xFE32, 'M', u'–'),
- (0xFE33, '3', u'_'),
- (0xFE35, '3', u'('),
- (0xFE36, '3', u')'),
- (0xFE37, '3', u'{'),
- (0xFE38, '3', u'}'),
- (0xFE39, 'M', u'〔'),
- (0xFE3A, 'M', u'〕'),
- (0xFE3B, 'M', u'【'),
- (0xFE3C, 'M', u'】'),
- (0xFE3D, 'M', u'《'),
- (0xFE3E, 'M', u'》'),
- ]
-
-def _seg_48():
- return [
- (0xFE3F, 'M', u'〈'),
- (0xFE40, 'M', u'〉'),
- (0xFE41, 'M', u'「'),
- (0xFE42, 'M', u'」'),
- (0xFE43, 'M', u'『'),
- (0xFE44, 'M', u'』'),
- (0xFE45, 'V'),
- (0xFE47, '3', u'['),
- (0xFE48, '3', u']'),
- (0xFE49, '3', u' ̅'),
- (0xFE4D, '3', u'_'),
- (0xFE50, '3', u','),
- (0xFE51, 'M', u'、'),
- (0xFE52, 'X'),
- (0xFE54, '3', u';'),
- (0xFE55, '3', u':'),
- (0xFE56, '3', u'?'),
- (0xFE57, '3', u'!'),
- (0xFE58, 'M', u'—'),
- (0xFE59, '3', u'('),
- (0xFE5A, '3', u')'),
- (0xFE5B, '3', u'{'),
- (0xFE5C, '3', u'}'),
- (0xFE5D, 'M', u'〔'),
- (0xFE5E, 'M', u'〕'),
- (0xFE5F, '3', u'#'),
- (0xFE60, '3', u'&'),
- (0xFE61, '3', u'*'),
- (0xFE62, '3', u'+'),
- (0xFE63, 'M', u'-'),
- (0xFE64, '3', u'<'),
- (0xFE65, '3', u'>'),
- (0xFE66, '3', u'='),
- (0xFE67, 'X'),
- (0xFE68, '3', u'\\'),
- (0xFE69, '3', u'$'),
- (0xFE6A, '3', u'%'),
- (0xFE6B, '3', u'@'),
- (0xFE6C, 'X'),
- (0xFE70, '3', u' ً'),
- (0xFE71, 'M', u'ـً'),
- (0xFE72, '3', u' ٌ'),
- (0xFE73, 'V'),
- (0xFE74, '3', u' ٍ'),
- (0xFE75, 'X'),
- (0xFE76, '3', u' َ'),
- (0xFE77, 'M', u'ـَ'),
- (0xFE78, '3', u' ُ'),
- (0xFE79, 'M', u'ـُ'),
- (0xFE7A, '3', u' ِ'),
- (0xFE7B, 'M', u'ـِ'),
- (0xFE7C, '3', u' ّ'),
- (0xFE7D, 'M', u'ـّ'),
- (0xFE7E, '3', u' ْ'),
- (0xFE7F, 'M', u'ـْ'),
- (0xFE80, 'M', u'ء'),
- (0xFE81, 'M', u'آ'),
- (0xFE83, 'M', u'أ'),
- (0xFE85, 'M', u'ؤ'),
- (0xFE87, 'M', u'إ'),
- (0xFE89, 'M', u'ئ'),
- (0xFE8D, 'M', u'ا'),
- (0xFE8F, 'M', u'ب'),
- (0xFE93, 'M', u'ة'),
- (0xFE95, 'M', u'ت'),
- (0xFE99, 'M', u'ث'),
- (0xFE9D, 'M', u'ج'),
- (0xFEA1, 'M', u'ح'),
- (0xFEA5, 'M', u'خ'),
- (0xFEA9, 'M', u'د'),
- (0xFEAB, 'M', u'ذ'),
- (0xFEAD, 'M', u'ر'),
- (0xFEAF, 'M', u'ز'),
- (0xFEB1, 'M', u'س'),
- (0xFEB5, 'M', u'ش'),
- (0xFEB9, 'M', u'ص'),
- (0xFEBD, 'M', u'ض'),
- (0xFEC1, 'M', u'ط'),
- (0xFEC5, 'M', u'ظ'),
- (0xFEC9, 'M', u'ع'),
- (0xFECD, 'M', u'غ'),
- (0xFED1, 'M', u'ف'),
- (0xFED5, 'M', u'ق'),
- (0xFED9, 'M', u'ك'),
- (0xFEDD, 'M', u'ل'),
- (0xFEE1, 'M', u'م'),
- (0xFEE5, 'M', u'ن'),
- (0xFEE9, 'M', u'ه'),
- (0xFEED, 'M', u'و'),
- (0xFEEF, 'M', u'ى'),
- (0xFEF1, 'M', u'ي'),
- (0xFEF5, 'M', u'لآ'),
- (0xFEF7, 'M', u'لأ'),
- (0xFEF9, 'M', u'لإ'),
- (0xFEFB, 'M', u'لا'),
- (0xFEFD, 'X'),
- (0xFEFF, 'I'),
- (0xFF00, 'X'),
- (0xFF01, '3', u'!'),
- (0xFF02, '3', u'"'),
- ]
-
-def _seg_49():
- return [
- (0xFF03, '3', u'#'),
- (0xFF04, '3', u'$'),
- (0xFF05, '3', u'%'),
- (0xFF06, '3', u'&'),
- (0xFF07, '3', u'\''),
- (0xFF08, '3', u'('),
- (0xFF09, '3', u')'),
- (0xFF0A, '3', u'*'),
- (0xFF0B, '3', u'+'),
- (0xFF0C, '3', u','),
- (0xFF0D, 'M', u'-'),
- (0xFF0E, 'M', u'.'),
- (0xFF0F, '3', u'/'),
- (0xFF10, 'M', u'0'),
- (0xFF11, 'M', u'1'),
- (0xFF12, 'M', u'2'),
- (0xFF13, 'M', u'3'),
- (0xFF14, 'M', u'4'),
- (0xFF15, 'M', u'5'),
- (0xFF16, 'M', u'6'),
- (0xFF17, 'M', u'7'),
- (0xFF18, 'M', u'8'),
- (0xFF19, 'M', u'9'),
- (0xFF1A, '3', u':'),
- (0xFF1B, '3', u';'),
- (0xFF1C, '3', u'<'),
- (0xFF1D, '3', u'='),
- (0xFF1E, '3', u'>'),
- (0xFF1F, '3', u'?'),
- (0xFF20, '3', u'@'),
- (0xFF21, 'M', u'a'),
- (0xFF22, 'M', u'b'),
- (0xFF23, 'M', u'c'),
- (0xFF24, 'M', u'd'),
- (0xFF25, 'M', u'e'),
- (0xFF26, 'M', u'f'),
- (0xFF27, 'M', u'g'),
- (0xFF28, 'M', u'h'),
- (0xFF29, 'M', u'i'),
- (0xFF2A, 'M', u'j'),
- (0xFF2B, 'M', u'k'),
- (0xFF2C, 'M', u'l'),
- (0xFF2D, 'M', u'm'),
- (0xFF2E, 'M', u'n'),
- (0xFF2F, 'M', u'o'),
- (0xFF30, 'M', u'p'),
- (0xFF31, 'M', u'q'),
- (0xFF32, 'M', u'r'),
- (0xFF33, 'M', u's'),
- (0xFF34, 'M', u't'),
- (0xFF35, 'M', u'u'),
- (0xFF36, 'M', u'v'),
- (0xFF37, 'M', u'w'),
- (0xFF38, 'M', u'x'),
- (0xFF39, 'M', u'y'),
- (0xFF3A, 'M', u'z'),
- (0xFF3B, '3', u'['),
- (0xFF3C, '3', u'\\'),
- (0xFF3D, '3', u']'),
- (0xFF3E, '3', u'^'),
- (0xFF3F, '3', u'_'),
- (0xFF40, '3', u'`'),
- (0xFF41, 'M', u'a'),
- (0xFF42, 'M', u'b'),
- (0xFF43, 'M', u'c'),
- (0xFF44, 'M', u'd'),
- (0xFF45, 'M', u'e'),
- (0xFF46, 'M', u'f'),
- (0xFF47, 'M', u'g'),
- (0xFF48, 'M', u'h'),
- (0xFF49, 'M', u'i'),
- (0xFF4A, 'M', u'j'),
- (0xFF4B, 'M', u'k'),
- (0xFF4C, 'M', u'l'),
- (0xFF4D, 'M', u'm'),
- (0xFF4E, 'M', u'n'),
- (0xFF4F, 'M', u'o'),
- (0xFF50, 'M', u'p'),
- (0xFF51, 'M', u'q'),
- (0xFF52, 'M', u'r'),
- (0xFF53, 'M', u's'),
- (0xFF54, 'M', u't'),
- (0xFF55, 'M', u'u'),
- (0xFF56, 'M', u'v'),
- (0xFF57, 'M', u'w'),
- (0xFF58, 'M', u'x'),
- (0xFF59, 'M', u'y'),
- (0xFF5A, 'M', u'z'),
- (0xFF5B, '3', u'{'),
- (0xFF5C, '3', u'|'),
- (0xFF5D, '3', u'}'),
- (0xFF5E, '3', u'~'),
- (0xFF5F, 'M', u'⦅'),
- (0xFF60, 'M', u'⦆'),
- (0xFF61, 'M', u'.'),
- (0xFF62, 'M', u'「'),
- (0xFF63, 'M', u'」'),
- (0xFF64, 'M', u'、'),
- (0xFF65, 'M', u'・'),
- (0xFF66, 'M', u'ヲ'),
- ]
-
-def _seg_50():
- return [
- (0xFF67, 'M', u'ァ'),
- (0xFF68, 'M', u'ィ'),
- (0xFF69, 'M', u'ゥ'),
- (0xFF6A, 'M', u'ェ'),
- (0xFF6B, 'M', u'ォ'),
- (0xFF6C, 'M', u'ャ'),
- (0xFF6D, 'M', u'ュ'),
- (0xFF6E, 'M', u'ョ'),
- (0xFF6F, 'M', u'ッ'),
- (0xFF70, 'M', u'ー'),
- (0xFF71, 'M', u'ア'),
- (0xFF72, 'M', u'イ'),
- (0xFF73, 'M', u'ウ'),
- (0xFF74, 'M', u'エ'),
- (0xFF75, 'M', u'オ'),
- (0xFF76, 'M', u'カ'),
- (0xFF77, 'M', u'キ'),
- (0xFF78, 'M', u'ク'),
- (0xFF79, 'M', u'ケ'),
- (0xFF7A, 'M', u'コ'),
- (0xFF7B, 'M', u'サ'),
- (0xFF7C, 'M', u'シ'),
- (0xFF7D, 'M', u'ス'),
- (0xFF7E, 'M', u'セ'),
- (0xFF7F, 'M', u'ソ'),
- (0xFF80, 'M', u'タ'),
- (0xFF81, 'M', u'チ'),
- (0xFF82, 'M', u'ツ'),
- (0xFF83, 'M', u'テ'),
- (0xFF84, 'M', u'ト'),
- (0xFF85, 'M', u'ナ'),
- (0xFF86, 'M', u'ニ'),
- (0xFF87, 'M', u'ヌ'),
- (0xFF88, 'M', u'ネ'),
- (0xFF89, 'M', u'ノ'),
- (0xFF8A, 'M', u'ハ'),
- (0xFF8B, 'M', u'ヒ'),
- (0xFF8C, 'M', u'フ'),
- (0xFF8D, 'M', u'ヘ'),
- (0xFF8E, 'M', u'ホ'),
- (0xFF8F, 'M', u'マ'),
- (0xFF90, 'M', u'ミ'),
- (0xFF91, 'M', u'ム'),
- (0xFF92, 'M', u'メ'),
- (0xFF93, 'M', u'モ'),
- (0xFF94, 'M', u'ヤ'),
- (0xFF95, 'M', u'ユ'),
- (0xFF96, 'M', u'ヨ'),
- (0xFF97, 'M', u'ラ'),
- (0xFF98, 'M', u'リ'),
- (0xFF99, 'M', u'ル'),
- (0xFF9A, 'M', u'レ'),
- (0xFF9B, 'M', u'ロ'),
- (0xFF9C, 'M', u'ワ'),
- (0xFF9D, 'M', u'ン'),
- (0xFF9E, 'M', u'゙'),
- (0xFF9F, 'M', u'゚'),
- (0xFFA0, 'X'),
- (0xFFA1, 'M', u'ᄀ'),
- (0xFFA2, 'M', u'ᄁ'),
- (0xFFA3, 'M', u'ᆪ'),
- (0xFFA4, 'M', u'ᄂ'),
- (0xFFA5, 'M', u'ᆬ'),
- (0xFFA6, 'M', u'ᆭ'),
- (0xFFA7, 'M', u'ᄃ'),
- (0xFFA8, 'M', u'ᄄ'),
- (0xFFA9, 'M', u'ᄅ'),
- (0xFFAA, 'M', u'ᆰ'),
- (0xFFAB, 'M', u'ᆱ'),
- (0xFFAC, 'M', u'ᆲ'),
- (0xFFAD, 'M', u'ᆳ'),
- (0xFFAE, 'M', u'ᆴ'),
- (0xFFAF, 'M', u'ᆵ'),
- (0xFFB0, 'M', u'ᄚ'),
- (0xFFB1, 'M', u'ᄆ'),
- (0xFFB2, 'M', u'ᄇ'),
- (0xFFB3, 'M', u'ᄈ'),
- (0xFFB4, 'M', u'ᄡ'),
- (0xFFB5, 'M', u'ᄉ'),
- (0xFFB6, 'M', u'ᄊ'),
- (0xFFB7, 'M', u'ᄋ'),
- (0xFFB8, 'M', u'ᄌ'),
- (0xFFB9, 'M', u'ᄍ'),
- (0xFFBA, 'M', u'ᄎ'),
- (0xFFBB, 'M', u'ᄏ'),
- (0xFFBC, 'M', u'ᄐ'),
- (0xFFBD, 'M', u'ᄑ'),
- (0xFFBE, 'M', u'ᄒ'),
- (0xFFBF, 'X'),
- (0xFFC2, 'M', u'ᅡ'),
- (0xFFC3, 'M', u'ᅢ'),
- (0xFFC4, 'M', u'ᅣ'),
- (0xFFC5, 'M', u'ᅤ'),
- (0xFFC6, 'M', u'ᅥ'),
- (0xFFC7, 'M', u'ᅦ'),
- (0xFFC8, 'X'),
- (0xFFCA, 'M', u'ᅧ'),
- (0xFFCB, 'M', u'ᅨ'),
- (0xFFCC, 'M', u'ᅩ'),
- (0xFFCD, 'M', u'ᅪ'),
- ]
-
-def _seg_51():
- return [
- (0xFFCE, 'M', u'ᅫ'),
- (0xFFCF, 'M', u'ᅬ'),
- (0xFFD0, 'X'),
- (0xFFD2, 'M', u'ᅭ'),
- (0xFFD3, 'M', u'ᅮ'),
- (0xFFD4, 'M', u'ᅯ'),
- (0xFFD5, 'M', u'ᅰ'),
- (0xFFD6, 'M', u'ᅱ'),
- (0xFFD7, 'M', u'ᅲ'),
- (0xFFD8, 'X'),
- (0xFFDA, 'M', u'ᅳ'),
- (0xFFDB, 'M', u'ᅴ'),
- (0xFFDC, 'M', u'ᅵ'),
- (0xFFDD, 'X'),
- (0xFFE0, 'M', u'¢'),
- (0xFFE1, 'M', u'£'),
- (0xFFE2, 'M', u'¬'),
- (0xFFE3, '3', u' ̄'),
- (0xFFE4, 'M', u'¦'),
- (0xFFE5, 'M', u'¥'),
- (0xFFE6, 'M', u'₩'),
- (0xFFE7, 'X'),
- (0xFFE8, 'M', u'│'),
- (0xFFE9, 'M', u'←'),
- (0xFFEA, 'M', u'↑'),
- (0xFFEB, 'M', u'→'),
- (0xFFEC, 'M', u'↓'),
- (0xFFED, 'M', u'■'),
- (0xFFEE, 'M', u'○'),
- (0xFFEF, 'X'),
- (0x10000, 'V'),
- (0x1000C, 'X'),
- (0x1000D, 'V'),
- (0x10027, 'X'),
- (0x10028, 'V'),
- (0x1003B, 'X'),
- (0x1003C, 'V'),
- (0x1003E, 'X'),
- (0x1003F, 'V'),
- (0x1004E, 'X'),
- (0x10050, 'V'),
- (0x1005E, 'X'),
- (0x10080, 'V'),
- (0x100FB, 'X'),
- (0x10100, 'V'),
- (0x10103, 'X'),
- (0x10107, 'V'),
- (0x10134, 'X'),
- (0x10137, 'V'),
- (0x1018B, 'X'),
- (0x10190, 'V'),
- (0x1019C, 'X'),
- (0x101D0, 'V'),
- (0x101FE, 'X'),
- (0x10280, 'V'),
- (0x1029D, 'X'),
- (0x102A0, 'V'),
- (0x102D1, 'X'),
- (0x10300, 'V'),
- (0x1031F, 'X'),
- (0x10320, 'V'),
- (0x10324, 'X'),
- (0x10330, 'V'),
- (0x1034B, 'X'),
- (0x10380, 'V'),
- (0x1039E, 'X'),
- (0x1039F, 'V'),
- (0x103C4, 'X'),
- (0x103C8, 'V'),
- (0x103D6, 'X'),
- (0x10400, 'M', u'𐐨'),
- (0x10401, 'M', u'𐐩'),
- (0x10402, 'M', u'𐐪'),
- (0x10403, 'M', u'𐐫'),
- (0x10404, 'M', u'𐐬'),
- (0x10405, 'M', u'𐐭'),
- (0x10406, 'M', u'𐐮'),
- (0x10407, 'M', u'𐐯'),
- (0x10408, 'M', u'𐐰'),
- (0x10409, 'M', u'𐐱'),
- (0x1040A, 'M', u'𐐲'),
- (0x1040B, 'M', u'𐐳'),
- (0x1040C, 'M', u'𐐴'),
- (0x1040D, 'M', u'𐐵'),
- (0x1040E, 'M', u'𐐶'),
- (0x1040F, 'M', u'𐐷'),
- (0x10410, 'M', u'𐐸'),
- (0x10411, 'M', u'𐐹'),
- (0x10412, 'M', u'𐐺'),
- (0x10413, 'M', u'𐐻'),
- (0x10414, 'M', u'𐐼'),
- (0x10415, 'M', u'𐐽'),
- (0x10416, 'M', u'𐐾'),
- (0x10417, 'M', u'𐐿'),
- (0x10418, 'M', u'𐑀'),
- (0x10419, 'M', u'𐑁'),
- (0x1041A, 'M', u'𐑂'),
- (0x1041B, 'M', u'𐑃'),
- (0x1041C, 'M', u'𐑄'),
- (0x1041D, 'M', u'𐑅'),
- ]
-
-def _seg_52():
- return [
- (0x1041E, 'M', u'𐑆'),
- (0x1041F, 'M', u'𐑇'),
- (0x10420, 'M', u'𐑈'),
- (0x10421, 'M', u'𐑉'),
- (0x10422, 'M', u'𐑊'),
- (0x10423, 'M', u'𐑋'),
- (0x10424, 'M', u'𐑌'),
- (0x10425, 'M', u'𐑍'),
- (0x10426, 'M', u'𐑎'),
- (0x10427, 'M', u'𐑏'),
- (0x10428, 'V'),
- (0x1049E, 'X'),
- (0x104A0, 'V'),
- (0x104AA, 'X'),
- (0x10800, 'V'),
- (0x10806, 'X'),
- (0x10808, 'V'),
- (0x10809, 'X'),
- (0x1080A, 'V'),
- (0x10836, 'X'),
- (0x10837, 'V'),
- (0x10839, 'X'),
- (0x1083C, 'V'),
- (0x1083D, 'X'),
- (0x1083F, 'V'),
- (0x10856, 'X'),
- (0x10857, 'V'),
- (0x10860, 'X'),
- (0x10900, 'V'),
- (0x1091C, 'X'),
- (0x1091F, 'V'),
- (0x1093A, 'X'),
- (0x1093F, 'V'),
- (0x10940, 'X'),
- (0x10980, 'V'),
- (0x109B8, 'X'),
- (0x109BE, 'V'),
- (0x109C0, 'X'),
- (0x10A00, 'V'),
- (0x10A04, 'X'),
- (0x10A05, 'V'),
- (0x10A07, 'X'),
- (0x10A0C, 'V'),
- (0x10A14, 'X'),
- (0x10A15, 'V'),
- (0x10A18, 'X'),
- (0x10A19, 'V'),
- (0x10A34, 'X'),
- (0x10A38, 'V'),
- (0x10A3B, 'X'),
- (0x10A3F, 'V'),
- (0x10A48, 'X'),
- (0x10A50, 'V'),
- (0x10A59, 'X'),
- (0x10A60, 'V'),
- (0x10A80, 'X'),
- (0x10B00, 'V'),
- (0x10B36, 'X'),
- (0x10B39, 'V'),
- (0x10B56, 'X'),
- (0x10B58, 'V'),
- (0x10B73, 'X'),
- (0x10B78, 'V'),
- (0x10B80, 'X'),
- (0x10C00, 'V'),
- (0x10C49, 'X'),
- (0x10E60, 'V'),
- (0x10E7F, 'X'),
- (0x11000, 'V'),
- (0x1104E, 'X'),
- (0x11052, 'V'),
- (0x11070, 'X'),
- (0x11080, 'V'),
- (0x110BD, 'X'),
- (0x110BE, 'V'),
- (0x110C2, 'X'),
- (0x110D0, 'V'),
- (0x110E9, 'X'),
- (0x110F0, 'V'),
- (0x110FA, 'X'),
- (0x11100, 'V'),
- (0x11135, 'X'),
- (0x11136, 'V'),
- (0x11144, 'X'),
- (0x11180, 'V'),
- (0x111C9, 'X'),
- (0x111D0, 'V'),
- (0x111DA, 'X'),
- (0x11680, 'V'),
- (0x116B8, 'X'),
- (0x116C0, 'V'),
- (0x116CA, 'X'),
- (0x12000, 'V'),
- (0x1236F, 'X'),
- (0x12400, 'V'),
- (0x12463, 'X'),
- (0x12470, 'V'),
- (0x12474, 'X'),
- (0x13000, 'V'),
- (0x1342F, 'X'),
- ]
-
-def _seg_53():
- return [
- (0x16800, 'V'),
- (0x16A39, 'X'),
- (0x16F00, 'V'),
- (0x16F45, 'X'),
- (0x16F50, 'V'),
- (0x16F7F, 'X'),
- (0x16F8F, 'V'),
- (0x16FA0, 'X'),
- (0x1B000, 'V'),
- (0x1B002, 'X'),
- (0x1D000, 'V'),
- (0x1D0F6, 'X'),
- (0x1D100, 'V'),
- (0x1D127, 'X'),
- (0x1D129, 'V'),
- (0x1D15E, 'M', u'𝅗𝅥'),
- (0x1D15F, 'M', u'𝅘𝅥'),
- (0x1D160, 'M', u'𝅘𝅥𝅮'),
- (0x1D161, 'M', u'𝅘𝅥𝅯'),
- (0x1D162, 'M', u'𝅘𝅥𝅰'),
- (0x1D163, 'M', u'𝅘𝅥𝅱'),
- (0x1D164, 'M', u'𝅘𝅥𝅲'),
- (0x1D165, 'V'),
- (0x1D173, 'X'),
- (0x1D17B, 'V'),
- (0x1D1BB, 'M', u'𝆹𝅥'),
- (0x1D1BC, 'M', u'𝆺𝅥'),
- (0x1D1BD, 'M', u'𝆹𝅥𝅮'),
- (0x1D1BE, 'M', u'𝆺𝅥𝅮'),
- (0x1D1BF, 'M', u'𝆹𝅥𝅯'),
- (0x1D1C0, 'M', u'𝆺𝅥𝅯'),
- (0x1D1C1, 'V'),
- (0x1D1DE, 'X'),
- (0x1D200, 'V'),
- (0x1D246, 'X'),
- (0x1D300, 'V'),
- (0x1D357, 'X'),
- (0x1D360, 'V'),
- (0x1D372, 'X'),
- (0x1D400, 'M', u'a'),
- (0x1D401, 'M', u'b'),
- (0x1D402, 'M', u'c'),
- (0x1D403, 'M', u'd'),
- (0x1D404, 'M', u'e'),
- (0x1D405, 'M', u'f'),
- (0x1D406, 'M', u'g'),
- (0x1D407, 'M', u'h'),
- (0x1D408, 'M', u'i'),
- (0x1D409, 'M', u'j'),
- (0x1D40A, 'M', u'k'),
- (0x1D40B, 'M', u'l'),
- (0x1D40C, 'M', u'm'),
- (0x1D40D, 'M', u'n'),
- (0x1D40E, 'M', u'o'),
- (0x1D40F, 'M', u'p'),
- (0x1D410, 'M', u'q'),
- (0x1D411, 'M', u'r'),
- (0x1D412, 'M', u's'),
- (0x1D413, 'M', u't'),
- (0x1D414, 'M', u'u'),
- (0x1D415, 'M', u'v'),
- (0x1D416, 'M', u'w'),
- (0x1D417, 'M', u'x'),
- (0x1D418, 'M', u'y'),
- (0x1D419, 'M', u'z'),
- (0x1D41A, 'M', u'a'),
- (0x1D41B, 'M', u'b'),
- (0x1D41C, 'M', u'c'),
- (0x1D41D, 'M', u'd'),
- (0x1D41E, 'M', u'e'),
- (0x1D41F, 'M', u'f'),
- (0x1D420, 'M', u'g'),
- (0x1D421, 'M', u'h'),
- (0x1D422, 'M', u'i'),
- (0x1D423, 'M', u'j'),
- (0x1D424, 'M', u'k'),
- (0x1D425, 'M', u'l'),
- (0x1D426, 'M', u'm'),
- (0x1D427, 'M', u'n'),
- (0x1D428, 'M', u'o'),
- (0x1D429, 'M', u'p'),
- (0x1D42A, 'M', u'q'),
- (0x1D42B, 'M', u'r'),
- (0x1D42C, 'M', u's'),
- (0x1D42D, 'M', u't'),
- (0x1D42E, 'M', u'u'),
- (0x1D42F, 'M', u'v'),
- (0x1D430, 'M', u'w'),
- (0x1D431, 'M', u'x'),
- (0x1D432, 'M', u'y'),
- (0x1D433, 'M', u'z'),
- (0x1D434, 'M', u'a'),
- (0x1D435, 'M', u'b'),
- (0x1D436, 'M', u'c'),
- (0x1D437, 'M', u'd'),
- (0x1D438, 'M', u'e'),
- (0x1D439, 'M', u'f'),
- (0x1D43A, 'M', u'g'),
- (0x1D43B, 'M', u'h'),
- (0x1D43C, 'M', u'i'),
- ]
-
-def _seg_54():
- return [
- (0x1D43D, 'M', u'j'),
- (0x1D43E, 'M', u'k'),
- (0x1D43F, 'M', u'l'),
- (0x1D440, 'M', u'm'),
- (0x1D441, 'M', u'n'),
- (0x1D442, 'M', u'o'),
- (0x1D443, 'M', u'p'),
- (0x1D444, 'M', u'q'),
- (0x1D445, 'M', u'r'),
- (0x1D446, 'M', u's'),
- (0x1D447, 'M', u't'),
- (0x1D448, 'M', u'u'),
- (0x1D449, 'M', u'v'),
- (0x1D44A, 'M', u'w'),
- (0x1D44B, 'M', u'x'),
- (0x1D44C, 'M', u'y'),
- (0x1D44D, 'M', u'z'),
- (0x1D44E, 'M', u'a'),
- (0x1D44F, 'M', u'b'),
- (0x1D450, 'M', u'c'),
- (0x1D451, 'M', u'd'),
- (0x1D452, 'M', u'e'),
- (0x1D453, 'M', u'f'),
- (0x1D454, 'M', u'g'),
- (0x1D455, 'X'),
- (0x1D456, 'M', u'i'),
- (0x1D457, 'M', u'j'),
- (0x1D458, 'M', u'k'),
- (0x1D459, 'M', u'l'),
- (0x1D45A, 'M', u'm'),
- (0x1D45B, 'M', u'n'),
- (0x1D45C, 'M', u'o'),
- (0x1D45D, 'M', u'p'),
- (0x1D45E, 'M', u'q'),
- (0x1D45F, 'M', u'r'),
- (0x1D460, 'M', u's'),
- (0x1D461, 'M', u't'),
- (0x1D462, 'M', u'u'),
- (0x1D463, 'M', u'v'),
- (0x1D464, 'M', u'w'),
- (0x1D465, 'M', u'x'),
- (0x1D466, 'M', u'y'),
- (0x1D467, 'M', u'z'),
- (0x1D468, 'M', u'a'),
- (0x1D469, 'M', u'b'),
- (0x1D46A, 'M', u'c'),
- (0x1D46B, 'M', u'd'),
- (0x1D46C, 'M', u'e'),
- (0x1D46D, 'M', u'f'),
- (0x1D46E, 'M', u'g'),
- (0x1D46F, 'M', u'h'),
- (0x1D470, 'M', u'i'),
- (0x1D471, 'M', u'j'),
- (0x1D472, 'M', u'k'),
- (0x1D473, 'M', u'l'),
- (0x1D474, 'M', u'm'),
- (0x1D475, 'M', u'n'),
- (0x1D476, 'M', u'o'),
- (0x1D477, 'M', u'p'),
- (0x1D478, 'M', u'q'),
- (0x1D479, 'M', u'r'),
- (0x1D47A, 'M', u's'),
- (0x1D47B, 'M', u't'),
- (0x1D47C, 'M', u'u'),
- (0x1D47D, 'M', u'v'),
- (0x1D47E, 'M', u'w'),
- (0x1D47F, 'M', u'x'),
- (0x1D480, 'M', u'y'),
- (0x1D481, 'M', u'z'),
- (0x1D482, 'M', u'a'),
- (0x1D483, 'M', u'b'),
- (0x1D484, 'M', u'c'),
- (0x1D485, 'M', u'd'),
- (0x1D486, 'M', u'e'),
- (0x1D487, 'M', u'f'),
- (0x1D488, 'M', u'g'),
- (0x1D489, 'M', u'h'),
- (0x1D48A, 'M', u'i'),
- (0x1D48B, 'M', u'j'),
- (0x1D48C, 'M', u'k'),
- (0x1D48D, 'M', u'l'),
- (0x1D48E, 'M', u'm'),
- (0x1D48F, 'M', u'n'),
- (0x1D490, 'M', u'o'),
- (0x1D491, 'M', u'p'),
- (0x1D492, 'M', u'q'),
- (0x1D493, 'M', u'r'),
- (0x1D494, 'M', u's'),
- (0x1D495, 'M', u't'),
- (0x1D496, 'M', u'u'),
- (0x1D497, 'M', u'v'),
- (0x1D498, 'M', u'w'),
- (0x1D499, 'M', u'x'),
- (0x1D49A, 'M', u'y'),
- (0x1D49B, 'M', u'z'),
- (0x1D49C, 'M', u'a'),
- (0x1D49D, 'X'),
- (0x1D49E, 'M', u'c'),
- (0x1D49F, 'M', u'd'),
- (0x1D4A0, 'X'),
- ]
-
-def _seg_55():
- return [
- (0x1D4A2, 'M', u'g'),
- (0x1D4A3, 'X'),
- (0x1D4A5, 'M', u'j'),
- (0x1D4A6, 'M', u'k'),
- (0x1D4A7, 'X'),
- (0x1D4A9, 'M', u'n'),
- (0x1D4AA, 'M', u'o'),
- (0x1D4AB, 'M', u'p'),
- (0x1D4AC, 'M', u'q'),
- (0x1D4AD, 'X'),
- (0x1D4AE, 'M', u's'),
- (0x1D4AF, 'M', u't'),
- (0x1D4B0, 'M', u'u'),
- (0x1D4B1, 'M', u'v'),
- (0x1D4B2, 'M', u'w'),
- (0x1D4B3, 'M', u'x'),
- (0x1D4B4, 'M', u'y'),
- (0x1D4B5, 'M', u'z'),
- (0x1D4B6, 'M', u'a'),
- (0x1D4B7, 'M', u'b'),
- (0x1D4B8, 'M', u'c'),
- (0x1D4B9, 'M', u'd'),
- (0x1D4BA, 'X'),
- (0x1D4BB, 'M', u'f'),
- (0x1D4BC, 'X'),
- (0x1D4BD, 'M', u'h'),
- (0x1D4BE, 'M', u'i'),
- (0x1D4BF, 'M', u'j'),
- (0x1D4C0, 'M', u'k'),
- (0x1D4C1, 'M', u'l'),
- (0x1D4C2, 'M', u'm'),
- (0x1D4C3, 'M', u'n'),
- (0x1D4C4, 'X'),
- (0x1D4C5, 'M', u'p'),
- (0x1D4C6, 'M', u'q'),
- (0x1D4C7, 'M', u'r'),
- (0x1D4C8, 'M', u's'),
- (0x1D4C9, 'M', u't'),
- (0x1D4CA, 'M', u'u'),
- (0x1D4CB, 'M', u'v'),
- (0x1D4CC, 'M', u'w'),
- (0x1D4CD, 'M', u'x'),
- (0x1D4CE, 'M', u'y'),
- (0x1D4CF, 'M', u'z'),
- (0x1D4D0, 'M', u'a'),
- (0x1D4D1, 'M', u'b'),
- (0x1D4D2, 'M', u'c'),
- (0x1D4D3, 'M', u'd'),
- (0x1D4D4, 'M', u'e'),
- (0x1D4D5, 'M', u'f'),
- (0x1D4D6, 'M', u'g'),
- (0x1D4D7, 'M', u'h'),
- (0x1D4D8, 'M', u'i'),
- (0x1D4D9, 'M', u'j'),
- (0x1D4DA, 'M', u'k'),
- (0x1D4DB, 'M', u'l'),
- (0x1D4DC, 'M', u'm'),
- (0x1D4DD, 'M', u'n'),
- (0x1D4DE, 'M', u'o'),
- (0x1D4DF, 'M', u'p'),
- (0x1D4E0, 'M', u'q'),
- (0x1D4E1, 'M', u'r'),
- (0x1D4E2, 'M', u's'),
- (0x1D4E3, 'M', u't'),
- (0x1D4E4, 'M', u'u'),
- (0x1D4E5, 'M', u'v'),
- (0x1D4E6, 'M', u'w'),
- (0x1D4E7, 'M', u'x'),
- (0x1D4E8, 'M', u'y'),
- (0x1D4E9, 'M', u'z'),
- (0x1D4EA, 'M', u'a'),
- (0x1D4EB, 'M', u'b'),
- (0x1D4EC, 'M', u'c'),
- (0x1D4ED, 'M', u'd'),
- (0x1D4EE, 'M', u'e'),
- (0x1D4EF, 'M', u'f'),
- (0x1D4F0, 'M', u'g'),
- (0x1D4F1, 'M', u'h'),
- (0x1D4F2, 'M', u'i'),
- (0x1D4F3, 'M', u'j'),
- (0x1D4F4, 'M', u'k'),
- (0x1D4F5, 'M', u'l'),
- (0x1D4F6, 'M', u'm'),
- (0x1D4F7, 'M', u'n'),
- (0x1D4F8, 'M', u'o'),
- (0x1D4F9, 'M', u'p'),
- (0x1D4FA, 'M', u'q'),
- (0x1D4FB, 'M', u'r'),
- (0x1D4FC, 'M', u's'),
- (0x1D4FD, 'M', u't'),
- (0x1D4FE, 'M', u'u'),
- (0x1D4FF, 'M', u'v'),
- (0x1D500, 'M', u'w'),
- (0x1D501, 'M', u'x'),
- (0x1D502, 'M', u'y'),
- (0x1D503, 'M', u'z'),
- (0x1D504, 'M', u'a'),
- (0x1D505, 'M', u'b'),
- (0x1D506, 'X'),
- (0x1D507, 'M', u'd'),
- ]
-
-def _seg_56():
- return [
- (0x1D508, 'M', u'e'),
- (0x1D509, 'M', u'f'),
- (0x1D50A, 'M', u'g'),
- (0x1D50B, 'X'),
- (0x1D50D, 'M', u'j'),
- (0x1D50E, 'M', u'k'),
- (0x1D50F, 'M', u'l'),
- (0x1D510, 'M', u'm'),
- (0x1D511, 'M', u'n'),
- (0x1D512, 'M', u'o'),
- (0x1D513, 'M', u'p'),
- (0x1D514, 'M', u'q'),
- (0x1D515, 'X'),
- (0x1D516, 'M', u's'),
- (0x1D517, 'M', u't'),
- (0x1D518, 'M', u'u'),
- (0x1D519, 'M', u'v'),
- (0x1D51A, 'M', u'w'),
- (0x1D51B, 'M', u'x'),
- (0x1D51C, 'M', u'y'),
- (0x1D51D, 'X'),
- (0x1D51E, 'M', u'a'),
- (0x1D51F, 'M', u'b'),
- (0x1D520, 'M', u'c'),
- (0x1D521, 'M', u'd'),
- (0x1D522, 'M', u'e'),
- (0x1D523, 'M', u'f'),
- (0x1D524, 'M', u'g'),
- (0x1D525, 'M', u'h'),
- (0x1D526, 'M', u'i'),
- (0x1D527, 'M', u'j'),
- (0x1D528, 'M', u'k'),
- (0x1D529, 'M', u'l'),
- (0x1D52A, 'M', u'm'),
- (0x1D52B, 'M', u'n'),
- (0x1D52C, 'M', u'o'),
- (0x1D52D, 'M', u'p'),
- (0x1D52E, 'M', u'q'),
- (0x1D52F, 'M', u'r'),
- (0x1D530, 'M', u's'),
- (0x1D531, 'M', u't'),
- (0x1D532, 'M', u'u'),
- (0x1D533, 'M', u'v'),
- (0x1D534, 'M', u'w'),
- (0x1D535, 'M', u'x'),
- (0x1D536, 'M', u'y'),
- (0x1D537, 'M', u'z'),
- (0x1D538, 'M', u'a'),
- (0x1D539, 'M', u'b'),
- (0x1D53A, 'X'),
- (0x1D53B, 'M', u'd'),
- (0x1D53C, 'M', u'e'),
- (0x1D53D, 'M', u'f'),
- (0x1D53E, 'M', u'g'),
- (0x1D53F, 'X'),
- (0x1D540, 'M', u'i'),
- (0x1D541, 'M', u'j'),
- (0x1D542, 'M', u'k'),
- (0x1D543, 'M', u'l'),
- (0x1D544, 'M', u'm'),
- (0x1D545, 'X'),
- (0x1D546, 'M', u'o'),
- (0x1D547, 'X'),
- (0x1D54A, 'M', u's'),
- (0x1D54B, 'M', u't'),
- (0x1D54C, 'M', u'u'),
- (0x1D54D, 'M', u'v'),
- (0x1D54E, 'M', u'w'),
- (0x1D54F, 'M', u'x'),
- (0x1D550, 'M', u'y'),
- (0x1D551, 'X'),
- (0x1D552, 'M', u'a'),
- (0x1D553, 'M', u'b'),
- (0x1D554, 'M', u'c'),
- (0x1D555, 'M', u'd'),
- (0x1D556, 'M', u'e'),
- (0x1D557, 'M', u'f'),
- (0x1D558, 'M', u'g'),
- (0x1D559, 'M', u'h'),
- (0x1D55A, 'M', u'i'),
- (0x1D55B, 'M', u'j'),
- (0x1D55C, 'M', u'k'),
- (0x1D55D, 'M', u'l'),
- (0x1D55E, 'M', u'm'),
- (0x1D55F, 'M', u'n'),
- (0x1D560, 'M', u'o'),
- (0x1D561, 'M', u'p'),
- (0x1D562, 'M', u'q'),
- (0x1D563, 'M', u'r'),
- (0x1D564, 'M', u's'),
- (0x1D565, 'M', u't'),
- (0x1D566, 'M', u'u'),
- (0x1D567, 'M', u'v'),
- (0x1D568, 'M', u'w'),
- (0x1D569, 'M', u'x'),
- (0x1D56A, 'M', u'y'),
- (0x1D56B, 'M', u'z'),
- (0x1D56C, 'M', u'a'),
- (0x1D56D, 'M', u'b'),
- (0x1D56E, 'M', u'c'),
- ]
-
-def _seg_57():
- return [
- (0x1D56F, 'M', u'd'),
- (0x1D570, 'M', u'e'),
- (0x1D571, 'M', u'f'),
- (0x1D572, 'M', u'g'),
- (0x1D573, 'M', u'h'),
- (0x1D574, 'M', u'i'),
- (0x1D575, 'M', u'j'),
- (0x1D576, 'M', u'k'),
- (0x1D577, 'M', u'l'),
- (0x1D578, 'M', u'm'),
- (0x1D579, 'M', u'n'),
- (0x1D57A, 'M', u'o'),
- (0x1D57B, 'M', u'p'),
- (0x1D57C, 'M', u'q'),
- (0x1D57D, 'M', u'r'),
- (0x1D57E, 'M', u's'),
- (0x1D57F, 'M', u't'),
- (0x1D580, 'M', u'u'),
- (0x1D581, 'M', u'v'),
- (0x1D582, 'M', u'w'),
- (0x1D583, 'M', u'x'),
- (0x1D584, 'M', u'y'),
- (0x1D585, 'M', u'z'),
- (0x1D586, 'M', u'a'),
- (0x1D587, 'M', u'b'),
- (0x1D588, 'M', u'c'),
- (0x1D589, 'M', u'd'),
- (0x1D58A, 'M', u'e'),
- (0x1D58B, 'M', u'f'),
- (0x1D58C, 'M', u'g'),
- (0x1D58D, 'M', u'h'),
- (0x1D58E, 'M', u'i'),
- (0x1D58F, 'M', u'j'),
- (0x1D590, 'M', u'k'),
- (0x1D591, 'M', u'l'),
- (0x1D592, 'M', u'm'),
- (0x1D593, 'M', u'n'),
- (0x1D594, 'M', u'o'),
- (0x1D595, 'M', u'p'),
- (0x1D596, 'M', u'q'),
- (0x1D597, 'M', u'r'),
- (0x1D598, 'M', u's'),
- (0x1D599, 'M', u't'),
- (0x1D59A, 'M', u'u'),
- (0x1D59B, 'M', u'v'),
- (0x1D59C, 'M', u'w'),
- (0x1D59D, 'M', u'x'),
- (0x1D59E, 'M', u'y'),
- (0x1D59F, 'M', u'z'),
- (0x1D5A0, 'M', u'a'),
- (0x1D5A1, 'M', u'b'),
- (0x1D5A2, 'M', u'c'),
- (0x1D5A3, 'M', u'd'),
- (0x1D5A4, 'M', u'e'),
- (0x1D5A5, 'M', u'f'),
- (0x1D5A6, 'M', u'g'),
- (0x1D5A7, 'M', u'h'),
- (0x1D5A8, 'M', u'i'),
- (0x1D5A9, 'M', u'j'),
- (0x1D5AA, 'M', u'k'),
- (0x1D5AB, 'M', u'l'),
- (0x1D5AC, 'M', u'm'),
- (0x1D5AD, 'M', u'n'),
- (0x1D5AE, 'M', u'o'),
- (0x1D5AF, 'M', u'p'),
- (0x1D5B0, 'M', u'q'),
- (0x1D5B1, 'M', u'r'),
- (0x1D5B2, 'M', u's'),
- (0x1D5B3, 'M', u't'),
- (0x1D5B4, 'M', u'u'),
- (0x1D5B5, 'M', u'v'),
- (0x1D5B6, 'M', u'w'),
- (0x1D5B7, 'M', u'x'),
- (0x1D5B8, 'M', u'y'),
- (0x1D5B9, 'M', u'z'),
- (0x1D5BA, 'M', u'a'),
- (0x1D5BB, 'M', u'b'),
- (0x1D5BC, 'M', u'c'),
- (0x1D5BD, 'M', u'd'),
- (0x1D5BE, 'M', u'e'),
- (0x1D5BF, 'M', u'f'),
- (0x1D5C0, 'M', u'g'),
- (0x1D5C1, 'M', u'h'),
- (0x1D5C2, 'M', u'i'),
- (0x1D5C3, 'M', u'j'),
- (0x1D5C4, 'M', u'k'),
- (0x1D5C5, 'M', u'l'),
- (0x1D5C6, 'M', u'm'),
- (0x1D5C7, 'M', u'n'),
- (0x1D5C8, 'M', u'o'),
- (0x1D5C9, 'M', u'p'),
- (0x1D5CA, 'M', u'q'),
- (0x1D5CB, 'M', u'r'),
- (0x1D5CC, 'M', u's'),
- (0x1D5CD, 'M', u't'),
- (0x1D5CE, 'M', u'u'),
- (0x1D5CF, 'M', u'v'),
- (0x1D5D0, 'M', u'w'),
- (0x1D5D1, 'M', u'x'),
- (0x1D5D2, 'M', u'y'),
- ]
-
-def _seg_58():
- return [
- (0x1D5D3, 'M', u'z'),
- (0x1D5D4, 'M', u'a'),
- (0x1D5D5, 'M', u'b'),
- (0x1D5D6, 'M', u'c'),
- (0x1D5D7, 'M', u'd'),
- (0x1D5D8, 'M', u'e'),
- (0x1D5D9, 'M', u'f'),
- (0x1D5DA, 'M', u'g'),
- (0x1D5DB, 'M', u'h'),
- (0x1D5DC, 'M', u'i'),
- (0x1D5DD, 'M', u'j'),
- (0x1D5DE, 'M', u'k'),
- (0x1D5DF, 'M', u'l'),
- (0x1D5E0, 'M', u'm'),
- (0x1D5E1, 'M', u'n'),
- (0x1D5E2, 'M', u'o'),
- (0x1D5E3, 'M', u'p'),
- (0x1D5E4, 'M', u'q'),
- (0x1D5E5, 'M', u'r'),
- (0x1D5E6, 'M', u's'),
- (0x1D5E7, 'M', u't'),
- (0x1D5E8, 'M', u'u'),
- (0x1D5E9, 'M', u'v'),
- (0x1D5EA, 'M', u'w'),
- (0x1D5EB, 'M', u'x'),
- (0x1D5EC, 'M', u'y'),
- (0x1D5ED, 'M', u'z'),
- (0x1D5EE, 'M', u'a'),
- (0x1D5EF, 'M', u'b'),
- (0x1D5F0, 'M', u'c'),
- (0x1D5F1, 'M', u'd'),
- (0x1D5F2, 'M', u'e'),
- (0x1D5F3, 'M', u'f'),
- (0x1D5F4, 'M', u'g'),
- (0x1D5F5, 'M', u'h'),
- (0x1D5F6, 'M', u'i'),
- (0x1D5F7, 'M', u'j'),
- (0x1D5F8, 'M', u'k'),
- (0x1D5F9, 'M', u'l'),
- (0x1D5FA, 'M', u'm'),
- (0x1D5FB, 'M', u'n'),
- (0x1D5FC, 'M', u'o'),
- (0x1D5FD, 'M', u'p'),
- (0x1D5FE, 'M', u'q'),
- (0x1D5FF, 'M', u'r'),
- (0x1D600, 'M', u's'),
- (0x1D601, 'M', u't'),
- (0x1D602, 'M', u'u'),
- (0x1D603, 'M', u'v'),
- (0x1D604, 'M', u'w'),
- (0x1D605, 'M', u'x'),
- (0x1D606, 'M', u'y'),
- (0x1D607, 'M', u'z'),
- (0x1D608, 'M', u'a'),
- (0x1D609, 'M', u'b'),
- (0x1D60A, 'M', u'c'),
- (0x1D60B, 'M', u'd'),
- (0x1D60C, 'M', u'e'),
- (0x1D60D, 'M', u'f'),
- (0x1D60E, 'M', u'g'),
- (0x1D60F, 'M', u'h'),
- (0x1D610, 'M', u'i'),
- (0x1D611, 'M', u'j'),
- (0x1D612, 'M', u'k'),
- (0x1D613, 'M', u'l'),
- (0x1D614, 'M', u'm'),
- (0x1D615, 'M', u'n'),
- (0x1D616, 'M', u'o'),
- (0x1D617, 'M', u'p'),
- (0x1D618, 'M', u'q'),
- (0x1D619, 'M', u'r'),
- (0x1D61A, 'M', u's'),
- (0x1D61B, 'M', u't'),
- (0x1D61C, 'M', u'u'),
- (0x1D61D, 'M', u'v'),
- (0x1D61E, 'M', u'w'),
- (0x1D61F, 'M', u'x'),
- (0x1D620, 'M', u'y'),
- (0x1D621, 'M', u'z'),
- (0x1D622, 'M', u'a'),
- (0x1D623, 'M', u'b'),
- (0x1D624, 'M', u'c'),
- (0x1D625, 'M', u'd'),
- (0x1D626, 'M', u'e'),
- (0x1D627, 'M', u'f'),
- (0x1D628, 'M', u'g'),
- (0x1D629, 'M', u'h'),
- (0x1D62A, 'M', u'i'),
- (0x1D62B, 'M', u'j'),
- (0x1D62C, 'M', u'k'),
- (0x1D62D, 'M', u'l'),
- (0x1D62E, 'M', u'm'),
- (0x1D62F, 'M', u'n'),
- (0x1D630, 'M', u'o'),
- (0x1D631, 'M', u'p'),
- (0x1D632, 'M', u'q'),
- (0x1D633, 'M', u'r'),
- (0x1D634, 'M', u's'),
- (0x1D635, 'M', u't'),
- (0x1D636, 'M', u'u'),
- ]
-
-def _seg_59():
- return [
- (0x1D637, 'M', u'v'),
- (0x1D638, 'M', u'w'),
- (0x1D639, 'M', u'x'),
- (0x1D63A, 'M', u'y'),
- (0x1D63B, 'M', u'z'),
- (0x1D63C, 'M', u'a'),
- (0x1D63D, 'M', u'b'),
- (0x1D63E, 'M', u'c'),
- (0x1D63F, 'M', u'd'),
- (0x1D640, 'M', u'e'),
- (0x1D641, 'M', u'f'),
- (0x1D642, 'M', u'g'),
- (0x1D643, 'M', u'h'),
- (0x1D644, 'M', u'i'),
- (0x1D645, 'M', u'j'),
- (0x1D646, 'M', u'k'),
- (0x1D647, 'M', u'l'),
- (0x1D648, 'M', u'm'),
- (0x1D649, 'M', u'n'),
- (0x1D64A, 'M', u'o'),
- (0x1D64B, 'M', u'p'),
- (0x1D64C, 'M', u'q'),
- (0x1D64D, 'M', u'r'),
- (0x1D64E, 'M', u's'),
- (0x1D64F, 'M', u't'),
- (0x1D650, 'M', u'u'),
- (0x1D651, 'M', u'v'),
- (0x1D652, 'M', u'w'),
- (0x1D653, 'M', u'x'),
- (0x1D654, 'M', u'y'),
- (0x1D655, 'M', u'z'),
- (0x1D656, 'M', u'a'),
- (0x1D657, 'M', u'b'),
- (0x1D658, 'M', u'c'),
- (0x1D659, 'M', u'd'),
- (0x1D65A, 'M', u'e'),
- (0x1D65B, 'M', u'f'),
- (0x1D65C, 'M', u'g'),
- (0x1D65D, 'M', u'h'),
- (0x1D65E, 'M', u'i'),
- (0x1D65F, 'M', u'j'),
- (0x1D660, 'M', u'k'),
- (0x1D661, 'M', u'l'),
- (0x1D662, 'M', u'm'),
- (0x1D663, 'M', u'n'),
- (0x1D664, 'M', u'o'),
- (0x1D665, 'M', u'p'),
- (0x1D666, 'M', u'q'),
- (0x1D667, 'M', u'r'),
- (0x1D668, 'M', u's'),
- (0x1D669, 'M', u't'),
- (0x1D66A, 'M', u'u'),
- (0x1D66B, 'M', u'v'),
- (0x1D66C, 'M', u'w'),
- (0x1D66D, 'M', u'x'),
- (0x1D66E, 'M', u'y'),
- (0x1D66F, 'M', u'z'),
- (0x1D670, 'M', u'a'),
- (0x1D671, 'M', u'b'),
- (0x1D672, 'M', u'c'),
- (0x1D673, 'M', u'd'),
- (0x1D674, 'M', u'e'),
- (0x1D675, 'M', u'f'),
- (0x1D676, 'M', u'g'),
- (0x1D677, 'M', u'h'),
- (0x1D678, 'M', u'i'),
- (0x1D679, 'M', u'j'),
- (0x1D67A, 'M', u'k'),
- (0x1D67B, 'M', u'l'),
- (0x1D67C, 'M', u'm'),
- (0x1D67D, 'M', u'n'),
- (0x1D67E, 'M', u'o'),
- (0x1D67F, 'M', u'p'),
- (0x1D680, 'M', u'q'),
- (0x1D681, 'M', u'r'),
- (0x1D682, 'M', u's'),
- (0x1D683, 'M', u't'),
- (0x1D684, 'M', u'u'),
- (0x1D685, 'M', u'v'),
- (0x1D686, 'M', u'w'),
- (0x1D687, 'M', u'x'),
- (0x1D688, 'M', u'y'),
- (0x1D689, 'M', u'z'),
- (0x1D68A, 'M', u'a'),
- (0x1D68B, 'M', u'b'),
- (0x1D68C, 'M', u'c'),
- (0x1D68D, 'M', u'd'),
- (0x1D68E, 'M', u'e'),
- (0x1D68F, 'M', u'f'),
- (0x1D690, 'M', u'g'),
- (0x1D691, 'M', u'h'),
- (0x1D692, 'M', u'i'),
- (0x1D693, 'M', u'j'),
- (0x1D694, 'M', u'k'),
- (0x1D695, 'M', u'l'),
- (0x1D696, 'M', u'm'),
- (0x1D697, 'M', u'n'),
- (0x1D698, 'M', u'o'),
- (0x1D699, 'M', u'p'),
- (0x1D69A, 'M', u'q'),
- ]
-
-def _seg_60():
- return [
- (0x1D69B, 'M', u'r'),
- (0x1D69C, 'M', u's'),
- (0x1D69D, 'M', u't'),
- (0x1D69E, 'M', u'u'),
- (0x1D69F, 'M', u'v'),
- (0x1D6A0, 'M', u'w'),
- (0x1D6A1, 'M', u'x'),
- (0x1D6A2, 'M', u'y'),
- (0x1D6A3, 'M', u'z'),
- (0x1D6A4, 'M', u'ı'),
- (0x1D6A5, 'M', u'ȷ'),
- (0x1D6A6, 'X'),
- (0x1D6A8, 'M', u'α'),
- (0x1D6A9, 'M', u'β'),
- (0x1D6AA, 'M', u'γ'),
- (0x1D6AB, 'M', u'δ'),
- (0x1D6AC, 'M', u'ε'),
- (0x1D6AD, 'M', u'ζ'),
- (0x1D6AE, 'M', u'η'),
- (0x1D6AF, 'M', u'θ'),
- (0x1D6B0, 'M', u'ι'),
- (0x1D6B1, 'M', u'κ'),
- (0x1D6B2, 'M', u'λ'),
- (0x1D6B3, 'M', u'μ'),
- (0x1D6B4, 'M', u'ν'),
- (0x1D6B5, 'M', u'ξ'),
- (0x1D6B6, 'M', u'ο'),
- (0x1D6B7, 'M', u'π'),
- (0x1D6B8, 'M', u'ρ'),
- (0x1D6B9, 'M', u'θ'),
- (0x1D6BA, 'M', u'σ'),
- (0x1D6BB, 'M', u'τ'),
- (0x1D6BC, 'M', u'υ'),
- (0x1D6BD, 'M', u'φ'),
- (0x1D6BE, 'M', u'χ'),
- (0x1D6BF, 'M', u'ψ'),
- (0x1D6C0, 'M', u'ω'),
- (0x1D6C1, 'M', u'∇'),
- (0x1D6C2, 'M', u'α'),
- (0x1D6C3, 'M', u'β'),
- (0x1D6C4, 'M', u'γ'),
- (0x1D6C5, 'M', u'δ'),
- (0x1D6C6, 'M', u'ε'),
- (0x1D6C7, 'M', u'ζ'),
- (0x1D6C8, 'M', u'η'),
- (0x1D6C9, 'M', u'θ'),
- (0x1D6CA, 'M', u'ι'),
- (0x1D6CB, 'M', u'κ'),
- (0x1D6CC, 'M', u'λ'),
- (0x1D6CD, 'M', u'μ'),
- (0x1D6CE, 'M', u'ν'),
- (0x1D6CF, 'M', u'ξ'),
- (0x1D6D0, 'M', u'ο'),
- (0x1D6D1, 'M', u'π'),
- (0x1D6D2, 'M', u'ρ'),
- (0x1D6D3, 'M', u'σ'),
- (0x1D6D5, 'M', u'τ'),
- (0x1D6D6, 'M', u'υ'),
- (0x1D6D7, 'M', u'φ'),
- (0x1D6D8, 'M', u'χ'),
- (0x1D6D9, 'M', u'ψ'),
- (0x1D6DA, 'M', u'ω'),
- (0x1D6DB, 'M', u'∂'),
- (0x1D6DC, 'M', u'ε'),
- (0x1D6DD, 'M', u'θ'),
- (0x1D6DE, 'M', u'κ'),
- (0x1D6DF, 'M', u'φ'),
- (0x1D6E0, 'M', u'ρ'),
- (0x1D6E1, 'M', u'π'),
- (0x1D6E2, 'M', u'α'),
- (0x1D6E3, 'M', u'β'),
- (0x1D6E4, 'M', u'γ'),
- (0x1D6E5, 'M', u'δ'),
- (0x1D6E6, 'M', u'ε'),
- (0x1D6E7, 'M', u'ζ'),
- (0x1D6E8, 'M', u'η'),
- (0x1D6E9, 'M', u'θ'),
- (0x1D6EA, 'M', u'ι'),
- (0x1D6EB, 'M', u'κ'),
- (0x1D6EC, 'M', u'λ'),
- (0x1D6ED, 'M', u'μ'),
- (0x1D6EE, 'M', u'ν'),
- (0x1D6EF, 'M', u'ξ'),
- (0x1D6F0, 'M', u'ο'),
- (0x1D6F1, 'M', u'π'),
- (0x1D6F2, 'M', u'ρ'),
- (0x1D6F3, 'M', u'θ'),
- (0x1D6F4, 'M', u'σ'),
- (0x1D6F5, 'M', u'τ'),
- (0x1D6F6, 'M', u'υ'),
- (0x1D6F7, 'M', u'φ'),
- (0x1D6F8, 'M', u'χ'),
- (0x1D6F9, 'M', u'ψ'),
- (0x1D6FA, 'M', u'ω'),
- (0x1D6FB, 'M', u'∇'),
- (0x1D6FC, 'M', u'α'),
- (0x1D6FD, 'M', u'β'),
- (0x1D6FE, 'M', u'γ'),
- (0x1D6FF, 'M', u'δ'),
- (0x1D700, 'M', u'ε'),
- ]
-
-def _seg_61():
- return [
- (0x1D701, 'M', u'ζ'),
- (0x1D702, 'M', u'η'),
- (0x1D703, 'M', u'θ'),
- (0x1D704, 'M', u'ι'),
- (0x1D705, 'M', u'κ'),
- (0x1D706, 'M', u'λ'),
- (0x1D707, 'M', u'μ'),
- (0x1D708, 'M', u'ν'),
- (0x1D709, 'M', u'ξ'),
- (0x1D70A, 'M', u'ο'),
- (0x1D70B, 'M', u'π'),
- (0x1D70C, 'M', u'ρ'),
- (0x1D70D, 'M', u'σ'),
- (0x1D70F, 'M', u'τ'),
- (0x1D710, 'M', u'υ'),
- (0x1D711, 'M', u'φ'),
- (0x1D712, 'M', u'χ'),
- (0x1D713, 'M', u'ψ'),
- (0x1D714, 'M', u'ω'),
- (0x1D715, 'M', u'∂'),
- (0x1D716, 'M', u'ε'),
- (0x1D717, 'M', u'θ'),
- (0x1D718, 'M', u'κ'),
- (0x1D719, 'M', u'φ'),
- (0x1D71A, 'M', u'ρ'),
- (0x1D71B, 'M', u'π'),
- (0x1D71C, 'M', u'α'),
- (0x1D71D, 'M', u'β'),
- (0x1D71E, 'M', u'γ'),
- (0x1D71F, 'M', u'δ'),
- (0x1D720, 'M', u'ε'),
- (0x1D721, 'M', u'ζ'),
- (0x1D722, 'M', u'η'),
- (0x1D723, 'M', u'θ'),
- (0x1D724, 'M', u'ι'),
- (0x1D725, 'M', u'κ'),
- (0x1D726, 'M', u'λ'),
- (0x1D727, 'M', u'μ'),
- (0x1D728, 'M', u'ν'),
- (0x1D729, 'M', u'ξ'),
- (0x1D72A, 'M', u'ο'),
- (0x1D72B, 'M', u'π'),
- (0x1D72C, 'M', u'ρ'),
- (0x1D72D, 'M', u'θ'),
- (0x1D72E, 'M', u'σ'),
- (0x1D72F, 'M', u'τ'),
- (0x1D730, 'M', u'υ'),
- (0x1D731, 'M', u'φ'),
- (0x1D732, 'M', u'χ'),
- (0x1D733, 'M', u'ψ'),
- (0x1D734, 'M', u'ω'),
- (0x1D735, 'M', u'∇'),
- (0x1D736, 'M', u'α'),
- (0x1D737, 'M', u'β'),
- (0x1D738, 'M', u'γ'),
- (0x1D739, 'M', u'δ'),
- (0x1D73A, 'M', u'ε'),
- (0x1D73B, 'M', u'ζ'),
- (0x1D73C, 'M', u'η'),
- (0x1D73D, 'M', u'θ'),
- (0x1D73E, 'M', u'ι'),
- (0x1D73F, 'M', u'κ'),
- (0x1D740, 'M', u'λ'),
- (0x1D741, 'M', u'μ'),
- (0x1D742, 'M', u'ν'),
- (0x1D743, 'M', u'ξ'),
- (0x1D744, 'M', u'ο'),
- (0x1D745, 'M', u'π'),
- (0x1D746, 'M', u'ρ'),
- (0x1D747, 'M', u'σ'),
- (0x1D749, 'M', u'τ'),
- (0x1D74A, 'M', u'υ'),
- (0x1D74B, 'M', u'φ'),
- (0x1D74C, 'M', u'χ'),
- (0x1D74D, 'M', u'ψ'),
- (0x1D74E, 'M', u'ω'),
- (0x1D74F, 'M', u'∂'),
- (0x1D750, 'M', u'ε'),
- (0x1D751, 'M', u'θ'),
- (0x1D752, 'M', u'κ'),
- (0x1D753, 'M', u'φ'),
- (0x1D754, 'M', u'ρ'),
- (0x1D755, 'M', u'π'),
- (0x1D756, 'M', u'α'),
- (0x1D757, 'M', u'β'),
- (0x1D758, 'M', u'γ'),
- (0x1D759, 'M', u'δ'),
- (0x1D75A, 'M', u'ε'),
- (0x1D75B, 'M', u'ζ'),
- (0x1D75C, 'M', u'η'),
- (0x1D75D, 'M', u'θ'),
- (0x1D75E, 'M', u'ι'),
- (0x1D75F, 'M', u'κ'),
- (0x1D760, 'M', u'λ'),
- (0x1D761, 'M', u'μ'),
- (0x1D762, 'M', u'ν'),
- (0x1D763, 'M', u'ξ'),
- (0x1D764, 'M', u'ο'),
- (0x1D765, 'M', u'π'),
- (0x1D766, 'M', u'ρ'),
- ]
-
-def _seg_62():
- return [
- (0x1D767, 'M', u'θ'),
- (0x1D768, 'M', u'σ'),
- (0x1D769, 'M', u'τ'),
- (0x1D76A, 'M', u'υ'),
- (0x1D76B, 'M', u'φ'),
- (0x1D76C, 'M', u'χ'),
- (0x1D76D, 'M', u'ψ'),
- (0x1D76E, 'M', u'ω'),
- (0x1D76F, 'M', u'∇'),
- (0x1D770, 'M', u'α'),
- (0x1D771, 'M', u'β'),
- (0x1D772, 'M', u'γ'),
- (0x1D773, 'M', u'δ'),
- (0x1D774, 'M', u'ε'),
- (0x1D775, 'M', u'ζ'),
- (0x1D776, 'M', u'η'),
- (0x1D777, 'M', u'θ'),
- (0x1D778, 'M', u'ι'),
- (0x1D779, 'M', u'κ'),
- (0x1D77A, 'M', u'λ'),
- (0x1D77B, 'M', u'μ'),
- (0x1D77C, 'M', u'ν'),
- (0x1D77D, 'M', u'ξ'),
- (0x1D77E, 'M', u'ο'),
- (0x1D77F, 'M', u'π'),
- (0x1D780, 'M', u'ρ'),
- (0x1D781, 'M', u'σ'),
- (0x1D783, 'M', u'τ'),
- (0x1D784, 'M', u'υ'),
- (0x1D785, 'M', u'φ'),
- (0x1D786, 'M', u'χ'),
- (0x1D787, 'M', u'ψ'),
- (0x1D788, 'M', u'ω'),
- (0x1D789, 'M', u'∂'),
- (0x1D78A, 'M', u'ε'),
- (0x1D78B, 'M', u'θ'),
- (0x1D78C, 'M', u'κ'),
- (0x1D78D, 'M', u'φ'),
- (0x1D78E, 'M', u'ρ'),
- (0x1D78F, 'M', u'π'),
- (0x1D790, 'M', u'α'),
- (0x1D791, 'M', u'β'),
- (0x1D792, 'M', u'γ'),
- (0x1D793, 'M', u'δ'),
- (0x1D794, 'M', u'ε'),
- (0x1D795, 'M', u'ζ'),
- (0x1D796, 'M', u'η'),
- (0x1D797, 'M', u'θ'),
- (0x1D798, 'M', u'ι'),
- (0x1D799, 'M', u'κ'),
- (0x1D79A, 'M', u'λ'),
- (0x1D79B, 'M', u'μ'),
- (0x1D79C, 'M', u'ν'),
- (0x1D79D, 'M', u'ξ'),
- (0x1D79E, 'M', u'ο'),
- (0x1D79F, 'M', u'π'),
- (0x1D7A0, 'M', u'ρ'),
- (0x1D7A1, 'M', u'θ'),
- (0x1D7A2, 'M', u'σ'),
- (0x1D7A3, 'M', u'τ'),
- (0x1D7A4, 'M', u'υ'),
- (0x1D7A5, 'M', u'φ'),
- (0x1D7A6, 'M', u'χ'),
- (0x1D7A7, 'M', u'ψ'),
- (0x1D7A8, 'M', u'ω'),
- (0x1D7A9, 'M', u'∇'),
- (0x1D7AA, 'M', u'α'),
- (0x1D7AB, 'M', u'β'),
- (0x1D7AC, 'M', u'γ'),
- (0x1D7AD, 'M', u'δ'),
- (0x1D7AE, 'M', u'ε'),
- (0x1D7AF, 'M', u'ζ'),
- (0x1D7B0, 'M', u'η'),
- (0x1D7B1, 'M', u'θ'),
- (0x1D7B2, 'M', u'ι'),
- (0x1D7B3, 'M', u'κ'),
- (0x1D7B4, 'M', u'λ'),
- (0x1D7B5, 'M', u'μ'),
- (0x1D7B6, 'M', u'ν'),
- (0x1D7B7, 'M', u'ξ'),
- (0x1D7B8, 'M', u'ο'),
- (0x1D7B9, 'M', u'π'),
- (0x1D7BA, 'M', u'ρ'),
- (0x1D7BB, 'M', u'σ'),
- (0x1D7BD, 'M', u'τ'),
- (0x1D7BE, 'M', u'υ'),
- (0x1D7BF, 'M', u'φ'),
- (0x1D7C0, 'M', u'χ'),
- (0x1D7C1, 'M', u'ψ'),
- (0x1D7C2, 'M', u'ω'),
- (0x1D7C3, 'M', u'∂'),
- (0x1D7C4, 'M', u'ε'),
- (0x1D7C5, 'M', u'θ'),
- (0x1D7C6, 'M', u'κ'),
- (0x1D7C7, 'M', u'φ'),
- (0x1D7C8, 'M', u'ρ'),
- (0x1D7C9, 'M', u'π'),
- (0x1D7CA, 'M', u'ϝ'),
- (0x1D7CC, 'X'),
- (0x1D7CE, 'M', u'0'),
- ]
-
-def _seg_63():
- return [
- (0x1D7CF, 'M', u'1'),
- (0x1D7D0, 'M', u'2'),
- (0x1D7D1, 'M', u'3'),
- (0x1D7D2, 'M', u'4'),
- (0x1D7D3, 'M', u'5'),
- (0x1D7D4, 'M', u'6'),
- (0x1D7D5, 'M', u'7'),
- (0x1D7D6, 'M', u'8'),
- (0x1D7D7, 'M', u'9'),
- (0x1D7D8, 'M', u'0'),
- (0x1D7D9, 'M', u'1'),
- (0x1D7DA, 'M', u'2'),
- (0x1D7DB, 'M', u'3'),
- (0x1D7DC, 'M', u'4'),
- (0x1D7DD, 'M', u'5'),
- (0x1D7DE, 'M', u'6'),
- (0x1D7DF, 'M', u'7'),
- (0x1D7E0, 'M', u'8'),
- (0x1D7E1, 'M', u'9'),
- (0x1D7E2, 'M', u'0'),
- (0x1D7E3, 'M', u'1'),
- (0x1D7E4, 'M', u'2'),
- (0x1D7E5, 'M', u'3'),
- (0x1D7E6, 'M', u'4'),
- (0x1D7E7, 'M', u'5'),
- (0x1D7E8, 'M', u'6'),
- (0x1D7E9, 'M', u'7'),
- (0x1D7EA, 'M', u'8'),
- (0x1D7EB, 'M', u'9'),
- (0x1D7EC, 'M', u'0'),
- (0x1D7ED, 'M', u'1'),
- (0x1D7EE, 'M', u'2'),
- (0x1D7EF, 'M', u'3'),
- (0x1D7F0, 'M', u'4'),
- (0x1D7F1, 'M', u'5'),
- (0x1D7F2, 'M', u'6'),
- (0x1D7F3, 'M', u'7'),
- (0x1D7F4, 'M', u'8'),
- (0x1D7F5, 'M', u'9'),
- (0x1D7F6, 'M', u'0'),
- (0x1D7F7, 'M', u'1'),
- (0x1D7F8, 'M', u'2'),
- (0x1D7F9, 'M', u'3'),
- (0x1D7FA, 'M', u'4'),
- (0x1D7FB, 'M', u'5'),
- (0x1D7FC, 'M', u'6'),
- (0x1D7FD, 'M', u'7'),
- (0x1D7FE, 'M', u'8'),
- (0x1D7FF, 'M', u'9'),
- (0x1D800, 'X'),
- (0x1EE00, 'M', u'ا'),
- (0x1EE01, 'M', u'ب'),
- (0x1EE02, 'M', u'ج'),
- (0x1EE03, 'M', u'د'),
- (0x1EE04, 'X'),
- (0x1EE05, 'M', u'و'),
- (0x1EE06, 'M', u'ز'),
- (0x1EE07, 'M', u'ح'),
- (0x1EE08, 'M', u'ط'),
- (0x1EE09, 'M', u'ي'),
- (0x1EE0A, 'M', u'ك'),
- (0x1EE0B, 'M', u'ل'),
- (0x1EE0C, 'M', u'م'),
- (0x1EE0D, 'M', u'ن'),
- (0x1EE0E, 'M', u'س'),
- (0x1EE0F, 'M', u'ع'),
- (0x1EE10, 'M', u'ف'),
- (0x1EE11, 'M', u'ص'),
- (0x1EE12, 'M', u'ق'),
- (0x1EE13, 'M', u'ر'),
- (0x1EE14, 'M', u'ش'),
- (0x1EE15, 'M', u'ت'),
- (0x1EE16, 'M', u'ث'),
- (0x1EE17, 'M', u'خ'),
- (0x1EE18, 'M', u'ذ'),
- (0x1EE19, 'M', u'ض'),
- (0x1EE1A, 'M', u'ظ'),
- (0x1EE1B, 'M', u'غ'),
- (0x1EE1C, 'M', u'ٮ'),
- (0x1EE1D, 'M', u'ں'),
- (0x1EE1E, 'M', u'ڡ'),
- (0x1EE1F, 'M', u'ٯ'),
- (0x1EE20, 'X'),
- (0x1EE21, 'M', u'ب'),
- (0x1EE22, 'M', u'ج'),
- (0x1EE23, 'X'),
- (0x1EE24, 'M', u'ه'),
- (0x1EE25, 'X'),
- (0x1EE27, 'M', u'ح'),
- (0x1EE28, 'X'),
- (0x1EE29, 'M', u'ي'),
- (0x1EE2A, 'M', u'ك'),
- (0x1EE2B, 'M', u'ل'),
- (0x1EE2C, 'M', u'م'),
- (0x1EE2D, 'M', u'ن'),
- (0x1EE2E, 'M', u'س'),
- (0x1EE2F, 'M', u'ع'),
- (0x1EE30, 'M', u'ف'),
- (0x1EE31, 'M', u'ص'),
- (0x1EE32, 'M', u'ق'),
- ]
-
-def _seg_64():
- return [
- (0x1EE33, 'X'),
- (0x1EE34, 'M', u'ش'),
- (0x1EE35, 'M', u'ت'),
- (0x1EE36, 'M', u'ث'),
- (0x1EE37, 'M', u'خ'),
- (0x1EE38, 'X'),
- (0x1EE39, 'M', u'ض'),
- (0x1EE3A, 'X'),
- (0x1EE3B, 'M', u'غ'),
- (0x1EE3C, 'X'),
- (0x1EE42, 'M', u'ج'),
- (0x1EE43, 'X'),
- (0x1EE47, 'M', u'ح'),
- (0x1EE48, 'X'),
- (0x1EE49, 'M', u'ي'),
- (0x1EE4A, 'X'),
- (0x1EE4B, 'M', u'ل'),
- (0x1EE4C, 'X'),
- (0x1EE4D, 'M', u'ن'),
- (0x1EE4E, 'M', u'س'),
- (0x1EE4F, 'M', u'ع'),
- (0x1EE50, 'X'),
- (0x1EE51, 'M', u'ص'),
- (0x1EE52, 'M', u'ق'),
- (0x1EE53, 'X'),
- (0x1EE54, 'M', u'ش'),
- (0x1EE55, 'X'),
- (0x1EE57, 'M', u'خ'),
- (0x1EE58, 'X'),
- (0x1EE59, 'M', u'ض'),
- (0x1EE5A, 'X'),
- (0x1EE5B, 'M', u'غ'),
- (0x1EE5C, 'X'),
- (0x1EE5D, 'M', u'ں'),
- (0x1EE5E, 'X'),
- (0x1EE5F, 'M', u'ٯ'),
- (0x1EE60, 'X'),
- (0x1EE61, 'M', u'ب'),
- (0x1EE62, 'M', u'ج'),
- (0x1EE63, 'X'),
- (0x1EE64, 'M', u'ه'),
- (0x1EE65, 'X'),
- (0x1EE67, 'M', u'ح'),
- (0x1EE68, 'M', u'ط'),
- (0x1EE69, 'M', u'ي'),
- (0x1EE6A, 'M', u'ك'),
- (0x1EE6B, 'X'),
- (0x1EE6C, 'M', u'م'),
- (0x1EE6D, 'M', u'ن'),
- (0x1EE6E, 'M', u'س'),
- (0x1EE6F, 'M', u'ع'),
- (0x1EE70, 'M', u'ف'),
- (0x1EE71, 'M', u'ص'),
- (0x1EE72, 'M', u'ق'),
- (0x1EE73, 'X'),
- (0x1EE74, 'M', u'ش'),
- (0x1EE75, 'M', u'ت'),
- (0x1EE76, 'M', u'ث'),
- (0x1EE77, 'M', u'خ'),
- (0x1EE78, 'X'),
- (0x1EE79, 'M', u'ض'),
- (0x1EE7A, 'M', u'ظ'),
- (0x1EE7B, 'M', u'غ'),
- (0x1EE7C, 'M', u'ٮ'),
- (0x1EE7D, 'X'),
- (0x1EE7E, 'M', u'ڡ'),
- (0x1EE7F, 'X'),
- (0x1EE80, 'M', u'ا'),
- (0x1EE81, 'M', u'ب'),
- (0x1EE82, 'M', u'ج'),
- (0x1EE83, 'M', u'د'),
- (0x1EE84, 'M', u'ه'),
- (0x1EE85, 'M', u'و'),
- (0x1EE86, 'M', u'ز'),
- (0x1EE87, 'M', u'ح'),
- (0x1EE88, 'M', u'ط'),
- (0x1EE89, 'M', u'ي'),
- (0x1EE8A, 'X'),
- (0x1EE8B, 'M', u'ل'),
- (0x1EE8C, 'M', u'م'),
- (0x1EE8D, 'M', u'ن'),
- (0x1EE8E, 'M', u'س'),
- (0x1EE8F, 'M', u'ع'),
- (0x1EE90, 'M', u'ف'),
- (0x1EE91, 'M', u'ص'),
- (0x1EE92, 'M', u'ق'),
- (0x1EE93, 'M', u'ر'),
- (0x1EE94, 'M', u'ش'),
- (0x1EE95, 'M', u'ت'),
- (0x1EE96, 'M', u'ث'),
- (0x1EE97, 'M', u'خ'),
- (0x1EE98, 'M', u'ذ'),
- (0x1EE99, 'M', u'ض'),
- (0x1EE9A, 'M', u'ظ'),
- (0x1EE9B, 'M', u'غ'),
- (0x1EE9C, 'X'),
- (0x1EEA1, 'M', u'ب'),
- (0x1EEA2, 'M', u'ج'),
- (0x1EEA3, 'M', u'د'),
- (0x1EEA4, 'X'),
- ]
-
-def _seg_65():
- return [
- (0x1EEA5, 'M', u'و'),
- (0x1EEA6, 'M', u'ز'),
- (0x1EEA7, 'M', u'ح'),
- (0x1EEA8, 'M', u'ط'),
- (0x1EEA9, 'M', u'ي'),
- (0x1EEAA, 'X'),
- (0x1EEAB, 'M', u'ل'),
- (0x1EEAC, 'M', u'م'),
- (0x1EEAD, 'M', u'ن'),
- (0x1EEAE, 'M', u'س'),
- (0x1EEAF, 'M', u'ع'),
- (0x1EEB0, 'M', u'ف'),
- (0x1EEB1, 'M', u'ص'),
- (0x1EEB2, 'M', u'ق'),
- (0x1EEB3, 'M', u'ر'),
- (0x1EEB4, 'M', u'ش'),
- (0x1EEB5, 'M', u'ت'),
- (0x1EEB6, 'M', u'ث'),
- (0x1EEB7, 'M', u'خ'),
- (0x1EEB8, 'M', u'ذ'),
- (0x1EEB9, 'M', u'ض'),
- (0x1EEBA, 'M', u'ظ'),
- (0x1EEBB, 'M', u'غ'),
- (0x1EEBC, 'X'),
- (0x1EEF0, 'V'),
- (0x1EEF2, 'X'),
- (0x1F000, 'V'),
- (0x1F02C, 'X'),
- (0x1F030, 'V'),
- (0x1F094, 'X'),
- (0x1F0A0, 'V'),
- (0x1F0AF, 'X'),
- (0x1F0B1, 'V'),
- (0x1F0BF, 'X'),
- (0x1F0C1, 'V'),
- (0x1F0D0, 'X'),
- (0x1F0D1, 'V'),
- (0x1F0E0, 'X'),
- (0x1F101, '3', u'0,'),
- (0x1F102, '3', u'1,'),
- (0x1F103, '3', u'2,'),
- (0x1F104, '3', u'3,'),
- (0x1F105, '3', u'4,'),
- (0x1F106, '3', u'5,'),
- (0x1F107, '3', u'6,'),
- (0x1F108, '3', u'7,'),
- (0x1F109, '3', u'8,'),
- (0x1F10A, '3', u'9,'),
- (0x1F10B, 'X'),
- (0x1F110, '3', u'(a)'),
- (0x1F111, '3', u'(b)'),
- (0x1F112, '3', u'(c)'),
- (0x1F113, '3', u'(d)'),
- (0x1F114, '3', u'(e)'),
- (0x1F115, '3', u'(f)'),
- (0x1F116, '3', u'(g)'),
- (0x1F117, '3', u'(h)'),
- (0x1F118, '3', u'(i)'),
- (0x1F119, '3', u'(j)'),
- (0x1F11A, '3', u'(k)'),
- (0x1F11B, '3', u'(l)'),
- (0x1F11C, '3', u'(m)'),
- (0x1F11D, '3', u'(n)'),
- (0x1F11E, '3', u'(o)'),
- (0x1F11F, '3', u'(p)'),
- (0x1F120, '3', u'(q)'),
- (0x1F121, '3', u'(r)'),
- (0x1F122, '3', u'(s)'),
- (0x1F123, '3', u'(t)'),
- (0x1F124, '3', u'(u)'),
- (0x1F125, '3', u'(v)'),
- (0x1F126, '3', u'(w)'),
- (0x1F127, '3', u'(x)'),
- (0x1F128, '3', u'(y)'),
- (0x1F129, '3', u'(z)'),
- (0x1F12A, 'M', u'〔s〕'),
- (0x1F12B, 'M', u'c'),
- (0x1F12C, 'M', u'r'),
- (0x1F12D, 'M', u'cd'),
- (0x1F12E, 'M', u'wz'),
- (0x1F12F, 'X'),
- (0x1F130, 'M', u'a'),
- (0x1F131, 'M', u'b'),
- (0x1F132, 'M', u'c'),
- (0x1F133, 'M', u'd'),
- (0x1F134, 'M', u'e'),
- (0x1F135, 'M', u'f'),
- (0x1F136, 'M', u'g'),
- (0x1F137, 'M', u'h'),
- (0x1F138, 'M', u'i'),
- (0x1F139, 'M', u'j'),
- (0x1F13A, 'M', u'k'),
- (0x1F13B, 'M', u'l'),
- (0x1F13C, 'M', u'm'),
- (0x1F13D, 'M', u'n'),
- (0x1F13E, 'M', u'o'),
- (0x1F13F, 'M', u'p'),
- (0x1F140, 'M', u'q'),
- (0x1F141, 'M', u'r'),
- (0x1F142, 'M', u's'),
- ]
-
-def _seg_66():
- return [
- (0x1F143, 'M', u't'),
- (0x1F144, 'M', u'u'),
- (0x1F145, 'M', u'v'),
- (0x1F146, 'M', u'w'),
- (0x1F147, 'M', u'x'),
- (0x1F148, 'M', u'y'),
- (0x1F149, 'M', u'z'),
- (0x1F14A, 'M', u'hv'),
- (0x1F14B, 'M', u'mv'),
- (0x1F14C, 'M', u'sd'),
- (0x1F14D, 'M', u'ss'),
- (0x1F14E, 'M', u'ppv'),
- (0x1F14F, 'M', u'wc'),
- (0x1F150, 'V'),
- (0x1F16A, 'M', u'mc'),
- (0x1F16B, 'M', u'md'),
- (0x1F16C, 'X'),
- (0x1F170, 'V'),
- (0x1F190, 'M', u'dj'),
- (0x1F191, 'V'),
- (0x1F19B, 'X'),
- (0x1F1E6, 'V'),
- (0x1F200, 'M', u'ほか'),
- (0x1F201, 'M', u'ココ'),
- (0x1F202, 'M', u'サ'),
- (0x1F203, 'X'),
- (0x1F210, 'M', u'手'),
- (0x1F211, 'M', u'字'),
- (0x1F212, 'M', u'双'),
- (0x1F213, 'M', u'デ'),
- (0x1F214, 'M', u'二'),
- (0x1F215, 'M', u'多'),
- (0x1F216, 'M', u'解'),
- (0x1F217, 'M', u'天'),
- (0x1F218, 'M', u'交'),
- (0x1F219, 'M', u'映'),
- (0x1F21A, 'M', u'無'),
- (0x1F21B, 'M', u'料'),
- (0x1F21C, 'M', u'前'),
- (0x1F21D, 'M', u'後'),
- (0x1F21E, 'M', u'再'),
- (0x1F21F, 'M', u'新'),
- (0x1F220, 'M', u'初'),
- (0x1F221, 'M', u'終'),
- (0x1F222, 'M', u'生'),
- (0x1F223, 'M', u'販'),
- (0x1F224, 'M', u'声'),
- (0x1F225, 'M', u'吹'),
- (0x1F226, 'M', u'演'),
- (0x1F227, 'M', u'投'),
- (0x1F228, 'M', u'捕'),
- (0x1F229, 'M', u'一'),
- (0x1F22A, 'M', u'三'),
- (0x1F22B, 'M', u'遊'),
- (0x1F22C, 'M', u'左'),
- (0x1F22D, 'M', u'中'),
- (0x1F22E, 'M', u'右'),
- (0x1F22F, 'M', u'指'),
- (0x1F230, 'M', u'走'),
- (0x1F231, 'M', u'打'),
- (0x1F232, 'M', u'禁'),
- (0x1F233, 'M', u'空'),
- (0x1F234, 'M', u'合'),
- (0x1F235, 'M', u'満'),
- (0x1F236, 'M', u'有'),
- (0x1F237, 'M', u'月'),
- (0x1F238, 'M', u'申'),
- (0x1F239, 'M', u'割'),
- (0x1F23A, 'M', u'営'),
- (0x1F23B, 'X'),
- (0x1F240, 'M', u'〔本〕'),
- (0x1F241, 'M', u'〔三〕'),
- (0x1F242, 'M', u'〔二〕'),
- (0x1F243, 'M', u'〔安〕'),
- (0x1F244, 'M', u'〔点〕'),
- (0x1F245, 'M', u'〔打〕'),
- (0x1F246, 'M', u'〔盗〕'),
- (0x1F247, 'M', u'〔勝〕'),
- (0x1F248, 'M', u'〔敗〕'),
- (0x1F249, 'X'),
- (0x1F250, 'M', u'得'),
- (0x1F251, 'M', u'可'),
- (0x1F252, 'X'),
- (0x1F300, 'V'),
- (0x1F321, 'X'),
- (0x1F330, 'V'),
- (0x1F336, 'X'),
- (0x1F337, 'V'),
- (0x1F37D, 'X'),
- (0x1F380, 'V'),
- (0x1F394, 'X'),
- (0x1F3A0, 'V'),
- (0x1F3C5, 'X'),
- (0x1F3C6, 'V'),
- (0x1F3CB, 'X'),
- (0x1F3E0, 'V'),
- (0x1F3F1, 'X'),
- (0x1F400, 'V'),
- (0x1F43F, 'X'),
- (0x1F440, 'V'),
- ]
-
-def _seg_67():
- return [
- (0x1F441, 'X'),
- (0x1F442, 'V'),
- (0x1F4F8, 'X'),
- (0x1F4F9, 'V'),
- (0x1F4FD, 'X'),
- (0x1F500, 'V'),
- (0x1F53E, 'X'),
- (0x1F540, 'V'),
- (0x1F544, 'X'),
- (0x1F550, 'V'),
- (0x1F568, 'X'),
- (0x1F5FB, 'V'),
- (0x1F641, 'X'),
- (0x1F645, 'V'),
- (0x1F650, 'X'),
- (0x1F680, 'V'),
- (0x1F6C6, 'X'),
- (0x1F700, 'V'),
- (0x1F774, 'X'),
- (0x20000, 'V'),
- (0x2A6D7, 'X'),
- (0x2A700, 'V'),
- (0x2B735, 'X'),
- (0x2B740, 'V'),
- (0x2B81E, 'X'),
- (0x2F800, 'M', u'丽'),
- (0x2F801, 'M', u'丸'),
- (0x2F802, 'M', u'乁'),
- (0x2F803, 'M', u'𠄢'),
- (0x2F804, 'M', u'你'),
- (0x2F805, 'M', u'侮'),
- (0x2F806, 'M', u'侻'),
- (0x2F807, 'M', u'倂'),
- (0x2F808, 'M', u'偺'),
- (0x2F809, 'M', u'備'),
- (0x2F80A, 'M', u'僧'),
- (0x2F80B, 'M', u'像'),
- (0x2F80C, 'M', u'㒞'),
- (0x2F80D, 'M', u'𠘺'),
- (0x2F80E, 'M', u'免'),
- (0x2F80F, 'M', u'兔'),
- (0x2F810, 'M', u'兤'),
- (0x2F811, 'M', u'具'),
- (0x2F812, 'M', u'𠔜'),
- (0x2F813, 'M', u'㒹'),
- (0x2F814, 'M', u'內'),
- (0x2F815, 'M', u'再'),
- (0x2F816, 'M', u'𠕋'),
- (0x2F817, 'M', u'冗'),
- (0x2F818, 'M', u'冤'),
- (0x2F819, 'M', u'仌'),
- (0x2F81A, 'M', u'冬'),
- (0x2F81B, 'M', u'况'),
- (0x2F81C, 'M', u'𩇟'),
- (0x2F81D, 'M', u'凵'),
- (0x2F81E, 'M', u'刃'),
- (0x2F81F, 'M', u'㓟'),
- (0x2F820, 'M', u'刻'),
- (0x2F821, 'M', u'剆'),
- (0x2F822, 'M', u'割'),
- (0x2F823, 'M', u'剷'),
- (0x2F824, 'M', u'㔕'),
- (0x2F825, 'M', u'勇'),
- (0x2F826, 'M', u'勉'),
- (0x2F827, 'M', u'勤'),
- (0x2F828, 'M', u'勺'),
- (0x2F829, 'M', u'包'),
- (0x2F82A, 'M', u'匆'),
- (0x2F82B, 'M', u'北'),
- (0x2F82C, 'M', u'卉'),
- (0x2F82D, 'M', u'卑'),
- (0x2F82E, 'M', u'博'),
- (0x2F82F, 'M', u'即'),
- (0x2F830, 'M', u'卽'),
- (0x2F831, 'M', u'卿'),
- (0x2F834, 'M', u'𠨬'),
- (0x2F835, 'M', u'灰'),
- (0x2F836, 'M', u'及'),
- (0x2F837, 'M', u'叟'),
- (0x2F838, 'M', u'𠭣'),
- (0x2F839, 'M', u'叫'),
- (0x2F83A, 'M', u'叱'),
- (0x2F83B, 'M', u'吆'),
- (0x2F83C, 'M', u'咞'),
- (0x2F83D, 'M', u'吸'),
- (0x2F83E, 'M', u'呈'),
- (0x2F83F, 'M', u'周'),
- (0x2F840, 'M', u'咢'),
- (0x2F841, 'M', u'哶'),
- (0x2F842, 'M', u'唐'),
- (0x2F843, 'M', u'啓'),
- (0x2F844, 'M', u'啣'),
- (0x2F845, 'M', u'善'),
- (0x2F847, 'M', u'喙'),
- (0x2F848, 'M', u'喫'),
- (0x2F849, 'M', u'喳'),
- (0x2F84A, 'M', u'嗂'),
- (0x2F84B, 'M', u'圖'),
- (0x2F84C, 'M', u'嘆'),
- (0x2F84D, 'M', u'圗'),
- ]
-
-def _seg_68():
- return [
- (0x2F84E, 'M', u'噑'),
- (0x2F84F, 'M', u'噴'),
- (0x2F850, 'M', u'切'),
- (0x2F851, 'M', u'壮'),
- (0x2F852, 'M', u'城'),
- (0x2F853, 'M', u'埴'),
- (0x2F854, 'M', u'堍'),
- (0x2F855, 'M', u'型'),
- (0x2F856, 'M', u'堲'),
- (0x2F857, 'M', u'報'),
- (0x2F858, 'M', u'墬'),
- (0x2F859, 'M', u'𡓤'),
- (0x2F85A, 'M', u'売'),
- (0x2F85B, 'M', u'壷'),
- (0x2F85C, 'M', u'夆'),
- (0x2F85D, 'M', u'多'),
- (0x2F85E, 'M', u'夢'),
- (0x2F85F, 'M', u'奢'),
- (0x2F860, 'M', u'𡚨'),
- (0x2F861, 'M', u'𡛪'),
- (0x2F862, 'M', u'姬'),
- (0x2F863, 'M', u'娛'),
- (0x2F864, 'M', u'娧'),
- (0x2F865, 'M', u'姘'),
- (0x2F866, 'M', u'婦'),
- (0x2F867, 'M', u'㛮'),
- (0x2F868, 'X'),
- (0x2F869, 'M', u'嬈'),
- (0x2F86A, 'M', u'嬾'),
- (0x2F86C, 'M', u'𡧈'),
- (0x2F86D, 'M', u'寃'),
- (0x2F86E, 'M', u'寘'),
- (0x2F86F, 'M', u'寧'),
- (0x2F870, 'M', u'寳'),
- (0x2F871, 'M', u'𡬘'),
- (0x2F872, 'M', u'寿'),
- (0x2F873, 'M', u'将'),
- (0x2F874, 'X'),
- (0x2F875, 'M', u'尢'),
- (0x2F876, 'M', u'㞁'),
- (0x2F877, 'M', u'屠'),
- (0x2F878, 'M', u'屮'),
- (0x2F879, 'M', u'峀'),
- (0x2F87A, 'M', u'岍'),
- (0x2F87B, 'M', u'𡷤'),
- (0x2F87C, 'M', u'嵃'),
- (0x2F87D, 'M', u'𡷦'),
- (0x2F87E, 'M', u'嵮'),
- (0x2F87F, 'M', u'嵫'),
- (0x2F880, 'M', u'嵼'),
- (0x2F881, 'M', u'巡'),
- (0x2F882, 'M', u'巢'),
- (0x2F883, 'M', u'㠯'),
- (0x2F884, 'M', u'巽'),
- (0x2F885, 'M', u'帨'),
- (0x2F886, 'M', u'帽'),
- (0x2F887, 'M', u'幩'),
- (0x2F888, 'M', u'㡢'),
- (0x2F889, 'M', u'𢆃'),
- (0x2F88A, 'M', u'㡼'),
- (0x2F88B, 'M', u'庰'),
- (0x2F88C, 'M', u'庳'),
- (0x2F88D, 'M', u'庶'),
- (0x2F88E, 'M', u'廊'),
- (0x2F88F, 'M', u'𪎒'),
- (0x2F890, 'M', u'廾'),
- (0x2F891, 'M', u'𢌱'),
- (0x2F893, 'M', u'舁'),
- (0x2F894, 'M', u'弢'),
- (0x2F896, 'M', u'㣇'),
- (0x2F897, 'M', u'𣊸'),
- (0x2F898, 'M', u'𦇚'),
- (0x2F899, 'M', u'形'),
- (0x2F89A, 'M', u'彫'),
- (0x2F89B, 'M', u'㣣'),
- (0x2F89C, 'M', u'徚'),
- (0x2F89D, 'M', u'忍'),
- (0x2F89E, 'M', u'志'),
- (0x2F89F, 'M', u'忹'),
- (0x2F8A0, 'M', u'悁'),
- (0x2F8A1, 'M', u'㤺'),
- (0x2F8A2, 'M', u'㤜'),
- (0x2F8A3, 'M', u'悔'),
- (0x2F8A4, 'M', u'𢛔'),
- (0x2F8A5, 'M', u'惇'),
- (0x2F8A6, 'M', u'慈'),
- (0x2F8A7, 'M', u'慌'),
- (0x2F8A8, 'M', u'慎'),
- (0x2F8A9, 'M', u'慌'),
- (0x2F8AA, 'M', u'慺'),
- (0x2F8AB, 'M', u'憎'),
- (0x2F8AC, 'M', u'憲'),
- (0x2F8AD, 'M', u'憤'),
- (0x2F8AE, 'M', u'憯'),
- (0x2F8AF, 'M', u'懞'),
- (0x2F8B0, 'M', u'懲'),
- (0x2F8B1, 'M', u'懶'),
- (0x2F8B2, 'M', u'成'),
- (0x2F8B3, 'M', u'戛'),
- (0x2F8B4, 'M', u'扝'),
- ]
-
-def _seg_69():
- return [
- (0x2F8B5, 'M', u'抱'),
- (0x2F8B6, 'M', u'拔'),
- (0x2F8B7, 'M', u'捐'),
- (0x2F8B8, 'M', u'𢬌'),
- (0x2F8B9, 'M', u'挽'),
- (0x2F8BA, 'M', u'拼'),
- (0x2F8BB, 'M', u'捨'),
- (0x2F8BC, 'M', u'掃'),
- (0x2F8BD, 'M', u'揤'),
- (0x2F8BE, 'M', u'𢯱'),
- (0x2F8BF, 'M', u'搢'),
- (0x2F8C0, 'M', u'揅'),
- (0x2F8C1, 'M', u'掩'),
- (0x2F8C2, 'M', u'㨮'),
- (0x2F8C3, 'M', u'摩'),
- (0x2F8C4, 'M', u'摾'),
- (0x2F8C5, 'M', u'撝'),
- (0x2F8C6, 'M', u'摷'),
- (0x2F8C7, 'M', u'㩬'),
- (0x2F8C8, 'M', u'敏'),
- (0x2F8C9, 'M', u'敬'),
- (0x2F8CA, 'M', u'𣀊'),
- (0x2F8CB, 'M', u'旣'),
- (0x2F8CC, 'M', u'書'),
- (0x2F8CD, 'M', u'晉'),
- (0x2F8CE, 'M', u'㬙'),
- (0x2F8CF, 'M', u'暑'),
- (0x2F8D0, 'M', u'㬈'),
- (0x2F8D1, 'M', u'㫤'),
- (0x2F8D2, 'M', u'冒'),
- (0x2F8D3, 'M', u'冕'),
- (0x2F8D4, 'M', u'最'),
- (0x2F8D5, 'M', u'暜'),
- (0x2F8D6, 'M', u'肭'),
- (0x2F8D7, 'M', u'䏙'),
- (0x2F8D8, 'M', u'朗'),
- (0x2F8D9, 'M', u'望'),
- (0x2F8DA, 'M', u'朡'),
- (0x2F8DB, 'M', u'杞'),
- (0x2F8DC, 'M', u'杓'),
- (0x2F8DD, 'M', u'𣏃'),
- (0x2F8DE, 'M', u'㭉'),
- (0x2F8DF, 'M', u'柺'),
- (0x2F8E0, 'M', u'枅'),
- (0x2F8E1, 'M', u'桒'),
- (0x2F8E2, 'M', u'梅'),
- (0x2F8E3, 'M', u'𣑭'),
- (0x2F8E4, 'M', u'梎'),
- (0x2F8E5, 'M', u'栟'),
- (0x2F8E6, 'M', u'椔'),
- (0x2F8E7, 'M', u'㮝'),
- (0x2F8E8, 'M', u'楂'),
- (0x2F8E9, 'M', u'榣'),
- (0x2F8EA, 'M', u'槪'),
- (0x2F8EB, 'M', u'檨'),
- (0x2F8EC, 'M', u'𣚣'),
- (0x2F8ED, 'M', u'櫛'),
- (0x2F8EE, 'M', u'㰘'),
- (0x2F8EF, 'M', u'次'),
- (0x2F8F0, 'M', u'𣢧'),
- (0x2F8F1, 'M', u'歔'),
- (0x2F8F2, 'M', u'㱎'),
- (0x2F8F3, 'M', u'歲'),
- (0x2F8F4, 'M', u'殟'),
- (0x2F8F5, 'M', u'殺'),
- (0x2F8F6, 'M', u'殻'),
- (0x2F8F7, 'M', u'𣪍'),
- (0x2F8F8, 'M', u'𡴋'),
- (0x2F8F9, 'M', u'𣫺'),
- (0x2F8FA, 'M', u'汎'),
- (0x2F8FB, 'M', u'𣲼'),
- (0x2F8FC, 'M', u'沿'),
- (0x2F8FD, 'M', u'泍'),
- (0x2F8FE, 'M', u'汧'),
- (0x2F8FF, 'M', u'洖'),
- (0x2F900, 'M', u'派'),
- (0x2F901, 'M', u'海'),
- (0x2F902, 'M', u'流'),
- (0x2F903, 'M', u'浩'),
- (0x2F904, 'M', u'浸'),
- (0x2F905, 'M', u'涅'),
- (0x2F906, 'M', u'𣴞'),
- (0x2F907, 'M', u'洴'),
- (0x2F908, 'M', u'港'),
- (0x2F909, 'M', u'湮'),
- (0x2F90A, 'M', u'㴳'),
- (0x2F90B, 'M', u'滋'),
- (0x2F90C, 'M', u'滇'),
- (0x2F90D, 'M', u'𣻑'),
- (0x2F90E, 'M', u'淹'),
- (0x2F90F, 'M', u'潮'),
- (0x2F910, 'M', u'𣽞'),
- (0x2F911, 'M', u'𣾎'),
- (0x2F912, 'M', u'濆'),
- (0x2F913, 'M', u'瀹'),
- (0x2F914, 'M', u'瀞'),
- (0x2F915, 'M', u'瀛'),
- (0x2F916, 'M', u'㶖'),
- (0x2F917, 'M', u'灊'),
- (0x2F918, 'M', u'災'),
- ]
-
-def _seg_70():
- return [
- (0x2F919, 'M', u'灷'),
- (0x2F91A, 'M', u'炭'),
- (0x2F91B, 'M', u'𠔥'),
- (0x2F91C, 'M', u'煅'),
- (0x2F91D, 'M', u'𤉣'),
- (0x2F91E, 'M', u'熜'),
- (0x2F91F, 'X'),
- (0x2F920, 'M', u'爨'),
- (0x2F921, 'M', u'爵'),
- (0x2F922, 'M', u'牐'),
- (0x2F923, 'M', u'𤘈'),
- (0x2F924, 'M', u'犀'),
- (0x2F925, 'M', u'犕'),
- (0x2F926, 'M', u'𤜵'),
- (0x2F927, 'M', u'𤠔'),
- (0x2F928, 'M', u'獺'),
- (0x2F929, 'M', u'王'),
- (0x2F92A, 'M', u'㺬'),
- (0x2F92B, 'M', u'玥'),
- (0x2F92C, 'M', u'㺸'),
- (0x2F92E, 'M', u'瑇'),
- (0x2F92F, 'M', u'瑜'),
- (0x2F930, 'M', u'瑱'),
- (0x2F931, 'M', u'璅'),
- (0x2F932, 'M', u'瓊'),
- (0x2F933, 'M', u'㼛'),
- (0x2F934, 'M', u'甤'),
- (0x2F935, 'M', u'𤰶'),
- (0x2F936, 'M', u'甾'),
- (0x2F937, 'M', u'𤲒'),
- (0x2F938, 'M', u'異'),
- (0x2F939, 'M', u'𢆟'),
- (0x2F93A, 'M', u'瘐'),
- (0x2F93B, 'M', u'𤾡'),
- (0x2F93C, 'M', u'𤾸'),
- (0x2F93D, 'M', u'𥁄'),
- (0x2F93E, 'M', u'㿼'),
- (0x2F93F, 'M', u'䀈'),
- (0x2F940, 'M', u'直'),
- (0x2F941, 'M', u'𥃳'),
- (0x2F942, 'M', u'𥃲'),
- (0x2F943, 'M', u'𥄙'),
- (0x2F944, 'M', u'𥄳'),
- (0x2F945, 'M', u'眞'),
- (0x2F946, 'M', u'真'),
- (0x2F948, 'M', u'睊'),
- (0x2F949, 'M', u'䀹'),
- (0x2F94A, 'M', u'瞋'),
- (0x2F94B, 'M', u'䁆'),
- (0x2F94C, 'M', u'䂖'),
- (0x2F94D, 'M', u'𥐝'),
- (0x2F94E, 'M', u'硎'),
- (0x2F94F, 'M', u'碌'),
- (0x2F950, 'M', u'磌'),
- (0x2F951, 'M', u'䃣'),
- (0x2F952, 'M', u'𥘦'),
- (0x2F953, 'M', u'祖'),
- (0x2F954, 'M', u'𥚚'),
- (0x2F955, 'M', u'𥛅'),
- (0x2F956, 'M', u'福'),
- (0x2F957, 'M', u'秫'),
- (0x2F958, 'M', u'䄯'),
- (0x2F959, 'M', u'穀'),
- (0x2F95A, 'M', u'穊'),
- (0x2F95B, 'M', u'穏'),
- (0x2F95C, 'M', u'𥥼'),
- (0x2F95D, 'M', u'𥪧'),
- (0x2F95F, 'X'),
- (0x2F960, 'M', u'䈂'),
- (0x2F961, 'M', u'𥮫'),
- (0x2F962, 'M', u'篆'),
- (0x2F963, 'M', u'築'),
- (0x2F964, 'M', u'䈧'),
- (0x2F965, 'M', u'𥲀'),
- (0x2F966, 'M', u'糒'),
- (0x2F967, 'M', u'䊠'),
- (0x2F968, 'M', u'糨'),
- (0x2F969, 'M', u'糣'),
- (0x2F96A, 'M', u'紀'),
- (0x2F96B, 'M', u'𥾆'),
- (0x2F96C, 'M', u'絣'),
- (0x2F96D, 'M', u'䌁'),
- (0x2F96E, 'M', u'緇'),
- (0x2F96F, 'M', u'縂'),
- (0x2F970, 'M', u'繅'),
- (0x2F971, 'M', u'䌴'),
- (0x2F972, 'M', u'𦈨'),
- (0x2F973, 'M', u'𦉇'),
- (0x2F974, 'M', u'䍙'),
- (0x2F975, 'M', u'𦋙'),
- (0x2F976, 'M', u'罺'),
- (0x2F977, 'M', u'𦌾'),
- (0x2F978, 'M', u'羕'),
- (0x2F979, 'M', u'翺'),
- (0x2F97A, 'M', u'者'),
- (0x2F97B, 'M', u'𦓚'),
- (0x2F97C, 'M', u'𦔣'),
- (0x2F97D, 'M', u'聠'),
- (0x2F97E, 'M', u'𦖨'),
- (0x2F97F, 'M', u'聰'),
- ]
-
-def _seg_71():
- return [
- (0x2F980, 'M', u'𣍟'),
- (0x2F981, 'M', u'䏕'),
- (0x2F982, 'M', u'育'),
- (0x2F983, 'M', u'脃'),
- (0x2F984, 'M', u'䐋'),
- (0x2F985, 'M', u'脾'),
- (0x2F986, 'M', u'媵'),
- (0x2F987, 'M', u'𦞧'),
- (0x2F988, 'M', u'𦞵'),
- (0x2F989, 'M', u'𣎓'),
- (0x2F98A, 'M', u'𣎜'),
- (0x2F98B, 'M', u'舁'),
- (0x2F98C, 'M', u'舄'),
- (0x2F98D, 'M', u'辞'),
- (0x2F98E, 'M', u'䑫'),
- (0x2F98F, 'M', u'芑'),
- (0x2F990, 'M', u'芋'),
- (0x2F991, 'M', u'芝'),
- (0x2F992, 'M', u'劳'),
- (0x2F993, 'M', u'花'),
- (0x2F994, 'M', u'芳'),
- (0x2F995, 'M', u'芽'),
- (0x2F996, 'M', u'苦'),
- (0x2F997, 'M', u'𦬼'),
- (0x2F998, 'M', u'若'),
- (0x2F999, 'M', u'茝'),
- (0x2F99A, 'M', u'荣'),
- (0x2F99B, 'M', u'莭'),
- (0x2F99C, 'M', u'茣'),
- (0x2F99D, 'M', u'莽'),
- (0x2F99E, 'M', u'菧'),
- (0x2F99F, 'M', u'著'),
- (0x2F9A0, 'M', u'荓'),
- (0x2F9A1, 'M', u'菊'),
- (0x2F9A2, 'M', u'菌'),
- (0x2F9A3, 'M', u'菜'),
- (0x2F9A4, 'M', u'𦰶'),
- (0x2F9A5, 'M', u'𦵫'),
- (0x2F9A6, 'M', u'𦳕'),
- (0x2F9A7, 'M', u'䔫'),
- (0x2F9A8, 'M', u'蓱'),
- (0x2F9A9, 'M', u'蓳'),
- (0x2F9AA, 'M', u'蔖'),
- (0x2F9AB, 'M', u'𧏊'),
- (0x2F9AC, 'M', u'蕤'),
- (0x2F9AD, 'M', u'𦼬'),
- (0x2F9AE, 'M', u'䕝'),
- (0x2F9AF, 'M', u'䕡'),
- (0x2F9B0, 'M', u'𦾱'),
- (0x2F9B1, 'M', u'𧃒'),
- (0x2F9B2, 'M', u'䕫'),
- (0x2F9B3, 'M', u'虐'),
- (0x2F9B4, 'M', u'虜'),
- (0x2F9B5, 'M', u'虧'),
- (0x2F9B6, 'M', u'虩'),
- (0x2F9B7, 'M', u'蚩'),
- (0x2F9B8, 'M', u'蚈'),
- (0x2F9B9, 'M', u'蜎'),
- (0x2F9BA, 'M', u'蛢'),
- (0x2F9BB, 'M', u'蝹'),
- (0x2F9BC, 'M', u'蜨'),
- (0x2F9BD, 'M', u'蝫'),
- (0x2F9BE, 'M', u'螆'),
- (0x2F9BF, 'X'),
- (0x2F9C0, 'M', u'蟡'),
- (0x2F9C1, 'M', u'蠁'),
- (0x2F9C2, 'M', u'䗹'),
- (0x2F9C3, 'M', u'衠'),
- (0x2F9C4, 'M', u'衣'),
- (0x2F9C5, 'M', u'𧙧'),
- (0x2F9C6, 'M', u'裗'),
- (0x2F9C7, 'M', u'裞'),
- (0x2F9C8, 'M', u'䘵'),
- (0x2F9C9, 'M', u'裺'),
- (0x2F9CA, 'M', u'㒻'),
- (0x2F9CB, 'M', u'𧢮'),
- (0x2F9CC, 'M', u'𧥦'),
- (0x2F9CD, 'M', u'䚾'),
- (0x2F9CE, 'M', u'䛇'),
- (0x2F9CF, 'M', u'誠'),
- (0x2F9D0, 'M', u'諭'),
- (0x2F9D1, 'M', u'變'),
- (0x2F9D2, 'M', u'豕'),
- (0x2F9D3, 'M', u'𧲨'),
- (0x2F9D4, 'M', u'貫'),
- (0x2F9D5, 'M', u'賁'),
- (0x2F9D6, 'M', u'贛'),
- (0x2F9D7, 'M', u'起'),
- (0x2F9D8, 'M', u'𧼯'),
- (0x2F9D9, 'M', u'𠠄'),
- (0x2F9DA, 'M', u'跋'),
- (0x2F9DB, 'M', u'趼'),
- (0x2F9DC, 'M', u'跰'),
- (0x2F9DD, 'M', u'𠣞'),
- (0x2F9DE, 'M', u'軔'),
- (0x2F9DF, 'M', u'輸'),
- (0x2F9E0, 'M', u'𨗒'),
- (0x2F9E1, 'M', u'𨗭'),
- (0x2F9E2, 'M', u'邔'),
- (0x2F9E3, 'M', u'郱'),
- ]
-
-def _seg_72():
- return [
- (0x2F9E4, 'M', u'鄑'),
- (0x2F9E5, 'M', u'𨜮'),
- (0x2F9E6, 'M', u'鄛'),
- (0x2F9E7, 'M', u'鈸'),
- (0x2F9E8, 'M', u'鋗'),
- (0x2F9E9, 'M', u'鋘'),
- (0x2F9EA, 'M', u'鉼'),
- (0x2F9EB, 'M', u'鏹'),
- (0x2F9EC, 'M', u'鐕'),
- (0x2F9ED, 'M', u'𨯺'),
- (0x2F9EE, 'M', u'開'),
- (0x2F9EF, 'M', u'䦕'),
- (0x2F9F0, 'M', u'閷'),
- (0x2F9F1, 'M', u'𨵷'),
- (0x2F9F2, 'M', u'䧦'),
- (0x2F9F3, 'M', u'雃'),
- (0x2F9F4, 'M', u'嶲'),
- (0x2F9F5, 'M', u'霣'),
- (0x2F9F6, 'M', u'𩅅'),
- (0x2F9F7, 'M', u'𩈚'),
- (0x2F9F8, 'M', u'䩮'),
- (0x2F9F9, 'M', u'䩶'),
- (0x2F9FA, 'M', u'韠'),
- (0x2F9FB, 'M', u'𩐊'),
- (0x2F9FC, 'M', u'䪲'),
- (0x2F9FD, 'M', u'𩒖'),
- (0x2F9FE, 'M', u'頋'),
- (0x2FA00, 'M', u'頩'),
- (0x2FA01, 'M', u'𩖶'),
- (0x2FA02, 'M', u'飢'),
- (0x2FA03, 'M', u'䬳'),
- (0x2FA04, 'M', u'餩'),
- (0x2FA05, 'M', u'馧'),
- (0x2FA06, 'M', u'駂'),
- (0x2FA07, 'M', u'駾'),
- (0x2FA08, 'M', u'䯎'),
- (0x2FA09, 'M', u'𩬰'),
- (0x2FA0A, 'M', u'鬒'),
- (0x2FA0B, 'M', u'鱀'),
- (0x2FA0C, 'M', u'鳽'),
- (0x2FA0D, 'M', u'䳎'),
- (0x2FA0E, 'M', u'䳭'),
- (0x2FA0F, 'M', u'鵧'),
- (0x2FA10, 'M', u'𪃎'),
- (0x2FA11, 'M', u'䳸'),
- (0x2FA12, 'M', u'𪄅'),
- (0x2FA13, 'M', u'𪈎'),
- (0x2FA14, 'M', u'𪊑'),
- (0x2FA15, 'M', u'麻'),
- (0x2FA16, 'M', u'䵖'),
- (0x2FA17, 'M', u'黹'),
- (0x2FA18, 'M', u'黾'),
- (0x2FA19, 'M', u'鼅'),
- (0x2FA1A, 'M', u'鼏'),
- (0x2FA1B, 'M', u'鼖'),
- (0x2FA1C, 'M', u'鼻'),
- (0x2FA1D, 'M', u'𪘀'),
- (0x2FA1E, 'X'),
- (0xE0100, 'I'),
- (0xE01F0, 'X'),
- ]
-
-uts46data = tuple(
- _seg_0()
- + _seg_1()
- + _seg_2()
- + _seg_3()
- + _seg_4()
- + _seg_5()
- + _seg_6()
- + _seg_7()
- + _seg_8()
- + _seg_9()
- + _seg_10()
- + _seg_11()
- + _seg_12()
- + _seg_13()
- + _seg_14()
- + _seg_15()
- + _seg_16()
- + _seg_17()
- + _seg_18()
- + _seg_19()
- + _seg_20()
- + _seg_21()
- + _seg_22()
- + _seg_23()
- + _seg_24()
- + _seg_25()
- + _seg_26()
- + _seg_27()
- + _seg_28()
- + _seg_29()
- + _seg_30()
- + _seg_31()
- + _seg_32()
- + _seg_33()
- + _seg_34()
- + _seg_35()
- + _seg_36()
- + _seg_37()
- + _seg_38()
- + _seg_39()
- + _seg_40()
- + _seg_41()
- + _seg_42()
- + _seg_43()
- + _seg_44()
- + _seg_45()
- + _seg_46()
- + _seg_47()
- + _seg_48()
- + _seg_49()
- + _seg_50()
- + _seg_51()
- + _seg_52()
- + _seg_53()
- + _seg_54()
- + _seg_55()
- + _seg_56()
- + _seg_57()
- + _seg_58()
- + _seg_59()
- + _seg_60()
- + _seg_61()
- + _seg_62()
- + _seg_63()
- + _seg_64()
- + _seg_65()
- + _seg_66()
- + _seg_67()
- + _seg_68()
- + _seg_69()
- + _seg_70()
- + _seg_71()
- + _seg_72()
-)
diff --git a/functions/source/CreateSSHKey/ipaddress.py b/functions/source/CreateSSHKey/ipaddress.py
deleted file mode 100644
index f602c71..0000000
--- a/functions/source/CreateSSHKey/ipaddress.py
+++ /dev/null
@@ -1,2425 +0,0 @@
-# Copyright 2007 Google Inc.
-# Licensed to PSF under a Contributor Agreement.
-
-"""A fast, lightweight IPv4/IPv6 manipulation library in Python.
-
-This library is used to create/poke/manipulate IPv4 and IPv6 addresses
-and networks.
-
-"""
-
-from __future__ import unicode_literals
-
-
-import itertools
-import struct
-
-__version__ = '1.0.18'
-
-# Compatibility functions
-_compat_int_types = (int,)
-try:
- _compat_int_types = (int, long)
-except NameError:
- pass
-try:
- _compat_str = unicode
-except NameError:
- _compat_str = str
- assert bytes != str
-if b'\0'[0] == 0: # Python 3 semantics
- def _compat_bytes_to_byte_vals(byt):
- return byt
-else:
- def _compat_bytes_to_byte_vals(byt):
- return [struct.unpack(b'!B', b)[0] for b in byt]
-try:
- _compat_int_from_byte_vals = int.from_bytes
-except AttributeError:
- def _compat_int_from_byte_vals(bytvals, endianess):
- assert endianess == 'big'
- res = 0
- for bv in bytvals:
- assert isinstance(bv, _compat_int_types)
- res = (res << 8) + bv
- return res
-
-
-def _compat_to_bytes(intval, length, endianess):
- assert isinstance(intval, _compat_int_types)
- assert endianess == 'big'
- if length == 4:
- if intval < 0 or intval >= 2 ** 32:
- raise struct.error("integer out of range for 'I' format code")
- return struct.pack(b'!I', intval)
- elif length == 16:
- if intval < 0 or intval >= 2 ** 128:
- raise struct.error("integer out of range for 'QQ' format code")
- return struct.pack(b'!QQ', intval >> 64, intval & 0xffffffffffffffff)
- else:
- raise NotImplementedError()
-
-
-if hasattr(int, 'bit_length'):
- # Not int.bit_length , since that won't work in 2.7 where long exists
- def _compat_bit_length(i):
- return i.bit_length()
-else:
- def _compat_bit_length(i):
- for res in itertools.count():
- if i >> res == 0:
- return res
-
-
-def _compat_range(start, end, step=1):
- assert step > 0
- i = start
- while i < end:
- yield i
- i += step
-
-
-class _TotalOrderingMixin(object):
- __slots__ = ()
-
- # Helper that derives the other comparison operations from
- # __lt__ and __eq__
- # We avoid functools.total_ordering because it doesn't handle
- # NotImplemented correctly yet (http://bugs.python.org/issue10042)
- def __eq__(self, other):
- raise NotImplementedError
-
- def __ne__(self, other):
- equal = self.__eq__(other)
- if equal is NotImplemented:
- return NotImplemented
- return not equal
-
- def __lt__(self, other):
- raise NotImplementedError
-
- def __le__(self, other):
- less = self.__lt__(other)
- if less is NotImplemented or not less:
- return self.__eq__(other)
- return less
-
- def __gt__(self, other):
- less = self.__lt__(other)
- if less is NotImplemented:
- return NotImplemented
- equal = self.__eq__(other)
- if equal is NotImplemented:
- return NotImplemented
- return not (less or equal)
-
- def __ge__(self, other):
- less = self.__lt__(other)
- if less is NotImplemented:
- return NotImplemented
- return not less
-
-
-IPV4LENGTH = 32
-IPV6LENGTH = 128
-
-
-class AddressValueError(ValueError):
- """A Value Error related to the address."""
-
-
-class NetmaskValueError(ValueError):
- """A Value Error related to the netmask."""
-
-
-def ip_address(address):
- """Take an IP string/int and return an object of the correct type.
-
- Args:
- address: A string or integer, the IP address. Either IPv4 or
- IPv6 addresses may be supplied; integers less than 2**32 will
- be considered to be IPv4 by default.
-
- Returns:
- An IPv4Address or IPv6Address object.
-
- Raises:
- ValueError: if the *address* passed isn't either a v4 or a v6
- address
-
- """
- try:
- return IPv4Address(address)
- except (AddressValueError, NetmaskValueError):
- pass
-
- try:
- return IPv6Address(address)
- except (AddressValueError, NetmaskValueError):
- pass
-
- if isinstance(address, bytes):
- raise AddressValueError(
- '%r does not appear to be an IPv4 or IPv6 address. '
- 'Did you pass in a bytes (str in Python 2) instead of'
- ' a unicode object?' % address)
-
- raise ValueError('%r does not appear to be an IPv4 or IPv6 address' %
- address)
-
-
-def ip_network(address, strict=True):
- """Take an IP string/int and return an object of the correct type.
-
- Args:
- address: A string or integer, the IP network. Either IPv4 or
- IPv6 networks may be supplied; integers less than 2**32 will
- be considered to be IPv4 by default.
-
- Returns:
- An IPv4Network or IPv6Network object.
-
- Raises:
- ValueError: if the string passed isn't either a v4 or a v6
- address. Or if the network has host bits set.
-
- """
- try:
- return IPv4Network(address, strict)
- except (AddressValueError, NetmaskValueError):
- pass
-
- try:
- return IPv6Network(address, strict)
- except (AddressValueError, NetmaskValueError):
- pass
-
- if isinstance(address, bytes):
- raise AddressValueError(
- '%r does not appear to be an IPv4 or IPv6 network. '
- 'Did you pass in a bytes (str in Python 2) instead of'
- ' a unicode object?' % address)
-
- raise ValueError('%r does not appear to be an IPv4 or IPv6 network' %
- address)
-
-
-def ip_interface(address):
- """Take an IP string/int and return an object of the correct type.
-
- Args:
- address: A string or integer, the IP address. Either IPv4 or
- IPv6 addresses may be supplied; integers less than 2**32 will
- be considered to be IPv4 by default.
-
- Returns:
- An IPv4Interface or IPv6Interface object.
-
- Raises:
- ValueError: if the string passed isn't either a v4 or a v6
- address.
-
- Notes:
- The IPv?Interface classes describe an Address on a particular
- Network, so they're basically a combination of both the Address
- and Network classes.
-
- """
- try:
- return IPv4Interface(address)
- except (AddressValueError, NetmaskValueError):
- pass
-
- try:
- return IPv6Interface(address)
- except (AddressValueError, NetmaskValueError):
- pass
-
- raise ValueError('%r does not appear to be an IPv4 or IPv6 interface' %
- address)
-
-
-def v4_int_to_packed(address):
- """Represent an address as 4 packed bytes in network (big-endian) order.
-
- Args:
- address: An integer representation of an IPv4 IP address.
-
- Returns:
- The integer address packed as 4 bytes in network (big-endian) order.
-
- Raises:
- ValueError: If the integer is negative or too large to be an
- IPv4 IP address.
-
- """
- try:
- return _compat_to_bytes(address, 4, 'big')
- except (struct.error, OverflowError):
- raise ValueError("Address negative or too large for IPv4")
-
-
-def v6_int_to_packed(address):
- """Represent an address as 16 packed bytes in network (big-endian) order.
-
- Args:
- address: An integer representation of an IPv6 IP address.
-
- Returns:
- The integer address packed as 16 bytes in network (big-endian) order.
-
- """
- try:
- return _compat_to_bytes(address, 16, 'big')
- except (struct.error, OverflowError):
- raise ValueError("Address negative or too large for IPv6")
-
-
-def _split_optional_netmask(address):
- """Helper to split the netmask and raise AddressValueError if needed"""
- addr = _compat_str(address).split('/')
- if len(addr) > 2:
- raise AddressValueError("Only one '/' permitted in %r" % address)
- return addr
-
-
-def _find_address_range(addresses):
- """Find a sequence of sorted deduplicated IPv#Address.
-
- Args:
- addresses: a list of IPv#Address objects.
-
- Yields:
- A tuple containing the first and last IP addresses in the sequence.
-
- """
- it = iter(addresses)
- first = last = next(it)
- for ip in it:
- if ip._ip != last._ip + 1:
- yield first, last
- first = ip
- last = ip
- yield first, last
-
-
-def _count_righthand_zero_bits(number, bits):
- """Count the number of zero bits on the right hand side.
-
- Args:
- number: an integer.
- bits: maximum number of bits to count.
-
- Returns:
- The number of zero bits on the right hand side of the number.
-
- """
- if number == 0:
- return bits
- return min(bits, _compat_bit_length(~number & (number - 1)))
-
-
-def summarize_address_range(first, last):
- """Summarize a network range given the first and last IP addresses.
-
- Example:
- >>> list(summarize_address_range(IPv4Address('192.0.2.0'),
- ... IPv4Address('192.0.2.130')))
- ... #doctest: +NORMALIZE_WHITESPACE
- [IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'),
- IPv4Network('192.0.2.130/32')]
-
- Args:
- first: the first IPv4Address or IPv6Address in the range.
- last: the last IPv4Address or IPv6Address in the range.
-
- Returns:
- An iterator of the summarized IPv(4|6) network objects.
-
- Raise:
- TypeError:
- If the first and last objects are not IP addresses.
- If the first and last objects are not the same version.
- ValueError:
- If the last object is not greater than the first.
- If the version of the first address is not 4 or 6.
-
- """
- if (not (isinstance(first, _BaseAddress) and
- isinstance(last, _BaseAddress))):
- raise TypeError('first and last must be IP addresses, not networks')
- if first.version != last.version:
- raise TypeError("%s and %s are not of the same version" % (
- first, last))
- if first > last:
- raise ValueError('last IP address must be greater than first')
-
- if first.version == 4:
- ip = IPv4Network
- elif first.version == 6:
- ip = IPv6Network
- else:
- raise ValueError('unknown IP version')
-
- ip_bits = first._max_prefixlen
- first_int = first._ip
- last_int = last._ip
- while first_int <= last_int:
- nbits = min(_count_righthand_zero_bits(first_int, ip_bits),
- _compat_bit_length(last_int - first_int + 1) - 1)
- net = ip((first_int, ip_bits - nbits))
- yield net
- first_int += 1 << nbits
- if first_int - 1 == ip._ALL_ONES:
- break
-
-
-def _collapse_addresses_internal(addresses):
- """Loops through the addresses, collapsing concurrent netblocks.
-
- Example:
-
- ip1 = IPv4Network('192.0.2.0/26')
- ip2 = IPv4Network('192.0.2.64/26')
- ip3 = IPv4Network('192.0.2.128/26')
- ip4 = IPv4Network('192.0.2.192/26')
-
- _collapse_addresses_internal([ip1, ip2, ip3, ip4]) ->
- [IPv4Network('192.0.2.0/24')]
-
- This shouldn't be called directly; it is called via
- collapse_addresses([]).
-
- Args:
- addresses: A list of IPv4Network's or IPv6Network's
-
- Returns:
- A list of IPv4Network's or IPv6Network's depending on what we were
- passed.
-
- """
- # First merge
- to_merge = list(addresses)
- subnets = {}
- while to_merge:
- net = to_merge.pop()
- supernet = net.supernet()
- existing = subnets.get(supernet)
- if existing is None:
- subnets[supernet] = net
- elif existing != net:
- # Merge consecutive subnets
- del subnets[supernet]
- to_merge.append(supernet)
- # Then iterate over resulting networks, skipping subsumed subnets
- last = None
- for net in sorted(subnets.values()):
- if last is not None:
- # Since they are sorted,
- # last.network_address <= net.network_address is a given.
- if last.broadcast_address >= net.broadcast_address:
- continue
- yield net
- last = net
-
-
-def collapse_addresses(addresses):
- """Collapse a list of IP objects.
-
- Example:
- collapse_addresses([IPv4Network('192.0.2.0/25'),
- IPv4Network('192.0.2.128/25')]) ->
- [IPv4Network('192.0.2.0/24')]
-
- Args:
- addresses: An iterator of IPv4Network or IPv6Network objects.
-
- Returns:
- An iterator of the collapsed IPv(4|6)Network objects.
-
- Raises:
- TypeError: If passed a list of mixed version objects.
-
- """
- addrs = []
- ips = []
- nets = []
-
- # split IP addresses and networks
- for ip in addresses:
- if isinstance(ip, _BaseAddress):
- if ips and ips[-1]._version != ip._version:
- raise TypeError("%s and %s are not of the same version" % (
- ip, ips[-1]))
- ips.append(ip)
- elif ip._prefixlen == ip._max_prefixlen:
- if ips and ips[-1]._version != ip._version:
- raise TypeError("%s and %s are not of the same version" % (
- ip, ips[-1]))
- try:
- ips.append(ip.ip)
- except AttributeError:
- ips.append(ip.network_address)
- else:
- if nets and nets[-1]._version != ip._version:
- raise TypeError("%s and %s are not of the same version" % (
- ip, nets[-1]))
- nets.append(ip)
-
- # sort and dedup
- ips = sorted(set(ips))
-
- # find consecutive address ranges in the sorted sequence and summarize them
- if ips:
- for first, last in _find_address_range(ips):
- addrs.extend(summarize_address_range(first, last))
-
- return _collapse_addresses_internal(addrs + nets)
-
-
-def get_mixed_type_key(obj):
- """Return a key suitable for sorting between networks and addresses.
-
- Address and Network objects are not sortable by default; they're
- fundamentally different so the expression
-
- IPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24')
-
- doesn't make any sense. There are some times however, where you may wish
- to have ipaddress sort these for you anyway. If you need to do this, you
- can use this function as the key= argument to sorted().
-
- Args:
- obj: either a Network or Address object.
- Returns:
- appropriate key.
-
- """
- if isinstance(obj, _BaseNetwork):
- return obj._get_networks_key()
- elif isinstance(obj, _BaseAddress):
- return obj._get_address_key()
- return NotImplemented
-
-
-class _IPAddressBase(_TotalOrderingMixin):
-
- """The mother class."""
-
- __slots__ = ()
-
- @property
- def exploded(self):
- """Return the longhand version of the IP address as a string."""
- return self._explode_shorthand_ip_string()
-
- @property
- def compressed(self):
- """Return the shorthand version of the IP address as a string."""
- return _compat_str(self)
-
- @property
- def reverse_pointer(self):
- """The name of the reverse DNS pointer for the IP address, e.g.:
- >>> ipaddress.ip_address("127.0.0.1").reverse_pointer
- '1.0.0.127.in-addr.arpa'
- >>> ipaddress.ip_address("2001:db8::1").reverse_pointer
- '1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa'
-
- """
- return self._reverse_pointer()
-
- @property
- def version(self):
- msg = '%200s has no version specified' % (type(self),)
- raise NotImplementedError(msg)
-
- def _check_int_address(self, address):
- if address < 0:
- msg = "%d (< 0) is not permitted as an IPv%d address"
- raise AddressValueError(msg % (address, self._version))
- if address > self._ALL_ONES:
- msg = "%d (>= 2**%d) is not permitted as an IPv%d address"
- raise AddressValueError(msg % (address, self._max_prefixlen,
- self._version))
-
- def _check_packed_address(self, address, expected_len):
- address_len = len(address)
- if address_len != expected_len:
- msg = (
- '%r (len %d != %d) is not permitted as an IPv%d address. '
- 'Did you pass in a bytes (str in Python 2) instead of'
- ' a unicode object?')
- raise AddressValueError(msg % (address, address_len,
- expected_len, self._version))
-
- @classmethod
- def _ip_int_from_prefix(cls, prefixlen):
- """Turn the prefix length into a bitwise netmask
-
- Args:
- prefixlen: An integer, the prefix length.
-
- Returns:
- An integer.
-
- """
- return cls._ALL_ONES ^ (cls._ALL_ONES >> prefixlen)
-
- @classmethod
- def _prefix_from_ip_int(cls, ip_int):
- """Return prefix length from the bitwise netmask.
-
- Args:
- ip_int: An integer, the netmask in expanded bitwise format
-
- Returns:
- An integer, the prefix length.
-
- Raises:
- ValueError: If the input intermingles zeroes & ones
- """
- trailing_zeroes = _count_righthand_zero_bits(ip_int,
- cls._max_prefixlen)
- prefixlen = cls._max_prefixlen - trailing_zeroes
- leading_ones = ip_int >> trailing_zeroes
- all_ones = (1 << prefixlen) - 1
- if leading_ones != all_ones:
- byteslen = cls._max_prefixlen // 8
- details = _compat_to_bytes(ip_int, byteslen, 'big')
- msg = 'Netmask pattern %r mixes zeroes & ones'
- raise ValueError(msg % details)
- return prefixlen
-
- @classmethod
- def _report_invalid_netmask(cls, netmask_str):
- msg = '%r is not a valid netmask' % netmask_str
- raise NetmaskValueError(msg)
-
- @classmethod
- def _prefix_from_prefix_string(cls, prefixlen_str):
- """Return prefix length from a numeric string
-
- Args:
- prefixlen_str: The string to be converted
-
- Returns:
- An integer, the prefix length.
-
- Raises:
- NetmaskValueError: If the input is not a valid netmask
- """
- # int allows a leading +/- as well as surrounding whitespace,
- # so we ensure that isn't the case
- if not _BaseV4._DECIMAL_DIGITS.issuperset(prefixlen_str):
- cls._report_invalid_netmask(prefixlen_str)
- try:
- prefixlen = int(prefixlen_str)
- except ValueError:
- cls._report_invalid_netmask(prefixlen_str)
- if not (0 <= prefixlen <= cls._max_prefixlen):
- cls._report_invalid_netmask(prefixlen_str)
- return prefixlen
-
- @classmethod
- def _prefix_from_ip_string(cls, ip_str):
- """Turn a netmask/hostmask string into a prefix length
-
- Args:
- ip_str: The netmask/hostmask to be converted
-
- Returns:
- An integer, the prefix length.
-
- Raises:
- NetmaskValueError: If the input is not a valid netmask/hostmask
- """
- # Parse the netmask/hostmask like an IP address.
- try:
- ip_int = cls._ip_int_from_string(ip_str)
- except AddressValueError:
- cls._report_invalid_netmask(ip_str)
-
- # Try matching a netmask (this would be /1*0*/ as a bitwise regexp).
- # Note that the two ambiguous cases (all-ones and all-zeroes) are
- # treated as netmasks.
- try:
- return cls._prefix_from_ip_int(ip_int)
- except ValueError:
- pass
-
- # Invert the bits, and try matching a /0+1+/ hostmask instead.
- ip_int ^= cls._ALL_ONES
- try:
- return cls._prefix_from_ip_int(ip_int)
- except ValueError:
- cls._report_invalid_netmask(ip_str)
-
- def __reduce__(self):
- return self.__class__, (_compat_str(self),)
-
-
-class _BaseAddress(_IPAddressBase):
-
- """A generic IP object.
-
- This IP class contains the version independent methods which are
- used by single IP addresses.
- """
-
- __slots__ = ()
-
- def __int__(self):
- return self._ip
-
- def __eq__(self, other):
- try:
- return (self._ip == other._ip and
- self._version == other._version)
- except AttributeError:
- return NotImplemented
-
- def __lt__(self, other):
- if not isinstance(other, _IPAddressBase):
- return NotImplemented
- if not isinstance(other, _BaseAddress):
- raise TypeError('%s and %s are not of the same type' % (
- self, other))
- if self._version != other._version:
- raise TypeError('%s and %s are not of the same version' % (
- self, other))
- if self._ip != other._ip:
- return self._ip < other._ip
- return False
-
- # Shorthand for Integer addition and subtraction. This is not
- # meant to ever support addition/subtraction of addresses.
- def __add__(self, other):
- if not isinstance(other, _compat_int_types):
- return NotImplemented
- return self.__class__(int(self) + other)
-
- def __sub__(self, other):
- if not isinstance(other, _compat_int_types):
- return NotImplemented
- return self.__class__(int(self) - other)
-
- def __repr__(self):
- return '%s(%r)' % (self.__class__.__name__, _compat_str(self))
-
- def __str__(self):
- return _compat_str(self._string_from_ip_int(self._ip))
-
- def __hash__(self):
- return hash(hex(int(self._ip)))
-
- def _get_address_key(self):
- return (self._version, self)
-
- def __reduce__(self):
- return self.__class__, (self._ip,)
-
-
-class _BaseNetwork(_IPAddressBase):
-
- """A generic IP network object.
-
- This IP class contains the version independent methods which are
- used by networks.
-
- """
- def __init__(self, address):
- self._cache = {}
-
- def __repr__(self):
- return '%s(%r)' % (self.__class__.__name__, _compat_str(self))
-
- def __str__(self):
- return '%s/%d' % (self.network_address, self.prefixlen)
-
- def hosts(self):
- """Generate Iterator over usable hosts in a network.
-
- This is like __iter__ except it doesn't return the network
- or broadcast addresses.
-
- """
- network = int(self.network_address)
- broadcast = int(self.broadcast_address)
- for x in _compat_range(network + 1, broadcast):
- yield self._address_class(x)
-
- def __iter__(self):
- network = int(self.network_address)
- broadcast = int(self.broadcast_address)
- for x in _compat_range(network, broadcast + 1):
- yield self._address_class(x)
-
- def __getitem__(self, n):
- network = int(self.network_address)
- broadcast = int(self.broadcast_address)
- if n >= 0:
- if network + n > broadcast:
- raise IndexError('address out of range')
- return self._address_class(network + n)
- else:
- n += 1
- if broadcast + n < network:
- raise IndexError('address out of range')
- return self._address_class(broadcast + n)
-
- def __lt__(self, other):
- if not isinstance(other, _IPAddressBase):
- return NotImplemented
- if not isinstance(other, _BaseNetwork):
- raise TypeError('%s and %s are not of the same type' % (
- self, other))
- if self._version != other._version:
- raise TypeError('%s and %s are not of the same version' % (
- self, other))
- if self.network_address != other.network_address:
- return self.network_address < other.network_address
- if self.netmask != other.netmask:
- return self.netmask < other.netmask
- return False
-
- def __eq__(self, other):
- try:
- return (self._version == other._version and
- self.network_address == other.network_address and
- int(self.netmask) == int(other.netmask))
- except AttributeError:
- return NotImplemented
-
- def __hash__(self):
- return hash(int(self.network_address) ^ int(self.netmask))
-
- def __contains__(self, other):
- # always false if one is v4 and the other is v6.
- if self._version != other._version:
- return False
- # dealing with another network.
- if isinstance(other, _BaseNetwork):
- return False
- # dealing with another address
- else:
- # address
- return (int(self.network_address) <= int(other._ip) <=
- int(self.broadcast_address))
-
- def overlaps(self, other):
- """Tell if self is partly contained in other."""
- return self.network_address in other or (
- self.broadcast_address in other or (
- other.network_address in self or (
- other.broadcast_address in self)))
-
- @property
- def broadcast_address(self):
- x = self._cache.get('broadcast_address')
- if x is None:
- x = self._address_class(int(self.network_address) |
- int(self.hostmask))
- self._cache['broadcast_address'] = x
- return x
-
- @property
- def hostmask(self):
- x = self._cache.get('hostmask')
- if x is None:
- x = self._address_class(int(self.netmask) ^ self._ALL_ONES)
- self._cache['hostmask'] = x
- return x
-
- @property
- def with_prefixlen(self):
- return '%s/%d' % (self.network_address, self._prefixlen)
-
- @property
- def with_netmask(self):
- return '%s/%s' % (self.network_address, self.netmask)
-
- @property
- def with_hostmask(self):
- return '%s/%s' % (self.network_address, self.hostmask)
-
- @property
- def num_addresses(self):
- """Number of hosts in the current subnet."""
- return int(self.broadcast_address) - int(self.network_address) + 1
-
- @property
- def _address_class(self):
- # Returning bare address objects (rather than interfaces) allows for
- # more consistent behaviour across the network address, broadcast
- # address and individual host addresses.
- msg = '%200s has no associated address class' % (type(self),)
- raise NotImplementedError(msg)
-
- @property
- def prefixlen(self):
- return self._prefixlen
-
- def address_exclude(self, other):
- """Remove an address from a larger block.
-
- For example:
-
- addr1 = ip_network('192.0.2.0/28')
- addr2 = ip_network('192.0.2.1/32')
- list(addr1.address_exclude(addr2)) =
- [IPv4Network('192.0.2.0/32'), IPv4Network('192.0.2.2/31'),
- IPv4Network('192.0.2.4/30'), IPv4Network('192.0.2.8/29')]
-
- or IPv6:
-
- addr1 = ip_network('2001:db8::1/32')
- addr2 = ip_network('2001:db8::1/128')
- list(addr1.address_exclude(addr2)) =
- [ip_network('2001:db8::1/128'),
- ip_network('2001:db8::2/127'),
- ip_network('2001:db8::4/126'),
- ip_network('2001:db8::8/125'),
- ...
- ip_network('2001:db8:8000::/33')]
-
- Args:
- other: An IPv4Network or IPv6Network object of the same type.
-
- Returns:
- An iterator of the IPv(4|6)Network objects which is self
- minus other.
-
- Raises:
- TypeError: If self and other are of differing address
- versions, or if other is not a network object.
- ValueError: If other is not completely contained by self.
-
- """
- if not self._version == other._version:
- raise TypeError("%s and %s are not of the same version" % (
- self, other))
-
- if not isinstance(other, _BaseNetwork):
- raise TypeError("%s is not a network object" % other)
-
- if not other.subnet_of(self):
- raise ValueError('%s not contained in %s' % (other, self))
- if other == self:
- return
-
- # Make sure we're comparing the network of other.
- other = other.__class__('%s/%s' % (other.network_address,
- other.prefixlen))
-
- s1, s2 = self.subnets()
- while s1 != other and s2 != other:
- if other.subnet_of(s1):
- yield s2
- s1, s2 = s1.subnets()
- elif other.subnet_of(s2):
- yield s1
- s1, s2 = s2.subnets()
- else:
- # If we got here, there's a bug somewhere.
- raise AssertionError('Error performing exclusion: '
- 's1: %s s2: %s other: %s' %
- (s1, s2, other))
- if s1 == other:
- yield s2
- elif s2 == other:
- yield s1
- else:
- # If we got here, there's a bug somewhere.
- raise AssertionError('Error performing exclusion: '
- 's1: %s s2: %s other: %s' %
- (s1, s2, other))
-
- def compare_networks(self, other):
- """Compare two IP objects.
-
- This is only concerned about the comparison of the integer
- representation of the network addresses. This means that the
- host bits aren't considered at all in this method. If you want
- to compare host bits, you can easily enough do a
- 'HostA._ip < HostB._ip'
-
- Args:
- other: An IP object.
-
- Returns:
- If the IP versions of self and other are the same, returns:
-
- -1 if self < other:
- eg: IPv4Network('192.0.2.0/25') < IPv4Network('192.0.2.128/25')
- IPv6Network('2001:db8::1000/124') <
- IPv6Network('2001:db8::2000/124')
- 0 if self == other
- eg: IPv4Network('192.0.2.0/24') == IPv4Network('192.0.2.0/24')
- IPv6Network('2001:db8::1000/124') ==
- IPv6Network('2001:db8::1000/124')
- 1 if self > other
- eg: IPv4Network('192.0.2.128/25') > IPv4Network('192.0.2.0/25')
- IPv6Network('2001:db8::2000/124') >
- IPv6Network('2001:db8::1000/124')
-
- Raises:
- TypeError if the IP versions are different.
-
- """
- # does this need to raise a ValueError?
- if self._version != other._version:
- raise TypeError('%s and %s are not of the same type' % (
- self, other))
- # self._version == other._version below here:
- if self.network_address < other.network_address:
- return -1
- if self.network_address > other.network_address:
- return 1
- # self.network_address == other.network_address below here:
- if self.netmask < other.netmask:
- return -1
- if self.netmask > other.netmask:
- return 1
- return 0
-
- def _get_networks_key(self):
- """Network-only key function.
-
- Returns an object that identifies this address' network and
- netmask. This function is a suitable "key" argument for sorted()
- and list.sort().
-
- """
- return (self._version, self.network_address, self.netmask)
-
- def subnets(self, prefixlen_diff=1, new_prefix=None):
- """The subnets which join to make the current subnet.
-
- In the case that self contains only one IP
- (self._prefixlen == 32 for IPv4 or self._prefixlen == 128
- for IPv6), yield an iterator with just ourself.
-
- Args:
- prefixlen_diff: An integer, the amount the prefix length
- should be increased by. This should not be set if
- new_prefix is also set.
- new_prefix: The desired new prefix length. This must be a
- larger number (smaller prefix) than the existing prefix.
- This should not be set if prefixlen_diff is also set.
-
- Returns:
- An iterator of IPv(4|6) objects.
-
- Raises:
- ValueError: The prefixlen_diff is too small or too large.
- OR
- prefixlen_diff and new_prefix are both set or new_prefix
- is a smaller number than the current prefix (smaller
- number means a larger network)
-
- """
- if self._prefixlen == self._max_prefixlen:
- yield self
- return
-
- if new_prefix is not None:
- if new_prefix < self._prefixlen:
- raise ValueError('new prefix must be longer')
- if prefixlen_diff != 1:
- raise ValueError('cannot set prefixlen_diff and new_prefix')
- prefixlen_diff = new_prefix - self._prefixlen
-
- if prefixlen_diff < 0:
- raise ValueError('prefix length diff must be > 0')
- new_prefixlen = self._prefixlen + prefixlen_diff
-
- if new_prefixlen > self._max_prefixlen:
- raise ValueError(
- 'prefix length diff %d is invalid for netblock %s' % (
- new_prefixlen, self))
-
- start = int(self.network_address)
- end = int(self.broadcast_address) + 1
- step = (int(self.hostmask) + 1) >> prefixlen_diff
- for new_addr in _compat_range(start, end, step):
- current = self.__class__((new_addr, new_prefixlen))
- yield current
-
- def supernet(self, prefixlen_diff=1, new_prefix=None):
- """The supernet containing the current network.
-
- Args:
- prefixlen_diff: An integer, the amount the prefix length of
- the network should be decreased by. For example, given a
- /24 network and a prefixlen_diff of 3, a supernet with a
- /21 netmask is returned.
-
- Returns:
- An IPv4 network object.
-
- Raises:
- ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have
- a negative prefix length.
- OR
- If prefixlen_diff and new_prefix are both set or new_prefix is a
- larger number than the current prefix (larger number means a
- smaller network)
-
- """
- if self._prefixlen == 0:
- return self
-
- if new_prefix is not None:
- if new_prefix > self._prefixlen:
- raise ValueError('new prefix must be shorter')
- if prefixlen_diff != 1:
- raise ValueError('cannot set prefixlen_diff and new_prefix')
- prefixlen_diff = self._prefixlen - new_prefix
-
- new_prefixlen = self.prefixlen - prefixlen_diff
- if new_prefixlen < 0:
- raise ValueError(
- 'current prefixlen is %d, cannot have a prefixlen_diff of %d' %
- (self.prefixlen, prefixlen_diff))
- return self.__class__((
- int(self.network_address) & (int(self.netmask) << prefixlen_diff),
- new_prefixlen))
-
- @property
- def is_multicast(self):
- """Test if the address is reserved for multicast use.
-
- Returns:
- A boolean, True if the address is a multicast address.
- See RFC 2373 2.7 for details.
-
- """
- return (self.network_address.is_multicast and
- self.broadcast_address.is_multicast)
-
- def subnet_of(self, other):
- # always false if one is v4 and the other is v6.
- if self._version != other._version:
- return False
- # dealing with another network.
- if (hasattr(other, 'network_address') and
- hasattr(other, 'broadcast_address')):
- return (other.network_address <= self.network_address and
- other.broadcast_address >= self.broadcast_address)
- # dealing with another address
- else:
- raise TypeError('Unable to test subnet containment with element '
- 'of type %s' % type(other))
-
- def supernet_of(self, other):
- # always false if one is v4 and the other is v6.
- if self._version != other._version:
- return False
- # dealing with another network.
- if (hasattr(other, 'network_address') and
- hasattr(other, 'broadcast_address')):
- return (other.network_address >= self.network_address and
- other.broadcast_address <= self.broadcast_address)
- # dealing with another address
- else:
- raise TypeError('Unable to test subnet containment with element '
- 'of type %s' % type(other))
-
- @property
- def is_reserved(self):
- """Test if the address is otherwise IETF reserved.
-
- Returns:
- A boolean, True if the address is within one of the
- reserved IPv6 Network ranges.
-
- """
- return (self.network_address.is_reserved and
- self.broadcast_address.is_reserved)
-
- @property
- def is_link_local(self):
- """Test if the address is reserved for link-local.
-
- Returns:
- A boolean, True if the address is reserved per RFC 4291.
-
- """
- return (self.network_address.is_link_local and
- self.broadcast_address.is_link_local)
-
- @property
- def is_private(self):
- """Test if this address is allocated for private networks.
-
- Returns:
- A boolean, True if the address is reserved per
- iana-ipv4-special-registry or iana-ipv6-special-registry.
-
- """
- return (self.network_address.is_private and
- self.broadcast_address.is_private)
-
- @property
- def is_global(self):
- """Test if this address is allocated for public networks.
-
- Returns:
- A boolean, True if the address is not reserved per
- iana-ipv4-special-registry or iana-ipv6-special-registry.
-
- """
- return not self.is_private
-
- @property
- def is_unspecified(self):
- """Test if the address is unspecified.
-
- Returns:
- A boolean, True if this is the unspecified address as defined in
- RFC 2373 2.5.2.
-
- """
- return (self.network_address.is_unspecified and
- self.broadcast_address.is_unspecified)
-
- @property
- def is_loopback(self):
- """Test if the address is a loopback address.
-
- Returns:
- A boolean, True if the address is a loopback address as defined in
- RFC 2373 2.5.3.
-
- """
- return (self.network_address.is_loopback and
- self.broadcast_address.is_loopback)
-
-
-class _BaseV4(object):
-
- """Base IPv4 object.
-
- The following methods are used by IPv4 objects in both single IP
- addresses and networks.
-
- """
-
- __slots__ = ()
- _version = 4
- # Equivalent to 255.255.255.255 or 32 bits of 1's.
- _ALL_ONES = (2 ** IPV4LENGTH) - 1
- _DECIMAL_DIGITS = frozenset('0123456789')
-
- # the valid octets for host and netmasks. only useful for IPv4.
- _valid_mask_octets = frozenset([255, 254, 252, 248, 240, 224, 192, 128, 0])
-
- _max_prefixlen = IPV4LENGTH
- # There are only a handful of valid v4 netmasks, so we cache them all
- # when constructed (see _make_netmask()).
- _netmask_cache = {}
-
- def _explode_shorthand_ip_string(self):
- return _compat_str(self)
-
- @classmethod
- def _make_netmask(cls, arg):
- """Make a (netmask, prefix_len) tuple from the given argument.
-
- Argument can be:
- - an integer (the prefix length)
- - a string representing the prefix length (e.g. "24")
- - a string representing the prefix netmask (e.g. "255.255.255.0")
- """
- if arg not in cls._netmask_cache:
- if isinstance(arg, _compat_int_types):
- prefixlen = arg
- else:
- try:
- # Check for a netmask in prefix length form
- prefixlen = cls._prefix_from_prefix_string(arg)
- except NetmaskValueError:
- # Check for a netmask or hostmask in dotted-quad form.
- # This may raise NetmaskValueError.
- prefixlen = cls._prefix_from_ip_string(arg)
- netmask = IPv4Address(cls._ip_int_from_prefix(prefixlen))
- cls._netmask_cache[arg] = netmask, prefixlen
- return cls._netmask_cache[arg]
-
- @classmethod
- def _ip_int_from_string(cls, ip_str):
- """Turn the given IP string into an integer for comparison.
-
- Args:
- ip_str: A string, the IP ip_str.
-
- Returns:
- The IP ip_str as an integer.
-
- Raises:
- AddressValueError: if ip_str isn't a valid IPv4 Address.
-
- """
- if not ip_str:
- raise AddressValueError('Address cannot be empty')
-
- octets = ip_str.split('.')
- if len(octets) != 4:
- raise AddressValueError("Expected 4 octets in %r" % ip_str)
-
- try:
- return _compat_int_from_byte_vals(
- map(cls._parse_octet, octets), 'big')
- except ValueError as exc:
- raise AddressValueError("%s in %r" % (exc, ip_str))
-
- @classmethod
- def _parse_octet(cls, octet_str):
- """Convert a decimal octet into an integer.
-
- Args:
- octet_str: A string, the number to parse.
-
- Returns:
- The octet as an integer.
-
- Raises:
- ValueError: if the octet isn't strictly a decimal from [0..255].
-
- """
- if not octet_str:
- raise ValueError("Empty octet not permitted")
- # Whitelist the characters, since int() allows a lot of bizarre stuff.
- if not cls._DECIMAL_DIGITS.issuperset(octet_str):
- msg = "Only decimal digits permitted in %r"
- raise ValueError(msg % octet_str)
- # We do the length check second, since the invalid character error
- # is likely to be more informative for the user
- if len(octet_str) > 3:
- msg = "At most 3 characters permitted in %r"
- raise ValueError(msg % octet_str)
- # Convert to integer (we know digits are legal)
- octet_int = int(octet_str, 10)
- # Any octets that look like they *might* be written in octal,
- # and which don't look exactly the same in both octal and
- # decimal are rejected as ambiguous
- if octet_int > 7 and octet_str[0] == '0':
- msg = "Ambiguous (octal/decimal) value in %r not permitted"
- raise ValueError(msg % octet_str)
- if octet_int > 255:
- raise ValueError("Octet %d (> 255) not permitted" % octet_int)
- return octet_int
-
- @classmethod
- def _string_from_ip_int(cls, ip_int):
- """Turns a 32-bit integer into dotted decimal notation.
-
- Args:
- ip_int: An integer, the IP address.
-
- Returns:
- The IP address as a string in dotted decimal notation.
-
- """
- return '.'.join(_compat_str(struct.unpack(b'!B', b)[0]
- if isinstance(b, bytes)
- else b)
- for b in _compat_to_bytes(ip_int, 4, 'big'))
-
- def _is_hostmask(self, ip_str):
- """Test if the IP string is a hostmask (rather than a netmask).
-
- Args:
- ip_str: A string, the potential hostmask.
-
- Returns:
- A boolean, True if the IP string is a hostmask.
-
- """
- bits = ip_str.split('.')
- try:
- parts = [x for x in map(int, bits) if x in self._valid_mask_octets]
- except ValueError:
- return False
- if len(parts) != len(bits):
- return False
- if parts[0] < parts[-1]:
- return True
- return False
-
- def _reverse_pointer(self):
- """Return the reverse DNS pointer name for the IPv4 address.
-
- This implements the method described in RFC1035 3.5.
-
- """
- reverse_octets = _compat_str(self).split('.')[::-1]
- return '.'.join(reverse_octets) + '.in-addr.arpa'
-
- @property
- def max_prefixlen(self):
- return self._max_prefixlen
-
- @property
- def version(self):
- return self._version
-
-
-class IPv4Address(_BaseV4, _BaseAddress):
-
- """Represent and manipulate single IPv4 Addresses."""
-
- __slots__ = ('_ip', '__weakref__')
-
- def __init__(self, address):
-
- """
- Args:
- address: A string or integer representing the IP
-
- Additionally, an integer can be passed, so
- IPv4Address('192.0.2.1') == IPv4Address(3221225985).
- or, more generally
- IPv4Address(int(IPv4Address('192.0.2.1'))) ==
- IPv4Address('192.0.2.1')
-
- Raises:
- AddressValueError: If ipaddress isn't a valid IPv4 address.
-
- """
- # Efficient constructor from integer.
- if isinstance(address, _compat_int_types):
- self._check_int_address(address)
- self._ip = address
- return
-
- # Constructing from a packed address
- if isinstance(address, bytes):
- self._check_packed_address(address, 4)
- bvs = _compat_bytes_to_byte_vals(address)
- self._ip = _compat_int_from_byte_vals(bvs, 'big')
- return
-
- # Assume input argument to be string or any object representation
- # which converts into a formatted IP string.
- addr_str = _compat_str(address)
- if '/' in addr_str:
- raise AddressValueError("Unexpected '/' in %r" % address)
- self._ip = self._ip_int_from_string(addr_str)
-
- @property
- def packed(self):
- """The binary representation of this address."""
- return v4_int_to_packed(self._ip)
-
- @property
- def is_reserved(self):
- """Test if the address is otherwise IETF reserved.
-
- Returns:
- A boolean, True if the address is within the
- reserved IPv4 Network range.
-
- """
- return self in self._constants._reserved_network
-
- @property
- def is_private(self):
- """Test if this address is allocated for private networks.
-
- Returns:
- A boolean, True if the address is reserved per
- iana-ipv4-special-registry.
-
- """
- return any(self in net for net in self._constants._private_networks)
-
- @property
- def is_global(self):
- return (
- self not in self._constants._public_network and
- not self.is_private)
-
- @property
- def is_multicast(self):
- """Test if the address is reserved for multicast use.
-
- Returns:
- A boolean, True if the address is multicast.
- See RFC 3171 for details.
-
- """
- return self in self._constants._multicast_network
-
- @property
- def is_unspecified(self):
- """Test if the address is unspecified.
-
- Returns:
- A boolean, True if this is the unspecified address as defined in
- RFC 5735 3.
-
- """
- return self == self._constants._unspecified_address
-
- @property
- def is_loopback(self):
- """Test if the address is a loopback address.
-
- Returns:
- A boolean, True if the address is a loopback per RFC 3330.
-
- """
- return self in self._constants._loopback_network
-
- @property
- def is_link_local(self):
- """Test if the address is reserved for link-local.
-
- Returns:
- A boolean, True if the address is link-local per RFC 3927.
-
- """
- return self in self._constants._linklocal_network
-
-
-class IPv4Interface(IPv4Address):
-
- def __init__(self, address):
- if isinstance(address, (bytes, _compat_int_types)):
- IPv4Address.__init__(self, address)
- self.network = IPv4Network(self._ip)
- self._prefixlen = self._max_prefixlen
- return
-
- if isinstance(address, tuple):
- IPv4Address.__init__(self, address[0])
- if len(address) > 1:
- self._prefixlen = int(address[1])
- else:
- self._prefixlen = self._max_prefixlen
-
- self.network = IPv4Network(address, strict=False)
- self.netmask = self.network.netmask
- self.hostmask = self.network.hostmask
- return
-
- addr = _split_optional_netmask(address)
- IPv4Address.__init__(self, addr[0])
-
- self.network = IPv4Network(address, strict=False)
- self._prefixlen = self.network._prefixlen
-
- self.netmask = self.network.netmask
- self.hostmask = self.network.hostmask
-
- def __str__(self):
- return '%s/%d' % (self._string_from_ip_int(self._ip),
- self.network.prefixlen)
-
- def __eq__(self, other):
- address_equal = IPv4Address.__eq__(self, other)
- if not address_equal or address_equal is NotImplemented:
- return address_equal
- try:
- return self.network == other.network
- except AttributeError:
- # An interface with an associated network is NOT the
- # same as an unassociated address. That's why the hash
- # takes the extra info into account.
- return False
-
- def __lt__(self, other):
- address_less = IPv4Address.__lt__(self, other)
- if address_less is NotImplemented:
- return NotImplemented
- try:
- return self.network < other.network
- except AttributeError:
- # We *do* allow addresses and interfaces to be sorted. The
- # unassociated address is considered less than all interfaces.
- return False
-
- def __hash__(self):
- return self._ip ^ self._prefixlen ^ int(self.network.network_address)
-
- __reduce__ = _IPAddressBase.__reduce__
-
- @property
- def ip(self):
- return IPv4Address(self._ip)
-
- @property
- def with_prefixlen(self):
- return '%s/%s' % (self._string_from_ip_int(self._ip),
- self._prefixlen)
-
- @property
- def with_netmask(self):
- return '%s/%s' % (self._string_from_ip_int(self._ip),
- self.netmask)
-
- @property
- def with_hostmask(self):
- return '%s/%s' % (self._string_from_ip_int(self._ip),
- self.hostmask)
-
-
-class IPv4Network(_BaseV4, _BaseNetwork):
-
- """This class represents and manipulates 32-bit IPv4 network + addresses..
-
- Attributes: [examples for IPv4Network('192.0.2.0/27')]
- .network_address: IPv4Address('192.0.2.0')
- .hostmask: IPv4Address('0.0.0.31')
- .broadcast_address: IPv4Address('192.0.2.32')
- .netmask: IPv4Address('255.255.255.224')
- .prefixlen: 27
-
- """
- # Class to use when creating address objects
- _address_class = IPv4Address
-
- def __init__(self, address, strict=True):
-
- """Instantiate a new IPv4 network object.
-
- Args:
- address: A string or integer representing the IP [& network].
- '192.0.2.0/24'
- '192.0.2.0/255.255.255.0'
- '192.0.0.2/0.0.0.255'
- are all functionally the same in IPv4. Similarly,
- '192.0.2.1'
- '192.0.2.1/255.255.255.255'
- '192.0.2.1/32'
- are also functionally equivalent. That is to say, failing to
- provide a subnetmask will create an object with a mask of /32.
-
- If the mask (portion after the / in the argument) is given in
- dotted quad form, it is treated as a netmask if it starts with a
- non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it
- starts with a zero field (e.g. 0.255.255.255 == /8), with the
- single exception of an all-zero mask which is treated as a
- netmask == /0. If no mask is given, a default of /32 is used.
-
- Additionally, an integer can be passed, so
- IPv4Network('192.0.2.1') == IPv4Network(3221225985)
- or, more generally
- IPv4Interface(int(IPv4Interface('192.0.2.1'))) ==
- IPv4Interface('192.0.2.1')
-
- Raises:
- AddressValueError: If ipaddress isn't a valid IPv4 address.
- NetmaskValueError: If the netmask isn't valid for
- an IPv4 address.
- ValueError: If strict is True and a network address is not
- supplied.
-
- """
- _BaseNetwork.__init__(self, address)
-
- # Constructing from a packed address or integer
- if isinstance(address, (_compat_int_types, bytes)):
- self.network_address = IPv4Address(address)
- self.netmask, self._prefixlen = self._make_netmask(
- self._max_prefixlen)
- # fixme: address/network test here.
- return
-
- if isinstance(address, tuple):
- if len(address) > 1:
- arg = address[1]
- else:
- # We weren't given an address[1]
- arg = self._max_prefixlen
- self.network_address = IPv4Address(address[0])
- self.netmask, self._prefixlen = self._make_netmask(arg)
- packed = int(self.network_address)
- if packed & int(self.netmask) != packed:
- if strict:
- raise ValueError('%s has host bits set' % self)
- else:
- self.network_address = IPv4Address(packed &
- int(self.netmask))
- return
-
- # Assume input argument to be string or any object representation
- # which converts into a formatted IP prefix string.
- addr = _split_optional_netmask(address)
- self.network_address = IPv4Address(self._ip_int_from_string(addr[0]))
-
- if len(addr) == 2:
- arg = addr[1]
- else:
- arg = self._max_prefixlen
- self.netmask, self._prefixlen = self._make_netmask(arg)
-
- if strict:
- if (IPv4Address(int(self.network_address) & int(self.netmask)) !=
- self.network_address):
- raise ValueError('%s has host bits set' % self)
- self.network_address = IPv4Address(int(self.network_address) &
- int(self.netmask))
-
- if self._prefixlen == (self._max_prefixlen - 1):
- self.hosts = self.__iter__
-
- @property
- def is_global(self):
- """Test if this address is allocated for public networks.
-
- Returns:
- A boolean, True if the address is not reserved per
- iana-ipv4-special-registry.
-
- """
- return (not (self.network_address in IPv4Network('100.64.0.0/10') and
- self.broadcast_address in IPv4Network('100.64.0.0/10')) and
- not self.is_private)
-
-
-class _IPv4Constants(object):
-
- _linklocal_network = IPv4Network('169.254.0.0/16')
-
- _loopback_network = IPv4Network('127.0.0.0/8')
-
- _multicast_network = IPv4Network('224.0.0.0/4')
-
- _public_network = IPv4Network('100.64.0.0/10')
-
- _private_networks = [
- IPv4Network('0.0.0.0/8'),
- IPv4Network('10.0.0.0/8'),
- IPv4Network('127.0.0.0/8'),
- IPv4Network('169.254.0.0/16'),
- IPv4Network('172.16.0.0/12'),
- IPv4Network('192.0.0.0/29'),
- IPv4Network('192.0.0.170/31'),
- IPv4Network('192.0.2.0/24'),
- IPv4Network('192.168.0.0/16'),
- IPv4Network('198.18.0.0/15'),
- IPv4Network('198.51.100.0/24'),
- IPv4Network('203.0.113.0/24'),
- IPv4Network('240.0.0.0/4'),
- IPv4Network('255.255.255.255/32'),
- ]
-
- _reserved_network = IPv4Network('240.0.0.0/4')
-
- _unspecified_address = IPv4Address('0.0.0.0')
-
-
-IPv4Address._constants = _IPv4Constants
-
-
-class _BaseV6(object):
-
- """Base IPv6 object.
-
- The following methods are used by IPv6 objects in both single IP
- addresses and networks.
-
- """
-
- __slots__ = ()
- _version = 6
- _ALL_ONES = (2 ** IPV6LENGTH) - 1
- _HEXTET_COUNT = 8
- _HEX_DIGITS = frozenset('0123456789ABCDEFabcdef')
- _max_prefixlen = IPV6LENGTH
-
- # There are only a bunch of valid v6 netmasks, so we cache them all
- # when constructed (see _make_netmask()).
- _netmask_cache = {}
-
- @classmethod
- def _make_netmask(cls, arg):
- """Make a (netmask, prefix_len) tuple from the given argument.
-
- Argument can be:
- - an integer (the prefix length)
- - a string representing the prefix length (e.g. "24")
- - a string representing the prefix netmask (e.g. "255.255.255.0")
- """
- if arg not in cls._netmask_cache:
- if isinstance(arg, _compat_int_types):
- prefixlen = arg
- else:
- prefixlen = cls._prefix_from_prefix_string(arg)
- netmask = IPv6Address(cls._ip_int_from_prefix(prefixlen))
- cls._netmask_cache[arg] = netmask, prefixlen
- return cls._netmask_cache[arg]
-
- @classmethod
- def _ip_int_from_string(cls, ip_str):
- """Turn an IPv6 ip_str into an integer.
-
- Args:
- ip_str: A string, the IPv6 ip_str.
-
- Returns:
- An int, the IPv6 address
-
- Raises:
- AddressValueError: if ip_str isn't a valid IPv6 Address.
-
- """
- if not ip_str:
- raise AddressValueError('Address cannot be empty')
-
- parts = ip_str.split(':')
-
- # An IPv6 address needs at least 2 colons (3 parts).
- _min_parts = 3
- if len(parts) < _min_parts:
- msg = "At least %d parts expected in %r" % (_min_parts, ip_str)
- raise AddressValueError(msg)
-
- # If the address has an IPv4-style suffix, convert it to hexadecimal.
- if '.' in parts[-1]:
- try:
- ipv4_int = IPv4Address(parts.pop())._ip
- except AddressValueError as exc:
- raise AddressValueError("%s in %r" % (exc, ip_str))
- parts.append('%x' % ((ipv4_int >> 16) & 0xFFFF))
- parts.append('%x' % (ipv4_int & 0xFFFF))
-
- # An IPv6 address can't have more than 8 colons (9 parts).
- # The extra colon comes from using the "::" notation for a single
- # leading or trailing zero part.
- _max_parts = cls._HEXTET_COUNT + 1
- if len(parts) > _max_parts:
- msg = "At most %d colons permitted in %r" % (
- _max_parts - 1, ip_str)
- raise AddressValueError(msg)
-
- # Disregarding the endpoints, find '::' with nothing in between.
- # This indicates that a run of zeroes has been skipped.
- skip_index = None
- for i in _compat_range(1, len(parts) - 1):
- if not parts[i]:
- if skip_index is not None:
- # Can't have more than one '::'
- msg = "At most one '::' permitted in %r" % ip_str
- raise AddressValueError(msg)
- skip_index = i
-
- # parts_hi is the number of parts to copy from above/before the '::'
- # parts_lo is the number of parts to copy from below/after the '::'
- if skip_index is not None:
- # If we found a '::', then check if it also covers the endpoints.
- parts_hi = skip_index
- parts_lo = len(parts) - skip_index - 1
- if not parts[0]:
- parts_hi -= 1
- if parts_hi:
- msg = "Leading ':' only permitted as part of '::' in %r"
- raise AddressValueError(msg % ip_str) # ^: requires ^::
- if not parts[-1]:
- parts_lo -= 1
- if parts_lo:
- msg = "Trailing ':' only permitted as part of '::' in %r"
- raise AddressValueError(msg % ip_str) # :$ requires ::$
- parts_skipped = cls._HEXTET_COUNT - (parts_hi + parts_lo)
- if parts_skipped < 1:
- msg = "Expected at most %d other parts with '::' in %r"
- raise AddressValueError(msg % (cls._HEXTET_COUNT - 1, ip_str))
- else:
- # Otherwise, allocate the entire address to parts_hi. The
- # endpoints could still be empty, but _parse_hextet() will check
- # for that.
- if len(parts) != cls._HEXTET_COUNT:
- msg = "Exactly %d parts expected without '::' in %r"
- raise AddressValueError(msg % (cls._HEXTET_COUNT, ip_str))
- if not parts[0]:
- msg = "Leading ':' only permitted as part of '::' in %r"
- raise AddressValueError(msg % ip_str) # ^: requires ^::
- if not parts[-1]:
- msg = "Trailing ':' only permitted as part of '::' in %r"
- raise AddressValueError(msg % ip_str) # :$ requires ::$
- parts_hi = len(parts)
- parts_lo = 0
- parts_skipped = 0
-
- try:
- # Now, parse the hextets into a 128-bit integer.
- ip_int = 0
- for i in range(parts_hi):
- ip_int <<= 16
- ip_int |= cls._parse_hextet(parts[i])
- ip_int <<= 16 * parts_skipped
- for i in range(-parts_lo, 0):
- ip_int <<= 16
- ip_int |= cls._parse_hextet(parts[i])
- return ip_int
- except ValueError as exc:
- raise AddressValueError("%s in %r" % (exc, ip_str))
-
- @classmethod
- def _parse_hextet(cls, hextet_str):
- """Convert an IPv6 hextet string into an integer.
-
- Args:
- hextet_str: A string, the number to parse.
-
- Returns:
- The hextet as an integer.
-
- Raises:
- ValueError: if the input isn't strictly a hex number from
- [0..FFFF].
-
- """
- # Whitelist the characters, since int() allows a lot of bizarre stuff.
- if not cls._HEX_DIGITS.issuperset(hextet_str):
- raise ValueError("Only hex digits permitted in %r" % hextet_str)
- # We do the length check second, since the invalid character error
- # is likely to be more informative for the user
- if len(hextet_str) > 4:
- msg = "At most 4 characters permitted in %r"
- raise ValueError(msg % hextet_str)
- # Length check means we can skip checking the integer value
- return int(hextet_str, 16)
-
- @classmethod
- def _compress_hextets(cls, hextets):
- """Compresses a list of hextets.
-
- Compresses a list of strings, replacing the longest continuous
- sequence of "0" in the list with "" and adding empty strings at
- the beginning or at the end of the string such that subsequently
- calling ":".join(hextets) will produce the compressed version of
- the IPv6 address.
-
- Args:
- hextets: A list of strings, the hextets to compress.
-
- Returns:
- A list of strings.
-
- """
- best_doublecolon_start = -1
- best_doublecolon_len = 0
- doublecolon_start = -1
- doublecolon_len = 0
- for index, hextet in enumerate(hextets):
- if hextet == '0':
- doublecolon_len += 1
- if doublecolon_start == -1:
- # Start of a sequence of zeros.
- doublecolon_start = index
- if doublecolon_len > best_doublecolon_len:
- # This is the longest sequence of zeros so far.
- best_doublecolon_len = doublecolon_len
- best_doublecolon_start = doublecolon_start
- else:
- doublecolon_len = 0
- doublecolon_start = -1
-
- if best_doublecolon_len > 1:
- best_doublecolon_end = (best_doublecolon_start +
- best_doublecolon_len)
- # For zeros at the end of the address.
- if best_doublecolon_end == len(hextets):
- hextets += ['']
- hextets[best_doublecolon_start:best_doublecolon_end] = ['']
- # For zeros at the beginning of the address.
- if best_doublecolon_start == 0:
- hextets = [''] + hextets
-
- return hextets
-
- @classmethod
- def _string_from_ip_int(cls, ip_int=None):
- """Turns a 128-bit integer into hexadecimal notation.
-
- Args:
- ip_int: An integer, the IP address.
-
- Returns:
- A string, the hexadecimal representation of the address.
-
- Raises:
- ValueError: The address is bigger than 128 bits of all ones.
-
- """
- if ip_int is None:
- ip_int = int(cls._ip)
-
- if ip_int > cls._ALL_ONES:
- raise ValueError('IPv6 address is too large')
-
- hex_str = '%032x' % ip_int
- hextets = ['%x' % int(hex_str[x:x + 4], 16) for x in range(0, 32, 4)]
-
- hextets = cls._compress_hextets(hextets)
- return ':'.join(hextets)
-
- def _explode_shorthand_ip_string(self):
- """Expand a shortened IPv6 address.
-
- Args:
- ip_str: A string, the IPv6 address.
-
- Returns:
- A string, the expanded IPv6 address.
-
- """
- if isinstance(self, IPv6Network):
- ip_str = _compat_str(self.network_address)
- elif isinstance(self, IPv6Interface):
- ip_str = _compat_str(self.ip)
- else:
- ip_str = _compat_str(self)
-
- ip_int = self._ip_int_from_string(ip_str)
- hex_str = '%032x' % ip_int
- parts = [hex_str[x:x + 4] for x in range(0, 32, 4)]
- if isinstance(self, (_BaseNetwork, IPv6Interface)):
- return '%s/%d' % (':'.join(parts), self._prefixlen)
- return ':'.join(parts)
-
- def _reverse_pointer(self):
- """Return the reverse DNS pointer name for the IPv6 address.
-
- This implements the method described in RFC3596 2.5.
-
- """
- reverse_chars = self.exploded[::-1].replace(':', '')
- return '.'.join(reverse_chars) + '.ip6.arpa'
-
- @property
- def max_prefixlen(self):
- return self._max_prefixlen
-
- @property
- def version(self):
- return self._version
-
-
-class IPv6Address(_BaseV6, _BaseAddress):
-
- """Represent and manipulate single IPv6 Addresses."""
-
- __slots__ = ('_ip', '__weakref__')
-
- def __init__(self, address):
- """Instantiate a new IPv6 address object.
-
- Args:
- address: A string or integer representing the IP
-
- Additionally, an integer can be passed, so
- IPv6Address('2001:db8::') ==
- IPv6Address(42540766411282592856903984951653826560)
- or, more generally
- IPv6Address(int(IPv6Address('2001:db8::'))) ==
- IPv6Address('2001:db8::')
-
- Raises:
- AddressValueError: If address isn't a valid IPv6 address.
-
- """
- # Efficient constructor from integer.
- if isinstance(address, _compat_int_types):
- self._check_int_address(address)
- self._ip = address
- return
-
- # Constructing from a packed address
- if isinstance(address, bytes):
- self._check_packed_address(address, 16)
- bvs = _compat_bytes_to_byte_vals(address)
- self._ip = _compat_int_from_byte_vals(bvs, 'big')
- return
-
- # Assume input argument to be string or any object representation
- # which converts into a formatted IP string.
- addr_str = _compat_str(address)
- if '/' in addr_str:
- raise AddressValueError("Unexpected '/' in %r" % address)
- self._ip = self._ip_int_from_string(addr_str)
-
- @property
- def packed(self):
- """The binary representation of this address."""
- return v6_int_to_packed(self._ip)
-
- @property
- def is_multicast(self):
- """Test if the address is reserved for multicast use.
-
- Returns:
- A boolean, True if the address is a multicast address.
- See RFC 2373 2.7 for details.
-
- """
- return self in self._constants._multicast_network
-
- @property
- def is_reserved(self):
- """Test if the address is otherwise IETF reserved.
-
- Returns:
- A boolean, True if the address is within one of the
- reserved IPv6 Network ranges.
-
- """
- return any(self in x for x in self._constants._reserved_networks)
-
- @property
- def is_link_local(self):
- """Test if the address is reserved for link-local.
-
- Returns:
- A boolean, True if the address is reserved per RFC 4291.
-
- """
- return self in self._constants._linklocal_network
-
- @property
- def is_site_local(self):
- """Test if the address is reserved for site-local.
-
- Note that the site-local address space has been deprecated by RFC 3879.
- Use is_private to test if this address is in the space of unique local
- addresses as defined by RFC 4193.
-
- Returns:
- A boolean, True if the address is reserved per RFC 3513 2.5.6.
-
- """
- return self in self._constants._sitelocal_network
-
- @property
- def is_private(self):
- """Test if this address is allocated for private networks.
-
- Returns:
- A boolean, True if the address is reserved per
- iana-ipv6-special-registry.
-
- """
- return any(self in net for net in self._constants._private_networks)
-
- @property
- def is_global(self):
- """Test if this address is allocated for public networks.
-
- Returns:
- A boolean, true if the address is not reserved per
- iana-ipv6-special-registry.
-
- """
- return not self.is_private
-
- @property
- def is_unspecified(self):
- """Test if the address is unspecified.
-
- Returns:
- A boolean, True if this is the unspecified address as defined in
- RFC 2373 2.5.2.
-
- """
- return self._ip == 0
-
- @property
- def is_loopback(self):
- """Test if the address is a loopback address.
-
- Returns:
- A boolean, True if the address is a loopback address as defined in
- RFC 2373 2.5.3.
-
- """
- return self._ip == 1
-
- @property
- def ipv4_mapped(self):
- """Return the IPv4 mapped address.
-
- Returns:
- If the IPv6 address is a v4 mapped address, return the
- IPv4 mapped address. Return None otherwise.
-
- """
- if (self._ip >> 32) != 0xFFFF:
- return None
- return IPv4Address(self._ip & 0xFFFFFFFF)
-
- @property
- def teredo(self):
- """Tuple of embedded teredo IPs.
-
- Returns:
- Tuple of the (server, client) IPs or None if the address
- doesn't appear to be a teredo address (doesn't start with
- 2001::/32)
-
- """
- if (self._ip >> 96) != 0x20010000:
- return None
- return (IPv4Address((self._ip >> 64) & 0xFFFFFFFF),
- IPv4Address(~self._ip & 0xFFFFFFFF))
-
- @property
- def sixtofour(self):
- """Return the IPv4 6to4 embedded address.
-
- Returns:
- The IPv4 6to4-embedded address if present or None if the
- address doesn't appear to contain a 6to4 embedded address.
-
- """
- if (self._ip >> 112) != 0x2002:
- return None
- return IPv4Address((self._ip >> 80) & 0xFFFFFFFF)
-
-
-class IPv6Interface(IPv6Address):
-
- def __init__(self, address):
- if isinstance(address, (bytes, _compat_int_types)):
- IPv6Address.__init__(self, address)
- self.network = IPv6Network(self._ip)
- self._prefixlen = self._max_prefixlen
- return
- if isinstance(address, tuple):
- IPv6Address.__init__(self, address[0])
- if len(address) > 1:
- self._prefixlen = int(address[1])
- else:
- self._prefixlen = self._max_prefixlen
- self.network = IPv6Network(address, strict=False)
- self.netmask = self.network.netmask
- self.hostmask = self.network.hostmask
- return
-
- addr = _split_optional_netmask(address)
- IPv6Address.__init__(self, addr[0])
- self.network = IPv6Network(address, strict=False)
- self.netmask = self.network.netmask
- self._prefixlen = self.network._prefixlen
- self.hostmask = self.network.hostmask
-
- def __str__(self):
- return '%s/%d' % (self._string_from_ip_int(self._ip),
- self.network.prefixlen)
-
- def __eq__(self, other):
- address_equal = IPv6Address.__eq__(self, other)
- if not address_equal or address_equal is NotImplemented:
- return address_equal
- try:
- return self.network == other.network
- except AttributeError:
- # An interface with an associated network is NOT the
- # same as an unassociated address. That's why the hash
- # takes the extra info into account.
- return False
-
- def __lt__(self, other):
- address_less = IPv6Address.__lt__(self, other)
- if address_less is NotImplemented:
- return NotImplemented
- try:
- return self.network < other.network
- except AttributeError:
- # We *do* allow addresses and interfaces to be sorted. The
- # unassociated address is considered less than all interfaces.
- return False
-
- def __hash__(self):
- return self._ip ^ self._prefixlen ^ int(self.network.network_address)
-
- __reduce__ = _IPAddressBase.__reduce__
-
- @property
- def ip(self):
- return IPv6Address(self._ip)
-
- @property
- def with_prefixlen(self):
- return '%s/%s' % (self._string_from_ip_int(self._ip),
- self._prefixlen)
-
- @property
- def with_netmask(self):
- return '%s/%s' % (self._string_from_ip_int(self._ip),
- self.netmask)
-
- @property
- def with_hostmask(self):
- return '%s/%s' % (self._string_from_ip_int(self._ip),
- self.hostmask)
-
- @property
- def is_unspecified(self):
- return self._ip == 0 and self.network.is_unspecified
-
- @property
- def is_loopback(self):
- return self._ip == 1 and self.network.is_loopback
-
-
-class IPv6Network(_BaseV6, _BaseNetwork):
-
- """This class represents and manipulates 128-bit IPv6 networks.
-
- Attributes: [examples for IPv6('2001:db8::1000/124')]
- .network_address: IPv6Address('2001:db8::1000')
- .hostmask: IPv6Address('::f')
- .broadcast_address: IPv6Address('2001:db8::100f')
- .netmask: IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0')
- .prefixlen: 124
-
- """
-
- # Class to use when creating address objects
- _address_class = IPv6Address
-
- def __init__(self, address, strict=True):
- """Instantiate a new IPv6 Network object.
-
- Args:
- address: A string or integer representing the IPv6 network or the
- IP and prefix/netmask.
- '2001:db8::/128'
- '2001:db8:0000:0000:0000:0000:0000:0000/128'
- '2001:db8::'
- are all functionally the same in IPv6. That is to say,
- failing to provide a subnetmask will create an object with
- a mask of /128.
-
- Additionally, an integer can be passed, so
- IPv6Network('2001:db8::') ==
- IPv6Network(42540766411282592856903984951653826560)
- or, more generally
- IPv6Network(int(IPv6Network('2001:db8::'))) ==
- IPv6Network('2001:db8::')
-
- strict: A boolean. If true, ensure that we have been passed
- A true network address, eg, 2001:db8::1000/124 and not an
- IP address on a network, eg, 2001:db8::1/124.
-
- Raises:
- AddressValueError: If address isn't a valid IPv6 address.
- NetmaskValueError: If the netmask isn't valid for
- an IPv6 address.
- ValueError: If strict was True and a network address was not
- supplied.
-
- """
- _BaseNetwork.__init__(self, address)
-
- # Efficient constructor from integer or packed address
- if isinstance(address, (bytes, _compat_int_types)):
- self.network_address = IPv6Address(address)
- self.netmask, self._prefixlen = self._make_netmask(
- self._max_prefixlen)
- return
-
- if isinstance(address, tuple):
- if len(address) > 1:
- arg = address[1]
- else:
- arg = self._max_prefixlen
- self.netmask, self._prefixlen = self._make_netmask(arg)
- self.network_address = IPv6Address(address[0])
- packed = int(self.network_address)
- if packed & int(self.netmask) != packed:
- if strict:
- raise ValueError('%s has host bits set' % self)
- else:
- self.network_address = IPv6Address(packed &
- int(self.netmask))
- return
-
- # Assume input argument to be string or any object representation
- # which converts into a formatted IP prefix string.
- addr = _split_optional_netmask(address)
-
- self.network_address = IPv6Address(self._ip_int_from_string(addr[0]))
-
- if len(addr) == 2:
- arg = addr[1]
- else:
- arg = self._max_prefixlen
- self.netmask, self._prefixlen = self._make_netmask(arg)
-
- if strict:
- if (IPv6Address(int(self.network_address) & int(self.netmask)) !=
- self.network_address):
- raise ValueError('%s has host bits set' % self)
- self.network_address = IPv6Address(int(self.network_address) &
- int(self.netmask))
-
- if self._prefixlen == (self._max_prefixlen - 1):
- self.hosts = self.__iter__
-
- def hosts(self):
- """Generate Iterator over usable hosts in a network.
-
- This is like __iter__ except it doesn't return the
- Subnet-Router anycast address.
-
- """
- network = int(self.network_address)
- broadcast = int(self.broadcast_address)
- for x in _compat_range(network + 1, broadcast + 1):
- yield self._address_class(x)
-
- @property
- def is_site_local(self):
- """Test if the address is reserved for site-local.
-
- Note that the site-local address space has been deprecated by RFC 3879.
- Use is_private to test if this address is in the space of unique local
- addresses as defined by RFC 4193.
-
- Returns:
- A boolean, True if the address is reserved per RFC 3513 2.5.6.
-
- """
- return (self.network_address.is_site_local and
- self.broadcast_address.is_site_local)
-
-
-class _IPv6Constants(object):
-
- _linklocal_network = IPv6Network('fe80::/10')
-
- _multicast_network = IPv6Network('ff00::/8')
-
- _private_networks = [
- IPv6Network('::1/128'),
- IPv6Network('::/128'),
- IPv6Network('::ffff:0:0/96'),
- IPv6Network('100::/64'),
- IPv6Network('2001::/23'),
- IPv6Network('2001:2::/48'),
- IPv6Network('2001:db8::/32'),
- IPv6Network('2001:10::/28'),
- IPv6Network('fc00::/7'),
- IPv6Network('fe80::/10'),
- ]
-
- _reserved_networks = [
- IPv6Network('::/8'), IPv6Network('100::/8'),
- IPv6Network('200::/7'), IPv6Network('400::/6'),
- IPv6Network('800::/5'), IPv6Network('1000::/4'),
- IPv6Network('4000::/3'), IPv6Network('6000::/3'),
- IPv6Network('8000::/3'), IPv6Network('A000::/3'),
- IPv6Network('C000::/3'), IPv6Network('E000::/4'),
- IPv6Network('F000::/5'), IPv6Network('F800::/6'),
- IPv6Network('FE00::/9'),
- ]
-
- _sitelocal_network = IPv6Network('fec0::/10')
-
-
-IPv6Address._constants = _IPv6Constants
diff --git a/functions/source/CreateSSHKey/lambda_function.py b/functions/source/CreateSSHKey/lambda_function.py
index 9797010..42fc73d 100644
--- a/functions/source/CreateSSHKey/lambda_function.py
+++ b/functions/source/CreateSSHKey/lambda_function.py
@@ -1,45 +1,60 @@
-# Copyright 2016 Amazon Web Services, Inc. or its affiliates. All Rights Reserved.
-# This file is licensed to you under the AWS Customer Agreement (the "License").
+# Copyright 2020 Amazon Web Services, Inc. or its affiliates.
+# All Rights Reserved.
+# This file is licensed to you under the AWS Customer Agreement
+# (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at http://aws.amazon.com/agreement/ .
-# This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or implied.
-# See the License for the specific language governing permissions and limitations under the License.
+# This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
+# CONDITIONS OF ANY KIND, express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
import cfnresponse
import traceback
import boto3
-from cryptography.hazmat.primitives import serialization as crypto_serialization
+from cryptography.hazmat.primitives import serialization as \
+ crypto_serialization
from cryptography.hazmat.primitives.asymmetric import rsa
-from cryptography.hazmat.backends import default_backend as crypto_default_backend
+from cryptography.hazmat.backends import default_backend as \
+ crypto_default_backend
+
def lambda_handler(event,context):
try:
if event['RequestType'] == 'Create':
# Generate keys
- new_key = rsa.generate_private_key(backend=crypto_default_backend(), public_exponent=65537, key_size=2048)
- priv_key = new_key.private_bytes(
+ new_key = rsa.generate_private_key(
+ backend=crypto_default_backend(), public_exponent=65537,
+ key_size=2048)
+ priv_key = str(new_key.private_bytes(
crypto_serialization.Encoding.PEM,
crypto_serialization.PrivateFormat.PKCS8,
crypto_serialization.NoEncryption()
- )
- pub_key = new_key.public_key().public_bytes(
+ ), 'utf-8')
+ pub_key = str(new_key.public_key().public_bytes(
crypto_serialization.Encoding.OpenSSH,
crypto_serialization.PublicFormat.OpenSSH
- )
+ ), 'utf-8')
print(priv_key)
print(pub_key)
# Encrypt private key
- kms = boto3.client('kms',region_name=event["ResourceProperties"]["Region"])
- enc_key = kms.encrypt(KeyId=event["ResourceProperties"]["KMSKey"],Plaintext=priv_key)['CiphertextBlob']
- f = open('/tmp/enc_key','wb')
+ kms = boto3.client(
+ 'kms', region_name=event["ResourceProperties"]["Region"])
+ enc_key = kms.encrypt(
+ KeyId=event["ResourceProperties"]["KMSKey"],
+ Plaintext=priv_key)['CiphertextBlob']
+ f = open('/tmp/enc_key', 'wb')
f.write(enc_key)
f.close()
# Upload priivate key to S3
s3 = boto3.client('s3')
- s3.upload_file('/tmp/enc_key',event["ResourceProperties"]["KeyBucket"],'enc_key')
+ s3.upload_file('/tmp/enc_key',
+ event["ResourceProperties"]["KeyBucket"], 'enc_key')
else:
pub_key = event['PhysicalResourceId']
cfnresponse.send(event, context, cfnresponse.SUCCESS, {}, pub_key)
except:
traceback.print_exc()
cfnresponse.send(event, context, cfnresponse.FAILED, {}, '')
+
diff --git a/functions/source/CreateSSHKey/pycparser-2.20.dist-info/INSTALLER b/functions/source/CreateSSHKey/pycparser-2.20.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/functions/source/CreateSSHKey/pycparser-2.20.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/functions/source/CreateSSHKey/pycparser-2.20.dist-info/LICENSE b/functions/source/CreateSSHKey/pycparser-2.20.dist-info/LICENSE
new file mode 100644
index 0000000..79b7547
--- /dev/null
+++ b/functions/source/CreateSSHKey/pycparser-2.20.dist-info/LICENSE
@@ -0,0 +1,27 @@
+pycparser -- A C parser in Python
+
+Copyright (c) 2008-2017, Eli Bendersky
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+* Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+* Neither the name of Eli Bendersky nor the names of its contributors may
+ be used to endorse or promote products derived from this software without
+ specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
+GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/functions/source/CreateSSHKey/pycparser-2.20.dist-info/METADATA b/functions/source/CreateSSHKey/pycparser-2.20.dist-info/METADATA
new file mode 100644
index 0000000..a3939e0
--- /dev/null
+++ b/functions/source/CreateSSHKey/pycparser-2.20.dist-info/METADATA
@@ -0,0 +1,27 @@
+Metadata-Version: 2.1
+Name: pycparser
+Version: 2.20
+Summary: C parser in Python
+Home-page: https://github.com/eliben/pycparser
+Author: Eli Bendersky
+Author-email: eliben@gmail.com
+Maintainer: Eli Bendersky
+License: BSD
+Platform: Cross Platform
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
+
+
+pycparser is a complete parser of the C language, written in
+pure Python using the PLY parsing library.
+It parses C code into an AST and can serve as a front-end for
+C compilers or analysis tools.
+
+
diff --git a/functions/source/CreateSSHKey/pycparser-2.20.dist-info/RECORD b/functions/source/CreateSSHKey/pycparser-2.20.dist-info/RECORD
new file mode 100644
index 0000000..f47a551
--- /dev/null
+++ b/functions/source/CreateSSHKey/pycparser-2.20.dist-info/RECORD
@@ -0,0 +1,41 @@
+pycparser-2.20.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+pycparser-2.20.dist-info/LICENSE,sha256=PHZimICuwvhXjtkUcBpP-eXai2CsuLfsZ1q_g8kMUWg,1536
+pycparser-2.20.dist-info/METADATA,sha256=5_RDLTEfmg8dh29oc053jTNp_OL82PllsggkGQTU_Ds,907
+pycparser-2.20.dist-info/RECORD,,
+pycparser-2.20.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
+pycparser-2.20.dist-info/top_level.txt,sha256=c-lPcS74L_8KoH7IE6PQF5ofyirRQNV4VhkbSFIPeWM,10
+pycparser/__init__.py,sha256=O2ajDXgU2_NI52hUFV8WeAjCR5L-sclmaXerpcxqgPo,2815
+pycparser/__pycache__/__init__.cpython-38.pyc,,
+pycparser/__pycache__/_ast_gen.cpython-38.pyc,,
+pycparser/__pycache__/_build_tables.cpython-38.pyc,,
+pycparser/__pycache__/ast_transforms.cpython-38.pyc,,
+pycparser/__pycache__/c_ast.cpython-38.pyc,,
+pycparser/__pycache__/c_generator.cpython-38.pyc,,
+pycparser/__pycache__/c_lexer.cpython-38.pyc,,
+pycparser/__pycache__/c_parser.cpython-38.pyc,,
+pycparser/__pycache__/lextab.cpython-38.pyc,,
+pycparser/__pycache__/plyparser.cpython-38.pyc,,
+pycparser/__pycache__/yacctab.cpython-38.pyc,,
+pycparser/_ast_gen.py,sha256=_LbRr_kKa2EHeb7y0gV525JV29nzCUbTH4oZ-9I4qIs,10607
+pycparser/_build_tables.py,sha256=oZCd3Plhq-vkV-QuEsaahcf-jUI6-HgKsrAL9gvFzuU,1039
+pycparser/_c_ast.cfg,sha256=1W8-DHa5RqZvyhha_0b4VvKL0CEYv9W0xFs_YwiyEHY,4206
+pycparser/ast_transforms.py,sha256=93ENKENTlugzFehnrQ0fdprijVdNt_ACCPygMxH4v7Q,3648
+pycparser/c_ast.py,sha256=JdDyC3QUZBfX9wVu2ENOrQQPbc737Jmf8Vtozhukayo,30233
+pycparser/c_generator.py,sha256=AwzNyE_rOFK2gzK0J5pCWDqfk7V8KL54ITFRf9m4GlY,15365
+pycparser/c_lexer.py,sha256=GWPUkwFe6F00gTAKIPAx4xs8-J-at_oGwEHnrKF4teM,16208
+pycparser/c_parser.py,sha256=w74N4tFGQ3TETIqUwClZIcbl-V4hFeJSPG2halVgUVs,69746
+pycparser/lextab.py,sha256=FyjRIsaq2wViDqJNYScURuc7GDW5F12VuYxOJLh1j4g,7011
+pycparser/ply/__init__.py,sha256=q4s86QwRsYRa20L9ueSxfh-hPihpftBjDOvYa2_SS2Y,102
+pycparser/ply/__pycache__/__init__.cpython-38.pyc,,
+pycparser/ply/__pycache__/cpp.cpython-38.pyc,,
+pycparser/ply/__pycache__/ctokens.cpython-38.pyc,,
+pycparser/ply/__pycache__/lex.cpython-38.pyc,,
+pycparser/ply/__pycache__/yacc.cpython-38.pyc,,
+pycparser/ply/__pycache__/ygen.cpython-38.pyc,,
+pycparser/ply/cpp.py,sha256=UtC3ylTWp5_1MKA-PLCuwKQR8zSOnlGuGGIdzj8xS98,33282
+pycparser/ply/ctokens.py,sha256=MKksnN40TehPhgVfxCJhjj_BjL943apreABKYz-bl0Y,3177
+pycparser/ply/lex.py,sha256=7Qol57x702HZwjA3ZLp-84CUEWq1EehW-N67Wzghi-M,42918
+pycparser/ply/yacc.py,sha256=eatSDkRLgRr6X3-hoDk_SQQv065R0BdL2K7fQ54CgVM,137323
+pycparser/ply/ygen.py,sha256=2JYNeYtrPz1JzLSLO3d4GsS8zJU8jY_I_CR1VI9gWrA,2251
+pycparser/plyparser.py,sha256=saGNjpsgncQz-hHEh45f28BLqopTxHffaJg_9BCZhi8,4873
+pycparser/yacctab.py,sha256=KOewsHNgbSYaYrLvDJr7K3jXj-7qou0ngyNEnhDmyB4,169715
diff --git a/functions/source/CreateSSHKey/pycparser-2.20.dist-info/WHEEL b/functions/source/CreateSSHKey/pycparser-2.20.dist-info/WHEEL
new file mode 100644
index 0000000..ef99c6c
--- /dev/null
+++ b/functions/source/CreateSSHKey/pycparser-2.20.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.34.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/functions/source/CreateSSHKey/pycparser-2.20.dist-info/top_level.txt b/functions/source/CreateSSHKey/pycparser-2.20.dist-info/top_level.txt
new file mode 100644
index 0000000..dc1c9e1
--- /dev/null
+++ b/functions/source/CreateSSHKey/pycparser-2.20.dist-info/top_level.txt
@@ -0,0 +1 @@
+pycparser
diff --git a/functions/source/CreateSSHKey/pycparser/__init__.py b/functions/source/CreateSSHKey/pycparser/__init__.py
index e089166..6e86e9f 100644
--- a/functions/source/CreateSSHKey/pycparser/__init__.py
+++ b/functions/source/CreateSSHKey/pycparser/__init__.py
@@ -4,13 +4,14 @@
# This package file exports some convenience functions for
# interacting with pycparser
#
-# Eli Bendersky [http://eli.thegreenplace.net]
+# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#-----------------------------------------------------------------
__all__ = ['c_lexer', 'c_parser', 'c_ast']
-__version__ = '2.18'
+__version__ = '2.20'
-from subprocess import Popen, PIPE
+import io
+from subprocess import check_output
from .c_parser import CParser
@@ -38,11 +39,7 @@ def preprocess_file(filename, cpp_path='cpp', cpp_args=''):
try:
# Note the use of universal_newlines to treat all newlines
# as \n for Python's purpose
- #
- pipe = Popen( path_list,
- stdout=PIPE,
- universal_newlines=True)
- text = pipe.communicate()[0]
+ text = check_output(path_list, universal_newlines=True)
except OSError as e:
raise RuntimeError("Unable to invoke 'cpp'. " +
'Make sure its path was passed correctly\n' +
@@ -85,7 +82,7 @@ def parse_file(filename, use_cpp=False, cpp_path='cpp', cpp_args='',
if use_cpp:
text = preprocess_file(filename, cpp_path, cpp_args)
else:
- with open(filename, 'rU') as f:
+ with io.open(filename) as f:
text = f.read()
if parser is None:
diff --git a/functions/source/CreateSSHKey/pycparser/__pycache__/__init__.cpython-38.pyc b/functions/source/CreateSSHKey/pycparser/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000..f8499f5
Binary files /dev/null and b/functions/source/CreateSSHKey/pycparser/__pycache__/__init__.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/pycparser/__pycache__/_ast_gen.cpython-38.pyc b/functions/source/CreateSSHKey/pycparser/__pycache__/_ast_gen.cpython-38.pyc
new file mode 100644
index 0000000..d61a637
Binary files /dev/null and b/functions/source/CreateSSHKey/pycparser/__pycache__/_ast_gen.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/pycparser/__pycache__/_build_tables.cpython-38.pyc b/functions/source/CreateSSHKey/pycparser/__pycache__/_build_tables.cpython-38.pyc
new file mode 100644
index 0000000..d338534
Binary files /dev/null and b/functions/source/CreateSSHKey/pycparser/__pycache__/_build_tables.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/pycparser/__pycache__/ast_transforms.cpython-38.pyc b/functions/source/CreateSSHKey/pycparser/__pycache__/ast_transforms.cpython-38.pyc
new file mode 100644
index 0000000..ea1614d
Binary files /dev/null and b/functions/source/CreateSSHKey/pycparser/__pycache__/ast_transforms.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/pycparser/__pycache__/c_ast.cpython-38.pyc b/functions/source/CreateSSHKey/pycparser/__pycache__/c_ast.cpython-38.pyc
new file mode 100644
index 0000000..70c0143
Binary files /dev/null and b/functions/source/CreateSSHKey/pycparser/__pycache__/c_ast.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/pycparser/__pycache__/c_generator.cpython-38.pyc b/functions/source/CreateSSHKey/pycparser/__pycache__/c_generator.cpython-38.pyc
new file mode 100644
index 0000000..bdb3f6d
Binary files /dev/null and b/functions/source/CreateSSHKey/pycparser/__pycache__/c_generator.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/pycparser/__pycache__/c_lexer.cpython-38.pyc b/functions/source/CreateSSHKey/pycparser/__pycache__/c_lexer.cpython-38.pyc
new file mode 100644
index 0000000..3d249e6
Binary files /dev/null and b/functions/source/CreateSSHKey/pycparser/__pycache__/c_lexer.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/pycparser/__pycache__/c_parser.cpython-38.pyc b/functions/source/CreateSSHKey/pycparser/__pycache__/c_parser.cpython-38.pyc
new file mode 100644
index 0000000..73c7e79
Binary files /dev/null and b/functions/source/CreateSSHKey/pycparser/__pycache__/c_parser.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/pycparser/__pycache__/lextab.cpython-38.pyc b/functions/source/CreateSSHKey/pycparser/__pycache__/lextab.cpython-38.pyc
new file mode 100644
index 0000000..23048c4
Binary files /dev/null and b/functions/source/CreateSSHKey/pycparser/__pycache__/lextab.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/pycparser/__pycache__/plyparser.cpython-38.pyc b/functions/source/CreateSSHKey/pycparser/__pycache__/plyparser.cpython-38.pyc
new file mode 100644
index 0000000..080aadb
Binary files /dev/null and b/functions/source/CreateSSHKey/pycparser/__pycache__/plyparser.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/pycparser/__pycache__/yacctab.cpython-38.pyc b/functions/source/CreateSSHKey/pycparser/__pycache__/yacctab.cpython-38.pyc
new file mode 100644
index 0000000..10fa436
Binary files /dev/null and b/functions/source/CreateSSHKey/pycparser/__pycache__/yacctab.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/pycparser/_ast_gen.py b/functions/source/CreateSSHKey/pycparser/_ast_gen.py
index 669c303..5ec2d3d 100644
--- a/functions/source/CreateSSHKey/pycparser/_ast_gen.py
+++ b/functions/source/CreateSSHKey/pycparser/_ast_gen.py
@@ -7,7 +7,7 @@
# The design of this module was inspired by astgen.py from the
# Python 2.5 code-base.
#
-# Eli Bendersky [http://eli.thegreenplace.net]
+# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#-----------------------------------------------------------------
import pprint
@@ -63,6 +63,7 @@ class NodeCfg(object):
contents: a list of contents - attributes and child nodes
See comment at the top of the configuration file for details.
"""
+
def __init__(self, name, contents):
self.name = name
self.all_entries = []
@@ -84,6 +85,8 @@ def __init__(self, name, contents):
def generate_source(self):
src = self._gen_init()
src += '\n' + self._gen_children()
+ src += '\n' + self._gen_iter()
+
src += '\n' + self._gen_attr_names()
return src
@@ -131,6 +134,33 @@ def _gen_children(self):
return src
+ def _gen_iter(self):
+ src = ' def __iter__(self):\n'
+
+ if self.all_entries:
+ for child in self.child:
+ src += (
+ ' if self.%(child)s is not None:\n' +
+ ' yield self.%(child)s\n') % (dict(child=child))
+
+ for seq_child in self.seq_child:
+ src += (
+ ' for child in (self.%(child)s or []):\n'
+ ' yield child\n') % (dict(child=seq_child))
+
+ if not (self.child or self.seq_child):
+ # Empty generator
+ src += (
+ ' return\n' +
+ ' yield\n')
+ else:
+ # Empty generator
+ src += (
+ ' return\n' +
+ ' yield\n')
+
+ return src
+
def _gen_attr_names(self):
src = " attr_names = (" + ''.join("%r, " % nm for nm in self.attr) + ')'
return src
@@ -150,7 +180,7 @@ def _gen_attr_names(self):
#
# AST Node classes.
#
-# Eli Bendersky [http://eli.thegreenplace.net]
+# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#-----------------------------------------------------------------
@@ -159,11 +189,38 @@ def _gen_attr_names(self):
_PROLOGUE_CODE = r'''
import sys
+def _repr(obj):
+ """
+ Get the representation of an object, with dedicated pprint-like format for lists.
+ """
+ if isinstance(obj, list):
+ return '[' + (',\n '.join((_repr(e).replace('\n', '\n ') for e in obj))) + '\n]'
+ else:
+ return repr(obj)
class Node(object):
__slots__ = ()
""" Abstract base class for AST nodes.
"""
+ def __repr__(self):
+ """ Generates a python representation of the current node
+ """
+ result = self.__class__.__name__ + '('
+
+ indent = ''
+ separator = ''
+ for name in self.__slots__[:-2]:
+ result += separator
+ result += indent
+ result += name + '=' + (_repr(getattr(self, name)).replace('\n', '\n ' + (' ' * (len(name) + len(self.__class__.__name__)))))
+
+ separator = ','
+ indent = '\n ' + (' ' * len(self.__class__.__name__))
+
+ result += indent + ')'
+
+ return result
+
def children(self):
""" A sequence of all children that are Nodes
"""
@@ -253,26 +310,29 @@ def visit_Constant(self, node):
* Modeled after Python's own AST visiting facilities
(the ast module of Python 3.0)
"""
+
+ _method_cache = None
+
def visit(self, node):
""" Visit a node.
"""
- method = 'visit_' + node.__class__.__name__
- visitor = getattr(self, method, self.generic_visit)
+
+ if self._method_cache is None:
+ self._method_cache = {}
+
+ visitor = self._method_cache.get(node.__class__.__name__, None)
+ if visitor is None:
+ method = 'visit_' + node.__class__.__name__
+ visitor = getattr(self, method, self.generic_visit)
+ self._method_cache[node.__class__.__name__] = visitor
+
return visitor(node)
def generic_visit(self, node):
""" Called if no explicit visitor function exists for a
node. Implements preorder visiting of the node.
"""
- for c_name, c in node.children():
+ for c in node:
self.visit(c)
-
'''
-
-
-if __name__ == "__main__":
- import sys
- ast_gen = ASTCodeGenerator('_c_ast.cfg')
- ast_gen.generate(open('c_ast.py', 'w'))
-
diff --git a/functions/source/CreateSSHKey/pycparser/_build_tables.py b/functions/source/CreateSSHKey/pycparser/_build_tables.py
index a8a9dcf..958381a 100644
--- a/functions/source/CreateSSHKey/pycparser/_build_tables.py
+++ b/functions/source/CreateSSHKey/pycparser/_build_tables.py
@@ -6,17 +6,21 @@
# Also generates AST code from the configuration file.
# Should be called from the pycparser directory.
#
-# Eli Bendersky [http://eli.thegreenplace.net]
+# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#-----------------------------------------------------------------
+# Insert '.' and '..' as first entries to the search path for modules.
+# Restricted environments like embeddable python do not include the
+# current working directory on startup.
+import sys
+sys.path[0:0] = ['.', '..']
+
# Generate c_ast.py
from _ast_gen import ASTCodeGenerator
ast_gen = ASTCodeGenerator('_c_ast.cfg')
ast_gen.generate(open('c_ast.py', 'w'))
-import sys
-sys.path[0:0] = ['.', '..']
from pycparser import c_parser
# Generates the tables
diff --git a/functions/source/CreateSSHKey/pycparser/_c_ast.cfg b/functions/source/CreateSSHKey/pycparser/_c_ast.cfg
index 7dfcd0c..b93d50b 100644
--- a/functions/source/CreateSSHKey/pycparser/_c_ast.cfg
+++ b/functions/source/CreateSSHKey/pycparser/_c_ast.cfg
@@ -9,7 +9,7 @@
# ** - a sequence of child nodes
# - an attribute
#
-# Eli Bendersky [http://eli.thegreenplace.net]
+# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#-----------------------------------------------------------------
diff --git a/functions/source/CreateSSHKey/pycparser/ast_transforms.py b/functions/source/CreateSSHKey/pycparser/ast_transforms.py
index 623821d..0aeb88f 100644
--- a/functions/source/CreateSSHKey/pycparser/ast_transforms.py
+++ b/functions/source/CreateSSHKey/pycparser/ast_transforms.py
@@ -3,7 +3,7 @@
#
# Some utilities used by the parser to create a friendlier AST.
#
-# Eli Bendersky [http://eli.thegreenplace.net]
+# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#------------------------------------------------------------------------------
@@ -74,7 +74,8 @@ def fix_switch_cases(switch_node):
# Goes over the children of the Compound below the Switch, adding them
# either directly below new_compound or below the last Case as appropriate
- for child in switch_node.stmt.block_items:
+ # (for `switch(cond) {}`, block_items would have been None)
+ for child in (switch_node.stmt.block_items or []):
if isinstance(child, (c_ast.Case, c_ast.Default)):
# If it's a Case/Default:
# 1. Add it to the Compound and mark as "last case"
diff --git a/functions/source/CreateSSHKey/pycparser/c_ast.py b/functions/source/CreateSSHKey/pycparser/c_ast.py
index 5e81648..b7bbbee 100644
--- a/functions/source/CreateSSHKey/pycparser/c_ast.py
+++ b/functions/source/CreateSSHKey/pycparser/c_ast.py
@@ -11,18 +11,45 @@
#
# AST Node classes.
#
-# Eli Bendersky [http://eli.thegreenplace.net]
+# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#-----------------------------------------------------------------
import sys
+def _repr(obj):
+ """
+ Get the representation of an object, with dedicated pprint-like format for lists.
+ """
+ if isinstance(obj, list):
+ return '[' + (',\n '.join((_repr(e).replace('\n', '\n ') for e in obj))) + '\n]'
+ else:
+ return repr(obj)
class Node(object):
__slots__ = ()
""" Abstract base class for AST nodes.
"""
+ def __repr__(self):
+ """ Generates a python representation of the current node
+ """
+ result = self.__class__.__name__ + '('
+
+ indent = ''
+ separator = ''
+ for name in self.__slots__[:-2]:
+ result += separator
+ result += indent
+ result += name + '=' + (_repr(getattr(self, name)).replace('\n', '\n ' + (' ' * (len(name) + len(self.__class__.__name__)))))
+
+ separator = ','
+ indent = '\n ' + (' ' * len(self.__class__.__name__))
+
+ result += indent + ')'
+
+ return result
+
def children(self):
""" A sequence of all children that are Nodes
"""
@@ -112,21 +139,31 @@ def visit_Constant(self, node):
* Modeled after Python's own AST visiting facilities
(the ast module of Python 3.0)
"""
+
+ _method_cache = None
+
def visit(self, node):
""" Visit a node.
"""
- method = 'visit_' + node.__class__.__name__
- visitor = getattr(self, method, self.generic_visit)
+
+ if self._method_cache is None:
+ self._method_cache = {}
+
+ visitor = self._method_cache.get(node.__class__.__name__, None)
+ if visitor is None:
+ method = 'visit_' + node.__class__.__name__
+ visitor = getattr(self, method, self.generic_visit)
+ self._method_cache[node.__class__.__name__] = visitor
+
return visitor(node)
def generic_visit(self, node):
""" Called if no explicit visitor function exists for a
node. Implements preorder visiting of the node.
"""
- for c_name, c in node.children():
+ for c in node:
self.visit(c)
-
class ArrayDecl(Node):
__slots__ = ('type', 'dim', 'dim_quals', 'coord', '__weakref__')
def __init__(self, type, dim, dim_quals, coord=None):
@@ -141,6 +178,12 @@ def children(self):
if self.dim is not None: nodelist.append(("dim", self.dim))
return tuple(nodelist)
+ def __iter__(self):
+ if self.type is not None:
+ yield self.type
+ if self.dim is not None:
+ yield self.dim
+
attr_names = ('dim_quals', )
class ArrayRef(Node):
@@ -156,6 +199,12 @@ def children(self):
if self.subscript is not None: nodelist.append(("subscript", self.subscript))
return tuple(nodelist)
+ def __iter__(self):
+ if self.name is not None:
+ yield self.name
+ if self.subscript is not None:
+ yield self.subscript
+
attr_names = ()
class Assignment(Node):
@@ -172,6 +221,12 @@ def children(self):
if self.rvalue is not None: nodelist.append(("rvalue", self.rvalue))
return tuple(nodelist)
+ def __iter__(self):
+ if self.lvalue is not None:
+ yield self.lvalue
+ if self.rvalue is not None:
+ yield self.rvalue
+
attr_names = ('op', )
class BinaryOp(Node):
@@ -188,6 +243,12 @@ def children(self):
if self.right is not None: nodelist.append(("right", self.right))
return tuple(nodelist)
+ def __iter__(self):
+ if self.left is not None:
+ yield self.left
+ if self.right is not None:
+ yield self.right
+
attr_names = ('op', )
class Break(Node):
@@ -198,6 +259,10 @@ def __init__(self, coord=None):
def children(self):
return ()
+ def __iter__(self):
+ return
+ yield
+
attr_names = ()
class Case(Node):
@@ -214,6 +279,12 @@ def children(self):
nodelist.append(("stmts[%d]" % i, child))
return tuple(nodelist)
+ def __iter__(self):
+ if self.expr is not None:
+ yield self.expr
+ for child in (self.stmts or []):
+ yield child
+
attr_names = ()
class Cast(Node):
@@ -229,6 +300,12 @@ def children(self):
if self.expr is not None: nodelist.append(("expr", self.expr))
return tuple(nodelist)
+ def __iter__(self):
+ if self.to_type is not None:
+ yield self.to_type
+ if self.expr is not None:
+ yield self.expr
+
attr_names = ()
class Compound(Node):
@@ -243,6 +320,10 @@ def children(self):
nodelist.append(("block_items[%d]" % i, child))
return tuple(nodelist)
+ def __iter__(self):
+ for child in (self.block_items or []):
+ yield child
+
attr_names = ()
class CompoundLiteral(Node):
@@ -258,6 +339,12 @@ def children(self):
if self.init is not None: nodelist.append(("init", self.init))
return tuple(nodelist)
+ def __iter__(self):
+ if self.type is not None:
+ yield self.type
+ if self.init is not None:
+ yield self.init
+
attr_names = ()
class Constant(Node):
@@ -271,6 +358,10 @@ def children(self):
nodelist = []
return tuple(nodelist)
+ def __iter__(self):
+ return
+ yield
+
attr_names = ('type', 'value', )
class Continue(Node):
@@ -281,6 +372,10 @@ def __init__(self, coord=None):
def children(self):
return ()
+ def __iter__(self):
+ return
+ yield
+
attr_names = ()
class Decl(Node):
@@ -302,6 +397,14 @@ def children(self):
if self.bitsize is not None: nodelist.append(("bitsize", self.bitsize))
return tuple(nodelist)
+ def __iter__(self):
+ if self.type is not None:
+ yield self.type
+ if self.init is not None:
+ yield self.init
+ if self.bitsize is not None:
+ yield self.bitsize
+
attr_names = ('name', 'quals', 'storage', 'funcspec', )
class DeclList(Node):
@@ -316,6 +419,10 @@ def children(self):
nodelist.append(("decls[%d]" % i, child))
return tuple(nodelist)
+ def __iter__(self):
+ for child in (self.decls or []):
+ yield child
+
attr_names = ()
class Default(Node):
@@ -330,6 +437,10 @@ def children(self):
nodelist.append(("stmts[%d]" % i, child))
return tuple(nodelist)
+ def __iter__(self):
+ for child in (self.stmts or []):
+ yield child
+
attr_names = ()
class DoWhile(Node):
@@ -345,6 +456,12 @@ def children(self):
if self.stmt is not None: nodelist.append(("stmt", self.stmt))
return tuple(nodelist)
+ def __iter__(self):
+ if self.cond is not None:
+ yield self.cond
+ if self.stmt is not None:
+ yield self.stmt
+
attr_names = ()
class EllipsisParam(Node):
@@ -355,6 +472,10 @@ def __init__(self, coord=None):
def children(self):
return ()
+ def __iter__(self):
+ return
+ yield
+
attr_names = ()
class EmptyStatement(Node):
@@ -365,6 +486,10 @@ def __init__(self, coord=None):
def children(self):
return ()
+ def __iter__(self):
+ return
+ yield
+
attr_names = ()
class Enum(Node):
@@ -379,6 +504,10 @@ def children(self):
if self.values is not None: nodelist.append(("values", self.values))
return tuple(nodelist)
+ def __iter__(self):
+ if self.values is not None:
+ yield self.values
+
attr_names = ('name', )
class Enumerator(Node):
@@ -393,6 +522,10 @@ def children(self):
if self.value is not None: nodelist.append(("value", self.value))
return tuple(nodelist)
+ def __iter__(self):
+ if self.value is not None:
+ yield self.value
+
attr_names = ('name', )
class EnumeratorList(Node):
@@ -407,6 +540,10 @@ def children(self):
nodelist.append(("enumerators[%d]" % i, child))
return tuple(nodelist)
+ def __iter__(self):
+ for child in (self.enumerators or []):
+ yield child
+
attr_names = ()
class ExprList(Node):
@@ -421,6 +558,10 @@ def children(self):
nodelist.append(("exprs[%d]" % i, child))
return tuple(nodelist)
+ def __iter__(self):
+ for child in (self.exprs or []):
+ yield child
+
attr_names = ()
class FileAST(Node):
@@ -435,6 +576,10 @@ def children(self):
nodelist.append(("ext[%d]" % i, child))
return tuple(nodelist)
+ def __iter__(self):
+ for child in (self.ext or []):
+ yield child
+
attr_names = ()
class For(Node):
@@ -454,6 +599,16 @@ def children(self):
if self.stmt is not None: nodelist.append(("stmt", self.stmt))
return tuple(nodelist)
+ def __iter__(self):
+ if self.init is not None:
+ yield self.init
+ if self.cond is not None:
+ yield self.cond
+ if self.next is not None:
+ yield self.next
+ if self.stmt is not None:
+ yield self.stmt
+
attr_names = ()
class FuncCall(Node):
@@ -469,6 +624,12 @@ def children(self):
if self.args is not None: nodelist.append(("args", self.args))
return tuple(nodelist)
+ def __iter__(self):
+ if self.name is not None:
+ yield self.name
+ if self.args is not None:
+ yield self.args
+
attr_names = ()
class FuncDecl(Node):
@@ -484,6 +645,12 @@ def children(self):
if self.type is not None: nodelist.append(("type", self.type))
return tuple(nodelist)
+ def __iter__(self):
+ if self.args is not None:
+ yield self.args
+ if self.type is not None:
+ yield self.type
+
attr_names = ()
class FuncDef(Node):
@@ -502,6 +669,14 @@ def children(self):
nodelist.append(("param_decls[%d]" % i, child))
return tuple(nodelist)
+ def __iter__(self):
+ if self.decl is not None:
+ yield self.decl
+ if self.body is not None:
+ yield self.body
+ for child in (self.param_decls or []):
+ yield child
+
attr_names = ()
class Goto(Node):
@@ -514,6 +689,10 @@ def children(self):
nodelist = []
return tuple(nodelist)
+ def __iter__(self):
+ return
+ yield
+
attr_names = ('name', )
class ID(Node):
@@ -526,6 +705,10 @@ def children(self):
nodelist = []
return tuple(nodelist)
+ def __iter__(self):
+ return
+ yield
+
attr_names = ('name', )
class IdentifierType(Node):
@@ -538,6 +721,10 @@ def children(self):
nodelist = []
return tuple(nodelist)
+ def __iter__(self):
+ return
+ yield
+
attr_names = ('names', )
class If(Node):
@@ -555,6 +742,14 @@ def children(self):
if self.iffalse is not None: nodelist.append(("iffalse", self.iffalse))
return tuple(nodelist)
+ def __iter__(self):
+ if self.cond is not None:
+ yield self.cond
+ if self.iftrue is not None:
+ yield self.iftrue
+ if self.iffalse is not None:
+ yield self.iffalse
+
attr_names = ()
class InitList(Node):
@@ -569,6 +764,10 @@ def children(self):
nodelist.append(("exprs[%d]" % i, child))
return tuple(nodelist)
+ def __iter__(self):
+ for child in (self.exprs or []):
+ yield child
+
attr_names = ()
class Label(Node):
@@ -583,6 +782,10 @@ def children(self):
if self.stmt is not None: nodelist.append(("stmt", self.stmt))
return tuple(nodelist)
+ def __iter__(self):
+ if self.stmt is not None:
+ yield self.stmt
+
attr_names = ('name', )
class NamedInitializer(Node):
@@ -599,6 +802,12 @@ def children(self):
nodelist.append(("name[%d]" % i, child))
return tuple(nodelist)
+ def __iter__(self):
+ if self.expr is not None:
+ yield self.expr
+ for child in (self.name or []):
+ yield child
+
attr_names = ()
class ParamList(Node):
@@ -613,6 +822,10 @@ def children(self):
nodelist.append(("params[%d]" % i, child))
return tuple(nodelist)
+ def __iter__(self):
+ for child in (self.params or []):
+ yield child
+
attr_names = ()
class PtrDecl(Node):
@@ -627,6 +840,10 @@ def children(self):
if self.type is not None: nodelist.append(("type", self.type))
return tuple(nodelist)
+ def __iter__(self):
+ if self.type is not None:
+ yield self.type
+
attr_names = ('quals', )
class Return(Node):
@@ -640,6 +857,10 @@ def children(self):
if self.expr is not None: nodelist.append(("expr", self.expr))
return tuple(nodelist)
+ def __iter__(self):
+ if self.expr is not None:
+ yield self.expr
+
attr_names = ()
class Struct(Node):
@@ -655,6 +876,10 @@ def children(self):
nodelist.append(("decls[%d]" % i, child))
return tuple(nodelist)
+ def __iter__(self):
+ for child in (self.decls or []):
+ yield child
+
attr_names = ('name', )
class StructRef(Node):
@@ -671,6 +896,12 @@ def children(self):
if self.field is not None: nodelist.append(("field", self.field))
return tuple(nodelist)
+ def __iter__(self):
+ if self.name is not None:
+ yield self.name
+ if self.field is not None:
+ yield self.field
+
attr_names = ('type', )
class Switch(Node):
@@ -686,6 +917,12 @@ def children(self):
if self.stmt is not None: nodelist.append(("stmt", self.stmt))
return tuple(nodelist)
+ def __iter__(self):
+ if self.cond is not None:
+ yield self.cond
+ if self.stmt is not None:
+ yield self.stmt
+
attr_names = ()
class TernaryOp(Node):
@@ -703,6 +940,14 @@ def children(self):
if self.iffalse is not None: nodelist.append(("iffalse", self.iffalse))
return tuple(nodelist)
+ def __iter__(self):
+ if self.cond is not None:
+ yield self.cond
+ if self.iftrue is not None:
+ yield self.iftrue
+ if self.iffalse is not None:
+ yield self.iffalse
+
attr_names = ()
class TypeDecl(Node):
@@ -718,6 +963,10 @@ def children(self):
if self.type is not None: nodelist.append(("type", self.type))
return tuple(nodelist)
+ def __iter__(self):
+ if self.type is not None:
+ yield self.type
+
attr_names = ('declname', 'quals', )
class Typedef(Node):
@@ -734,6 +983,10 @@ def children(self):
if self.type is not None: nodelist.append(("type", self.type))
return tuple(nodelist)
+ def __iter__(self):
+ if self.type is not None:
+ yield self.type
+
attr_names = ('name', 'quals', 'storage', )
class Typename(Node):
@@ -749,6 +1002,10 @@ def children(self):
if self.type is not None: nodelist.append(("type", self.type))
return tuple(nodelist)
+ def __iter__(self):
+ if self.type is not None:
+ yield self.type
+
attr_names = ('name', 'quals', )
class UnaryOp(Node):
@@ -763,6 +1020,10 @@ def children(self):
if self.expr is not None: nodelist.append(("expr", self.expr))
return tuple(nodelist)
+ def __iter__(self):
+ if self.expr is not None:
+ yield self.expr
+
attr_names = ('op', )
class Union(Node):
@@ -778,6 +1039,10 @@ def children(self):
nodelist.append(("decls[%d]" % i, child))
return tuple(nodelist)
+ def __iter__(self):
+ for child in (self.decls or []):
+ yield child
+
attr_names = ('name', )
class While(Node):
@@ -793,6 +1058,12 @@ def children(self):
if self.stmt is not None: nodelist.append(("stmt", self.stmt))
return tuple(nodelist)
+ def __iter__(self):
+ if self.cond is not None:
+ yield self.cond
+ if self.stmt is not None:
+ yield self.stmt
+
attr_names = ()
class Pragma(Node):
@@ -805,5 +1076,9 @@ def children(self):
nodelist = []
return tuple(nodelist)
+ def __iter__(self):
+ return
+ yield
+
attr_names = ('string', )
diff --git a/functions/source/CreateSSHKey/pycparser/c_generator.py b/functions/source/CreateSSHKey/pycparser/c_generator.py
index 73e7f1b..973d24a 100644
--- a/functions/source/CreateSSHKey/pycparser/c_generator.py
+++ b/functions/source/CreateSSHKey/pycparser/c_generator.py
@@ -3,7 +3,7 @@
#
# C code generator from pycparser AST nodes.
#
-# Eli Bendersky [http://eli.thegreenplace.net]
+# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#------------------------------------------------------------------------------
from . import c_ast
@@ -39,7 +39,7 @@ def visit_Constant(self, n):
def visit_ID(self, n):
return n.name
-
+
def visit_Pragma(self, n):
ret = '#pragma'
if n.string:
@@ -119,7 +119,7 @@ def visit_Typedef(self, n):
return s
def visit_Cast(self, n):
- s = '(' + self._generate_type(n.to_type) + ')'
+ s = '(' + self._generate_type(n.to_type, emit_declname=False) + ')'
return s + ' ' + self._parenthesize_unless_simple(n.expr)
def visit_ExprList(self, n):
@@ -135,18 +135,20 @@ def visit_InitList(self, n):
return ', '.join(visited_subexprs)
def visit_Enum(self, n):
- s = 'enum'
- if n.name: s += ' ' + n.name
- if n.values:
- s += ' {'
- for i, enumerator in enumerate(n.values.enumerators):
- s += enumerator.name
- if enumerator.value:
- s += ' = ' + self.visit(enumerator.value)
- if i != len(n.values.enumerators) - 1:
- s += ', '
- s += '}'
- return s
+ return self._generate_struct_union_enum(n, name='enum')
+
+ def visit_Enumerator(self, n):
+ if not n.value:
+ return '{indent}{name},\n'.format(
+ indent=self._make_indent(),
+ name=n.name,
+ )
+ else:
+ return '{indent}{name} = {value},\n'.format(
+ indent=self._make_indent(),
+ name=n.name,
+ value=self.visit(n.value),
+ )
def visit_FuncDef(self, n):
decl = self.visit(n.decl)
@@ -268,43 +270,67 @@ def visit_EllipsisParam(self, n):
return '...'
def visit_Struct(self, n):
- return self._generate_struct_union(n, 'struct')
+ return self._generate_struct_union_enum(n, 'struct')
def visit_Typename(self, n):
return self._generate_type(n.type)
def visit_Union(self, n):
- return self._generate_struct_union(n, 'union')
+ return self._generate_struct_union_enum(n, 'union')
def visit_NamedInitializer(self, n):
s = ''
for name in n.name:
if isinstance(name, c_ast.ID):
s += '.' + name.name
- elif isinstance(name, c_ast.Constant):
- s += '[' + name.value + ']'
+ else:
+ s += '[' + self.visit(name) + ']'
s += ' = ' + self._visit_expr(n.expr)
return s
def visit_FuncDecl(self, n):
return self._generate_type(n)
- def _generate_struct_union(self, n, name):
- """ Generates code for structs and unions. name should be either
- 'struct' or union.
+ def visit_ArrayDecl(self, n):
+ return self._generate_type(n, emit_declname=False)
+
+ def visit_TypeDecl(self, n):
+ return self._generate_type(n, emit_declname=False)
+
+ def visit_PtrDecl(self, n):
+ return self._generate_type(n, emit_declname=False)
+
+ def _generate_struct_union_enum(self, n, name):
+ """ Generates code for structs, unions, and enums. name should be
+ 'struct', 'union', or 'enum'.
"""
+ if name in ('struct', 'union'):
+ members = n.decls
+ body_function = self._generate_struct_union_body
+ else:
+ assert name == 'enum'
+ members = None if n.values is None else n.values.enumerators
+ body_function = self._generate_enum_body
s = name + ' ' + (n.name or '')
- if n.decls:
+ if members is not None:
+ # None means no members
+ # Empty sequence means an empty list of members
s += '\n'
s += self._make_indent()
self.indent_level += 2
s += '{\n'
- for decl in n.decls:
- s += self._generate_stmt(decl)
+ s += body_function(members)
self.indent_level -= 2
s += self._make_indent() + '}'
return s
+ def _generate_struct_union_body(self, members):
+ return ''.join(self._generate_stmt(decl) for decl in members)
+
+ def _generate_enum_body(self, members):
+ # `[:-2] + '\n'` removes the final `,` from the enumerator list
+ return ''.join(self.visit(value) for value in members)[:-2] + '\n'
+
def _generate_stmt(self, n, add_indent=False):
""" Generation from a statement node. This method exists as a wrapper
for individual visit_* methods to handle different treatment of
@@ -342,7 +368,7 @@ def _generate_decl(self, n):
s += self._generate_type(n.type)
return s
- def _generate_type(self, n, modifiers=[]):
+ def _generate_type(self, n, modifiers=[], emit_declname = True):
""" Recursive generation from a type node. n is the type node.
modifiers collects the PtrDecl, ArrayDecl and FuncDecl modifiers
encountered on the way down to a TypeDecl, to allow proper
@@ -356,23 +382,29 @@ def _generate_type(self, n, modifiers=[]):
if n.quals: s += ' '.join(n.quals) + ' '
s += self.visit(n.type)
- nstr = n.declname if n.declname else ''
+ nstr = n.declname if n.declname and emit_declname else ''
# Resolve modifiers.
# Wrap in parens to distinguish pointer to array and pointer to
# function syntax.
#
for i, modifier in enumerate(modifiers):
if isinstance(modifier, c_ast.ArrayDecl):
- if (i != 0 and isinstance(modifiers[i - 1], c_ast.PtrDecl)):
- nstr = '(' + nstr + ')'
- nstr += '[' + self.visit(modifier.dim) + ']'
+ if (i != 0 and
+ isinstance(modifiers[i - 1], c_ast.PtrDecl)):
+ nstr = '(' + nstr + ')'
+ nstr += '['
+ if modifier.dim_quals:
+ nstr += ' '.join(modifier.dim_quals) + ' '
+ nstr += self.visit(modifier.dim) + ']'
elif isinstance(modifier, c_ast.FuncDecl):
- if (i != 0 and isinstance(modifiers[i - 1], c_ast.PtrDecl)):
- nstr = '(' + nstr + ')'
+ if (i != 0 and
+ isinstance(modifiers[i - 1], c_ast.PtrDecl)):
+ nstr = '(' + nstr + ')'
nstr += '(' + self.visit(modifier.args) + ')'
elif isinstance(modifier, c_ast.PtrDecl):
if modifier.quals:
- nstr = '* %s %s' % (' '.join(modifier.quals), nstr)
+ nstr = '* %s%s' % (' '.join(modifier.quals),
+ ' ' + nstr if nstr else '')
else:
nstr = '*' + nstr
if nstr: s += ' ' + nstr
@@ -380,11 +412,12 @@ def _generate_type(self, n, modifiers=[]):
elif typ == c_ast.Decl:
return self._generate_decl(n.type)
elif typ == c_ast.Typename:
- return self._generate_type(n.type)
+ return self._generate_type(n.type, emit_declname = emit_declname)
elif typ == c_ast.IdentifierType:
return ' '.join(n.names) + ' '
elif typ in (c_ast.ArrayDecl, c_ast.PtrDecl, c_ast.FuncDecl):
- return self._generate_type(n.type, modifiers + [n])
+ return self._generate_type(n.type, modifiers + [n],
+ emit_declname = emit_declname)
else:
return self.visit(n)
@@ -407,5 +440,5 @@ def _is_simple_node(self, n):
""" Returns True for nodes that are "simple" - i.e. nodes that always
have higher precedence than operators.
"""
- return isinstance(n,( c_ast.Constant, c_ast.ID, c_ast.ArrayRef,
- c_ast.StructRef, c_ast.FuncCall))
+ return isinstance(n, (c_ast.Constant, c_ast.ID, c_ast.ArrayRef,
+ c_ast.StructRef, c_ast.FuncCall))
diff --git a/functions/source/CreateSSHKey/pycparser/c_lexer.py b/functions/source/CreateSSHKey/pycparser/c_lexer.py
index d9941c1..045d24e 100644
--- a/functions/source/CreateSSHKey/pycparser/c_lexer.py
+++ b/functions/source/CreateSSHKey/pycparser/c_lexer.py
@@ -3,7 +3,7 @@
#
# CLexer class: lexer for the C language
#
-# Eli Bendersky [http://eli.thegreenplace.net]
+# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#------------------------------------------------------------------------------
import re
@@ -19,7 +19,7 @@ class CLexer(object):
tokens.
The public attribute filename can be set to an initial
- filaneme, but the lexer will update it upon #line
+ filename, but the lexer will update it upon #line
directives.
"""
def __init__(self, error_func, on_lbrace_func, on_rbrace_func,
@@ -130,7 +130,7 @@ def _make_tok_location(self, token):
'TYPEID',
# constants
- 'INT_CONST_DEC', 'INT_CONST_OCT', 'INT_CONST_HEX', 'INT_CONST_BIN',
+ 'INT_CONST_DEC', 'INT_CONST_OCT', 'INT_CONST_HEX', 'INT_CONST_BIN', 'INT_CONST_CHAR',
'FLOAT_CONST', 'HEX_FLOAT_CONST',
'CHAR_CONST',
'WCHAR_CONST',
@@ -205,23 +205,49 @@ def _make_tok_location(self, token):
# parse all correct code, even if it means to sometimes parse incorrect
# code.
#
- simple_escape = r"""([a-zA-Z._~!=&\^\-\\?'"])"""
- decimal_escape = r"""(\d+)"""
- hex_escape = r"""(x[0-9a-fA-F]+)"""
- bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-7])"""
+ # The original regexes were taken verbatim from the C syntax definition,
+ # and were later modified to avoid worst-case exponential running time.
+ #
+ # simple_escape = r"""([a-zA-Z._~!=&\^\-\\?'"])"""
+ # decimal_escape = r"""(\d+)"""
+ # hex_escape = r"""(x[0-9a-fA-F]+)"""
+ # bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-7])"""
+ #
+ # The following modifications were made to avoid the ambiguity that allowed backtracking:
+ # (https://github.com/eliben/pycparser/issues/61)
+ #
+ # - \x was removed from simple_escape, unless it was not followed by a hex digit, to avoid ambiguity with hex_escape.
+ # - hex_escape allows one or more hex characters, but requires that the next character(if any) is not hex
+ # - decimal_escape allows one or more decimal characters, but requires that the next character(if any) is not a decimal
+ # - bad_escape does not allow any decimals (8-9), to avoid conflicting with the permissive decimal_escape.
+ #
+ # Without this change, python's `re` module would recursively try parsing each ambiguous escape sequence in multiple ways.
+ # e.g. `\123` could be parsed as `\1`+`23`, `\12`+`3`, and `\123`.
+
+ simple_escape = r"""([a-wyzA-Z._~!=&\^\-\\?'"]|x(?![0-9a-fA-F]))"""
+ decimal_escape = r"""(\d+)(?!\d)"""
+ hex_escape = r"""(x[0-9a-fA-F]+)(?![0-9a-fA-F])"""
+ bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-9])"""
escape_sequence = r"""(\\("""+simple_escape+'|'+decimal_escape+'|'+hex_escape+'))'
+
+ # This complicated regex with lookahead might be slow for strings, so because all of the valid escapes (including \x) allowed
+ # 0 or more non-escaped characters after the first character, simple_escape+decimal_escape+hex_escape got simplified to
+
+ escape_sequence_start_in_string = r"""(\\[0-9a-zA-Z._~!=&\^\-\\?'"])"""
+
cconst_char = r"""([^'\\\n]|"""+escape_sequence+')'
char_const = "'"+cconst_char+"'"
wchar_const = 'L'+char_const
+ multicharacter_constant = "'"+cconst_char+"{2,4}'"
unmatched_quote = "('"+cconst_char+"*\\n)|('"+cconst_char+"*$)"
bad_char_const = r"""('"""+cconst_char+"""[^'\n]+')|('')|('"""+bad_escape+r"""[^'\n]*')"""
# string literals (K&R2: A.2.6)
- string_char = r"""([^"\\\n]|"""+escape_sequence+')'
+ string_char = r"""([^"\\\n]|"""+escape_sequence_start_in_string+')'
string_literal = '"'+string_char+'*"'
wstring_literal = 'L'+string_literal
- bad_string_literal = '"'+string_char+'*?'+bad_escape+string_char+'*"'
+ bad_string_literal = '"'+string_char+'*'+bad_escape+string_char+'*"'
# floating constants (K&R2: A.2.5.3)
exponent_part = r"""([eE][-+]?[0-9]+)"""
@@ -443,6 +469,10 @@ def t_INT_CONST_DEC(self, t):
# Must come before bad_char_const, to prevent it from
# catching valid char constants as invalid
#
+ @TOKEN(multicharacter_constant)
+ def t_INT_CONST_CHAR(self, t):
+ return t
+
@TOKEN(char_const)
def t_CHAR_CONST(self, t):
return t
@@ -482,4 +512,3 @@ def t_ID(self, t):
def t_error(self, t):
msg = 'Illegal character %s' % repr(t.value[0])
self._error(msg, t)
-
diff --git a/functions/source/CreateSSHKey/pycparser/c_parser.py b/functions/source/CreateSSHKey/pycparser/c_parser.py
index f84d6bc..744ede8 100644
--- a/functions/source/CreateSSHKey/pycparser/c_parser.py
+++ b/functions/source/CreateSSHKey/pycparser/c_parser.py
@@ -3,7 +3,7 @@
#
# CParser class: Parser and AST builder for the C language
#
-# Eli Bendersky [http://eli.thegreenplace.net]
+# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#------------------------------------------------------------------------------
import re
@@ -529,8 +529,7 @@ def p_translation_unit_1(self, p):
def p_translation_unit_2(self, p):
""" translation_unit : translation_unit external_declaration
"""
- if p[2] is not None:
- p[1].extend(p[2])
+ p[1].extend(p[2])
p[0] = p[1]
# Declarations always come as lists (because they can be
@@ -557,7 +556,7 @@ def p_external_declaration_3(self, p):
def p_external_declaration_4(self, p):
""" external_declaration : SEMI
"""
- p[0] = None
+ p[0] = []
def p_pp_directive(self, p):
""" pp_directive : PPHASH
@@ -616,6 +615,59 @@ def p_statement(self, p):
"""
p[0] = p[1]
+ # A pragma is generally considered a decorator rather than an actual statement.
+ # Still, for the purposes of analyzing an abstract syntax tree of C code,
+ # pragma's should not be ignored and were previously treated as a statement.
+ # This presents a problem for constructs that take a statement such as labeled_statements,
+ # selection_statements, and iteration_statements, causing a misleading structure
+ # in the AST. For example, consider the following C code.
+ #
+ # for (int i = 0; i < 3; i++)
+ # #pragma omp critical
+ # sum += 1;
+ #
+ # This code will compile and execute "sum += 1;" as the body of the for loop.
+ # Previous implementations of PyCParser would render the AST for this
+ # block of code as follows:
+ #
+ # For:
+ # DeclList:
+ # Decl: i, [], [], []
+ # TypeDecl: i, []
+ # IdentifierType: ['int']
+ # Constant: int, 0
+ # BinaryOp: <
+ # ID: i
+ # Constant: int, 3
+ # UnaryOp: p++
+ # ID: i
+ # Pragma: omp critical
+ # Assignment: +=
+ # ID: sum
+ # Constant: int, 1
+ #
+ # This AST misleadingly takes the Pragma as the body of the loop and the
+ # assignment then becomes a sibling of the loop.
+ #
+ # To solve edge cases like these, the pragmacomp_or_statement rule groups
+ # a pragma and its following statement (which would otherwise be orphaned)
+ # using a compound block, effectively turning the above code into:
+ #
+ # for (int i = 0; i < 3; i++) {
+ # #pragma omp critical
+ # sum += 1;
+ # }
+ def p_pragmacomp_or_statement(self, p):
+ """ pragmacomp_or_statement : pppragma_directive statement
+ | statement
+ """
+ if isinstance(p[1], c_ast.Pragma) and len(p) == 3:
+ p[0] = c_ast.Compound(
+ block_items=[p[1], p[2]],
+ coord=self._token_coord(p, 1))
+ else:
+ p[0] = p[1]
+
# In C, declarations can come several in a line:
# int x, *px, romulo = 5;
#
@@ -855,6 +907,7 @@ def p_struct_or_union_specifier_1(self, p):
| struct_or_union TYPEID
"""
klass = self._select_struct_union_class(p[1])
+ # None means no list of members
p[0] = klass(
name=p[2],
decls=None,
@@ -862,22 +915,40 @@ def p_struct_or_union_specifier_1(self, p):
def p_struct_or_union_specifier_2(self, p):
""" struct_or_union_specifier : struct_or_union brace_open struct_declaration_list brace_close
+ | struct_or_union brace_open brace_close
"""
klass = self._select_struct_union_class(p[1])
- p[0] = klass(
- name=None,
- decls=p[3],
- coord=self._token_coord(p, 2))
+ if len(p) == 4:
+ # Empty sequence means an empty list of members
+ p[0] = klass(
+ name=None,
+ decls=[],
+ coord=self._token_coord(p, 2))
+ else:
+ p[0] = klass(
+ name=None,
+ decls=p[3],
+ coord=self._token_coord(p, 2))
+
def p_struct_or_union_specifier_3(self, p):
""" struct_or_union_specifier : struct_or_union ID brace_open struct_declaration_list brace_close
+ | struct_or_union ID brace_open brace_close
| struct_or_union TYPEID brace_open struct_declaration_list brace_close
+ | struct_or_union TYPEID brace_open brace_close
"""
klass = self._select_struct_union_class(p[1])
- p[0] = klass(
- name=p[2],
- decls=p[4],
- coord=self._token_coord(p, 2))
+ if len(p) == 5:
+ # Empty sequence means an empty list of members
+ p[0] = klass(
+ name=p[2],
+ decls=[],
+ coord=self._token_coord(p, 2))
+ else:
+ p[0] = klass(
+ name=p[2],
+ decls=p[4],
+ coord=self._token_coord(p, 2))
def p_struct_or_union(self, p):
""" struct_or_union : STRUCT
@@ -939,6 +1010,11 @@ def p_struct_declaration_2(self, p):
"""
p[0] = None
+ def p_struct_declaration_3(self, p):
+ """ struct_declaration : pppragma_directive
+ """
+ p[0] = [p[1]]
+
def p_struct_declarator_list(self, p):
""" struct_declarator_list : struct_declarator
| struct_declarator_list COMMA struct_declarator
@@ -1334,12 +1410,13 @@ def p_direct_abstract_declarator_2(self, p):
p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
def p_direct_abstract_declarator_3(self, p):
- """ direct_abstract_declarator : LBRACKET assignment_expression_opt RBRACKET
+ """ direct_abstract_declarator : LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET
"""
+ quals = (p[2] if len(p) > 4 else []) or []
p[0] = c_ast.ArrayDecl(
type=c_ast.TypeDecl(None, None, None),
- dim=p[2],
- dim_quals=[],
+ dim=p[3] if len(p) > 4 else p[2],
+ dim_quals=quals,
coord=self._token_coord(p, 1))
def p_direct_abstract_declarator_4(self, p):
@@ -1405,44 +1482,44 @@ def p_compound_statement_1(self, p):
coord=self._token_coord(p, 1))
def p_labeled_statement_1(self, p):
- """ labeled_statement : ID COLON statement """
+ """ labeled_statement : ID COLON pragmacomp_or_statement """
p[0] = c_ast.Label(p[1], p[3], self._token_coord(p, 1))
def p_labeled_statement_2(self, p):
- """ labeled_statement : CASE constant_expression COLON statement """
+ """ labeled_statement : CASE constant_expression COLON pragmacomp_or_statement """
p[0] = c_ast.Case(p[2], [p[4]], self._token_coord(p, 1))
def p_labeled_statement_3(self, p):
- """ labeled_statement : DEFAULT COLON statement """
+ """ labeled_statement : DEFAULT COLON pragmacomp_or_statement """
p[0] = c_ast.Default([p[3]], self._token_coord(p, 1))
def p_selection_statement_1(self, p):
- """ selection_statement : IF LPAREN expression RPAREN statement """
+ """ selection_statement : IF LPAREN expression RPAREN pragmacomp_or_statement """
p[0] = c_ast.If(p[3], p[5], None, self._token_coord(p, 1))
def p_selection_statement_2(self, p):
- """ selection_statement : IF LPAREN expression RPAREN statement ELSE statement """
+ """ selection_statement : IF LPAREN expression RPAREN statement ELSE pragmacomp_or_statement """
p[0] = c_ast.If(p[3], p[5], p[7], self._token_coord(p, 1))
def p_selection_statement_3(self, p):
- """ selection_statement : SWITCH LPAREN expression RPAREN statement """
+ """ selection_statement : SWITCH LPAREN expression RPAREN pragmacomp_or_statement """
p[0] = fix_switch_cases(
c_ast.Switch(p[3], p[5], self._token_coord(p, 1)))
def p_iteration_statement_1(self, p):
- """ iteration_statement : WHILE LPAREN expression RPAREN statement """
+ """ iteration_statement : WHILE LPAREN expression RPAREN pragmacomp_or_statement """
p[0] = c_ast.While(p[3], p[5], self._token_coord(p, 1))
def p_iteration_statement_2(self, p):
- """ iteration_statement : DO statement WHILE LPAREN expression RPAREN SEMI """
+ """ iteration_statement : DO pragmacomp_or_statement WHILE LPAREN expression RPAREN SEMI """
p[0] = c_ast.DoWhile(p[5], p[2], self._token_coord(p, 1))
def p_iteration_statement_3(self, p):
- """ iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN statement """
+ """ iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement """
p[0] = c_ast.For(p[3], p[5], p[7], p[9], self._token_coord(p, 1))
def p_iteration_statement_4(self, p):
- """ iteration_statement : FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN statement """
+ """ iteration_statement : FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement """
p[0] = c_ast.For(c_ast.DeclList(p[3], self._token_coord(p, 1)),
p[4], p[6], p[8], self._token_coord(p, 1))
@@ -1663,8 +1740,7 @@ def p_offsetof_member_designator(self, p):
if len(p) == 2:
p[0] = p[1]
elif len(p) == 4:
- field = c_ast.ID(p[3], self._token_coord(p, 3))
- p[0] = c_ast.StructRef(p[1], p[2], field, p[1].coord)
+ p[0] = c_ast.StructRef(p[1], p[2], p[3], p[1].coord)
elif len(p) == 5:
p[0] = c_ast.ArrayRef(p[1], p[3], p[1].coord)
else:
@@ -1689,16 +1765,40 @@ def p_constant_1(self, p):
| INT_CONST_OCT
| INT_CONST_HEX
| INT_CONST_BIN
- """
+ | INT_CONST_CHAR
+ """
+ uCount = 0
+ lCount = 0
+ for x in p[1][-3:]:
+ if x in ('l', 'L'):
+ lCount += 1
+ elif x in ('u', 'U'):
+ uCount += 1
+ t = ''
+ if uCount > 1:
+ raise ValueError('Constant cannot have more than one u/U suffix.')
+ elif lCount > 2:
+ raise ValueError('Constant cannot have more than two l/L suffix.')
+ prefix = 'unsigned ' * uCount + 'long ' * lCount
p[0] = c_ast.Constant(
- 'int', p[1], self._token_coord(p, 1))
+ prefix + 'int', p[1], self._token_coord(p, 1))
def p_constant_2(self, p):
""" constant : FLOAT_CONST
| HEX_FLOAT_CONST
"""
+ if 'x' in p[1].lower():
+ t = 'float'
+ else:
+ if p[1][-1] in ('f', 'F'):
+ t = 'float'
+ elif p[1][-1] in ('l', 'L'):
+ t = 'long double'
+ else:
+ t = 'double'
+
p[0] = c_ast.Constant(
- 'float', p[1], self._token_coord(p, 1))
+ t, p[1], self._token_coord(p, 1))
def p_constant_3(self, p):
""" constant : CHAR_CONST
@@ -1761,22 +1861,3 @@ def p_error(self, p):
column=self.clex.find_tok_column(p)))
else:
self._parse_error('At end of input', self.clex.filename)
-
-
-#------------------------------------------------------------------------------
-if __name__ == "__main__":
- import pprint
- import time, sys
-
- #t1 = time.time()
- #parser = CParser(lex_optimize=True, yacc_debug=True, yacc_optimize=False)
- #sys.write(time.time() - t1)
-
- #buf = '''
- #int (*k)(int);
- #'''
-
- ## set debuglevel to 2 for debugging
- #t = parser.parse(buf, 'x.c', debuglevel=0)
- #t.show(showcoord=True)
-
diff --git a/functions/source/CreateSSHKey/pycparser/ply/__pycache__/__init__.cpython-38.pyc b/functions/source/CreateSSHKey/pycparser/ply/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000..8e9ed41
Binary files /dev/null and b/functions/source/CreateSSHKey/pycparser/ply/__pycache__/__init__.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/pycparser/ply/__pycache__/cpp.cpython-38.pyc b/functions/source/CreateSSHKey/pycparser/ply/__pycache__/cpp.cpython-38.pyc
new file mode 100644
index 0000000..e39a87e
Binary files /dev/null and b/functions/source/CreateSSHKey/pycparser/ply/__pycache__/cpp.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/pycparser/ply/__pycache__/ctokens.cpython-38.pyc b/functions/source/CreateSSHKey/pycparser/ply/__pycache__/ctokens.cpython-38.pyc
new file mode 100644
index 0000000..81eb1c1
Binary files /dev/null and b/functions/source/CreateSSHKey/pycparser/ply/__pycache__/ctokens.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/pycparser/ply/__pycache__/lex.cpython-38.pyc b/functions/source/CreateSSHKey/pycparser/ply/__pycache__/lex.cpython-38.pyc
new file mode 100644
index 0000000..7767411
Binary files /dev/null and b/functions/source/CreateSSHKey/pycparser/ply/__pycache__/lex.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/pycparser/ply/__pycache__/yacc.cpython-38.pyc b/functions/source/CreateSSHKey/pycparser/ply/__pycache__/yacc.cpython-38.pyc
new file mode 100644
index 0000000..d29d345
Binary files /dev/null and b/functions/source/CreateSSHKey/pycparser/ply/__pycache__/yacc.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/pycparser/ply/__pycache__/ygen.cpython-38.pyc b/functions/source/CreateSSHKey/pycparser/ply/__pycache__/ygen.cpython-38.pyc
new file mode 100644
index 0000000..b5f5524
Binary files /dev/null and b/functions/source/CreateSSHKey/pycparser/ply/__pycache__/ygen.cpython-38.pyc differ
diff --git a/functions/source/CreateSSHKey/pycparser/ply/cpp.py b/functions/source/CreateSSHKey/pycparser/ply/cpp.py
index 8ff0be1..86273ea 100644
--- a/functions/source/CreateSSHKey/pycparser/ply/cpp.py
+++ b/functions/source/CreateSSHKey/pycparser/ply/cpp.py
@@ -7,8 +7,6 @@
#
# This module implements an ANSI-C style lexical preprocessor for PLY.
# -----------------------------------------------------------------------------
-from __future__ import generators
-
import sys
# Some Python 3 compatibility shims
diff --git a/functions/source/CreateSSHKey/pycparser/ply/yacc.py b/functions/source/CreateSSHKey/pycparser/ply/yacc.py
index 03bd86e..20b4f28 100644
--- a/functions/source/CreateSSHKey/pycparser/ply/yacc.py
+++ b/functions/source/CreateSSHKey/pycparser/ply/yacc.py
@@ -309,7 +309,7 @@ def restart(self):
# certain kinds of advanced parsing situations where the lexer and parser interact with
# each other or change states (i.e., manipulation of scope, lexer states, etc.).
#
- # See: http://www.gnu.org/software/bison/manual/html_node/Default-Reductions.html#Default-Reductions
+ # See: https://www.gnu.org/software/bison/manual/html_node/Default-Reductions.html#Default-Reductions
def set_defaulted_states(self):
self.defaulted_states = {}
for state, actions in self.action.items():
diff --git a/functions/source/CreateSSHKey/pycparser/plyparser.py b/functions/source/CreateSSHKey/pycparser/plyparser.py
index af91922..6222c0e 100644
--- a/functions/source/CreateSSHKey/pycparser/plyparser.py
+++ b/functions/source/CreateSSHKey/pycparser/plyparser.py
@@ -4,10 +4,11 @@
# PLYParser class and other utilites for simplifying programming
# parsers with PLY
#
-# Eli Bendersky [http://eli.thegreenplace.net]
+# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#-----------------------------------------------------------------
+import warnings
class Coord(object):
""" Coordinates of a syntactic element. Consists of:
@@ -87,12 +88,28 @@ def template(cls):
See `parameterized` for more information on parameterized rules.
"""
+ issued_nodoc_warning = False
for attr_name in dir(cls):
if attr_name.startswith('p_'):
method = getattr(cls, attr_name)
if hasattr(method, '_params'):
- delattr(cls, attr_name) # Remove template method
- _create_param_rules(cls, method)
+ # Remove the template method
+ delattr(cls, attr_name)
+ # Create parameterized rules from this method; only run this if
+ # the method has a docstring. This is to address an issue when
+ # pycparser's users are installed in -OO mode which strips
+ # docstrings away.
+ # See: https://github.com/eliben/pycparser/pull/198/ and
+ # https://github.com/eliben/pycparser/issues/197
+ # for discussion.
+ if method.__doc__ is not None:
+ _create_param_rules(cls, method)
+ elif not issued_nodoc_warning:
+ warnings.warn(
+ 'parsing methods must have __doc__ for pycparser to work properly',
+ RuntimeWarning,
+ stacklevel=2)
+ issued_nodoc_warning = True
return cls
diff --git a/functions/source/CreateSSHKey/pycparser/yacctab.py b/functions/source/CreateSSHKey/pycparser/yacctab.py
index a244322..7fbdef9 100644
--- a/functions/source/CreateSSHKey/pycparser/yacctab.py
+++ b/functions/source/CreateSSHKey/pycparser/yacctab.py
@@ -5,9 +5,9 @@
_lr_method = 'LALR'
-_lr_signature = 'translation_unit_or_emptyleftLORleftLANDleftORleftXORleftANDleftEQNEleftGTGELTLEleftRSHIFTLSHIFTleftPLUSMINUSleftTIMESDIVIDEMOD_BOOL _COMPLEX AUTO BREAK CASE CHAR CONST CONTINUE DEFAULT DO DOUBLE ELSE ENUM EXTERN FLOAT FOR GOTO IF INLINE INT LONG REGISTER OFFSETOF RESTRICT RETURN SHORT SIGNED SIZEOF STATIC STRUCT SWITCH TYPEDEF UNION UNSIGNED VOID VOLATILE WHILE __INT128 ID TYPEID INT_CONST_DEC INT_CONST_OCT INT_CONST_HEX INT_CONST_BIN FLOAT_CONST HEX_FLOAT_CONST CHAR_CONST WCHAR_CONST STRING_LITERAL WSTRING_LITERAL PLUS MINUS TIMES DIVIDE MOD OR AND NOT XOR LSHIFT RSHIFT LOR LAND LNOT LT LE GT GE EQ NE EQUALS TIMESEQUAL DIVEQUAL MODEQUAL PLUSEQUAL MINUSEQUAL LSHIFTEQUAL RSHIFTEQUAL ANDEQUAL XOREQUAL OREQUAL PLUSPLUS MINUSMINUS ARROW CONDOP LPAREN RPAREN LBRACKET RBRACKET LBRACE RBRACE COMMA PERIOD SEMI COLON ELLIPSIS PPHASH PPPRAGMA PPPRAGMASTRabstract_declarator_opt : empty\n| abstract_declaratorassignment_expression_opt : empty\n| assignment_expressionblock_item_list_opt : empty\n| block_item_listdeclaration_list_opt : empty\n| declaration_listdeclaration_specifiers_no_type_opt : empty\n| declaration_specifiers_no_typedesignation_opt : empty\n| designationexpression_opt : empty\n| expressionid_init_declarator_list_opt : empty\n| id_init_declarator_listidentifier_list_opt : empty\n| identifier_listinit_declarator_list_opt : empty\n| init_declarator_listinitializer_list_opt : empty\n| initializer_listparameter_type_list_opt : empty\n| parameter_type_liststruct_declarator_list_opt : empty\n| struct_declarator_listtype_qualifier_list_opt : empty\n| type_qualifier_list direct_id_declarator : ID\n direct_id_declarator : LPAREN id_declarator RPAREN\n direct_id_declarator : direct_id_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_id_declarator : direct_id_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET\n | direct_id_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET\n direct_id_declarator : direct_id_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET\n direct_id_declarator : direct_id_declarator LPAREN parameter_type_list RPAREN\n | direct_id_declarator LPAREN identifier_list_opt RPAREN\n direct_typeid_declarator : TYPEID\n direct_typeid_declarator : LPAREN typeid_declarator RPAREN\n direct_typeid_declarator : direct_typeid_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_typeid_declarator : direct_typeid_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET\n | direct_typeid_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET\n direct_typeid_declarator : direct_typeid_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET\n direct_typeid_declarator : direct_typeid_declarator LPAREN parameter_type_list RPAREN\n | direct_typeid_declarator LPAREN identifier_list_opt RPAREN\n direct_typeid_noparen_declarator : TYPEID\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET\n | direct_typeid_noparen_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LPAREN parameter_type_list RPAREN\n | direct_typeid_noparen_declarator LPAREN identifier_list_opt RPAREN\n id_declarator : direct_id_declarator\n id_declarator : pointer direct_id_declarator\n typeid_declarator : direct_typeid_declarator\n typeid_declarator : pointer direct_typeid_declarator\n typeid_noparen_declarator : direct_typeid_noparen_declarator\n typeid_noparen_declarator : pointer direct_typeid_noparen_declarator\n translation_unit_or_empty : translation_unit\n | empty\n translation_unit : external_declaration\n translation_unit : translation_unit external_declaration\n external_declaration : function_definition\n external_declaration : declaration\n external_declaration : pp_directive\n | pppragma_directive\n external_declaration : SEMI\n pp_directive : PPHASH\n pppragma_directive : PPPRAGMA\n | PPPRAGMA PPPRAGMASTR\n function_definition : id_declarator declaration_list_opt compound_statement\n function_definition : declaration_specifiers id_declarator declaration_list_opt compound_statement\n statement : labeled_statement\n | expression_statement\n | compound_statement\n | selection_statement\n | iteration_statement\n | jump_statement\n | pppragma_directive\n decl_body : declaration_specifiers init_declarator_list_opt\n | declaration_specifiers_no_type id_init_declarator_list_opt\n declaration : decl_body SEMI\n declaration_list : declaration\n | declaration_list declaration\n declaration_specifiers_no_type : type_qualifier declaration_specifiers_no_type_opt\n declaration_specifiers_no_type : storage_class_specifier declaration_specifiers_no_type_opt\n declaration_specifiers_no_type : function_specifier declaration_specifiers_no_type_opt\n declaration_specifiers : declaration_specifiers type_qualifier\n declaration_specifiers : declaration_specifiers storage_class_specifier\n declaration_specifiers : declaration_specifiers function_specifier\n declaration_specifiers : declaration_specifiers type_specifier_no_typeid\n declaration_specifiers : type_specifier\n declaration_specifiers : declaration_specifiers_no_type type_specifier\n storage_class_specifier : AUTO\n | REGISTER\n | STATIC\n | EXTERN\n | TYPEDEF\n function_specifier : INLINE\n type_specifier_no_typeid : VOID\n | _BOOL\n | CHAR\n | SHORT\n | INT\n | LONG\n | FLOAT\n | DOUBLE\n | _COMPLEX\n | SIGNED\n | UNSIGNED\n | __INT128\n type_specifier : typedef_name\n | enum_specifier\n | struct_or_union_specifier\n | type_specifier_no_typeid\n type_qualifier : CONST\n | RESTRICT\n | VOLATILE\n init_declarator_list : init_declarator\n | init_declarator_list COMMA init_declarator\n init_declarator : declarator\n | declarator EQUALS initializer\n id_init_declarator_list : id_init_declarator\n | id_init_declarator_list COMMA init_declarator\n id_init_declarator : id_declarator\n | id_declarator EQUALS initializer\n specifier_qualifier_list : specifier_qualifier_list type_specifier_no_typeid\n specifier_qualifier_list : specifier_qualifier_list type_qualifier\n specifier_qualifier_list : type_specifier\n specifier_qualifier_list : type_qualifier_list type_specifier\n struct_or_union_specifier : struct_or_union ID\n | struct_or_union TYPEID\n struct_or_union_specifier : struct_or_union brace_open struct_declaration_list brace_close\n struct_or_union_specifier : struct_or_union ID brace_open struct_declaration_list brace_close\n | struct_or_union TYPEID brace_open struct_declaration_list brace_close\n struct_or_union : STRUCT\n | UNION\n struct_declaration_list : struct_declaration\n | struct_declaration_list struct_declaration\n struct_declaration : specifier_qualifier_list struct_declarator_list_opt SEMI\n struct_declaration : SEMI\n struct_declarator_list : struct_declarator\n | struct_declarator_list COMMA struct_declarator\n struct_declarator : declarator\n struct_declarator : declarator COLON constant_expression\n | COLON constant_expression\n enum_specifier : ENUM ID\n | ENUM TYPEID\n enum_specifier : ENUM brace_open enumerator_list brace_close\n enum_specifier : ENUM ID brace_open enumerator_list brace_close\n | ENUM TYPEID brace_open enumerator_list brace_close\n enumerator_list : enumerator\n | enumerator_list COMMA\n | enumerator_list COMMA enumerator\n enumerator : ID\n | ID EQUALS constant_expression\n declarator : id_declarator\n | typeid_declarator\n pointer : TIMES type_qualifier_list_opt\n | TIMES type_qualifier_list_opt pointer\n type_qualifier_list : type_qualifier\n | type_qualifier_list type_qualifier\n parameter_type_list : parameter_list\n | parameter_list COMMA ELLIPSIS\n parameter_list : parameter_declaration\n | parameter_list COMMA parameter_declaration\n parameter_declaration : declaration_specifiers id_declarator\n | declaration_specifiers typeid_noparen_declarator\n parameter_declaration : declaration_specifiers abstract_declarator_opt\n identifier_list : identifier\n | identifier_list COMMA identifier\n initializer : assignment_expression\n initializer : brace_open initializer_list_opt brace_close\n | brace_open initializer_list COMMA brace_close\n initializer_list : designation_opt initializer\n | initializer_list COMMA designation_opt initializer\n designation : designator_list EQUALS\n designator_list : designator\n | designator_list designator\n designator : LBRACKET constant_expression RBRACKET\n | PERIOD identifier\n type_name : specifier_qualifier_list abstract_declarator_opt\n abstract_declarator : pointer\n abstract_declarator : pointer direct_abstract_declarator\n abstract_declarator : direct_abstract_declarator\n direct_abstract_declarator : LPAREN abstract_declarator RPAREN direct_abstract_declarator : direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET\n direct_abstract_declarator : LBRACKET assignment_expression_opt RBRACKET\n direct_abstract_declarator : direct_abstract_declarator LBRACKET TIMES RBRACKET\n direct_abstract_declarator : LBRACKET TIMES RBRACKET\n direct_abstract_declarator : direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN\n direct_abstract_declarator : LPAREN parameter_type_list_opt RPAREN\n block_item : declaration\n | statement\n block_item_list : block_item\n | block_item_list block_item\n compound_statement : brace_open block_item_list_opt brace_close labeled_statement : ID COLON statement labeled_statement : CASE constant_expression COLON statement labeled_statement : DEFAULT COLON statement selection_statement : IF LPAREN expression RPAREN statement selection_statement : IF LPAREN expression RPAREN statement ELSE statement selection_statement : SWITCH LPAREN expression RPAREN statement iteration_statement : WHILE LPAREN expression RPAREN statement iteration_statement : DO statement WHILE LPAREN expression RPAREN SEMI iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN statement iteration_statement : FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN statement jump_statement : GOTO ID SEMI jump_statement : BREAK SEMI jump_statement : CONTINUE SEMI jump_statement : RETURN expression SEMI\n | RETURN SEMI\n expression_statement : expression_opt SEMI expression : assignment_expression\n | expression COMMA assignment_expression\n typedef_name : TYPEID assignment_expression : conditional_expression\n | unary_expression assignment_operator assignment_expression\n assignment_operator : EQUALS\n | XOREQUAL\n | TIMESEQUAL\n | DIVEQUAL\n | MODEQUAL\n | PLUSEQUAL\n | MINUSEQUAL\n | LSHIFTEQUAL\n | RSHIFTEQUAL\n | ANDEQUAL\n | OREQUAL\n constant_expression : conditional_expression conditional_expression : binary_expression\n | binary_expression CONDOP expression COLON conditional_expression\n binary_expression : cast_expression\n | binary_expression TIMES binary_expression\n | binary_expression DIVIDE binary_expression\n | binary_expression MOD binary_expression\n | binary_expression PLUS binary_expression\n | binary_expression MINUS binary_expression\n | binary_expression RSHIFT binary_expression\n | binary_expression LSHIFT binary_expression\n | binary_expression LT binary_expression\n | binary_expression LE binary_expression\n | binary_expression GE binary_expression\n | binary_expression GT binary_expression\n | binary_expression EQ binary_expression\n | binary_expression NE binary_expression\n | binary_expression AND binary_expression\n | binary_expression OR binary_expression\n | binary_expression XOR binary_expression\n | binary_expression LAND binary_expression\n | binary_expression LOR binary_expression\n cast_expression : unary_expression cast_expression : LPAREN type_name RPAREN cast_expression unary_expression : postfix_expression unary_expression : PLUSPLUS unary_expression\n | MINUSMINUS unary_expression\n | unary_operator cast_expression\n unary_expression : SIZEOF unary_expression\n | SIZEOF LPAREN type_name RPAREN\n unary_operator : AND\n | TIMES\n | PLUS\n | MINUS\n | NOT\n | LNOT\n postfix_expression : primary_expression postfix_expression : postfix_expression LBRACKET expression RBRACKET postfix_expression : postfix_expression LPAREN argument_expression_list RPAREN\n | postfix_expression LPAREN RPAREN\n postfix_expression : postfix_expression PERIOD ID\n | postfix_expression PERIOD TYPEID\n | postfix_expression ARROW ID\n | postfix_expression ARROW TYPEID\n postfix_expression : postfix_expression PLUSPLUS\n | postfix_expression MINUSMINUS\n postfix_expression : LPAREN type_name RPAREN brace_open initializer_list brace_close\n | LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close\n primary_expression : identifier primary_expression : constant primary_expression : unified_string_literal\n | unified_wstring_literal\n primary_expression : LPAREN expression RPAREN primary_expression : OFFSETOF LPAREN type_name COMMA offsetof_member_designator RPAREN\n offsetof_member_designator : identifier\n | offsetof_member_designator PERIOD identifier\n | offsetof_member_designator LBRACKET expression RBRACKET\n argument_expression_list : assignment_expression\n | argument_expression_list COMMA assignment_expression\n identifier : ID constant : INT_CONST_DEC\n | INT_CONST_OCT\n | INT_CONST_HEX\n | INT_CONST_BIN\n constant : FLOAT_CONST\n | HEX_FLOAT_CONST\n constant : CHAR_CONST\n | WCHAR_CONST\n unified_string_literal : STRING_LITERAL\n | unified_string_literal STRING_LITERAL\n unified_wstring_literal : WSTRING_LITERAL\n | unified_wstring_literal WSTRING_LITERAL\n brace_open : LBRACE\n brace_close : RBRACE\n empty : '
+_lr_signature = 'translation_unit_or_emptyleftLORleftLANDleftORleftXORleftANDleftEQNEleftGTGELTLEleftRSHIFTLSHIFTleftPLUSMINUSleftTIMESDIVIDEMOD_BOOL _COMPLEX AUTO BREAK CASE CHAR CONST CONTINUE DEFAULT DO DOUBLE ELSE ENUM EXTERN FLOAT FOR GOTO IF INLINE INT LONG REGISTER OFFSETOF RESTRICT RETURN SHORT SIGNED SIZEOF STATIC STRUCT SWITCH TYPEDEF UNION UNSIGNED VOID VOLATILE WHILE __INT128 ID TYPEID INT_CONST_DEC INT_CONST_OCT INT_CONST_HEX INT_CONST_BIN FLOAT_CONST HEX_FLOAT_CONST CHAR_CONST WCHAR_CONST STRING_LITERAL WSTRING_LITERAL PLUS MINUS TIMES DIVIDE MOD OR AND NOT XOR LSHIFT RSHIFT LOR LAND LNOT LT LE GT GE EQ NE EQUALS TIMESEQUAL DIVEQUAL MODEQUAL PLUSEQUAL MINUSEQUAL LSHIFTEQUAL RSHIFTEQUAL ANDEQUAL XOREQUAL OREQUAL PLUSPLUS MINUSMINUS ARROW CONDOP LPAREN RPAREN LBRACKET RBRACKET LBRACE RBRACE COMMA PERIOD SEMI COLON ELLIPSIS PPHASH PPPRAGMA PPPRAGMASTRabstract_declarator_opt : empty\n| abstract_declaratorassignment_expression_opt : empty\n| assignment_expressionblock_item_list_opt : empty\n| block_item_listdeclaration_list_opt : empty\n| declaration_listdeclaration_specifiers_no_type_opt : empty\n| declaration_specifiers_no_typedesignation_opt : empty\n| designationexpression_opt : empty\n| expressionid_init_declarator_list_opt : empty\n| id_init_declarator_listidentifier_list_opt : empty\n| identifier_listinit_declarator_list_opt : empty\n| init_declarator_listinitializer_list_opt : empty\n| initializer_listparameter_type_list_opt : empty\n| parameter_type_liststruct_declarator_list_opt : empty\n| struct_declarator_listtype_qualifier_list_opt : empty\n| type_qualifier_list direct_id_declarator : ID\n direct_id_declarator : LPAREN id_declarator RPAREN\n direct_id_declarator : direct_id_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_id_declarator : direct_id_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET\n | direct_id_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET\n direct_id_declarator : direct_id_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET\n direct_id_declarator : direct_id_declarator LPAREN parameter_type_list RPAREN\n | direct_id_declarator LPAREN identifier_list_opt RPAREN\n direct_typeid_declarator : TYPEID\n direct_typeid_declarator : LPAREN typeid_declarator RPAREN\n direct_typeid_declarator : direct_typeid_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_typeid_declarator : direct_typeid_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET\n | direct_typeid_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET\n direct_typeid_declarator : direct_typeid_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET\n direct_typeid_declarator : direct_typeid_declarator LPAREN parameter_type_list RPAREN\n | direct_typeid_declarator LPAREN identifier_list_opt RPAREN\n direct_typeid_noparen_declarator : TYPEID\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET\n | direct_typeid_noparen_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LPAREN parameter_type_list RPAREN\n | direct_typeid_noparen_declarator LPAREN identifier_list_opt RPAREN\n id_declarator : direct_id_declarator\n id_declarator : pointer direct_id_declarator\n typeid_declarator : direct_typeid_declarator\n typeid_declarator : pointer direct_typeid_declarator\n typeid_noparen_declarator : direct_typeid_noparen_declarator\n typeid_noparen_declarator : pointer direct_typeid_noparen_declarator\n translation_unit_or_empty : translation_unit\n | empty\n translation_unit : external_declaration\n translation_unit : translation_unit external_declaration\n external_declaration : function_definition\n external_declaration : declaration\n external_declaration : pp_directive\n | pppragma_directive\n external_declaration : SEMI\n pp_directive : PPHASH\n pppragma_directive : PPPRAGMA\n | PPPRAGMA PPPRAGMASTR\n function_definition : id_declarator declaration_list_opt compound_statement\n function_definition : declaration_specifiers id_declarator declaration_list_opt compound_statement\n statement : labeled_statement\n | expression_statement\n | compound_statement\n | selection_statement\n | iteration_statement\n | jump_statement\n | pppragma_directive\n pragmacomp_or_statement : pppragma_directive statement\n | statement\n decl_body : declaration_specifiers init_declarator_list_opt\n | declaration_specifiers_no_type id_init_declarator_list_opt\n declaration : decl_body SEMI\n declaration_list : declaration\n | declaration_list declaration\n declaration_specifiers_no_type : type_qualifier declaration_specifiers_no_type_opt\n declaration_specifiers_no_type : storage_class_specifier declaration_specifiers_no_type_opt\n declaration_specifiers_no_type : function_specifier declaration_specifiers_no_type_opt\n declaration_specifiers : declaration_specifiers type_qualifier\n declaration_specifiers : declaration_specifiers storage_class_specifier\n declaration_specifiers : declaration_specifiers function_specifier\n declaration_specifiers : declaration_specifiers type_specifier_no_typeid\n declaration_specifiers : type_specifier\n declaration_specifiers : declaration_specifiers_no_type type_specifier\n storage_class_specifier : AUTO\n | REGISTER\n | STATIC\n | EXTERN\n | TYPEDEF\n function_specifier : INLINE\n type_specifier_no_typeid : VOID\n | _BOOL\n | CHAR\n | SHORT\n | INT\n | LONG\n | FLOAT\n | DOUBLE\n | _COMPLEX\n | SIGNED\n | UNSIGNED\n | __INT128\n type_specifier : typedef_name\n | enum_specifier\n | struct_or_union_specifier\n | type_specifier_no_typeid\n type_qualifier : CONST\n | RESTRICT\n | VOLATILE\n init_declarator_list : init_declarator\n | init_declarator_list COMMA init_declarator\n init_declarator : declarator\n | declarator EQUALS initializer\n id_init_declarator_list : id_init_declarator\n | id_init_declarator_list COMMA init_declarator\n id_init_declarator : id_declarator\n | id_declarator EQUALS initializer\n specifier_qualifier_list : specifier_qualifier_list type_specifier_no_typeid\n specifier_qualifier_list : specifier_qualifier_list type_qualifier\n specifier_qualifier_list : type_specifier\n specifier_qualifier_list : type_qualifier_list type_specifier\n struct_or_union_specifier : struct_or_union ID\n | struct_or_union TYPEID\n struct_or_union_specifier : struct_or_union brace_open struct_declaration_list brace_close\n | struct_or_union brace_open brace_close\n struct_or_union_specifier : struct_or_union ID brace_open struct_declaration_list brace_close\n | struct_or_union ID brace_open brace_close\n | struct_or_union TYPEID brace_open struct_declaration_list brace_close\n | struct_or_union TYPEID brace_open brace_close\n struct_or_union : STRUCT\n | UNION\n struct_declaration_list : struct_declaration\n | struct_declaration_list struct_declaration\n struct_declaration : specifier_qualifier_list struct_declarator_list_opt SEMI\n struct_declaration : SEMI\n struct_declaration : pppragma_directive\n struct_declarator_list : struct_declarator\n | struct_declarator_list COMMA struct_declarator\n struct_declarator : declarator\n struct_declarator : declarator COLON constant_expression\n | COLON constant_expression\n enum_specifier : ENUM ID\n | ENUM TYPEID\n enum_specifier : ENUM brace_open enumerator_list brace_close\n enum_specifier : ENUM ID brace_open enumerator_list brace_close\n | ENUM TYPEID brace_open enumerator_list brace_close\n enumerator_list : enumerator\n | enumerator_list COMMA\n | enumerator_list COMMA enumerator\n enumerator : ID\n | ID EQUALS constant_expression\n declarator : id_declarator\n | typeid_declarator\n pointer : TIMES type_qualifier_list_opt\n | TIMES type_qualifier_list_opt pointer\n type_qualifier_list : type_qualifier\n | type_qualifier_list type_qualifier\n parameter_type_list : parameter_list\n | parameter_list COMMA ELLIPSIS\n parameter_list : parameter_declaration\n | parameter_list COMMA parameter_declaration\n parameter_declaration : declaration_specifiers id_declarator\n | declaration_specifiers typeid_noparen_declarator\n parameter_declaration : declaration_specifiers abstract_declarator_opt\n identifier_list : identifier\n | identifier_list COMMA identifier\n initializer : assignment_expression\n initializer : brace_open initializer_list_opt brace_close\n | brace_open initializer_list COMMA brace_close\n initializer_list : designation_opt initializer\n | initializer_list COMMA designation_opt initializer\n designation : designator_list EQUALS\n designator_list : designator\n | designator_list designator\n designator : LBRACKET constant_expression RBRACKET\n | PERIOD identifier\n type_name : specifier_qualifier_list abstract_declarator_opt\n abstract_declarator : pointer\n abstract_declarator : pointer direct_abstract_declarator\n abstract_declarator : direct_abstract_declarator\n direct_abstract_declarator : LPAREN abstract_declarator RPAREN direct_abstract_declarator : direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET\n direct_abstract_declarator : LBRACKET assignment_expression_opt RBRACKET\n direct_abstract_declarator : direct_abstract_declarator LBRACKET TIMES RBRACKET\n direct_abstract_declarator : LBRACKET TIMES RBRACKET\n direct_abstract_declarator : direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN\n direct_abstract_declarator : LPAREN parameter_type_list_opt RPAREN\n block_item : declaration\n | statement\n block_item_list : block_item\n | block_item_list block_item\n compound_statement : brace_open block_item_list_opt brace_close labeled_statement : ID COLON pragmacomp_or_statement labeled_statement : CASE constant_expression COLON pragmacomp_or_statement labeled_statement : DEFAULT COLON pragmacomp_or_statement selection_statement : IF LPAREN expression RPAREN pragmacomp_or_statement selection_statement : IF LPAREN expression RPAREN statement ELSE pragmacomp_or_statement selection_statement : SWITCH LPAREN expression RPAREN pragmacomp_or_statement iteration_statement : WHILE LPAREN expression RPAREN pragmacomp_or_statement iteration_statement : DO pragmacomp_or_statement WHILE LPAREN expression RPAREN SEMI iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement iteration_statement : FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement jump_statement : GOTO ID SEMI jump_statement : BREAK SEMI jump_statement : CONTINUE SEMI jump_statement : RETURN expression SEMI\n | RETURN SEMI\n expression_statement : expression_opt SEMI expression : assignment_expression\n | expression COMMA assignment_expression\n typedef_name : TYPEID assignment_expression : conditional_expression\n | unary_expression assignment_operator assignment_expression\n assignment_operator : EQUALS\n | XOREQUAL\n | TIMESEQUAL\n | DIVEQUAL\n | MODEQUAL\n | PLUSEQUAL\n | MINUSEQUAL\n | LSHIFTEQUAL\n | RSHIFTEQUAL\n | ANDEQUAL\n | OREQUAL\n constant_expression : conditional_expression conditional_expression : binary_expression\n | binary_expression CONDOP expression COLON conditional_expression\n binary_expression : cast_expression\n | binary_expression TIMES binary_expression\n | binary_expression DIVIDE binary_expression\n | binary_expression MOD binary_expression\n | binary_expression PLUS binary_expression\n | binary_expression MINUS binary_expression\n | binary_expression RSHIFT binary_expression\n | binary_expression LSHIFT binary_expression\n | binary_expression LT binary_expression\n | binary_expression LE binary_expression\n | binary_expression GE binary_expression\n | binary_expression GT binary_expression\n | binary_expression EQ binary_expression\n | binary_expression NE binary_expression\n | binary_expression AND binary_expression\n | binary_expression OR binary_expression\n | binary_expression XOR binary_expression\n | binary_expression LAND binary_expression\n | binary_expression LOR binary_expression\n cast_expression : unary_expression cast_expression : LPAREN type_name RPAREN cast_expression unary_expression : postfix_expression unary_expression : PLUSPLUS unary_expression\n | MINUSMINUS unary_expression\n | unary_operator cast_expression\n unary_expression : SIZEOF unary_expression\n | SIZEOF LPAREN type_name RPAREN\n unary_operator : AND\n | TIMES\n | PLUS\n | MINUS\n | NOT\n | LNOT\n postfix_expression : primary_expression postfix_expression : postfix_expression LBRACKET expression RBRACKET postfix_expression : postfix_expression LPAREN argument_expression_list RPAREN\n | postfix_expression LPAREN RPAREN\n postfix_expression : postfix_expression PERIOD ID\n | postfix_expression PERIOD TYPEID\n | postfix_expression ARROW ID\n | postfix_expression ARROW TYPEID\n postfix_expression : postfix_expression PLUSPLUS\n | postfix_expression MINUSMINUS\n postfix_expression : LPAREN type_name RPAREN brace_open initializer_list brace_close\n | LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close\n primary_expression : identifier primary_expression : constant primary_expression : unified_string_literal\n | unified_wstring_literal\n primary_expression : LPAREN expression RPAREN primary_expression : OFFSETOF LPAREN type_name COMMA offsetof_member_designator RPAREN\n offsetof_member_designator : identifier\n | offsetof_member_designator PERIOD identifier\n | offsetof_member_designator LBRACKET expression RBRACKET\n argument_expression_list : assignment_expression\n | argument_expression_list COMMA assignment_expression\n identifier : ID constant : INT_CONST_DEC\n | INT_CONST_OCT\n | INT_CONST_HEX\n | INT_CONST_BIN\n constant : FLOAT_CONST\n | HEX_FLOAT_CONST\n constant : CHAR_CONST\n | WCHAR_CONST\n unified_string_literal : STRING_LITERAL\n | unified_string_literal STRING_LITERAL\n unified_wstring_literal : WSTRING_LITERAL\n | unified_wstring_literal WSTRING_LITERAL\n brace_open : LBRACE\n brace_close : RBRACE\n empty : '
-_lr_action_items = {'VOID':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,254,273,282,283,284,287,289,292,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[6,-303,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,6,-94,-109,-104,-65,-93,-110,6,-215,-107,-111,6,-63,-116,6,-29,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,6,-53,6,-82,6,6,-61,-131,-301,-130,6,-147,-146,-160,-88,-90,6,-87,-89,-92,-81,-84,-86,-69,-30,6,6,-70,6,-83,6,6,-128,-140,-137,6,6,6,-161,6,6,-36,-35,6,6,-73,-76,-72,-74,6,-78,-193,-192,-77,-194,-75,6,6,-129,-132,-138,-302,-126,-127,-148,-71,6,-31,6,6,6,-34,6,6,6,-212,-211,6,-209,-195,-208,-196,-134,-133,-139,-150,-149,6,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'LBRACKET':([2,3,5,6,7,8,9,10,11,15,16,19,20,21,24,25,29,30,31,32,35,37,39,41,44,45,48,50,51,54,61,69,70,71,73,74,76,77,78,79,80,83,85,88,91,92,96,105,113,115,125,136,137,140,147,150,151,152,153,154,155,158,159,167,169,172,174,175,177,178,184,185,186,189,190,193,196,226,230,231,233,234,240,245,247,256,272,275,276,278,282,289,292,315,320,321,350,351,356,357,364,365,368,373,377,378,379,380,383,388,391,392,412,413,414,415,421,422,440,441,445,447,449,452,453,459,465,466,467,468,469,477,478,479,484,485,488,489,499,502,503,504,505,510,512,517,],[-102,-115,-113,-99,-97,59,-95,-114,-96,-100,-91,-94,-109,-104,-93,-110,-215,-107,-303,-111,-116,-29,-105,-101,-112,-106,-108,-103,-117,-98,59,-131,-301,-130,-147,-146,-28,-158,-160,-27,-88,-90,141,-37,-87,-89,-92,-30,195,-288,-128,-161,-159,141,-292,-280,-295,-299,-296,-293,-278,-279,280,-291,-265,-297,-289,-277,-294,-290,-36,-35,195,195,322,-45,326,-288,-129,-132,-302,-126,-127,-148,-38,370,-300,-298,-274,-273,-31,-34,195,195,322,326,-134,-133,-150,-149,-44,-43,-177,370,-272,-271,-270,-269,-268,-281,195,195,-33,-32,-191,-185,-187,-189,-39,-42,-180,370,-178,-266,-267,370,-51,-50,-186,-188,-190,-41,-40,-179,501,-283,-46,-49,-282,370,-275,-48,-47,-284,-276,-285,]),'WCHAR_CONST':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,136,141,144,145,148,156,157,163,164,166,168,170,171,173,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,501,502,506,509,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,153,-28,-303,153,-161,-303,153,153,-264,153,-262,153,-261,153,-260,153,153,-259,-263,153,153,153,-73,-76,-72,153,-74,153,153,-78,-193,-192,-77,-194,153,-75,-260,-302,153,153,153,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,153,-227,-228,-220,-226,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,-303,-260,153,-212,-211,153,-209,153,153,153,-195,153,-208,-196,153,153,153,-260,153,153,-12,153,153,-11,153,153,-28,-303,-260,-207,-210,153,-199,153,-197,-303,-176,153,153,-303,153,-260,153,153,153,153,-198,153,153,153,153,-11,153,-203,-202,-200,153,-303,153,153,153,-204,-201,153,-206,-205,]),'FLOAT_CONST':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,136,141,144,145,148,156,157,163,164,166,168,170,171,173,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,501,502,506,509,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,154,-28,-303,154,-161,-303,154,154,-264,154,-262,154,-261,154,-260,154,154,-259,-263,154,154,154,-73,-76,-72,154,-74,154,154,-78,-193,-192,-77,-194,154,-75,-260,-302,154,154,154,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,154,-227,-228,-220,-226,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,-303,-260,154,-212,-211,154,-209,154,154,154,-195,154,-208,-196,154,154,154,-260,154,154,-12,154,154,-11,154,154,-28,-303,-260,-207,-210,154,-199,154,-197,-303,-176,154,154,-303,154,-260,154,154,154,154,-198,154,154,154,154,-11,154,-203,-202,-200,154,-303,154,154,154,-204,-201,154,-206,-205,]),'MINUS':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,115,121,136,141,144,145,147,148,149,150,151,152,153,154,155,156,157,158,159,161,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,226,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,278,280,281,284,285,286,287,288,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,377,378,379,380,383,388,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,416,417,418,424,426,427,429,431,433,436,447,450,451,452,453,454,456,458,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,499,501,502,503,506,509,512,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,157,-28,-303,-288,157,-161,-303,157,157,-292,-264,-251,-280,-295,-299,-296,-293,-278,157,-262,-279,-253,-232,157,-261,157,-291,-260,-265,157,157,-297,-259,-289,-277,297,-294,-290,-263,157,157,157,-73,-76,-72,157,-74,157,157,-78,-193,-192,-77,-194,157,-75,-260,-288,-302,157,157,157,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,157,-227,-228,-220,-226,-300,157,-257,-298,-274,-273,157,157,157,-251,-256,157,-254,-255,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,-303,-260,157,-212,-211,157,-209,157,157,157,-195,157,-208,-196,157,157,157,-260,157,157,-12,157,157,-11,-272,-271,-270,-269,-268,-281,157,297,297,297,-237,297,297,297,-236,297,297,-234,-233,297,297,297,297,297,-235,157,-28,-303,-260,-207,-210,157,-199,157,-197,-303,-176,-258,-266,-267,157,157,-252,-303,157,-260,157,157,157,157,-198,157,157,157,157,-11,157,-203,-202,-200,-282,157,-303,-275,157,157,-276,157,-204,-201,157,-206,-205,]),'RPAREN':([2,3,5,6,7,8,9,10,11,15,16,19,20,21,24,25,29,30,31,32,35,37,39,41,44,45,48,50,51,54,58,60,61,69,71,73,74,76,77,78,79,80,83,85,88,91,92,96,105,109,110,111,112,113,114,115,116,118,125,136,137,138,140,142,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,180,184,185,186,187,188,189,190,191,192,193,194,196,208,224,230,231,233,234,240,245,247,252,253,272,274,275,276,278,281,282,285,286,288,289,290,291,292,293,315,316,317,318,319,320,321,323,327,328,329,330,343,350,351,356,357,364,365,375,376,377,378,379,380,382,383,384,386,387,388,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,412,413,414,415,419,420,421,422,425,430,432,434,437,440,441,451,452,453,458,465,466,467,468,469,477,478,483,484,485,487,488,489,493,496,499,503,504,505,506,507,510,512,513,517,],[-102,-115,-113,-99,-97,-52,-95,-114,-96,-100,-91,-94,-109,-104,-93,-110,-215,-107,-303,-111,-116,-29,-105,-101,-112,-106,-108,-103,-117,-98,105,-303,-53,-131,-130,-147,-146,-28,-158,-160,-27,-88,-90,-54,-37,-87,-89,-92,-30,184,-17,185,-164,-303,-18,-288,-162,-169,-128,-161,-159,247,-55,-303,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,-230,-294,-290,-216,-36,-35,-303,-168,-2,-182,-56,-166,-1,-45,-167,-184,-14,-213,-129,-132,-302,-126,-127,-148,-38,364,365,-300,-257,-298,-274,-273,383,-31,-251,-256,-254,-34,388,389,-303,-255,-182,-23,-24,414,415,-57,-183,-303,-303,-170,-163,-165,-13,-134,-133,-150,-149,-44,-43,-217,451,-272,-271,-270,-269,-286,-268,453,456,457,-281,-181,-182,-303,-238,-250,-239,-237,-241,-245,-240,-236,-243,-248,-234,-233,-242,-249,-244,-246,-247,-235,-33,-32,-191,-185,465,466,-187,-189,469,-214,472,474,476,-39,-42,-258,-266,-267,-252,-51,-50,-186,-188,-190,-41,-40,-287,499,-283,-231,-46,-49,-303,508,-282,-275,-48,-47,-303,514,-284,-276,518,-285,]),'LONG':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,254,273,282,283,284,287,289,292,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[21,-303,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,21,-94,-109,-104,-65,-93,-110,21,-215,-107,-111,21,-63,-116,21,-29,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,21,-53,21,-82,21,21,-61,-131,-301,-130,21,-147,-146,-160,-88,-90,21,-87,-89,-92,-81,-84,-86,-69,-30,21,21,-70,21,-83,21,21,-128,-140,-137,21,21,21,-161,21,21,-36,-35,21,21,-73,-76,-72,-74,21,-78,-193,-192,-77,-194,-75,21,21,-129,-132,-138,-302,-126,-127,-148,-71,21,-31,21,21,21,-34,21,21,21,-212,-211,21,-209,-195,-208,-196,-134,-133,-139,-150,-149,21,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'PLUS':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,115,121,136,141,144,145,147,148,149,150,151,152,153,154,155,156,157,158,159,161,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,226,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,278,280,281,284,285,286,287,288,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,377,378,379,380,383,388,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,416,417,418,424,426,427,429,431,433,436,447,450,451,452,453,454,456,458,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,499,501,502,503,506,509,512,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,164,-28,-303,-288,164,-161,-303,164,164,-292,-264,-251,-280,-295,-299,-296,-293,-278,164,-262,-279,-253,-232,164,-261,164,-291,-260,-265,164,164,-297,-259,-289,-277,301,-294,-290,-263,164,164,164,-73,-76,-72,164,-74,164,164,-78,-193,-192,-77,-194,164,-75,-260,-288,-302,164,164,164,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,164,-227,-228,-220,-226,-300,164,-257,-298,-274,-273,164,164,164,-251,-256,164,-254,-255,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,-303,-260,164,-212,-211,164,-209,164,164,164,-195,164,-208,-196,164,164,164,-260,164,164,-12,164,164,-11,-272,-271,-270,-269,-268,-281,164,301,301,301,-237,301,301,301,-236,301,301,-234,-233,301,301,301,301,301,-235,164,-28,-303,-260,-207,-210,164,-199,164,-197,-303,-176,-258,-266,-267,164,164,-252,-303,164,-260,164,164,164,164,-198,164,164,164,164,-11,164,-203,-202,-200,-282,164,-303,-275,164,164,-276,164,-204,-201,164,-206,-205,]),'ELLIPSIS':([198,],[329,]),'GT':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,302,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-238,302,-239,-237,-241,302,-240,-236,-243,302,-234,-233,-242,302,302,302,302,-235,-258,-266,-267,-252,-282,-275,-276,]),'GOTO':([53,70,101,104,121,199,200,203,205,212,214,215,216,217,219,221,222,233,332,333,336,338,342,345,347,348,426,427,431,433,436,472,473,474,476,494,495,497,509,514,515,516,518,519,520,],[-68,-301,-81,-69,201,-73,-76,-72,-74,201,-78,-193,-192,-77,-194,201,-75,-302,-212,-211,-209,201,-195,-208,-196,201,-207,-210,-199,201,-197,201,-198,201,201,-203,-202,-200,201,201,-204,-201,201,-206,-205,]),'ENUM':([0,1,3,7,8,9,11,12,14,17,18,19,23,24,26,34,35,36,37,40,42,47,49,51,53,54,55,56,57,60,61,64,65,67,68,70,72,78,87,101,102,103,104,105,117,120,121,122,123,124,126,127,128,129,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,232,233,254,273,282,283,284,287,289,323,327,332,333,335,336,342,345,347,354,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[28,-303,-115,-97,-52,-95,-96,-64,-60,-66,28,-94,-65,-93,28,-63,-116,28,-29,-62,-67,-303,-303,-117,-68,-98,-85,-10,-9,28,-53,-82,28,28,-61,-301,28,-160,28,-81,-84,-86,-69,-30,28,-70,28,-83,28,28,-140,-137,28,28,-161,28,28,-36,-35,28,28,-73,-76,-72,-74,28,-78,-193,-192,-77,-194,-75,28,28,-138,-302,-71,28,-31,28,28,28,-34,28,28,-212,-211,28,-209,-195,-208,-196,-139,28,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'PERIOD':([70,115,147,150,151,152,153,154,155,158,159,167,169,172,174,175,177,178,226,233,256,272,275,276,278,368,373,377,378,379,380,383,388,445,447,449,452,453,459,479,484,485,499,502,503,510,512,517,],[-301,-288,-292,-280,-295,-299,-296,-293,-278,-279,279,-291,-265,-297,-289,-277,-294,-290,-288,-302,369,-300,-298,-274,-273,-177,369,-272,-271,-270,-269,-268,-281,-180,369,-178,-266,-267,369,-179,500,-283,-282,369,-275,-284,-276,-285,]),'GE':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,306,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-238,306,-239,-237,-241,306,-240,-236,-243,306,-234,-233,-242,306,306,306,306,-235,-258,-266,-267,-252,-282,-275,-276,]),'INT_CONST_DEC':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,136,141,144,145,148,156,157,163,164,166,168,170,171,173,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,501,502,506,509,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,174,-28,-303,174,-161,-303,174,174,-264,174,-262,174,-261,174,-260,174,174,-259,-263,174,174,174,-73,-76,-72,174,-74,174,174,-78,-193,-192,-77,-194,174,-75,-260,-302,174,174,174,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,174,-227,-228,-220,-226,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,-303,-260,174,-212,-211,174,-209,174,174,174,-195,174,-208,-196,174,174,174,-260,174,174,-12,174,174,-11,174,174,-28,-303,-260,-207,-210,174,-199,174,-197,-303,-176,174,174,-303,174,-260,174,174,174,174,-198,174,174,174,174,-11,174,-203,-202,-200,174,-303,174,174,174,-204,-201,174,-206,-205,]),'ARROW':([115,147,150,151,152,153,154,155,158,159,167,169,172,174,175,177,178,226,233,272,275,276,278,377,378,379,380,383,388,452,453,499,503,512,],[-288,-292,-280,-295,-299,-296,-293,-278,-279,277,-291,-265,-297,-289,-277,-294,-290,-288,-302,-300,-298,-274,-273,-272,-271,-270,-269,-268,-281,-266,-267,-282,-275,-276,]),'CHAR':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,254,273,282,283,284,287,289,292,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[41,-303,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,41,-94,-109,-104,-65,-93,-110,41,-215,-107,-111,41,-63,-116,41,-29,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,41,-53,41,-82,41,41,-61,-131,-301,-130,41,-147,-146,-160,-88,-90,41,-87,-89,-92,-81,-84,-86,-69,-30,41,41,-70,41,-83,41,41,-128,-140,-137,41,41,41,-161,41,41,-36,-35,41,41,-73,-76,-72,-74,41,-78,-193,-192,-77,-194,-75,41,41,-129,-132,-138,-302,-126,-127,-148,-71,41,-31,41,41,41,-34,41,41,41,-212,-211,41,-209,-195,-208,-196,-134,-133,-139,-150,-149,41,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'HEX_FLOAT_CONST':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,136,141,144,145,148,156,157,163,164,166,168,170,171,173,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,501,502,506,509,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,177,-28,-303,177,-161,-303,177,177,-264,177,-262,177,-261,177,-260,177,177,-259,-263,177,177,177,-73,-76,-72,177,-74,177,177,-78,-193,-192,-77,-194,177,-75,-260,-302,177,177,177,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,177,-227,-228,-220,-226,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,-303,-260,177,-212,-211,177,-209,177,177,177,-195,177,-208,-196,177,177,177,-260,177,177,-12,177,177,-11,177,177,-28,-303,-260,-207,-210,177,-199,177,-197,-303,-176,177,177,-303,177,-260,177,177,177,177,-198,177,177,177,177,-11,177,-203,-202,-200,177,-303,177,177,177,-204,-201,177,-206,-205,]),'DOUBLE':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,254,273,282,283,284,287,289,292,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[45,-303,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,45,-94,-109,-104,-65,-93,-110,45,-215,-107,-111,45,-63,-116,45,-29,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,45,-53,45,-82,45,45,-61,-131,-301,-130,45,-147,-146,-160,-88,-90,45,-87,-89,-92,-81,-84,-86,-69,-30,45,45,-70,45,-83,45,45,-128,-140,-137,45,45,45,-161,45,45,-36,-35,45,45,-73,-76,-72,-74,45,-78,-193,-192,-77,-194,-75,45,45,-129,-132,-138,-302,-126,-127,-148,-71,45,-31,45,45,45,-34,45,45,45,-212,-211,45,-209,-195,-208,-196,-134,-133,-139,-150,-149,45,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'MINUSEQUAL':([115,147,149,150,151,152,153,154,155,158,159,167,169,172,174,175,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,451,452,453,458,499,503,512,],[-288,-292,261,-280,-295,-299,-296,-293,-278,-279,-253,-291,-265,-297,-289,-277,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-258,-266,-267,-252,-282,-275,-276,]),'INT_CONST_OCT':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,136,141,144,145,148,156,157,163,164,166,168,170,171,173,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,501,502,506,509,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,178,-28,-303,178,-161,-303,178,178,-264,178,-262,178,-261,178,-260,178,178,-259,-263,178,178,178,-73,-76,-72,178,-74,178,178,-78,-193,-192,-77,-194,178,-75,-260,-302,178,178,178,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,178,-227,-228,-220,-226,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,-303,-260,178,-212,-211,178,-209,178,178,178,-195,178,-208,-196,178,178,178,-260,178,178,-12,178,178,-11,178,178,-28,-303,-260,-207,-210,178,-199,178,-197,-303,-176,178,178,-303,178,-260,178,178,178,178,-198,178,178,178,178,-11,178,-203,-202,-200,178,-303,178,178,178,-204,-201,178,-206,-205,]),'TIMESEQUAL':([115,147,149,150,151,152,153,154,155,158,159,167,169,172,174,175,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,451,452,453,458,499,503,512,],[-288,-292,270,-280,-295,-299,-296,-293,-278,-279,-253,-291,-265,-297,-289,-277,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-258,-266,-267,-252,-282,-275,-276,]),'OR':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,311,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-238,311,-239,-237,-241,-245,-240,-236,-243,-248,-234,-233,-242,311,-244,-246,-247,-235,-258,-266,-267,-252,-282,-275,-276,]),'SHORT':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,254,273,282,283,284,287,289,292,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[2,-303,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,2,-94,-109,-104,-65,-93,-110,2,-215,-107,-111,2,-63,-116,2,-29,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,2,-53,2,-82,2,2,-61,-131,-301,-130,2,-147,-146,-160,-88,-90,2,-87,-89,-92,-81,-84,-86,-69,-30,2,2,-70,2,-83,2,2,-128,-140,-137,2,2,2,-161,2,2,-36,-35,2,2,-73,-76,-72,-74,2,-78,-193,-192,-77,-194,-75,2,2,-129,-132,-138,-302,-126,-127,-148,-71,2,-31,2,2,2,-34,2,2,2,-212,-211,2,-209,-195,-208,-196,-134,-133,-139,-150,-149,2,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'RETURN':([53,70,101,104,121,199,200,203,205,212,214,215,216,217,219,221,222,233,332,333,336,338,342,345,347,348,426,427,431,433,436,472,473,474,476,494,495,497,509,514,515,516,518,519,520,],[-68,-301,-81,-69,204,-73,-76,-72,-74,204,-78,-193,-192,-77,-194,204,-75,-302,-212,-211,-209,204,-195,-208,-196,204,-207,-210,-199,204,-197,204,-198,204,204,-203,-202,-200,204,204,-204,-201,204,-206,-205,]),'RSHIFTEQUAL':([115,147,149,150,151,152,153,154,155,158,159,167,169,172,174,175,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,451,452,453,458,499,503,512,],[-288,-292,271,-280,-295,-299,-296,-293,-278,-279,-253,-291,-265,-297,-289,-277,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-258,-266,-267,-252,-282,-275,-276,]),'RESTRICT':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,31,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,59,60,61,63,64,67,68,69,70,71,72,73,74,76,78,80,83,87,91,92,96,101,104,105,107,108,113,120,121,122,123,124,125,126,127,128,129,130,136,141,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,250,251,254,273,282,283,284,287,289,292,322,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,417,418,426,427,431,436,473,494,495,497,515,516,519,520,],[35,35,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,35,-94,-109,-104,-65,-93,-110,35,-215,-107,35,-111,35,-63,-116,-29,-105,-62,-101,-67,-112,-106,35,-108,35,-103,-117,-68,-98,35,35,-53,35,-82,35,-61,-131,-301,-130,35,-147,-146,35,-160,-88,-90,35,-87,-89,-92,-81,-69,-30,35,35,35,-70,35,-83,35,35,-128,-140,-137,35,35,35,-161,35,35,35,-36,-35,35,35,-73,-76,-72,-74,35,-78,-193,-192,-77,-194,-75,35,35,-129,-132,-138,-302,-126,-127,-148,35,35,-71,35,-31,35,35,35,-34,35,35,35,35,-212,-211,35,-209,-195,-208,-196,-134,-133,-139,-150,-149,35,-33,-32,35,35,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'STATIC':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,59,60,61,63,64,67,68,69,70,71,73,74,78,80,83,87,91,92,96,101,104,105,107,113,120,121,122,136,141,142,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,231,233,245,250,254,282,289,322,323,327,332,333,335,336,342,345,347,350,351,356,357,392,412,413,417,426,427,431,436,473,494,495,497,515,516,519,520,],[9,9,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,9,-94,-109,-104,-65,-93,-110,9,-215,-107,-111,9,-63,-116,-29,-105,-62,-101,-67,-112,-106,9,-108,9,-103,-117,-68,-98,108,9,-53,9,-82,9,-61,-131,-301,-130,-147,-146,-160,-88,-90,9,-87,-89,-92,-81,-69,-30,182,9,-70,9,-83,-161,251,9,-36,-35,9,9,-73,-76,-72,-74,9,-78,-193,-192,-77,-194,-75,-132,-302,-148,362,-71,-31,-34,418,9,9,-212,-211,9,-209,-195,-208,-196,-134,-133,-150,-149,9,-33,-32,463,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'SIZEOF':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,136,141,144,145,148,156,157,163,164,166,168,170,171,173,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,501,502,506,509,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,156,-28,-303,156,-161,-303,156,156,-264,156,-262,156,-261,156,-260,156,156,-259,-263,156,156,156,-73,-76,-72,156,-74,156,156,-78,-193,-192,-77,-194,156,-75,-260,-302,156,156,156,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,156,-227,-228,-220,-226,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,-303,-260,156,-212,-211,156,-209,156,156,156,-195,156,-208,-196,156,156,156,-260,156,156,-12,156,156,-11,156,156,-28,-303,-260,-207,-210,156,-199,156,-197,-303,-176,156,156,-303,156,-260,156,156,156,156,-198,156,156,156,156,-11,156,-203,-202,-200,156,-303,156,156,156,-204,-201,156,-206,-205,]),'UNSIGNED':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,254,273,282,283,284,287,289,292,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[20,-303,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,20,-94,-109,-104,-65,-93,-110,20,-215,-107,-111,20,-63,-116,20,-29,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,20,-53,20,-82,20,20,-61,-131,-301,-130,20,-147,-146,-160,-88,-90,20,-87,-89,-92,-81,-84,-86,-69,-30,20,20,-70,20,-83,20,20,-128,-140,-137,20,20,20,-161,20,20,-36,-35,20,20,-73,-76,-72,-74,20,-78,-193,-192,-77,-194,-75,20,20,-129,-132,-138,-302,-126,-127,-148,-71,20,-31,20,20,20,-34,20,20,20,-212,-211,20,-209,-195,-208,-196,-134,-133,-139,-150,-149,20,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'UNION':([0,1,3,7,8,9,11,12,14,17,18,19,23,24,26,34,35,36,37,40,42,47,49,51,53,54,55,56,57,60,61,64,65,67,68,70,72,78,87,101,102,103,104,105,117,120,121,122,123,124,126,127,128,129,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,232,233,254,273,282,283,284,287,289,323,327,332,333,335,336,342,345,347,354,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[22,-303,-115,-97,-52,-95,-96,-64,-60,-66,22,-94,-65,-93,22,-63,-116,22,-29,-62,-67,-303,-303,-117,-68,-98,-85,-10,-9,22,-53,-82,22,22,-61,-301,22,-160,22,-81,-84,-86,-69,-30,22,-70,22,-83,22,22,-140,-137,22,22,-161,22,22,-36,-35,22,22,-73,-76,-72,-74,22,-78,-193,-192,-77,-194,-75,22,22,-138,-302,-71,22,-31,22,22,22,-34,22,22,-212,-211,22,-209,-195,-208,-196,-139,22,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'COLON':([2,3,5,6,8,10,15,20,21,25,29,30,32,35,37,39,41,44,45,48,50,51,61,69,71,73,74,85,86,88,105,115,119,125,130,140,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,180,184,185,209,224,226,230,231,233,234,240,241,245,247,272,274,275,276,278,282,285,286,288,289,293,340,341,350,351,353,356,357,364,365,375,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,430,440,441,451,452,453,458,477,478,487,499,503,512,],[-102,-115,-113,-99,-52,-114,-100,-109,-104,-110,-215,-107,-111,-116,-29,-105,-101,-112,-106,-108,-103,-117,-53,-131,-130,-147,-146,-54,-157,-37,-30,-288,-156,-128,235,-55,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,-230,-294,-290,-216,-36,-35,338,-213,348,-129,-132,-302,-126,-127,355,-148,-38,-300,-257,-298,-274,-273,-31,-251,-256,-254,-34,-255,433,-229,-134,-133,235,-150,-149,-44,-43,-217,-272,-271,-270,-269,-268,-281,-238,-250,-239,-237,-241,-245,-240,-236,-243,-248,-234,-233,-242,-249,-244,-246,460,-247,-235,-33,-32,-214,-39,-42,-258,-266,-267,-252,-41,-40,-231,-282,-275,-276,]),'$end':([0,12,14,17,23,26,34,40,42,43,52,53,68,101,104,120,233,254,347,],[-303,-64,-60,-66,-65,-58,-63,-62,-67,0,-59,-68,-61,-81,-69,-70,-302,-71,-196,]),'WSTRING_LITERAL':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,136,141,144,145,148,150,152,156,157,163,164,166,168,170,171,173,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,272,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,501,502,506,509,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,152,-28,-303,152,-161,-303,152,152,-264,272,-299,152,-262,152,-261,152,-260,152,152,-259,-263,152,152,152,-73,-76,-72,152,-74,152,152,-78,-193,-192,-77,-194,152,-75,-260,-302,152,152,152,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,152,-227,-228,-220,-226,-300,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,-303,-260,152,-212,-211,152,-209,152,152,152,-195,152,-208,-196,152,152,152,-260,152,152,-12,152,152,-11,152,152,-28,-303,-260,-207,-210,152,-199,152,-197,-303,-176,152,152,-303,152,-260,152,152,152,152,-198,152,152,152,152,-11,152,-203,-202,-200,152,-303,152,152,152,-204,-201,152,-206,-205,]),'DIVIDE':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,304,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,304,304,304,304,304,304,304,304,304,304,-234,-233,304,304,304,304,304,-235,-258,-266,-267,-252,-282,-275,-276,]),'FOR':([53,70,101,104,121,199,200,203,205,212,214,215,216,217,219,221,222,233,332,333,336,338,342,345,347,348,426,427,431,433,436,472,473,474,476,494,495,497,509,514,515,516,518,519,520,],[-68,-301,-81,-69,206,-73,-76,-72,-74,206,-78,-193,-192,-77,-194,206,-75,-302,-212,-211,-209,206,-195,-208,-196,206,-207,-210,-199,206,-197,206,-198,206,206,-203,-202,-200,206,206,-204,-201,206,-206,-205,]),'PLUSPLUS':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,115,121,136,141,144,145,147,148,150,151,152,153,154,155,156,157,158,159,163,164,166,167,168,169,170,171,172,173,174,175,177,178,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,226,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,272,273,275,276,278,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,377,378,379,380,383,388,389,416,417,418,424,426,427,429,431,433,436,447,450,452,453,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,499,501,502,503,506,509,512,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,166,-28,-303,-288,166,-161,-303,166,166,-292,-264,-280,-295,-299,-296,-293,-278,166,-262,-279,278,166,-261,166,-291,-260,-265,166,166,-297,-259,-289,-277,-294,-290,-263,166,166,166,-73,-76,-72,166,-74,166,166,-78,-193,-192,-77,-194,166,-75,-260,-288,-302,166,166,166,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,166,-227,-228,-220,-226,-300,166,-298,-274,-273,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,-303,-260,166,-212,-211,166,-209,166,166,166,-195,166,-208,-196,166,166,166,-260,166,166,-12,166,166,-11,-272,-271,-270,-269,-268,-281,166,166,-28,-303,-260,-207,-210,166,-199,166,-197,-303,-176,-266,-267,166,166,-303,166,-260,166,166,166,166,-198,166,166,166,166,-11,166,-203,-202,-200,-282,166,-303,-275,166,166,-276,166,-204,-201,166,-206,-205,]),'EQUALS':([8,37,61,85,86,87,88,89,97,105,115,119,135,140,147,149,150,151,152,153,154,155,158,159,167,169,172,174,175,177,178,184,185,226,233,247,272,274,275,276,278,282,285,286,288,289,293,364,365,368,373,377,378,379,380,383,388,412,413,440,441,445,449,451,452,453,458,477,478,479,499,503,512,],[-52,-29,-53,-54,-157,-156,-37,144,145,-30,-288,-156,246,-55,-292,263,-280,-295,-299,-296,-293,-278,-279,-253,-291,-265,-297,-289,-277,-294,-290,-36,-35,-288,-302,-38,-300,-257,-298,-274,-273,-31,-251,-256,-254,-34,-255,-44,-43,-177,450,-272,-271,-270,-269,-268,-281,-33,-32,-39,-42,-180,-178,-258,-266,-267,-252,-41,-40,-179,-282,-275,-276,]),'ELSE':([53,104,199,200,203,205,214,217,222,233,332,333,336,345,347,426,427,431,436,473,494,495,497,515,516,519,520,],[-68,-69,-73,-76,-72,-74,-78,-77,-75,-302,-212,-211,-209,-208,-196,-207,-210,-199,-197,-198,-203,-202,509,-204,-201,-206,-205,]),'ANDEQUAL':([115,147,149,150,151,152,153,154,155,158,159,167,169,172,174,175,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,451,452,453,458,499,503,512,],[-288,-292,268,-280,-295,-299,-296,-293,-278,-279,-253,-291,-265,-297,-289,-277,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-258,-266,-267,-252,-282,-275,-276,]),'EQ':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,308,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-238,308,-239,-237,-241,-245,-240,-236,-243,308,-234,-233,-242,308,-244,308,308,-235,-258,-266,-267,-252,-282,-275,-276,]),'AND':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,115,121,136,141,144,145,147,148,149,150,151,152,153,154,155,156,157,158,159,161,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,226,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,278,280,281,284,285,286,287,288,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,377,378,379,380,383,388,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,416,417,418,424,426,427,429,431,433,436,447,450,451,452,453,454,456,458,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,499,501,502,503,506,509,512,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,173,-28,-303,-288,173,-161,-303,173,173,-292,-264,-251,-280,-295,-299,-296,-293,-278,173,-262,-279,-253,-232,173,-261,173,-291,-260,-265,173,173,-297,-259,-289,-277,309,-294,-290,-263,173,173,173,-73,-76,-72,173,-74,173,173,-78,-193,-192,-77,-194,173,-75,-260,-288,-302,173,173,173,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,173,-227,-228,-220,-226,-300,173,-257,-298,-274,-273,173,173,173,-251,-256,173,-254,-255,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,-303,-260,173,-212,-211,173,-209,173,173,173,-195,173,-208,-196,173,173,173,-260,173,173,-12,173,173,-11,-272,-271,-270,-269,-268,-281,173,-238,309,-239,-237,-241,-245,-240,-236,-243,309,-234,-233,-242,309,-244,-246,309,-235,173,-28,-303,-260,-207,-210,173,-199,173,-197,-303,-176,-258,-266,-267,173,173,-252,-303,173,-260,173,173,173,173,-198,173,173,173,173,-11,173,-203,-202,-200,-282,173,-303,-275,173,173,-276,173,-204,-201,173,-206,-205,]),'TYPEID':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,76,77,78,79,80,81,83,84,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,136,137,139,142,146,170,184,185,186,189,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,254,273,277,279,282,283,284,287,289,323,327,332,333,335,336,342,345,347,350,351,353,354,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[29,-303,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,29,-94,-109,-104,-136,-65,-93,-110,29,69,73,-215,-107,-303,-111,88,-63,-116,29,-29,-135,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,29,-53,88,-82,29,29,-61,-131,-301,-130,29,-147,-146,-28,-158,-160,-27,-88,88,-90,88,29,-87,-89,-92,-81,-84,-86,-69,-30,193,29,-70,29,-83,29,29,-128,-140,-137,29,29,88,-161,-159,88,29,88,29,-36,-35,29,193,29,-73,-76,-72,-74,29,-78,-193,-192,-77,-194,-75,29,29,-129,-132,-138,-302,-126,-127,-148,-71,29,377,379,-31,29,29,29,-34,29,29,-212,-211,29,-209,-195,-208,-196,-134,-133,88,-139,-150,-149,29,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'LBRACE':([8,18,22,27,28,37,38,53,61,62,64,66,67,69,70,71,73,74,87,101,104,105,121,122,143,144,145,184,185,199,200,203,205,212,214,215,216,217,219,221,222,233,256,282,289,332,333,336,338,342,345,347,348,366,372,374,389,412,413,426,427,431,433,436,447,450,451,456,457,459,472,473,474,476,480,481,494,495,497,502,509,514,515,516,518,519,520,],[-52,-303,-136,70,70,-29,-135,-68,-53,-7,-82,70,-8,70,-301,70,70,70,-303,-81,-69,-30,70,-83,70,70,70,-36,-35,-73,-76,-72,-74,70,-78,-193,-192,-77,-194,70,-75,-302,-303,-31,-34,-212,-211,-209,70,-195,-208,-196,70,-12,70,-11,70,-33,-32,-207,-210,-199,70,-197,-303,-176,70,70,70,-303,70,-198,70,70,70,-11,-203,-202,-200,-303,70,70,-204,-201,70,-206,-205,]),'PPHASH':([0,12,14,17,23,26,34,40,42,53,68,101,104,120,233,254,347,],[42,-64,-60,-66,-65,42,-63,-62,-67,-68,-61,-81,-69,-70,-302,-71,-196,]),'INT':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,254,273,282,283,284,287,289,292,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[50,-303,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,50,-94,-109,-104,-65,-93,-110,50,-215,-107,-111,50,-63,-116,50,-29,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,50,-53,50,-82,50,50,-61,-131,-301,-130,50,-147,-146,-160,-88,-90,50,-87,-89,-92,-81,-84,-86,-69,-30,50,50,-70,50,-83,50,50,-128,-140,-137,50,50,50,-161,50,50,-36,-35,50,50,-73,-76,-72,-74,50,-78,-193,-192,-77,-194,-75,50,50,-129,-132,-138,-302,-126,-127,-148,-71,50,-31,50,50,50,-34,50,50,50,-212,-211,50,-209,-195,-208,-196,-134,-133,-139,-150,-149,50,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'SIGNED':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,254,273,282,283,284,287,289,292,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[48,-303,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,48,-94,-109,-104,-65,-93,-110,48,-215,-107,-111,48,-63,-116,48,-29,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,48,-53,48,-82,48,48,-61,-131,-301,-130,48,-147,-146,-160,-88,-90,48,-87,-89,-92,-81,-84,-86,-69,-30,48,48,-70,48,-83,48,48,-128,-140,-137,48,48,48,-161,48,48,-36,-35,48,48,-73,-76,-72,-74,48,-78,-193,-192,-77,-194,-75,48,48,-129,-132,-138,-302,-126,-127,-148,-71,48,-31,48,48,48,-34,48,48,48,-212,-211,48,-209,-195,-208,-196,-134,-133,-139,-150,-149,48,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'CONTINUE':([53,70,101,104,121,199,200,203,205,212,214,215,216,217,219,221,222,233,332,333,336,338,342,345,347,348,426,427,431,433,436,472,473,474,476,494,495,497,509,514,515,516,518,519,520,],[-68,-301,-81,-69,207,-73,-76,-72,-74,207,-78,-193,-192,-77,-194,207,-75,-302,-212,-211,-209,207,-195,-208,-196,207,-207,-210,-199,207,-197,207,-198,207,207,-203,-202,-200,207,207,-204,-201,207,-206,-205,]),'NOT':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,136,141,144,145,148,156,157,163,164,166,168,170,171,173,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,501,502,506,509,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,181,-28,-303,181,-161,-303,181,181,-264,181,-262,181,-261,181,-260,181,181,-259,-263,181,181,181,-73,-76,-72,181,-74,181,181,-78,-193,-192,-77,-194,181,-75,-260,-302,181,181,181,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,181,-227,-228,-220,-226,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,-303,-260,181,-212,-211,181,-209,181,181,181,-195,181,-208,-196,181,181,181,-260,181,181,-12,181,181,-11,181,181,-28,-303,-260,-207,-210,181,-199,181,-197,-303,-176,181,181,-303,181,-260,181,181,181,181,-198,181,181,181,181,-11,181,-203,-202,-200,181,-303,181,181,181,-204,-201,181,-206,-205,]),'OREQUAL':([115,147,149,150,151,152,153,154,155,158,159,167,169,172,174,175,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,451,452,453,458,499,503,512,],[-288,-292,269,-280,-295,-299,-296,-293,-278,-279,-253,-291,-265,-297,-289,-277,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-258,-266,-267,-252,-282,-275,-276,]),'MOD':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,312,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,312,312,312,312,312,312,312,312,312,312,-234,-233,312,312,312,312,312,-235,-258,-266,-267,-252,-282,-275,-276,]),'RSHIFT':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,294,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-238,294,-239,-237,294,294,294,-236,294,294,-234,-233,294,294,294,294,294,-235,-258,-266,-267,-252,-282,-275,-276,]),'DEFAULT':([53,70,101,104,121,199,200,203,205,212,214,215,216,217,219,221,222,233,332,333,336,338,342,345,347,348,426,427,431,433,436,472,473,474,476,494,495,497,509,514,515,516,518,519,520,],[-68,-301,-81,-69,209,-73,-76,-72,-74,209,-78,-193,-192,-77,-194,209,-75,-302,-212,-211,-209,209,-195,-208,-196,209,-207,-210,-199,209,-197,209,-198,209,209,-203,-202,-200,209,209,-204,-201,209,-206,-205,]),'__INT128':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,254,273,282,283,284,287,289,292,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[25,-303,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,25,-94,-109,-104,-65,-93,-110,25,-215,-107,-111,25,-63,-116,25,-29,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,25,-53,25,-82,25,25,-61,-131,-301,-130,25,-147,-146,-160,-88,-90,25,-87,-89,-92,-81,-84,-86,-69,-30,25,25,-70,25,-83,25,25,-128,-140,-137,25,25,25,-161,25,25,-36,-35,25,25,-73,-76,-72,-74,25,-78,-193,-192,-77,-194,-75,25,25,-129,-132,-138,-302,-126,-127,-148,-71,25,-31,25,25,25,-34,25,25,25,-212,-211,25,-209,-195,-208,-196,-134,-133,-139,-150,-149,25,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'WHILE':([53,70,101,104,121,199,200,203,205,212,214,215,216,217,219,221,222,233,332,333,336,338,342,345,346,347,348,426,427,431,433,436,472,473,474,476,494,495,497,509,514,515,516,518,519,520,],[-68,-301,-81,-69,210,-73,-76,-72,-74,210,-78,-193,-192,-77,-194,210,-75,-302,-212,-211,-209,210,-195,-208,435,-196,210,-207,-210,-199,210,-197,210,-198,210,210,-203,-202,-200,210,210,-204,-201,210,-206,-205,]),'DIVEQUAL':([115,147,149,150,151,152,153,154,155,158,159,167,169,172,174,175,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,451,452,453,458,499,503,512,],[-288,-292,260,-280,-295,-299,-296,-293,-278,-279,-253,-291,-265,-297,-289,-277,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-258,-266,-267,-252,-282,-275,-276,]),'EXTERN':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,60,61,63,64,67,68,69,70,71,73,74,80,83,87,91,92,96,101,104,105,113,120,121,122,142,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,231,233,245,254,282,289,323,327,332,333,335,336,342,345,347,350,351,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[11,11,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,11,-94,-109,-104,-65,-93,-110,11,-215,-107,-111,11,-63,-116,-29,-105,-62,-101,-67,-112,-106,11,-108,11,-103,-117,-68,-98,11,-53,11,-82,11,-61,-131,-301,-130,-147,-146,-88,-90,11,-87,-89,-92,-81,-69,-30,11,-70,11,-83,11,-36,-35,11,11,-73,-76,-72,-74,11,-78,-193,-192,-77,-194,-75,-132,-302,-148,-71,-31,-34,11,11,-212,-211,11,-209,-195,-208,-196,-134,-133,-150,-149,11,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'CASE':([53,70,101,104,121,199,200,203,205,212,214,215,216,217,219,221,222,233,332,333,336,338,342,345,347,348,426,427,431,433,436,472,473,474,476,494,495,497,509,514,515,516,518,519,520,],[-68,-301,-81,-69,211,-73,-76,-72,-74,211,-78,-193,-192,-77,-194,211,-75,-302,-212,-211,-209,211,-195,-208,-196,211,-207,-210,-199,211,-197,211,-198,211,211,-203,-202,-200,211,211,-204,-201,211,-206,-205,]),'LAND':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,307,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-238,307,-239,-237,-241,-245,-240,-236,-243,-248,-234,-233,-242,-249,-244,-246,-247,-235,-258,-266,-267,-252,-282,-275,-276,]),'REGISTER':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,60,61,63,64,67,68,69,70,71,73,74,80,83,87,91,92,96,101,104,105,113,120,121,122,142,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,231,233,245,254,282,289,323,327,332,333,335,336,342,345,347,350,351,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[19,19,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,19,-94,-109,-104,-65,-93,-110,19,-215,-107,-111,19,-63,-116,-29,-105,-62,-101,-67,-112,-106,19,-108,19,-103,-117,-68,-98,19,-53,19,-82,19,-61,-131,-301,-130,-147,-146,-88,-90,19,-87,-89,-92,-81,-69,-30,19,-70,19,-83,19,-36,-35,19,19,-73,-76,-72,-74,19,-78,-193,-192,-77,-194,-75,-132,-302,-148,-71,-31,-34,19,19,-212,-211,19,-209,-195,-208,-196,-134,-133,-150,-149,19,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'MODEQUAL':([115,147,149,150,151,152,153,154,155,158,159,167,169,172,174,175,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,451,452,453,458,499,503,512,],[-288,-292,262,-280,-295,-299,-296,-293,-278,-279,-253,-291,-265,-297,-289,-277,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-258,-266,-267,-252,-282,-275,-276,]),'NE':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,299,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-238,299,-239,-237,-241,-245,-240,-236,-243,299,-234,-233,-242,299,-244,299,299,-235,-258,-266,-267,-252,-282,-275,-276,]),'SWITCH':([53,70,101,104,121,199,200,203,205,212,214,215,216,217,219,221,222,233,332,333,336,338,342,345,347,348,426,427,431,433,436,472,473,474,476,494,495,497,509,514,515,516,518,519,520,],[-68,-301,-81,-69,213,-73,-76,-72,-74,213,-78,-193,-192,-77,-194,213,-75,-302,-212,-211,-209,213,-195,-208,-196,213,-207,-210,-199,213,-197,213,-198,213,213,-203,-202,-200,213,213,-204,-201,213,-206,-205,]),'INT_CONST_HEX':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,136,141,144,145,148,156,157,163,164,166,168,170,171,173,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,501,502,506,509,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,167,-28,-303,167,-161,-303,167,167,-264,167,-262,167,-261,167,-260,167,167,-259,-263,167,167,167,-73,-76,-72,167,-74,167,167,-78,-193,-192,-77,-194,167,-75,-260,-302,167,167,167,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,167,-227,-228,-220,-226,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,-303,-260,167,-212,-211,167,-209,167,167,167,-195,167,-208,-196,167,167,167,-260,167,167,-12,167,167,-11,167,167,-28,-303,-260,-207,-210,167,-199,167,-197,-303,-176,167,167,-303,167,-260,167,167,167,167,-198,167,167,167,167,-11,167,-203,-202,-200,167,-303,167,167,167,-204,-201,167,-206,-205,]),'_COMPLEX':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,254,273,282,283,284,287,289,292,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[30,-303,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,30,-94,-109,-104,-65,-93,-110,30,-215,-107,-111,30,-63,-116,30,-29,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,30,-53,30,-82,30,30,-61,-131,-301,-130,30,-147,-146,-160,-88,-90,30,-87,-89,-92,-81,-84,-86,-69,-30,30,30,-70,30,-83,30,30,-128,-140,-137,30,30,30,-161,30,30,-36,-35,30,30,-73,-76,-72,-74,30,-78,-193,-192,-77,-194,-75,30,30,-129,-132,-138,-302,-126,-127,-148,-71,30,-31,30,30,30,-34,30,30,30,-212,-211,30,-209,-195,-208,-196,-134,-133,-139,-150,-149,30,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'PPPRAGMASTR':([53,],[104,]),'PLUSEQUAL':([115,147,149,150,151,152,153,154,155,158,159,167,169,172,174,175,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,451,452,453,458,499,503,512,],[-288,-292,265,-280,-295,-299,-296,-293,-278,-279,-253,-291,-265,-297,-289,-277,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-258,-266,-267,-252,-282,-275,-276,]),'STRUCT':([0,1,3,7,8,9,11,12,14,17,18,19,23,24,26,34,35,36,37,40,42,47,49,51,53,54,55,56,57,60,61,64,65,67,68,70,72,78,87,101,102,103,104,105,117,120,121,122,123,124,126,127,128,129,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,232,233,254,273,282,283,284,287,289,323,327,332,333,335,336,342,345,347,354,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[38,-303,-115,-97,-52,-95,-96,-64,-60,-66,38,-94,-65,-93,38,-63,-116,38,-29,-62,-67,-303,-303,-117,-68,-98,-85,-10,-9,38,-53,-82,38,38,-61,-301,38,-160,38,-81,-84,-86,-69,-30,38,-70,38,-83,38,38,-140,-137,38,38,-161,38,38,-36,-35,38,38,-73,-76,-72,-74,38,-78,-193,-192,-77,-194,-75,38,38,-138,-302,-71,38,-31,38,38,38,-34,38,38,-212,-211,38,-209,-195,-208,-196,-139,38,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'CONDOP':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,310,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-238,-250,-239,-237,-241,-245,-240,-236,-243,-248,-234,-233,-242,-249,-244,-246,-247,-235,-258,-266,-267,-252,-282,-275,-276,]),'BREAK':([53,70,101,104,121,199,200,203,205,212,214,215,216,217,219,221,222,233,332,333,336,338,342,345,347,348,426,427,431,433,436,472,473,474,476,494,495,497,509,514,515,516,518,519,520,],[-68,-301,-81,-69,218,-73,-76,-72,-74,218,-78,-193,-192,-77,-194,218,-75,-302,-212,-211,-209,218,-195,-208,-196,218,-207,-210,-199,218,-197,218,-198,218,218,-203,-202,-200,218,218,-204,-201,218,-206,-205,]),'VOLATILE':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,31,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,59,60,61,63,64,67,68,69,70,71,72,73,74,76,78,80,83,87,91,92,96,101,104,105,107,108,113,120,121,122,123,124,125,126,127,128,129,130,136,141,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,250,251,254,273,282,283,284,287,289,292,322,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,417,418,426,427,431,436,473,494,495,497,515,516,519,520,],[51,51,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,51,-94,-109,-104,-65,-93,-110,51,-215,-107,51,-111,51,-63,-116,-29,-105,-62,-101,-67,-112,-106,51,-108,51,-103,-117,-68,-98,51,51,-53,51,-82,51,-61,-131,-301,-130,51,-147,-146,51,-160,-88,-90,51,-87,-89,-92,-81,-69,-30,51,51,51,-70,51,-83,51,51,-128,-140,-137,51,51,51,-161,51,51,51,-36,-35,51,51,-73,-76,-72,-74,51,-78,-193,-192,-77,-194,-75,51,51,-129,-132,-138,-302,-126,-127,-148,51,51,-71,51,-31,51,51,51,-34,51,51,51,51,-212,-211,51,-209,-195,-208,-196,-134,-133,-139,-150,-149,51,-33,-32,51,51,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'PPPRAGMA':([0,12,14,17,23,26,34,40,42,53,68,70,101,104,120,121,199,200,203,205,212,214,215,216,217,219,221,222,233,254,332,333,336,338,342,345,347,348,426,427,431,433,436,472,473,474,476,494,495,497,509,514,515,516,518,519,520,],[53,-64,-60,-66,-65,53,-63,-62,-67,-68,-61,-301,-81,-69,-70,53,-73,-76,-72,-74,53,-78,-193,-192,-77,-194,53,-75,-302,-71,-212,-211,-209,53,-195,-208,-196,53,-207,-210,-199,53,-197,53,-198,53,53,-203,-202,-200,53,53,-204,-201,53,-206,-205,]),'INLINE':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,60,61,63,64,67,68,69,70,71,73,74,80,83,87,91,92,96,101,104,105,113,120,121,122,142,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,231,233,245,254,282,289,323,327,332,333,335,336,342,345,347,350,351,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[54,54,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,54,-94,-109,-104,-65,-93,-110,54,-215,-107,-111,54,-63,-116,-29,-105,-62,-101,-67,-112,-106,54,-108,54,-103,-117,-68,-98,54,-53,54,-82,54,-61,-131,-301,-130,-147,-146,-88,-90,54,-87,-89,-92,-81,-69,-30,54,-70,54,-83,54,-36,-35,54,54,-73,-76,-72,-74,54,-78,-193,-192,-77,-194,-75,-132,-302,-148,-71,-31,-34,54,54,-212,-211,54,-209,-195,-208,-196,-134,-133,-150,-149,54,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'INT_CONST_BIN':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,136,141,144,145,148,156,157,163,164,166,168,170,171,173,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,501,502,506,509,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,147,-28,-303,147,-161,-303,147,147,-264,147,-262,147,-261,147,-260,147,147,-259,-263,147,147,147,-73,-76,-72,147,-74,147,147,-78,-193,-192,-77,-194,147,-75,-260,-302,147,147,147,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,147,-227,-228,-220,-226,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,-303,-260,147,-212,-211,147,-209,147,147,147,-195,147,-208,-196,147,147,147,-260,147,147,-12,147,147,-11,147,147,-28,-303,-260,-207,-210,147,-199,147,-197,-303,-176,147,147,-303,147,-260,147,147,147,147,-198,147,147,147,147,-11,147,-203,-202,-200,147,-303,147,147,147,-204,-201,147,-206,-205,]),'DO':([53,70,101,104,121,199,200,203,205,212,214,215,216,217,219,221,222,233,332,333,336,338,342,345,347,348,426,427,431,433,436,472,473,474,476,494,495,497,509,514,515,516,518,519,520,],[-68,-301,-81,-69,221,-73,-76,-72,-74,221,-78,-193,-192,-77,-194,221,-75,-302,-212,-211,-209,221,-195,-208,-196,221,-207,-210,-199,221,-197,221,-198,221,221,-203,-202,-200,221,221,-204,-201,221,-206,-205,]),'LNOT':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,136,141,144,145,148,156,157,163,164,166,168,170,171,173,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,501,502,506,509,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,148,-28,-303,148,-161,-303,148,148,-264,148,-262,148,-261,148,-260,148,148,-259,-263,148,148,148,-73,-76,-72,148,-74,148,148,-78,-193,-192,-77,-194,148,-75,-260,-302,148,148,148,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,148,-227,-228,-220,-226,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,-303,-260,148,-212,-211,148,-209,148,148,148,-195,148,-208,-196,148,148,148,-260,148,148,-12,148,148,-11,148,148,-28,-303,-260,-207,-210,148,-199,148,-197,-303,-176,148,148,-303,148,-260,148,148,148,148,-198,148,148,148,148,-11,148,-203,-202,-200,148,-303,148,148,148,-204,-201,148,-206,-205,]),'CONST':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,31,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,59,60,61,63,64,67,68,69,70,71,72,73,74,76,78,80,83,87,91,92,96,101,104,105,107,108,113,120,121,122,123,124,125,126,127,128,129,130,136,141,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,250,251,254,273,282,283,284,287,289,292,322,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,417,418,426,427,431,436,473,494,495,497,515,516,519,520,],[3,3,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,3,-94,-109,-104,-65,-93,-110,3,-215,-107,3,-111,3,-63,-116,-29,-105,-62,-101,-67,-112,-106,3,-108,3,-103,-117,-68,-98,3,3,-53,3,-82,3,-61,-131,-301,-130,3,-147,-146,3,-160,-88,-90,3,-87,-89,-92,-81,-69,-30,3,3,3,-70,3,-83,3,3,-128,-140,-137,3,3,3,-161,3,3,3,-36,-35,3,3,-73,-76,-72,-74,3,-78,-193,-192,-77,-194,-75,3,3,-129,-132,-138,-302,-126,-127,-148,3,3,-71,3,-31,3,3,3,-34,3,3,3,3,-212,-211,3,-209,-195,-208,-196,-134,-133,-139,-150,-149,3,-33,-32,3,3,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'LOR':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,295,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-238,-250,-239,-237,-241,-245,-240,-236,-243,-248,-234,-233,-242,-249,-244,-246,-247,-235,-258,-266,-267,-252,-282,-275,-276,]),'CHAR_CONST':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,136,141,144,145,148,156,157,163,164,166,168,170,171,173,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,501,502,506,509,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,151,-28,-303,151,-161,-303,151,151,-264,151,-262,151,-261,151,-260,151,151,-259,-263,151,151,151,-73,-76,-72,151,-74,151,151,-78,-193,-192,-77,-194,151,-75,-260,-302,151,151,151,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,151,-227,-228,-220,-226,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,-303,-260,151,-212,-211,151,-209,151,151,151,-195,151,-208,-196,151,151,151,-260,151,151,-12,151,151,-11,151,151,-28,-303,-260,-207,-210,151,-199,151,-197,-303,-176,151,151,-303,151,-260,151,151,151,151,-198,151,151,151,151,-11,151,-203,-202,-200,151,-303,151,151,151,-204,-201,151,-206,-205,]),'LSHIFT':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,296,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-238,296,-239,-237,296,296,296,-236,296,296,-234,-233,296,296,296,296,296,-235,-258,-266,-267,-252,-282,-275,-276,]),'RBRACE':([53,70,101,104,115,121,126,127,129,133,134,135,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,180,199,200,203,205,212,214,215,216,217,219,220,222,223,228,229,232,233,242,243,244,256,257,272,274,275,276,278,285,286,288,293,332,333,336,341,342,345,347,354,358,359,367,371,374,375,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,426,427,431,436,444,447,448,451,452,453,458,473,482,486,487,494,495,497,498,499,502,503,512,515,516,519,520,],[-68,-301,-81,-69,-288,-303,-140,-137,233,-151,233,-154,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,-230,-294,-290,-216,-73,-76,-72,-74,-6,-78,-193,-192,-77,-194,-5,-75,233,233,233,-138,-302,233,233,-152,-303,-171,-300,-257,-298,-274,-273,-251,-256,-254,-255,-212,-211,-209,-229,-195,-208,-196,-139,-153,-155,233,-22,-21,-217,-272,-271,-270,-269,-268,-281,-238,-250,-239,-237,-241,-245,-240,-236,-243,-248,-234,-233,-242,-249,-244,-246,-247,-235,-207,-210,-199,-197,-172,233,-174,-258,-266,-267,-252,-198,-173,233,-231,-203,-202,-200,-175,-282,233,-275,-276,-204,-201,-206,-205,]),'_BOOL':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,254,273,282,283,284,287,289,292,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[15,-303,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,15,-94,-109,-104,-65,-93,-110,15,-215,-107,-111,15,-63,-116,15,-29,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,15,-53,15,-82,15,15,-61,-131,-301,-130,15,-147,-146,-160,-88,-90,15,-87,-89,-92,-81,-84,-86,-69,-30,15,15,-70,15,-83,15,15,-128,-140,-137,15,15,15,-161,15,15,-36,-35,15,15,-73,-76,-72,-74,15,-78,-193,-192,-77,-194,-75,15,15,-129,-132,-138,-302,-126,-127,-148,-71,15,-31,15,15,15,-34,15,15,15,-212,-211,15,-209,-195,-208,-196,-134,-133,-139,-150,-149,15,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'LE':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,298,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-238,298,-239,-237,-241,298,-240,-236,-243,298,-234,-233,-242,298,298,298,298,-235,-258,-266,-267,-252,-282,-275,-276,]),'SEMI':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,46,47,48,49,50,51,53,54,55,56,57,61,63,65,68,69,70,71,72,73,74,80,82,83,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,115,119,120,121,123,124,125,126,127,129,130,140,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,180,184,185,199,200,202,203,204,205,207,208,212,214,215,216,217,218,219,220,221,222,224,226,228,229,230,231,232,233,234,236,237,238,239,240,241,245,247,248,254,255,257,258,259,272,274,275,276,278,282,285,286,288,289,293,331,332,333,334,335,336,338,341,342,343,345,347,348,350,351,352,354,356,357,364,365,375,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,412,413,426,427,428,429,430,431,433,436,438,439,440,441,444,451,452,453,458,470,471,472,473,474,476,477,478,482,487,492,494,495,497,499,503,508,509,512,514,515,516,518,519,520,],[17,-303,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,-94,-109,-104,-65,-93,-110,17,-215,-107,-111,-303,-63,-116,-303,-29,-105,-62,-101,-67,-112,-106,101,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,-53,-303,-303,-61,-131,-301,-130,126,-147,-146,-88,-20,-90,-54,-157,-156,-37,-120,-79,-87,-89,-19,-118,-122,-92,-124,-16,-80,-15,-81,-84,-86,-69,-30,-288,-156,-70,-303,126,126,-128,-140,-137,126,-303,-55,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,-230,-294,-290,-216,-36,-35,-73,-76,332,-72,333,-74,336,-14,-303,-78,-193,-192,-77,345,-194,-13,-303,-75,-213,-288,126,126,-129,-132,-138,-302,-126,-26,-25,354,-141,-127,-143,-148,-38,-119,-71,-121,-171,-125,-123,-300,-257,-298,-274,-273,-31,-251,-256,-254,-34,-255,426,-212,-211,427,-303,-209,-303,-229,-195,-13,-208,-196,-303,-134,-133,-145,-139,-150,-149,-44,-43,-217,-272,-271,-270,-269,-268,-281,-238,-250,-239,-237,-241,-245,-240,-236,-243,-248,-234,-233,-242,-249,-244,-246,-247,-235,-33,-32,-207,-210,470,-303,-214,-199,-303,-197,-142,-144,-39,-42,-172,-258,-266,-267,-252,-303,493,-303,-198,-303,-303,-41,-40,-173,-231,506,-203,-202,-200,-282,-275,515,-303,-276,-303,-204,-201,-303,-206,-205,]),'LT':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,300,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-238,300,-239,-237,-241,300,-240,-236,-243,300,-234,-233,-242,300,300,300,300,-235,-258,-266,-267,-252,-282,-275,-276,]),'COMMA':([2,3,5,6,7,8,9,10,11,15,16,19,20,21,24,25,29,30,31,32,35,37,39,41,44,45,48,50,51,54,61,69,71,73,74,76,77,78,79,80,82,83,85,86,87,88,89,91,92,94,95,96,97,98,105,112,113,114,115,116,118,119,125,133,134,135,136,137,140,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,180,184,185,187,188,189,190,191,192,193,194,196,208,224,226,230,231,233,234,236,239,240,241,242,243,244,245,247,248,255,257,258,259,272,274,275,276,278,282,285,286,288,289,290,292,293,320,321,328,330,334,341,350,351,352,356,357,358,359,364,365,371,375,377,378,379,380,381,382,383,384,385,388,390,391,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,421,422,430,432,434,437,438,439,440,441,444,448,451,452,453,458,465,466,467,468,469,477,478,482,483,486,487,488,489,496,498,499,503,504,505,511,512,],[-102,-115,-113,-99,-97,-52,-95,-114,-96,-100,-91,-94,-109,-104,-93,-110,-215,-107,-303,-111,-116,-29,-105,-101,-112,-106,-108,-103,-117,-98,-53,-131,-130,-147,-146,-28,-158,-160,-27,-88,139,-90,-54,-157,-156,-37,-120,-87,-89,-118,-122,-92,-124,146,-30,-164,-303,197,-288,198,-169,-156,-128,-151,244,-154,-161,-159,-55,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,-230,-294,-290,-216,-36,-35,-168,-2,-182,-56,-166,-1,-45,-167,-184,337,-213,-288,-129,-132,-302,-126,353,-141,-127,-143,244,244,-152,-148,-38,-119,-121,-171,-125,-123,-300,-257,-298,-274,-273,-31,-251,-256,-254,-34,337,-303,-255,-57,-183,-170,-165,337,-229,-134,-133,-145,-150,-149,-153,-155,-44,-43,447,-217,-272,-271,-270,-269,337,-286,-268,454,455,-281,-181,-182,-238,-250,-239,-237,-241,-245,-240,-236,-243,-248,-234,-233,-242,-249,-244,-246,337,-247,-235,-33,-32,-191,-185,-187,-189,-214,337,337,337,-142,-144,-39,-42,-172,-174,-258,-266,-267,-252,-51,-50,-186,-188,-190,-41,-40,-173,-287,502,-231,-46,-49,337,-175,-282,-275,-48,-47,337,-276,]),'OFFSETOF':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,136,141,144,145,148,156,157,163,164,166,168,170,171,173,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,501,502,506,509,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,162,-28,-303,162,-161,-303,162,162,-264,162,-262,162,-261,162,-260,162,162,-259,-263,162,162,162,-73,-76,-72,162,-74,162,162,-78,-193,-192,-77,-194,162,-75,-260,-302,162,162,162,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,162,-227,-228,-220,-226,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,-303,-260,162,-212,-211,162,-209,162,162,162,-195,162,-208,-196,162,162,162,-260,162,162,-12,162,162,-11,162,162,-28,-303,-260,-207,-210,162,-199,162,-197,-303,-176,162,162,-303,162,-260,162,162,162,162,-198,162,162,162,162,-11,162,-203,-202,-200,162,-303,162,162,162,-204,-201,162,-206,-205,]),'TYPEDEF':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,60,61,63,64,67,68,69,70,71,73,74,80,83,87,91,92,96,101,104,105,113,120,121,122,142,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,231,233,245,254,282,289,323,327,332,333,335,336,342,345,347,350,351,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[7,7,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,7,-94,-109,-104,-65,-93,-110,7,-215,-107,-111,7,-63,-116,-29,-105,-62,-101,-67,-112,-106,7,-108,7,-103,-117,-68,-98,7,-53,7,-82,7,-61,-131,-301,-130,-147,-146,-88,-90,7,-87,-89,-92,-81,-69,-30,7,-70,7,-83,7,-36,-35,7,7,-73,-76,-72,-74,7,-78,-193,-192,-77,-194,-75,-132,-302,-148,-71,-31,-34,7,7,-212,-211,7,-209,-195,-208,-196,-134,-133,-150,-149,7,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'XOR':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,303,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-238,303,-239,-237,-241,-245,-240,-236,-243,-248,-234,-233,-242,303,-244,-246,303,-235,-258,-266,-267,-252,-282,-275,-276,]),'AUTO':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,60,61,63,64,67,68,69,70,71,73,74,80,83,87,91,92,96,101,104,105,113,120,121,122,142,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,231,233,245,254,282,289,323,327,332,333,335,336,342,345,347,350,351,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[24,24,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,24,-94,-109,-104,-65,-93,-110,24,-215,-107,-111,24,-63,-116,-29,-105,-62,-101,-67,-112,-106,24,-108,24,-103,-117,-68,-98,24,-53,24,-82,24,-61,-131,-301,-130,-147,-146,-88,-90,24,-87,-89,-92,-81,-69,-30,24,-70,24,-83,24,-36,-35,24,24,-73,-76,-72,-74,24,-78,-193,-192,-77,-194,-75,-132,-302,-148,-71,-31,-34,24,24,-212,-211,24,-209,-195,-208,-196,-134,-133,-150,-149,24,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'TIMES':([0,1,2,3,4,5,6,7,9,10,11,12,14,15,16,17,19,20,21,23,24,25,26,29,30,31,32,33,34,35,36,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,59,63,65,68,69,70,71,73,74,76,77,78,79,80,81,83,91,92,96,101,102,103,104,106,107,108,113,115,120,121,125,130,136,139,141,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,161,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,181,182,183,186,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,226,230,231,233,234,235,240,245,246,249,250,251,254,256,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,278,280,281,284,285,286,287,288,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,350,351,353,355,356,357,361,362,363,366,370,372,374,377,378,379,380,383,388,389,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,416,417,418,424,426,427,429,431,433,436,447,450,451,452,453,454,456,458,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,499,501,502,503,506,509,512,514,515,516,518,519,520,],[31,-303,-102,-115,31,-113,-99,-97,-95,-114,-96,-64,-60,-100,-91,-66,-94,-109,-104,-65,-93,-110,31,-215,-107,-303,-111,31,-63,-116,31,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,-303,31,31,-61,-131,-301,-130,-147,-146,-28,31,-160,-27,-88,31,-90,-87,-89,-92,-81,-84,-86,-69,168,-28,-303,31,-288,-70,225,-128,31,-161,31,-303,225,225,31,-292,-264,-251,-280,-295,-299,-296,-293,-278,225,-262,-279,-253,-232,225,-261,225,-291,-260,-265,225,225,-297,-259,-289,-277,305,-294,-290,-263,225,225,31,325,-73,-76,-72,225,-74,225,225,-78,-193,-192,-77,-194,225,-75,-260,-288,-129,-132,-302,-126,225,-127,-148,225,361,-28,-303,-71,-303,-221,-224,-222,-218,-219,-223,-225,225,-227,-228,-220,-226,-300,225,-257,-298,-274,-273,225,225,225,-251,-256,225,-254,31,-255,225,225,225,225,225,225,225,225,225,225,225,225,225,225,225,225,225,225,225,-303,-260,424,-212,-211,225,-209,225,225,225,-195,225,-208,-196,225,225,-134,-133,31,225,-150,-149,-260,225,225,-12,225,225,-11,-272,-271,-270,-269,-268,-281,225,31,305,305,305,305,305,305,305,305,305,305,-234,-233,305,305,305,305,305,-235,462,-28,-303,-260,-207,-210,225,-199,225,-197,-303,-176,-258,-266,-267,225,225,-252,-303,225,-260,225,225,225,225,-198,225,225,225,225,-11,225,-203,-202,-200,-282,225,-303,-275,225,225,-276,225,-204,-201,225,-206,-205,]),'LPAREN':([0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,19,20,21,23,24,25,26,29,30,31,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,59,61,63,65,68,69,70,71,73,74,76,77,78,79,80,81,83,84,85,88,91,92,96,101,102,103,104,105,106,107,108,113,115,120,121,125,130,136,137,139,140,141,144,145,146,147,148,150,151,152,153,154,155,156,157,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,177,178,181,182,183,184,185,186,189,190,193,195,196,199,200,203,204,205,206,210,211,212,213,214,215,216,217,219,221,222,225,226,227,230,231,233,234,235,240,245,246,247,249,250,251,254,256,260,261,262,263,264,265,266,267,268,269,270,271,272,273,275,276,278,280,281,282,284,287,289,292,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,315,320,321,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,350,351,353,355,356,357,361,362,363,364,365,366,370,372,374,377,378,379,380,383,388,389,391,392,412,413,414,415,416,417,418,421,422,424,426,427,429,431,433,435,436,440,441,447,450,452,453,454,456,459,460,462,463,464,465,466,467,468,469,470,472,473,474,475,476,477,478,480,481,488,489,493,494,495,497,499,501,502,503,504,505,506,509,512,514,515,516,518,519,520,],[4,-303,-102,-115,4,-113,-99,-97,60,-95,-114,-96,-64,4,-60,-100,-91,-66,-94,-109,-104,-65,-93,-110,4,-215,-107,-303,-111,81,-63,-116,4,-29,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,-303,60,81,4,-61,-131,-301,-130,-147,-146,-28,-158,-160,-27,-88,81,-90,81,142,-37,-87,-89,-92,-81,-84,-86,-69,-30,170,-28,-303,186,-288,-70,170,-128,81,-161,-159,81,142,-303,170,170,81,-292,-264,-280,-295,-299,-296,-293,-278,273,-262,-279,281,283,284,-261,287,-291,-260,-265,170,287,-297,-259,-289,-277,-294,-290,-263,170,170,-36,-35,186,186,323,-45,170,327,-73,-76,-72,170,-74,335,339,284,170,344,-78,-193,-192,-77,-194,170,-75,-260,-288,349,-129,-132,-302,-126,284,-127,-148,284,-38,170,-28,-303,-71,-303,-221,-224,-222,-218,-219,-223,-225,170,-227,-228,-220,-226,-300,170,-298,-274,-273,170,170,-31,170,170,-34,392,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,170,284,284,186,323,327,-303,-260,170,-212,-211,170,-209,170,170,170,-195,170,-208,-196,170,170,-134,-133,81,284,-150,-149,-260,170,170,-44,-43,-12,284,170,-11,-272,-271,-270,-269,-268,-281,284,392,392,-33,-32,-191,-185,170,-28,-303,-187,-189,-260,-207,-210,170,-199,170,475,-197,-39,-42,-303,-176,-266,-267,170,284,-303,284,-260,170,170,-51,-50,-186,-188,-190,170,170,-198,170,170,170,-41,-40,170,-11,-46,-49,170,-203,-202,-200,-282,170,-303,-275,-48,-47,170,170,-276,170,-204,-201,170,-206,-205,]),'MINUSMINUS':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,115,121,136,141,144,145,147,148,150,151,152,153,154,155,156,157,158,159,163,164,166,167,168,169,170,171,172,173,174,175,177,178,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,226,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,272,273,275,276,278,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,377,378,379,380,383,388,389,416,417,418,424,426,427,429,431,433,436,447,450,452,453,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,499,501,502,503,506,509,512,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,171,-28,-303,-288,171,-161,-303,171,171,-292,-264,-280,-295,-299,-296,-293,-278,171,-262,-279,276,171,-261,171,-291,-260,-265,171,171,-297,-259,-289,-277,-294,-290,-263,171,171,171,-73,-76,-72,171,-74,171,171,-78,-193,-192,-77,-194,171,-75,-260,-288,-302,171,171,171,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,171,-227,-228,-220,-226,-300,171,-298,-274,-273,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,-303,-260,171,-212,-211,171,-209,171,171,171,-195,171,-208,-196,171,171,171,-260,171,171,-12,171,171,-11,-272,-271,-270,-269,-268,-281,171,171,-28,-303,-260,-207,-210,171,-199,171,-197,-303,-176,-266,-267,171,171,-303,171,-260,171,171,171,171,-198,171,171,171,171,-11,171,-203,-202,-200,-282,171,-303,-275,171,171,-276,171,-204,-201,171,-206,-205,]),'ID':([0,1,2,3,4,5,6,7,9,10,11,12,13,14,15,16,17,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,38,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,59,60,63,65,68,69,70,71,73,74,75,76,77,78,79,80,81,83,84,91,92,96,101,102,103,104,106,107,108,113,120,121,125,130,131,132,136,137,139,141,142,144,145,146,148,156,157,163,164,166,168,170,171,173,181,182,183,186,189,195,197,199,200,201,203,204,205,211,212,214,215,216,217,219,221,222,225,230,231,233,234,235,240,244,245,246,249,250,251,254,256,260,261,262,263,264,265,266,267,268,269,270,271,273,277,279,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,315,322,323,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,350,351,353,355,356,357,361,362,363,366,369,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,455,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,500,501,502,506,509,514,515,516,518,519,520,],[37,-303,-102,-115,37,-113,-99,-97,-95,-114,-96,-64,37,-60,-100,-91,-66,-94,-109,-104,-136,-65,-93,-110,37,71,74,-215,-107,-303,-111,37,-63,-116,37,-135,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,-303,115,37,37,-61,-131,-301,-130,-147,-146,135,-28,-158,-160,-27,-88,37,-90,37,-87,-89,-92,-81,-84,-86,-69,115,-28,-303,37,-70,226,-128,37,135,135,-161,-159,37,-303,115,115,115,37,-264,115,-262,115,-261,115,-260,115,115,-259,-263,115,115,37,37,115,115,-73,-76,331,-72,115,-74,115,226,-78,-193,-192,-77,-194,226,-75,-260,-129,-132,-302,-126,115,-127,135,-148,115,115,-28,-303,-71,-303,-221,-224,-222,-218,-219,-223,-225,115,-227,-228,-220,-226,115,378,380,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,37,-303,115,-260,115,-212,-211,115,-209,115,226,115,-195,115,-208,-196,226,115,-134,-133,37,115,-150,-149,-260,115,115,-12,115,115,115,-11,115,115,-28,-303,-260,-207,-210,115,-199,226,-197,-303,-176,115,115,115,-303,115,-260,115,115,115,226,-198,226,115,226,115,-11,115,-203,-202,-200,115,115,-303,115,226,226,-204,-201,226,-206,-205,]),'IF':([53,70,101,104,121,199,200,203,205,212,214,215,216,217,219,221,222,233,332,333,336,338,342,345,347,348,426,427,431,433,436,472,473,474,476,494,495,497,509,514,515,516,518,519,520,],[-68,-301,-81,-69,227,-73,-76,-72,-74,227,-78,-193,-192,-77,-194,227,-75,-302,-212,-211,-209,227,-195,-208,-196,227,-207,-210,-199,227,-197,227,-198,227,227,-203,-202,-200,227,227,-204,-201,227,-206,-205,]),'STRING_LITERAL':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,136,141,144,145,148,156,157,158,163,164,166,168,170,171,172,173,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,273,275,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,501,502,506,509,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,172,-28,-303,172,-161,-303,172,172,-264,172,-262,275,172,-261,172,-260,172,172,-297,-259,-263,172,172,172,-73,-76,-72,172,-74,172,172,-78,-193,-192,-77,-194,172,-75,-260,-302,172,172,172,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,172,-227,-228,-220,-226,172,-298,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,-303,-260,172,-212,-211,172,-209,172,172,172,-195,172,-208,-196,172,172,172,-260,172,172,-12,172,172,-11,172,172,-28,-303,-260,-207,-210,172,-199,172,-197,-303,-176,172,172,-303,172,-260,172,172,172,172,-198,172,172,172,172,-11,172,-203,-202,-200,172,-303,172,172,172,-204,-201,172,-206,-205,]),'FLOAT':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,254,273,282,283,284,287,289,292,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[39,-303,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,39,-94,-109,-104,-65,-93,-110,39,-215,-107,-111,39,-63,-116,39,-29,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,39,-53,39,-82,39,39,-61,-131,-301,-130,39,-147,-146,-160,-88,-90,39,-87,-89,-92,-81,-84,-86,-69,-30,39,39,-70,39,-83,39,39,-128,-140,-137,39,39,39,-161,39,39,-36,-35,39,39,-73,-76,-72,-74,39,-78,-193,-192,-77,-194,-75,39,39,-129,-132,-138,-302,-126,-127,-148,-71,39,-31,39,39,39,-34,39,39,39,-212,-211,39,-209,-195,-208,-196,-134,-133,-139,-150,-149,39,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'XOREQUAL':([115,147,149,150,151,152,153,154,155,158,159,167,169,172,174,175,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,451,452,453,458,499,503,512,],[-288,-292,264,-280,-295,-299,-296,-293,-278,-279,-253,-291,-265,-297,-289,-277,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-258,-266,-267,-252,-282,-275,-276,]),'LSHIFTEQUAL':([115,147,149,150,151,152,153,154,155,158,159,167,169,172,174,175,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,451,452,453,458,499,503,512,],[-288,-292,266,-280,-295,-299,-296,-293,-278,-279,-253,-291,-265,-297,-289,-277,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-258,-266,-267,-252,-282,-275,-276,]),'RBRACKET':([3,35,51,59,78,79,106,107,115,136,141,147,149,150,151,152,153,154,155,158,159,160,161,165,167,168,169,172,174,175,176,177,178,179,180,195,224,233,249,250,272,274,275,276,278,285,286,288,293,313,314,322,324,325,326,341,360,361,375,377,378,379,380,381,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,416,417,423,424,430,442,443,446,451,452,453,458,461,462,487,490,491,499,503,511,512,],[-115,-116,-117,-303,-160,-27,-303,-28,-288,-161,-303,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,282,-232,-4,-291,289,-265,-297,-289,-277,-230,-294,-290,-3,-216,-303,-213,-302,-303,-28,-300,-257,-298,-274,-273,-251,-256,-254,-255,412,413,-303,421,422,-303,-229,440,441,-217,-272,-271,-270,-269,452,-268,-281,-238,-250,-239,-237,-241,-245,-240,-236,-243,-248,-234,-233,-242,-249,-244,-246,-247,-235,-303,-28,467,468,-214,477,478,479,-258,-266,-267,-252,488,489,-231,504,505,-282,-275,517,-276,]),}
+_lr_action_items = {'VOID':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,258,277,286,287,288,291,293,296,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[6,-309,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,6,-96,-111,-106,-65,-95,-112,6,-221,-109,-113,6,-63,-118,6,-29,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,6,-53,6,-84,6,6,-61,-133,-307,-132,6,-153,-152,-166,-90,-92,6,-89,-91,-94,-83,-86,-88,-69,-30,6,6,-70,6,-85,6,6,6,-135,-130,-145,-146,-142,-308,6,6,-167,6,6,-36,-35,6,6,-73,-76,-72,-74,6,-78,-199,-198,-77,-200,-75,6,-139,6,-137,-134,-143,-131,-128,-129,-154,-71,6,-31,6,6,6,-34,6,6,6,-218,-217,6,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,6,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'LBRACKET':([2,3,5,6,7,8,9,10,11,15,16,19,20,21,24,25,29,30,31,32,35,37,39,41,44,45,48,50,51,54,61,69,70,71,73,74,76,77,78,79,80,83,85,88,91,92,96,105,113,115,126,127,131,139,140,143,150,153,154,155,156,157,158,161,162,170,172,175,177,178,180,181,187,188,189,192,193,196,199,229,232,234,235,237,238,244,249,251,260,276,279,280,282,286,293,296,319,324,325,356,357,362,363,370,371,374,379,383,384,385,386,389,394,397,398,418,419,420,421,427,428,447,448,452,454,456,459,460,466,472,473,474,475,476,484,485,486,491,492,495,496,507,510,511,512,513,518,520,525,],[-104,-117,-115,-101,-99,59,-97,-116,-98,-102,-93,-96,-111,-106,-95,-112,-221,-109,-309,-113,-118,-29,-107,-103,-114,-108,-110,-105,-119,-100,59,-133,-307,-132,-153,-152,-28,-164,-166,-27,-90,-92,144,-37,-89,-91,-94,-30,198,-294,-135,-130,-308,-167,-165,144,-298,-286,-301,-305,-302,-299,-284,-285,284,-297,-271,-303,-295,-283,-300,-296,-36,-35,198,198,326,-45,330,-294,-139,-137,-134,-131,-128,-129,-154,-38,376,-306,-304,-280,-279,-31,-34,198,198,326,330,-138,-136,-156,-155,-44,-43,-183,376,-278,-277,-276,-275,-274,-287,198,198,-33,-32,-197,-191,-193,-195,-39,-42,-186,376,-184,-272,-273,376,-51,-50,-192,-194,-196,-41,-40,-185,509,-289,-46,-49,-288,376,-281,-48,-47,-290,-282,-291,]),'WCHAR_CONST':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,131,139,144,147,148,151,159,160,166,167,169,171,173,174,176,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,509,510,514,517,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,156,-28,-309,156,-308,-167,-309,156,156,-270,156,-268,156,-267,156,-266,156,156,-265,-269,156,156,156,-73,-76,-72,156,-74,156,156,-78,-199,-198,-77,-200,156,-75,-266,156,156,156,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,156,-233,-234,-226,-232,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,-309,-266,156,-218,-217,156,-215,156,156,156,-201,156,-214,156,-80,-202,156,156,156,-266,156,156,-12,156,156,-11,156,156,-28,-309,-266,-213,-216,156,-205,156,-79,-203,-309,-182,156,156,-309,156,-266,156,156,156,156,-204,156,156,156,156,-11,156,-209,-208,-206,-80,156,-309,156,156,156,-210,-207,156,-212,-211,]),'FLOAT_CONST':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,131,139,144,147,148,151,159,160,166,167,169,171,173,174,176,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,509,510,514,517,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,157,-28,-309,157,-308,-167,-309,157,157,-270,157,-268,157,-267,157,-266,157,157,-265,-269,157,157,157,-73,-76,-72,157,-74,157,157,-78,-199,-198,-77,-200,157,-75,-266,157,157,157,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,157,-233,-234,-226,-232,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,-309,-266,157,-218,-217,157,-215,157,157,157,-201,157,-214,157,-80,-202,157,157,157,-266,157,157,-12,157,157,-11,157,157,-28,-309,-266,-213,-216,157,-205,157,-79,-203,-309,-182,157,157,-309,157,-266,157,157,157,157,-204,157,157,157,157,-11,157,-209,-208,-206,-80,157,-309,157,157,157,-210,-207,157,-212,-211,]),'MINUS':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,115,121,131,139,144,147,148,150,151,152,153,154,155,156,157,158,159,160,161,162,164,166,167,169,170,171,172,173,174,175,176,177,178,179,180,181,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,229,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,282,284,285,288,289,290,291,292,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,383,384,385,386,389,394,395,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,422,423,424,430,432,433,435,437,439,442,443,454,457,458,459,460,461,463,465,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,507,509,510,511,514,517,520,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,160,-28,-309,-294,160,-308,-167,-309,160,160,-298,-270,-257,-286,-301,-305,-302,-299,-284,160,-268,-285,-259,-238,160,-267,160,-297,-266,-271,160,160,-303,-265,-295,-283,301,-300,-296,-269,160,160,160,-73,-76,-72,160,-74,160,160,-78,-199,-198,-77,-200,160,-75,-266,-294,160,160,160,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,160,-233,-234,-226,-232,-306,160,-263,-304,-280,-279,160,160,160,-257,-262,160,-260,-261,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,-309,-266,160,-218,-217,160,-215,160,160,160,-201,160,-214,160,-80,-202,160,160,160,-266,160,160,-12,160,160,-11,-278,-277,-276,-275,-274,-287,160,301,301,301,-243,301,301,301,-242,301,301,-240,-239,301,301,301,301,301,-241,160,-28,-309,-266,-213,-216,160,-205,160,-79,-203,-309,-182,-264,-272,-273,160,160,-258,-309,160,-266,160,160,160,160,-204,160,160,160,160,-11,160,-209,-208,-206,-80,-288,160,-309,-281,160,160,-282,160,-210,-207,160,-212,-211,]),'RPAREN':([2,3,5,6,7,8,9,10,11,15,16,19,20,21,24,25,29,30,31,32,35,37,39,41,44,45,48,50,51,54,58,60,61,69,71,73,74,76,77,78,79,80,83,85,88,91,92,96,105,109,110,111,112,113,114,115,116,118,126,127,131,139,140,141,143,145,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,183,187,188,189,190,191,192,193,194,195,196,197,199,211,227,232,234,235,237,238,244,249,251,256,257,276,278,279,280,282,285,286,289,290,292,293,294,295,296,297,319,320,321,322,323,324,325,327,331,332,333,334,347,356,357,362,363,370,371,381,382,383,384,385,386,388,389,390,392,393,394,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,418,419,420,421,425,426,427,428,431,436,438,440,444,447,448,458,459,460,465,472,473,474,475,476,484,485,490,491,492,494,495,496,500,503,507,511,512,513,514,515,518,520,521,525,],[-104,-117,-115,-101,-99,-52,-97,-116,-98,-102,-93,-96,-111,-106,-95,-112,-221,-109,-309,-113,-118,-29,-107,-103,-114,-108,-110,-105,-119,-100,105,-309,-53,-133,-132,-153,-152,-28,-164,-166,-27,-90,-92,-54,-37,-89,-91,-94,-30,187,-17,188,-170,-309,-18,-294,-168,-175,-135,-130,-308,-167,-165,251,-55,-309,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,-236,-300,-296,-222,-36,-35,-309,-174,-2,-188,-56,-172,-1,-45,-173,-190,-14,-219,-139,-137,-134,-131,-128,-129,-154,-38,370,371,-306,-263,-304,-280,-279,389,-31,-257,-262,-260,-34,394,395,-309,-261,-188,-23,-24,420,421,-57,-189,-309,-309,-176,-169,-171,-13,-138,-136,-156,-155,-44,-43,-223,458,-278,-277,-276,-275,-292,-274,460,463,464,-287,-187,-188,-309,-244,-256,-245,-243,-247,-251,-246,-242,-249,-254,-240,-239,-248,-255,-250,-252,-253,-241,-33,-32,-197,-191,472,473,-193,-195,476,-220,479,481,483,-39,-42,-264,-272,-273,-258,-51,-50,-192,-194,-196,-41,-40,-293,507,-289,-237,-46,-49,-309,516,-288,-281,-48,-47,-309,522,-290,-282,526,-291,]),'LONG':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,258,277,286,287,288,291,293,296,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[21,-309,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,21,-96,-111,-106,-65,-95,-112,21,-221,-109,-113,21,-63,-118,21,-29,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,21,-53,21,-84,21,21,-61,-133,-307,-132,21,-153,-152,-166,-90,-92,21,-89,-91,-94,-83,-86,-88,-69,-30,21,21,-70,21,-85,21,21,21,-135,-130,-145,-146,-142,-308,21,21,-167,21,21,-36,-35,21,21,-73,-76,-72,-74,21,-78,-199,-198,-77,-200,-75,21,-139,21,-137,-134,-143,-131,-128,-129,-154,-71,21,-31,21,21,21,-34,21,21,21,-218,-217,21,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,21,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'PLUS':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,115,121,131,139,144,147,148,150,151,152,153,154,155,156,157,158,159,160,161,162,164,166,167,169,170,171,172,173,174,175,176,177,178,179,180,181,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,229,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,282,284,285,288,289,290,291,292,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,383,384,385,386,389,394,395,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,422,423,424,430,432,433,435,437,439,442,443,454,457,458,459,460,461,463,465,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,507,509,510,511,514,517,520,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,167,-28,-309,-294,167,-308,-167,-309,167,167,-298,-270,-257,-286,-301,-305,-302,-299,-284,167,-268,-285,-259,-238,167,-267,167,-297,-266,-271,167,167,-303,-265,-295,-283,305,-300,-296,-269,167,167,167,-73,-76,-72,167,-74,167,167,-78,-199,-198,-77,-200,167,-75,-266,-294,167,167,167,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,167,-233,-234,-226,-232,-306,167,-263,-304,-280,-279,167,167,167,-257,-262,167,-260,-261,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,-309,-266,167,-218,-217,167,-215,167,167,167,-201,167,-214,167,-80,-202,167,167,167,-266,167,167,-12,167,167,-11,-278,-277,-276,-275,-274,-287,167,305,305,305,-243,305,305,305,-242,305,305,-240,-239,305,305,305,305,305,-241,167,-28,-309,-266,-213,-216,167,-205,167,-79,-203,-309,-182,-264,-272,-273,167,167,-258,-309,167,-266,167,167,167,167,-204,167,167,167,167,-11,167,-209,-208,-206,-80,-288,167,-309,-281,167,167,-282,167,-210,-207,167,-212,-211,]),'ELLIPSIS':([201,],[333,]),'GT':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,306,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-244,306,-245,-243,-247,306,-246,-242,-249,306,-240,-239,-248,306,306,306,306,-241,-264,-272,-273,-258,-288,-281,-282,]),'GOTO':([53,70,101,104,121,131,202,203,206,208,215,217,218,219,220,222,224,225,336,337,340,342,346,349,351,352,353,354,432,433,437,439,442,443,479,480,481,483,501,502,504,505,517,522,523,524,526,527,528,],[-68,-307,-83,-69,204,-308,-73,-76,-72,-74,204,-78,-199,-198,-77,-200,204,-75,-218,-217,-215,204,-201,-214,204,-80,-202,204,-213,-216,-205,204,-79,-203,204,-204,204,204,-209,-208,-206,-80,204,204,-210,-207,204,-212,-211,]),'ENUM':([0,1,3,7,8,9,11,12,14,17,18,19,23,24,26,34,35,36,37,40,42,47,49,51,53,54,55,56,57,60,61,64,65,67,68,70,72,78,87,101,102,103,104,105,117,120,121,122,123,124,125,128,129,130,131,132,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,233,236,258,277,286,287,288,291,293,327,331,336,337,339,340,346,349,351,352,353,360,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[28,-309,-117,-99,-52,-97,-98,-64,-60,-66,28,-96,-65,-95,28,-63,-118,28,-29,-62,-67,-309,-309,-119,-68,-100,-87,-10,-9,28,-53,-84,28,28,-61,-307,28,-166,28,-83,-86,-88,-69,-30,28,-70,28,-85,28,28,28,-145,-146,-142,-308,28,-167,28,28,-36,-35,28,28,-73,-76,-72,-74,28,-78,-199,-198,-77,-200,-75,28,28,-143,-71,28,-31,28,28,28,-34,28,28,-218,-217,28,-215,-201,-214,-78,-80,-202,-144,28,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'PERIOD':([70,115,131,150,153,154,155,156,157,158,161,162,170,172,175,177,178,180,181,229,260,276,279,280,282,374,379,383,384,385,386,389,394,452,454,456,459,460,466,486,491,492,507,510,511,518,520,525,],[-307,-294,-308,-298,-286,-301,-305,-302,-299,-284,-285,283,-297,-271,-303,-295,-283,-300,-296,-294,375,-306,-304,-280,-279,-183,375,-278,-277,-276,-275,-274,-287,-186,375,-184,-272,-273,375,-185,508,-289,-288,375,-281,-290,-282,-291,]),'GE':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,310,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-244,310,-245,-243,-247,310,-246,-242,-249,310,-240,-239,-248,310,310,310,310,-241,-264,-272,-273,-258,-288,-281,-282,]),'INT_CONST_DEC':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,131,139,144,147,148,151,159,160,166,167,169,171,173,174,176,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,509,510,514,517,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,177,-28,-309,177,-308,-167,-309,177,177,-270,177,-268,177,-267,177,-266,177,177,-265,-269,177,177,177,-73,-76,-72,177,-74,177,177,-78,-199,-198,-77,-200,177,-75,-266,177,177,177,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,177,-233,-234,-226,-232,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,-309,-266,177,-218,-217,177,-215,177,177,177,-201,177,-214,177,-80,-202,177,177,177,-266,177,177,-12,177,177,-11,177,177,-28,-309,-266,-213,-216,177,-205,177,-79,-203,-309,-182,177,177,-309,177,-266,177,177,177,177,-204,177,177,177,177,-11,177,-209,-208,-206,-80,177,-309,177,177,177,-210,-207,177,-212,-211,]),'ARROW':([115,131,150,153,154,155,156,157,158,161,162,170,172,175,177,178,180,181,229,276,279,280,282,383,384,385,386,389,394,459,460,507,511,520,],[-294,-308,-298,-286,-301,-305,-302,-299,-284,-285,281,-297,-271,-303,-295,-283,-300,-296,-294,-306,-304,-280,-279,-278,-277,-276,-275,-274,-287,-272,-273,-288,-281,-282,]),'CHAR':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,258,277,286,287,288,291,293,296,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[41,-309,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,41,-96,-111,-106,-65,-95,-112,41,-221,-109,-113,41,-63,-118,41,-29,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,41,-53,41,-84,41,41,-61,-133,-307,-132,41,-153,-152,-166,-90,-92,41,-89,-91,-94,-83,-86,-88,-69,-30,41,41,-70,41,-85,41,41,41,-135,-130,-145,-146,-142,-308,41,41,-167,41,41,-36,-35,41,41,-73,-76,-72,-74,41,-78,-199,-198,-77,-200,-75,41,-139,41,-137,-134,-143,-131,-128,-129,-154,-71,41,-31,41,41,41,-34,41,41,41,-218,-217,41,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,41,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'HEX_FLOAT_CONST':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,131,139,144,147,148,151,159,160,166,167,169,171,173,174,176,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,509,510,514,517,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,180,-28,-309,180,-308,-167,-309,180,180,-270,180,-268,180,-267,180,-266,180,180,-265,-269,180,180,180,-73,-76,-72,180,-74,180,180,-78,-199,-198,-77,-200,180,-75,-266,180,180,180,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,180,-233,-234,-226,-232,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,-309,-266,180,-218,-217,180,-215,180,180,180,-201,180,-214,180,-80,-202,180,180,180,-266,180,180,-12,180,180,-11,180,180,-28,-309,-266,-213,-216,180,-205,180,-79,-203,-309,-182,180,180,-309,180,-266,180,180,180,180,-204,180,180,180,180,-11,180,-209,-208,-206,-80,180,-309,180,180,180,-210,-207,180,-212,-211,]),'DOUBLE':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,258,277,286,287,288,291,293,296,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[45,-309,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,45,-96,-111,-106,-65,-95,-112,45,-221,-109,-113,45,-63,-118,45,-29,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,45,-53,45,-84,45,45,-61,-133,-307,-132,45,-153,-152,-166,-90,-92,45,-89,-91,-94,-83,-86,-88,-69,-30,45,45,-70,45,-85,45,45,45,-135,-130,-145,-146,-142,-308,45,45,-167,45,45,-36,-35,45,45,-73,-76,-72,-74,45,-78,-199,-198,-77,-200,-75,45,-139,45,-137,-134,-143,-131,-128,-129,-154,-71,45,-31,45,45,45,-34,45,45,45,-218,-217,45,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,45,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'MINUSEQUAL':([115,131,150,152,153,154,155,156,157,158,161,162,170,172,175,177,178,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,458,459,460,465,507,511,520,],[-294,-308,-298,265,-286,-301,-305,-302,-299,-284,-285,-259,-297,-271,-303,-295,-283,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-264,-272,-273,-258,-288,-281,-282,]),'INT_CONST_OCT':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,131,139,144,147,148,151,159,160,166,167,169,171,173,174,176,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,509,510,514,517,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,181,-28,-309,181,-308,-167,-309,181,181,-270,181,-268,181,-267,181,-266,181,181,-265,-269,181,181,181,-73,-76,-72,181,-74,181,181,-78,-199,-198,-77,-200,181,-75,-266,181,181,181,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,181,-233,-234,-226,-232,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,-309,-266,181,-218,-217,181,-215,181,181,181,-201,181,-214,181,-80,-202,181,181,181,-266,181,181,-12,181,181,-11,181,181,-28,-309,-266,-213,-216,181,-205,181,-79,-203,-309,-182,181,181,-309,181,-266,181,181,181,181,-204,181,181,181,181,-11,181,-209,-208,-206,-80,181,-309,181,181,181,-210,-207,181,-212,-211,]),'TIMESEQUAL':([115,131,150,152,153,154,155,156,157,158,161,162,170,172,175,177,178,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,458,459,460,465,507,511,520,],[-294,-308,-298,274,-286,-301,-305,-302,-299,-284,-285,-259,-297,-271,-303,-295,-283,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-264,-272,-273,-258,-288,-281,-282,]),'OR':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,315,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-244,315,-245,-243,-247,-251,-246,-242,-249,-254,-240,-239,-248,315,-250,-252,-253,-241,-264,-272,-273,-258,-288,-281,-282,]),'SHORT':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,258,277,286,287,288,291,293,296,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[2,-309,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,2,-96,-111,-106,-65,-95,-112,2,-221,-109,-113,2,-63,-118,2,-29,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,2,-53,2,-84,2,2,-61,-133,-307,-132,2,-153,-152,-166,-90,-92,2,-89,-91,-94,-83,-86,-88,-69,-30,2,2,-70,2,-85,2,2,2,-135,-130,-145,-146,-142,-308,2,2,-167,2,2,-36,-35,2,2,-73,-76,-72,-74,2,-78,-199,-198,-77,-200,-75,2,-139,2,-137,-134,-143,-131,-128,-129,-154,-71,2,-31,2,2,2,-34,2,2,2,-218,-217,2,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,2,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'RETURN':([53,70,101,104,121,131,202,203,206,208,215,217,218,219,220,222,224,225,336,337,340,342,346,349,351,352,353,354,432,433,437,439,442,443,479,480,481,483,501,502,504,505,517,522,523,524,526,527,528,],[-68,-307,-83,-69,207,-308,-73,-76,-72,-74,207,-78,-199,-198,-77,-200,207,-75,-218,-217,-215,207,-201,-214,207,-80,-202,207,-213,-216,-205,207,-79,-203,207,-204,207,207,-209,-208,-206,-80,207,207,-210,-207,207,-212,-211,]),'RSHIFTEQUAL':([115,131,150,152,153,154,155,156,157,158,161,162,170,172,175,177,178,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,458,459,460,465,507,511,520,],[-294,-308,-298,275,-286,-301,-305,-302,-299,-284,-285,-259,-297,-271,-303,-295,-283,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-264,-272,-273,-258,-288,-281,-282,]),'RESTRICT':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,31,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,59,60,61,63,64,67,68,69,70,71,72,73,74,76,78,80,83,87,91,92,96,101,104,105,107,108,113,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,144,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,254,255,258,277,286,287,288,291,293,296,326,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,423,424,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[35,35,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,35,-96,-111,-106,-65,-95,-112,35,-221,-109,35,-113,35,-63,-118,-29,-107,-62,-103,-67,-114,-108,35,-110,35,-105,-119,-68,-100,35,35,-53,35,-84,35,-61,-133,-307,-132,35,-153,-152,35,-166,-90,-92,35,-89,-91,-94,-83,-69,-30,35,35,35,-70,35,-85,35,35,35,-135,-130,-145,-146,-142,-308,35,35,-167,35,35,35,-36,-35,35,35,-73,-76,-72,-74,35,-78,-199,-198,-77,-200,-75,35,-139,35,-137,-134,-143,-131,-128,-129,-154,35,35,-71,35,-31,35,35,35,-34,35,35,35,35,-218,-217,35,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,35,-33,-32,35,35,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'STATIC':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,59,60,61,63,64,67,68,69,70,71,73,74,78,80,83,87,91,92,96,101,104,105,107,113,120,121,122,126,131,139,144,145,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,232,234,235,249,254,258,286,293,326,327,331,336,337,339,340,346,349,351,352,353,356,357,362,363,398,418,419,423,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[9,9,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,9,-96,-111,-106,-65,-95,-112,9,-221,-109,-113,9,-63,-118,-29,-107,-62,-103,-67,-114,-108,9,-110,9,-105,-119,-68,-100,108,9,-53,9,-84,9,-61,-133,-307,-132,-153,-152,-166,-90,-92,9,-89,-91,-94,-83,-69,-30,185,9,-70,9,-85,-135,-308,-167,255,9,-36,-35,9,9,-73,-76,-72,-74,9,-78,-199,-198,-77,-200,-75,-139,-137,-134,-154,368,-71,-31,-34,424,9,9,-218,-217,9,-215,-201,-214,-78,-80,-202,-138,-136,-156,-155,9,-33,-32,470,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'SIZEOF':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,131,139,144,147,148,151,159,160,166,167,169,171,173,174,176,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,509,510,514,517,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,159,-28,-309,159,-308,-167,-309,159,159,-270,159,-268,159,-267,159,-266,159,159,-265,-269,159,159,159,-73,-76,-72,159,-74,159,159,-78,-199,-198,-77,-200,159,-75,-266,159,159,159,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,159,-233,-234,-226,-232,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,-309,-266,159,-218,-217,159,-215,159,159,159,-201,159,-214,159,-80,-202,159,159,159,-266,159,159,-12,159,159,-11,159,159,-28,-309,-266,-213,-216,159,-205,159,-79,-203,-309,-182,159,159,-309,159,-266,159,159,159,159,-204,159,159,159,159,-11,159,-209,-208,-206,-80,159,-309,159,159,159,-210,-207,159,-212,-211,]),'UNSIGNED':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,258,277,286,287,288,291,293,296,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[20,-309,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,20,-96,-111,-106,-65,-95,-112,20,-221,-109,-113,20,-63,-118,20,-29,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,20,-53,20,-84,20,20,-61,-133,-307,-132,20,-153,-152,-166,-90,-92,20,-89,-91,-94,-83,-86,-88,-69,-30,20,20,-70,20,-85,20,20,20,-135,-130,-145,-146,-142,-308,20,20,-167,20,20,-36,-35,20,20,-73,-76,-72,-74,20,-78,-199,-198,-77,-200,-75,20,-139,20,-137,-134,-143,-131,-128,-129,-154,-71,20,-31,20,20,20,-34,20,20,20,-218,-217,20,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,20,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'UNION':([0,1,3,7,8,9,11,12,14,17,18,19,23,24,26,34,35,36,37,40,42,47,49,51,53,54,55,56,57,60,61,64,65,67,68,70,72,78,87,101,102,103,104,105,117,120,121,122,123,124,125,128,129,130,131,132,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,233,236,258,277,286,287,288,291,293,327,331,336,337,339,340,346,349,351,352,353,360,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[22,-309,-117,-99,-52,-97,-98,-64,-60,-66,22,-96,-65,-95,22,-63,-118,22,-29,-62,-67,-309,-309,-119,-68,-100,-87,-10,-9,22,-53,-84,22,22,-61,-307,22,-166,22,-83,-86,-88,-69,-30,22,-70,22,-85,22,22,22,-145,-146,-142,-308,22,-167,22,22,-36,-35,22,22,-73,-76,-72,-74,22,-78,-199,-198,-77,-200,-75,22,22,-143,-71,22,-31,22,22,22,-34,22,22,-218,-217,22,-215,-201,-214,-78,-80,-202,-144,22,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'COLON':([2,3,5,6,8,10,15,20,21,25,29,30,32,35,37,39,41,44,45,48,50,51,61,69,71,73,74,85,86,88,105,115,119,126,127,131,133,143,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,183,187,188,212,227,229,232,234,235,237,238,244,245,249,251,276,278,279,280,282,286,289,290,292,293,297,344,345,356,357,359,362,363,370,371,381,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,436,447,448,458,459,460,465,484,485,494,507,511,520,],[-104,-117,-115,-101,-52,-116,-102,-111,-106,-112,-221,-109,-113,-118,-29,-107,-103,-114,-108,-110,-105,-119,-53,-133,-132,-153,-152,-54,-163,-37,-30,-294,-162,-135,-130,-308,239,-55,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,-236,-300,-296,-222,-36,-35,342,-219,354,-139,-137,-134,-131,-128,-129,361,-154,-38,-306,-263,-304,-280,-279,-31,-257,-262,-260,-34,-261,439,-235,-138,-136,239,-156,-155,-44,-43,-223,-278,-277,-276,-275,-274,-287,-244,-256,-245,-243,-247,-251,-246,-242,-249,-254,-240,-239,-248,-255,-250,-252,467,-253,-241,-33,-32,-220,-39,-42,-264,-272,-273,-258,-41,-40,-237,-288,-281,-282,]),'$end':([0,12,14,17,23,26,34,40,42,43,52,53,68,101,104,120,131,258,353,],[-309,-64,-60,-66,-65,-58,-63,-62,-67,0,-59,-68,-61,-83,-69,-70,-308,-71,-202,]),'WSTRING_LITERAL':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,131,139,144,147,148,151,153,155,159,160,166,167,169,171,173,174,176,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,276,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,509,510,514,517,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,155,-28,-309,155,-308,-167,-309,155,155,-270,276,-305,155,-268,155,-267,155,-266,155,155,-265,-269,155,155,155,-73,-76,-72,155,-74,155,155,-78,-199,-198,-77,-200,155,-75,-266,155,155,155,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,155,-233,-234,-226,-232,-306,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,-309,-266,155,-218,-217,155,-215,155,155,155,-201,155,-214,155,-80,-202,155,155,155,-266,155,155,-12,155,155,-11,155,155,-28,-309,-266,-213,-216,155,-205,155,-79,-203,-309,-182,155,155,-309,155,-266,155,155,155,155,-204,155,155,155,155,-11,155,-209,-208,-206,-80,155,-309,155,155,155,-210,-207,155,-212,-211,]),'DIVIDE':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,308,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,308,308,308,308,308,308,308,308,308,308,-240,-239,308,308,308,308,308,-241,-264,-272,-273,-258,-288,-281,-282,]),'FOR':([53,70,101,104,121,131,202,203,206,208,215,217,218,219,220,222,224,225,336,337,340,342,346,349,351,352,353,354,432,433,437,439,442,443,479,480,481,483,501,502,504,505,517,522,523,524,526,527,528,],[-68,-307,-83,-69,209,-308,-73,-76,-72,-74,209,-78,-199,-198,-77,-200,209,-75,-218,-217,-215,209,-201,-214,209,-80,-202,209,-213,-216,-205,209,-79,-203,209,-204,209,209,-209,-208,-206,-80,209,209,-210,-207,209,-212,-211,]),'PLUSPLUS':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,115,121,131,139,144,147,148,150,151,153,154,155,156,157,158,159,160,161,162,166,167,169,170,171,172,173,174,175,176,177,178,180,181,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,229,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,276,277,279,280,282,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,383,384,385,386,389,394,395,422,423,424,430,432,433,435,437,439,442,443,454,457,459,460,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,507,509,510,511,514,517,520,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,169,-28,-309,-294,169,-308,-167,-309,169,169,-298,-270,-286,-301,-305,-302,-299,-284,169,-268,-285,282,169,-267,169,-297,-266,-271,169,169,-303,-265,-295,-283,-300,-296,-269,169,169,169,-73,-76,-72,169,-74,169,169,-78,-199,-198,-77,-200,169,-75,-266,-294,169,169,169,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,169,-233,-234,-226,-232,-306,169,-304,-280,-279,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,-309,-266,169,-218,-217,169,-215,169,169,169,-201,169,-214,169,-80,-202,169,169,169,-266,169,169,-12,169,169,-11,-278,-277,-276,-275,-274,-287,169,169,-28,-309,-266,-213,-216,169,-205,169,-79,-203,-309,-182,-272,-273,169,169,-309,169,-266,169,169,169,169,-204,169,169,169,169,-11,169,-209,-208,-206,-80,-288,169,-309,-281,169,169,-282,169,-210,-207,169,-212,-211,]),'EQUALS':([8,37,61,85,86,87,88,89,97,105,115,119,131,138,143,150,152,153,154,155,156,157,158,161,162,170,172,175,177,178,180,181,187,188,229,251,276,278,279,280,282,286,289,290,292,293,297,370,371,374,379,383,384,385,386,389,394,418,419,447,448,452,456,458,459,460,465,484,485,486,507,511,520,],[-52,-29,-53,-54,-163,-162,-37,147,148,-30,-294,-162,-308,250,-55,-298,267,-286,-301,-305,-302,-299,-284,-285,-259,-297,-271,-303,-295,-283,-300,-296,-36,-35,-294,-38,-306,-263,-304,-280,-279,-31,-257,-262,-260,-34,-261,-44,-43,-183,457,-278,-277,-276,-275,-274,-287,-33,-32,-39,-42,-186,-184,-264,-272,-273,-258,-41,-40,-185,-288,-281,-282,]),'ELSE':([53,104,131,202,203,206,208,217,220,225,336,337,340,349,351,352,353,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[-68,-69,-308,-73,-76,-72,-74,-78,-77,-75,-218,-217,-215,-214,-78,-80,-202,-213,-216,-205,-79,-203,-204,-209,-208,-206,517,-210,-207,-212,-211,]),'ANDEQUAL':([115,131,150,152,153,154,155,156,157,158,161,162,170,172,175,177,178,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,458,459,460,465,507,511,520,],[-294,-308,-298,272,-286,-301,-305,-302,-299,-284,-285,-259,-297,-271,-303,-295,-283,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-264,-272,-273,-258,-288,-281,-282,]),'EQ':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,312,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-244,312,-245,-243,-247,-251,-246,-242,-249,312,-240,-239,-248,312,-250,312,312,-241,-264,-272,-273,-258,-288,-281,-282,]),'AND':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,115,121,131,139,144,147,148,150,151,152,153,154,155,156,157,158,159,160,161,162,164,166,167,169,170,171,172,173,174,175,176,177,178,179,180,181,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,229,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,282,284,285,288,289,290,291,292,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,383,384,385,386,389,394,395,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,422,423,424,430,432,433,435,437,439,442,443,454,457,458,459,460,461,463,465,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,507,509,510,511,514,517,520,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,176,-28,-309,-294,176,-308,-167,-309,176,176,-298,-270,-257,-286,-301,-305,-302,-299,-284,176,-268,-285,-259,-238,176,-267,176,-297,-266,-271,176,176,-303,-265,-295,-283,313,-300,-296,-269,176,176,176,-73,-76,-72,176,-74,176,176,-78,-199,-198,-77,-200,176,-75,-266,-294,176,176,176,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,176,-233,-234,-226,-232,-306,176,-263,-304,-280,-279,176,176,176,-257,-262,176,-260,-261,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,-309,-266,176,-218,-217,176,-215,176,176,176,-201,176,-214,176,-80,-202,176,176,176,-266,176,176,-12,176,176,-11,-278,-277,-276,-275,-274,-287,176,-244,313,-245,-243,-247,-251,-246,-242,-249,313,-240,-239,-248,313,-250,-252,313,-241,176,-28,-309,-266,-213,-216,176,-205,176,-79,-203,-309,-182,-264,-272,-273,176,176,-258,-309,176,-266,176,176,176,176,-204,176,176,176,176,-11,176,-209,-208,-206,-80,-288,176,-309,-281,176,176,-282,176,-210,-207,176,-212,-211,]),'TYPEID':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,76,77,78,79,80,81,83,84,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,140,142,145,149,173,187,188,189,192,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,258,277,281,283,286,287,288,291,293,327,331,336,337,339,340,346,349,351,352,353,356,357,359,360,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[29,-309,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,29,-96,-111,-106,-141,-65,-95,-112,29,69,73,-221,-109,-309,-113,88,-63,-118,29,-29,-140,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,29,-53,88,-84,29,29,-61,-133,-307,-132,29,-153,-152,-28,-164,-166,-27,-90,88,-92,88,29,-89,-91,-94,-83,-86,-88,-69,-30,196,29,-70,29,-85,29,29,29,-135,-130,-145,-146,-142,-308,29,88,-167,-165,88,29,88,29,-36,-35,29,196,29,-73,-76,-72,-74,29,-78,-199,-198,-77,-200,-75,29,-139,29,-137,-134,-143,-131,-128,-129,-154,-71,29,383,385,-31,29,29,29,-34,29,29,-218,-217,29,-215,-201,-214,-78,-80,-202,-138,-136,88,-144,-156,-155,29,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'LBRACE':([8,18,22,27,28,37,38,53,61,62,64,66,67,69,70,71,73,74,87,101,104,105,121,122,131,146,147,148,187,188,202,203,206,208,215,217,218,219,220,222,224,225,260,286,293,336,337,340,342,346,349,351,352,353,354,372,378,380,395,418,419,432,433,437,439,442,443,454,457,458,463,464,466,479,480,481,483,487,488,501,502,504,505,510,517,522,523,524,526,527,528,],[-52,-309,-141,70,70,-29,-140,-68,-53,-7,-84,70,-8,70,-307,70,70,70,-309,-83,-69,-30,70,-85,-308,70,70,70,-36,-35,-73,-76,-72,-74,70,-78,-199,-198,-77,-200,70,-75,-309,-31,-34,-218,-217,-215,70,-201,-214,70,-80,-202,70,-12,70,-11,70,-33,-32,-213,-216,-205,70,-79,-203,-309,-182,70,70,70,-309,70,-204,70,70,70,-11,-209,-208,-206,-80,-309,70,70,-210,-207,70,-212,-211,]),'PPHASH':([0,12,14,17,23,26,34,40,42,53,68,101,104,120,131,258,353,],[42,-64,-60,-66,-65,42,-63,-62,-67,-68,-61,-83,-69,-70,-308,-71,-202,]),'INT':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,258,277,286,287,288,291,293,296,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[50,-309,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,50,-96,-111,-106,-65,-95,-112,50,-221,-109,-113,50,-63,-118,50,-29,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,50,-53,50,-84,50,50,-61,-133,-307,-132,50,-153,-152,-166,-90,-92,50,-89,-91,-94,-83,-86,-88,-69,-30,50,50,-70,50,-85,50,50,50,-135,-130,-145,-146,-142,-308,50,50,-167,50,50,-36,-35,50,50,-73,-76,-72,-74,50,-78,-199,-198,-77,-200,-75,50,-139,50,-137,-134,-143,-131,-128,-129,-154,-71,50,-31,50,50,50,-34,50,50,50,-218,-217,50,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,50,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'SIGNED':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,258,277,286,287,288,291,293,296,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[48,-309,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,48,-96,-111,-106,-65,-95,-112,48,-221,-109,-113,48,-63,-118,48,-29,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,48,-53,48,-84,48,48,-61,-133,-307,-132,48,-153,-152,-166,-90,-92,48,-89,-91,-94,-83,-86,-88,-69,-30,48,48,-70,48,-85,48,48,48,-135,-130,-145,-146,-142,-308,48,48,-167,48,48,-36,-35,48,48,-73,-76,-72,-74,48,-78,-199,-198,-77,-200,-75,48,-139,48,-137,-134,-143,-131,-128,-129,-154,-71,48,-31,48,48,48,-34,48,48,48,-218,-217,48,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,48,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'CONTINUE':([53,70,101,104,121,131,202,203,206,208,215,217,218,219,220,222,224,225,336,337,340,342,346,349,351,352,353,354,432,433,437,439,442,443,479,480,481,483,501,502,504,505,517,522,523,524,526,527,528,],[-68,-307,-83,-69,210,-308,-73,-76,-72,-74,210,-78,-199,-198,-77,-200,210,-75,-218,-217,-215,210,-201,-214,210,-80,-202,210,-213,-216,-205,210,-79,-203,210,-204,210,210,-209,-208,-206,-80,210,210,-210,-207,210,-212,-211,]),'NOT':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,131,139,144,147,148,151,159,160,166,167,169,171,173,174,176,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,509,510,514,517,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,184,-28,-309,184,-308,-167,-309,184,184,-270,184,-268,184,-267,184,-266,184,184,-265,-269,184,184,184,-73,-76,-72,184,-74,184,184,-78,-199,-198,-77,-200,184,-75,-266,184,184,184,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,184,-233,-234,-226,-232,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,-309,-266,184,-218,-217,184,-215,184,184,184,-201,184,-214,184,-80,-202,184,184,184,-266,184,184,-12,184,184,-11,184,184,-28,-309,-266,-213,-216,184,-205,184,-79,-203,-309,-182,184,184,-309,184,-266,184,184,184,184,-204,184,184,184,184,-11,184,-209,-208,-206,-80,184,-309,184,184,184,-210,-207,184,-212,-211,]),'OREQUAL':([115,131,150,152,153,154,155,156,157,158,161,162,170,172,175,177,178,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,458,459,460,465,507,511,520,],[-294,-308,-298,273,-286,-301,-305,-302,-299,-284,-285,-259,-297,-271,-303,-295,-283,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-264,-272,-273,-258,-288,-281,-282,]),'MOD':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,316,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,316,316,316,316,316,316,316,316,316,316,-240,-239,316,316,316,316,316,-241,-264,-272,-273,-258,-288,-281,-282,]),'RSHIFT':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,298,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-244,298,-245,-243,298,298,298,-242,298,298,-240,-239,298,298,298,298,298,-241,-264,-272,-273,-258,-288,-281,-282,]),'DEFAULT':([53,70,101,104,121,131,202,203,206,208,215,217,218,219,220,222,224,225,336,337,340,342,346,349,351,352,353,354,432,433,437,439,442,443,479,480,481,483,501,502,504,505,517,522,523,524,526,527,528,],[-68,-307,-83,-69,212,-308,-73,-76,-72,-74,212,-78,-199,-198,-77,-200,212,-75,-218,-217,-215,212,-201,-214,212,-80,-202,212,-213,-216,-205,212,-79,-203,212,-204,212,212,-209,-208,-206,-80,212,212,-210,-207,212,-212,-211,]),'__INT128':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,258,277,286,287,288,291,293,296,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[25,-309,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,25,-96,-111,-106,-65,-95,-112,25,-221,-109,-113,25,-63,-118,25,-29,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,25,-53,25,-84,25,25,-61,-133,-307,-132,25,-153,-152,-166,-90,-92,25,-89,-91,-94,-83,-86,-88,-69,-30,25,25,-70,25,-85,25,25,25,-135,-130,-145,-146,-142,-308,25,25,-167,25,25,-36,-35,25,25,-73,-76,-72,-74,25,-78,-199,-198,-77,-200,-75,25,-139,25,-137,-134,-143,-131,-128,-129,-154,-71,25,-31,25,25,25,-34,25,25,25,-218,-217,25,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,25,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'WHILE':([53,70,101,104,121,131,202,203,206,208,215,217,218,219,220,222,224,225,336,337,340,342,346,349,350,351,352,353,354,432,433,437,439,442,443,479,480,481,483,501,502,504,505,517,522,523,524,526,527,528,],[-68,-307,-83,-69,213,-308,-73,-76,-72,-74,213,-78,-199,-198,-77,-200,213,-75,-218,-217,-215,213,-201,-214,441,213,-80,-202,213,-213,-216,-205,213,-79,-203,213,-204,213,213,-209,-208,-206,-80,213,213,-210,-207,213,-212,-211,]),'DIVEQUAL':([115,131,150,152,153,154,155,156,157,158,161,162,170,172,175,177,178,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,458,459,460,465,507,511,520,],[-294,-308,-298,264,-286,-301,-305,-302,-299,-284,-285,-259,-297,-271,-303,-295,-283,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-264,-272,-273,-258,-288,-281,-282,]),'EXTERN':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,60,61,63,64,67,68,69,70,71,73,74,80,83,87,91,92,96,101,104,105,113,120,121,122,126,131,145,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,232,234,235,249,258,286,293,327,331,336,337,339,340,346,349,351,352,353,356,357,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[11,11,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,11,-96,-111,-106,-65,-95,-112,11,-221,-109,-113,11,-63,-118,-29,-107,-62,-103,-67,-114,-108,11,-110,11,-105,-119,-68,-100,11,-53,11,-84,11,-61,-133,-307,-132,-153,-152,-90,-92,11,-89,-91,-94,-83,-69,-30,11,-70,11,-85,-135,-308,11,-36,-35,11,11,-73,-76,-72,-74,11,-78,-199,-198,-77,-200,-75,-139,-137,-134,-154,-71,-31,-34,11,11,-218,-217,11,-215,-201,-214,-78,-80,-202,-138,-136,-156,-155,11,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'CASE':([53,70,101,104,121,131,202,203,206,208,215,217,218,219,220,222,224,225,336,337,340,342,346,349,351,352,353,354,432,433,437,439,442,443,479,480,481,483,501,502,504,505,517,522,523,524,526,527,528,],[-68,-307,-83,-69,214,-308,-73,-76,-72,-74,214,-78,-199,-198,-77,-200,214,-75,-218,-217,-215,214,-201,-214,214,-80,-202,214,-213,-216,-205,214,-79,-203,214,-204,214,214,-209,-208,-206,-80,214,214,-210,-207,214,-212,-211,]),'LAND':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,311,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-244,311,-245,-243,-247,-251,-246,-242,-249,-254,-240,-239,-248,-255,-250,-252,-253,-241,-264,-272,-273,-258,-288,-281,-282,]),'REGISTER':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,60,61,63,64,67,68,69,70,71,73,74,80,83,87,91,92,96,101,104,105,113,120,121,122,126,131,145,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,232,234,235,249,258,286,293,327,331,336,337,339,340,346,349,351,352,353,356,357,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[19,19,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,19,-96,-111,-106,-65,-95,-112,19,-221,-109,-113,19,-63,-118,-29,-107,-62,-103,-67,-114,-108,19,-110,19,-105,-119,-68,-100,19,-53,19,-84,19,-61,-133,-307,-132,-153,-152,-90,-92,19,-89,-91,-94,-83,-69,-30,19,-70,19,-85,-135,-308,19,-36,-35,19,19,-73,-76,-72,-74,19,-78,-199,-198,-77,-200,-75,-139,-137,-134,-154,-71,-31,-34,19,19,-218,-217,19,-215,-201,-214,-78,-80,-202,-138,-136,-156,-155,19,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'MODEQUAL':([115,131,150,152,153,154,155,156,157,158,161,162,170,172,175,177,178,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,458,459,460,465,507,511,520,],[-294,-308,-298,266,-286,-301,-305,-302,-299,-284,-285,-259,-297,-271,-303,-295,-283,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-264,-272,-273,-258,-288,-281,-282,]),'NE':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,303,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-244,303,-245,-243,-247,-251,-246,-242,-249,303,-240,-239,-248,303,-250,303,303,-241,-264,-272,-273,-258,-288,-281,-282,]),'SWITCH':([53,70,101,104,121,131,202,203,206,208,215,217,218,219,220,222,224,225,336,337,340,342,346,349,351,352,353,354,432,433,437,439,442,443,479,480,481,483,501,502,504,505,517,522,523,524,526,527,528,],[-68,-307,-83,-69,216,-308,-73,-76,-72,-74,216,-78,-199,-198,-77,-200,216,-75,-218,-217,-215,216,-201,-214,216,-80,-202,216,-213,-216,-205,216,-79,-203,216,-204,216,216,-209,-208,-206,-80,216,216,-210,-207,216,-212,-211,]),'INT_CONST_HEX':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,131,139,144,147,148,151,159,160,166,167,169,171,173,174,176,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,509,510,514,517,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,170,-28,-309,170,-308,-167,-309,170,170,-270,170,-268,170,-267,170,-266,170,170,-265,-269,170,170,170,-73,-76,-72,170,-74,170,170,-78,-199,-198,-77,-200,170,-75,-266,170,170,170,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,170,-233,-234,-226,-232,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,-309,-266,170,-218,-217,170,-215,170,170,170,-201,170,-214,170,-80,-202,170,170,170,-266,170,170,-12,170,170,-11,170,170,-28,-309,-266,-213,-216,170,-205,170,-79,-203,-309,-182,170,170,-309,170,-266,170,170,170,170,-204,170,170,170,170,-11,170,-209,-208,-206,-80,170,-309,170,170,170,-210,-207,170,-212,-211,]),'_COMPLEX':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,258,277,286,287,288,291,293,296,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[30,-309,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,30,-96,-111,-106,-65,-95,-112,30,-221,-109,-113,30,-63,-118,30,-29,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,30,-53,30,-84,30,30,-61,-133,-307,-132,30,-153,-152,-166,-90,-92,30,-89,-91,-94,-83,-86,-88,-69,-30,30,30,-70,30,-85,30,30,30,-135,-130,-145,-146,-142,-308,30,30,-167,30,30,-36,-35,30,30,-73,-76,-72,-74,30,-78,-199,-198,-77,-200,-75,30,-139,30,-137,-134,-143,-131,-128,-129,-154,-71,30,-31,30,30,30,-34,30,30,30,-218,-217,30,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,30,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'PPPRAGMASTR':([53,],[104,]),'PLUSEQUAL':([115,131,150,152,153,154,155,156,157,158,161,162,170,172,175,177,178,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,458,459,460,465,507,511,520,],[-294,-308,-298,269,-286,-301,-305,-302,-299,-284,-285,-259,-297,-271,-303,-295,-283,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-264,-272,-273,-258,-288,-281,-282,]),'STRUCT':([0,1,3,7,8,9,11,12,14,17,18,19,23,24,26,34,35,36,37,40,42,47,49,51,53,54,55,56,57,60,61,64,65,67,68,70,72,78,87,101,102,103,104,105,117,120,121,122,123,124,125,128,129,130,131,132,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,233,236,258,277,286,287,288,291,293,327,331,336,337,339,340,346,349,351,352,353,360,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[38,-309,-117,-99,-52,-97,-98,-64,-60,-66,38,-96,-65,-95,38,-63,-118,38,-29,-62,-67,-309,-309,-119,-68,-100,-87,-10,-9,38,-53,-84,38,38,-61,-307,38,-166,38,-83,-86,-88,-69,-30,38,-70,38,-85,38,38,38,-145,-146,-142,-308,38,-167,38,38,-36,-35,38,38,-73,-76,-72,-74,38,-78,-199,-198,-77,-200,-75,38,38,-143,-71,38,-31,38,38,38,-34,38,38,-218,-217,38,-215,-201,-214,-78,-80,-202,-144,38,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'CONDOP':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,314,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-244,-256,-245,-243,-247,-251,-246,-242,-249,-254,-240,-239,-248,-255,-250,-252,-253,-241,-264,-272,-273,-258,-288,-281,-282,]),'BREAK':([53,70,101,104,121,131,202,203,206,208,215,217,218,219,220,222,224,225,336,337,340,342,346,349,351,352,353,354,432,433,437,439,442,443,479,480,481,483,501,502,504,505,517,522,523,524,526,527,528,],[-68,-307,-83,-69,221,-308,-73,-76,-72,-74,221,-78,-199,-198,-77,-200,221,-75,-218,-217,-215,221,-201,-214,221,-80,-202,221,-213,-216,-205,221,-79,-203,221,-204,221,221,-209,-208,-206,-80,221,221,-210,-207,221,-212,-211,]),'VOLATILE':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,31,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,59,60,61,63,64,67,68,69,70,71,72,73,74,76,78,80,83,87,91,92,96,101,104,105,107,108,113,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,144,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,254,255,258,277,286,287,288,291,293,296,326,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,423,424,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[51,51,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,51,-96,-111,-106,-65,-95,-112,51,-221,-109,51,-113,51,-63,-118,-29,-107,-62,-103,-67,-114,-108,51,-110,51,-105,-119,-68,-100,51,51,-53,51,-84,51,-61,-133,-307,-132,51,-153,-152,51,-166,-90,-92,51,-89,-91,-94,-83,-69,-30,51,51,51,-70,51,-85,51,51,51,-135,-130,-145,-146,-142,-308,51,51,-167,51,51,51,-36,-35,51,51,-73,-76,-72,-74,51,-78,-199,-198,-77,-200,-75,51,-139,51,-137,-134,-143,-131,-128,-129,-154,51,51,-71,51,-31,51,51,51,-34,51,51,51,51,-218,-217,51,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,51,-33,-32,51,51,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'PPPRAGMA':([0,12,14,17,23,26,34,40,42,53,68,70,72,101,104,120,121,123,124,125,128,129,130,131,202,203,206,208,215,217,218,219,220,222,224,225,231,233,236,258,336,337,340,342,346,349,351,352,353,354,360,432,433,437,439,442,443,479,480,481,483,501,502,504,505,517,522,523,524,526,527,528,],[53,-64,-60,-66,-65,53,-63,-62,-67,-68,-61,-307,53,-83,-69,-70,53,53,53,53,-145,-146,-142,-308,-73,-76,-72,-74,53,-78,-199,-198,-77,-200,53,-75,53,53,-143,-71,-218,-217,-215,53,-201,-214,53,-80,-202,53,-144,-213,-216,-205,53,-79,-203,53,-204,53,53,-209,-208,-206,-80,53,53,-210,-207,53,-212,-211,]),'INLINE':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,60,61,63,64,67,68,69,70,71,73,74,80,83,87,91,92,96,101,104,105,113,120,121,122,126,131,145,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,232,234,235,249,258,286,293,327,331,336,337,339,340,346,349,351,352,353,356,357,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[54,54,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,54,-96,-111,-106,-65,-95,-112,54,-221,-109,-113,54,-63,-118,-29,-107,-62,-103,-67,-114,-108,54,-110,54,-105,-119,-68,-100,54,-53,54,-84,54,-61,-133,-307,-132,-153,-152,-90,-92,54,-89,-91,-94,-83,-69,-30,54,-70,54,-85,-135,-308,54,-36,-35,54,54,-73,-76,-72,-74,54,-78,-199,-198,-77,-200,-75,-139,-137,-134,-154,-71,-31,-34,54,54,-218,-217,54,-215,-201,-214,-78,-80,-202,-138,-136,-156,-155,54,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'INT_CONST_BIN':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,131,139,144,147,148,151,159,160,166,167,169,171,173,174,176,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,509,510,514,517,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,150,-28,-309,150,-308,-167,-309,150,150,-270,150,-268,150,-267,150,-266,150,150,-265,-269,150,150,150,-73,-76,-72,150,-74,150,150,-78,-199,-198,-77,-200,150,-75,-266,150,150,150,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,150,-233,-234,-226,-232,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,-309,-266,150,-218,-217,150,-215,150,150,150,-201,150,-214,150,-80,-202,150,150,150,-266,150,150,-12,150,150,-11,150,150,-28,-309,-266,-213,-216,150,-205,150,-79,-203,-309,-182,150,150,-309,150,-266,150,150,150,150,-204,150,150,150,150,-11,150,-209,-208,-206,-80,150,-309,150,150,150,-210,-207,150,-212,-211,]),'DO':([53,70,101,104,121,131,202,203,206,208,215,217,218,219,220,222,224,225,336,337,340,342,346,349,351,352,353,354,432,433,437,439,442,443,479,480,481,483,501,502,504,505,517,522,523,524,526,527,528,],[-68,-307,-83,-69,224,-308,-73,-76,-72,-74,224,-78,-199,-198,-77,-200,224,-75,-218,-217,-215,224,-201,-214,224,-80,-202,224,-213,-216,-205,224,-79,-203,224,-204,224,224,-209,-208,-206,-80,224,224,-210,-207,224,-212,-211,]),'LNOT':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,131,139,144,147,148,151,159,160,166,167,169,171,173,174,176,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,509,510,514,517,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,151,-28,-309,151,-308,-167,-309,151,151,-270,151,-268,151,-267,151,-266,151,151,-265,-269,151,151,151,-73,-76,-72,151,-74,151,151,-78,-199,-198,-77,-200,151,-75,-266,151,151,151,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,151,-233,-234,-226,-232,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,-309,-266,151,-218,-217,151,-215,151,151,151,-201,151,-214,151,-80,-202,151,151,151,-266,151,151,-12,151,151,-11,151,151,-28,-309,-266,-213,-216,151,-205,151,-79,-203,-309,-182,151,151,-309,151,-266,151,151,151,151,-204,151,151,151,151,-11,151,-209,-208,-206,-80,151,-309,151,151,151,-210,-207,151,-212,-211,]),'CONST':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,31,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,59,60,61,63,64,67,68,69,70,71,72,73,74,76,78,80,83,87,91,92,96,101,104,105,107,108,113,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,144,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,254,255,258,277,286,287,288,291,293,296,326,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,423,424,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[3,3,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,3,-96,-111,-106,-65,-95,-112,3,-221,-109,3,-113,3,-63,-118,-29,-107,-62,-103,-67,-114,-108,3,-110,3,-105,-119,-68,-100,3,3,-53,3,-84,3,-61,-133,-307,-132,3,-153,-152,3,-166,-90,-92,3,-89,-91,-94,-83,-69,-30,3,3,3,-70,3,-85,3,3,3,-135,-130,-145,-146,-142,-308,3,3,-167,3,3,3,-36,-35,3,3,-73,-76,-72,-74,3,-78,-199,-198,-77,-200,-75,3,-139,3,-137,-134,-143,-131,-128,-129,-154,3,3,-71,3,-31,3,3,3,-34,3,3,3,3,-218,-217,3,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,3,-33,-32,3,3,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'LOR':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,299,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-244,-256,-245,-243,-247,-251,-246,-242,-249,-254,-240,-239,-248,-255,-250,-252,-253,-241,-264,-272,-273,-258,-288,-281,-282,]),'CHAR_CONST':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,131,139,144,147,148,151,159,160,166,167,169,171,173,174,176,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,509,510,514,517,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,154,-28,-309,154,-308,-167,-309,154,154,-270,154,-268,154,-267,154,-266,154,154,-265,-269,154,154,154,-73,-76,-72,154,-74,154,154,-78,-199,-198,-77,-200,154,-75,-266,154,154,154,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,154,-233,-234,-226,-232,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,-309,-266,154,-218,-217,154,-215,154,154,154,-201,154,-214,154,-80,-202,154,154,154,-266,154,154,-12,154,154,-11,154,154,-28,-309,-266,-213,-216,154,-205,154,-79,-203,-309,-182,154,154,-309,154,-266,154,154,154,154,-204,154,154,154,154,-11,154,-209,-208,-206,-80,154,-309,154,154,154,-210,-207,154,-212,-211,]),'LSHIFT':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,300,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-244,300,-245,-243,300,300,300,-242,300,300,-240,-239,300,300,300,300,300,-241,-264,-272,-273,-258,-288,-281,-282,]),'RBRACE':([53,70,72,101,104,115,121,123,124,125,128,129,130,131,136,137,138,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,183,202,203,206,208,215,217,218,219,220,222,223,225,226,231,233,236,246,247,248,260,261,276,278,279,280,282,289,290,292,297,336,337,340,345,346,349,351,352,353,360,364,365,373,377,380,381,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,432,433,437,442,443,451,454,455,458,459,460,465,480,489,493,494,501,502,504,505,506,507,510,511,520,523,524,527,528,],[-68,-307,131,-83,-69,-294,-309,131,131,131,-145,-146,-142,-308,-157,131,-160,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,-236,-300,-296,-222,-73,-76,-72,-74,-6,-78,-199,-198,-77,-200,-5,-75,131,131,131,-143,131,131,-158,-309,-177,-306,-263,-304,-280,-279,-257,-262,-260,-261,-218,-217,-215,-235,-201,-214,-78,-80,-202,-144,-159,-161,131,-22,-21,-223,-278,-277,-276,-275,-274,-287,-244,-256,-245,-243,-247,-251,-246,-242,-249,-254,-240,-239,-248,-255,-250,-252,-253,-241,-213,-216,-205,-79,-203,-178,131,-180,-264,-272,-273,-258,-204,-179,131,-237,-209,-208,-206,-80,-181,-288,131,-281,-282,-210,-207,-212,-211,]),'_BOOL':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,258,277,286,287,288,291,293,296,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[15,-309,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,15,-96,-111,-106,-65,-95,-112,15,-221,-109,-113,15,-63,-118,15,-29,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,15,-53,15,-84,15,15,-61,-133,-307,-132,15,-153,-152,-166,-90,-92,15,-89,-91,-94,-83,-86,-88,-69,-30,15,15,-70,15,-85,15,15,15,-135,-130,-145,-146,-142,-308,15,15,-167,15,15,-36,-35,15,15,-73,-76,-72,-74,15,-78,-199,-198,-77,-200,-75,15,-139,15,-137,-134,-143,-131,-128,-129,-154,-71,15,-31,15,15,15,-34,15,15,15,-218,-217,15,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,15,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'LE':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,302,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-244,302,-245,-243,-247,302,-246,-242,-249,302,-240,-239,-248,302,302,302,302,-241,-264,-272,-273,-258,-288,-281,-282,]),'SEMI':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,46,47,48,49,50,51,53,54,55,56,57,61,63,65,68,69,70,71,72,73,74,80,82,83,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,115,119,120,121,123,124,125,126,127,128,129,130,131,133,143,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,183,187,188,202,203,205,206,207,208,210,211,215,217,218,219,220,221,222,223,224,225,227,229,231,232,233,234,235,236,237,238,240,241,242,243,244,245,249,251,252,258,259,261,262,263,276,278,279,280,282,286,289,290,292,293,297,335,336,337,338,339,340,342,345,346,347,349,351,352,353,354,356,357,358,360,362,363,370,371,381,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,418,419,432,433,434,435,436,437,439,442,443,445,446,447,448,451,458,459,460,465,477,478,479,480,481,483,484,485,489,494,499,501,502,504,505,507,511,516,517,520,522,523,524,526,527,528,],[17,-309,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,-96,-111,-106,-65,-95,-112,17,-221,-109,-113,-309,-63,-118,-309,-29,-107,-62,-103,-67,-114,-108,101,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,-53,-309,-309,-61,-133,-307,-132,128,-153,-152,-90,-20,-92,-54,-163,-162,-37,-122,-81,-89,-91,-19,-120,-124,-94,-126,-16,-82,-15,-83,-86,-88,-69,-30,-294,-162,-70,-309,128,128,128,-135,-130,-145,-146,-142,-308,-309,-55,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,-236,-300,-296,-222,-36,-35,-73,-76,336,-72,337,-74,340,-14,-309,-78,-199,-198,-77,349,-200,-13,-309,-75,-219,-294,128,-139,128,-137,-134,-143,-131,-128,-26,-25,360,-147,-129,-149,-154,-38,-121,-71,-123,-177,-127,-125,-306,-263,-304,-280,-279,-31,-257,-262,-260,-34,-261,432,-218,-217,433,-309,-215,-309,-235,-201,-13,-214,-78,-80,-202,-309,-138,-136,-151,-144,-156,-155,-44,-43,-223,-278,-277,-276,-275,-274,-287,-244,-256,-245,-243,-247,-251,-246,-242,-249,-254,-240,-239,-248,-255,-250,-252,-253,-241,-33,-32,-213,-216,477,-309,-220,-205,-309,-79,-203,-148,-150,-39,-42,-178,-264,-272,-273,-258,-309,500,-309,-204,-309,-309,-41,-40,-179,-237,514,-209,-208,-206,-80,-288,-281,523,-309,-282,-309,-210,-207,-309,-212,-211,]),'LT':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,304,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-244,304,-245,-243,-247,304,-246,-242,-249,304,-240,-239,-248,304,304,304,304,-241,-264,-272,-273,-258,-288,-281,-282,]),'COMMA':([2,3,5,6,7,8,9,10,11,15,16,19,20,21,24,25,29,30,31,32,35,37,39,41,44,45,48,50,51,54,61,69,71,73,74,76,77,78,79,80,82,83,85,86,87,88,89,91,92,94,95,96,97,98,105,112,113,114,115,116,118,119,126,127,131,136,137,138,139,140,143,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,183,187,188,190,191,192,193,194,195,196,197,199,211,227,229,232,234,235,237,238,240,243,244,245,246,247,248,249,251,252,259,261,262,263,276,278,279,280,282,286,289,290,292,293,294,296,297,324,325,332,334,338,345,356,357,358,362,363,364,365,370,371,377,381,383,384,385,386,387,388,389,390,391,394,396,397,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,427,428,436,438,440,444,445,446,447,448,451,455,458,459,460,465,472,473,474,475,476,484,485,489,490,493,494,495,496,503,506,507,511,512,513,519,520,],[-104,-117,-115,-101,-99,-52,-97,-116,-98,-102,-93,-96,-111,-106,-95,-112,-221,-109,-309,-113,-118,-29,-107,-103,-114,-108,-110,-105,-119,-100,-53,-133,-132,-153,-152,-28,-164,-166,-27,-90,142,-92,-54,-163,-162,-37,-122,-89,-91,-120,-124,-94,-126,149,-30,-170,-309,200,-294,201,-175,-162,-135,-130,-308,-157,248,-160,-167,-165,-55,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,-236,-300,-296,-222,-36,-35,-174,-2,-188,-56,-172,-1,-45,-173,-190,341,-219,-294,-139,-137,-134,-131,-128,359,-147,-129,-149,248,248,-158,-154,-38,-121,-123,-177,-127,-125,-306,-263,-304,-280,-279,-31,-257,-262,-260,-34,341,-309,-261,-57,-189,-176,-171,341,-235,-138,-136,-151,-156,-155,-159,-161,-44,-43,454,-223,-278,-277,-276,-275,341,-292,-274,461,462,-287,-187,-188,-244,-256,-245,-243,-247,-251,-246,-242,-249,-254,-240,-239,-248,-255,-250,-252,341,-253,-241,-33,-32,-197,-191,-193,-195,-220,341,341,341,-148,-150,-39,-42,-178,-180,-264,-272,-273,-258,-51,-50,-192,-194,-196,-41,-40,-179,-293,510,-237,-46,-49,341,-181,-288,-281,-48,-47,341,-282,]),'OFFSETOF':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,131,139,144,147,148,151,159,160,166,167,169,171,173,174,176,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,509,510,514,517,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,165,-28,-309,165,-308,-167,-309,165,165,-270,165,-268,165,-267,165,-266,165,165,-265,-269,165,165,165,-73,-76,-72,165,-74,165,165,-78,-199,-198,-77,-200,165,-75,-266,165,165,165,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,165,-233,-234,-226,-232,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,-309,-266,165,-218,-217,165,-215,165,165,165,-201,165,-214,165,-80,-202,165,165,165,-266,165,165,-12,165,165,-11,165,165,-28,-309,-266,-213,-216,165,-205,165,-79,-203,-309,-182,165,165,-309,165,-266,165,165,165,165,-204,165,165,165,165,-11,165,-209,-208,-206,-80,165,-309,165,165,165,-210,-207,165,-212,-211,]),'TYPEDEF':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,60,61,63,64,67,68,69,70,71,73,74,80,83,87,91,92,96,101,104,105,113,120,121,122,126,131,145,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,232,234,235,249,258,286,293,327,331,336,337,339,340,346,349,351,352,353,356,357,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[7,7,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,7,-96,-111,-106,-65,-95,-112,7,-221,-109,-113,7,-63,-118,-29,-107,-62,-103,-67,-114,-108,7,-110,7,-105,-119,-68,-100,7,-53,7,-84,7,-61,-133,-307,-132,-153,-152,-90,-92,7,-89,-91,-94,-83,-69,-30,7,-70,7,-85,-135,-308,7,-36,-35,7,7,-73,-76,-72,-74,7,-78,-199,-198,-77,-200,-75,-139,-137,-134,-154,-71,-31,-34,7,7,-218,-217,7,-215,-201,-214,-78,-80,-202,-138,-136,-156,-155,7,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'XOR':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,307,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-244,307,-245,-243,-247,-251,-246,-242,-249,-254,-240,-239,-248,307,-250,-252,307,-241,-264,-272,-273,-258,-288,-281,-282,]),'AUTO':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,60,61,63,64,67,68,69,70,71,73,74,80,83,87,91,92,96,101,104,105,113,120,121,122,126,131,145,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,232,234,235,249,258,286,293,327,331,336,337,339,340,346,349,351,352,353,356,357,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[24,24,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,24,-96,-111,-106,-65,-95,-112,24,-221,-109,-113,24,-63,-118,-29,-107,-62,-103,-67,-114,-108,24,-110,24,-105,-119,-68,-100,24,-53,24,-84,24,-61,-133,-307,-132,-153,-152,-90,-92,24,-89,-91,-94,-83,-69,-30,24,-70,24,-85,-135,-308,24,-36,-35,24,24,-73,-76,-72,-74,24,-78,-199,-198,-77,-200,-75,-139,-137,-134,-154,-71,-31,-34,24,24,-218,-217,24,-215,-201,-214,-78,-80,-202,-138,-136,-156,-155,24,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'TIMES':([0,1,2,3,4,5,6,7,9,10,11,12,14,15,16,17,19,20,21,23,24,25,26,29,30,31,32,33,34,35,36,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,59,63,65,68,69,70,71,73,74,76,77,78,79,80,81,83,91,92,96,101,102,103,104,106,107,108,113,115,120,121,126,127,131,133,139,142,144,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,164,166,167,169,170,171,172,173,174,175,176,177,178,179,180,181,184,185,186,189,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,229,232,234,235,237,238,239,244,249,250,253,254,255,258,260,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,282,284,285,288,289,290,291,292,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,356,357,359,361,362,363,367,368,369,372,376,378,380,383,384,385,386,389,394,395,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,422,423,424,430,432,433,435,437,439,442,443,454,457,458,459,460,461,463,465,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,507,509,510,511,514,517,520,522,523,524,526,527,528,],[31,-309,-104,-117,31,-115,-101,-99,-97,-116,-98,-64,-60,-102,-93,-66,-96,-111,-106,-65,-95,-112,31,-221,-109,-309,-113,31,-63,-118,31,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,-309,31,31,-61,-133,-307,-132,-153,-152,-28,31,-166,-27,-90,31,-92,-89,-91,-94,-83,-86,-88,-69,171,-28,-309,31,-294,-70,228,-135,-130,-308,31,-167,31,-309,228,228,31,-298,-270,-257,-286,-301,-305,-302,-299,-284,228,-268,-285,-259,-238,228,-267,228,-297,-266,-271,228,228,-303,-265,-295,-283,309,-300,-296,-269,228,228,31,329,-73,-76,-72,228,-74,228,228,-78,-199,-198,-77,-200,228,-75,-266,-294,-139,-137,-134,-131,-128,228,-129,-154,228,367,-28,-309,-71,-309,-227,-230,-228,-224,-225,-229,-231,228,-233,-234,-226,-232,-306,228,-263,-304,-280,-279,228,228,228,-257,-262,228,-260,31,-261,228,228,228,228,228,228,228,228,228,228,228,228,228,228,228,228,228,228,228,-309,-266,430,-218,-217,228,-215,228,228,228,-201,228,-214,228,-80,-202,228,228,-138,-136,31,228,-156,-155,-266,228,228,-12,228,228,-11,-278,-277,-276,-275,-274,-287,228,31,309,309,309,309,309,309,309,309,309,309,-240,-239,309,309,309,309,309,-241,469,-28,-309,-266,-213,-216,228,-205,228,-79,-203,-309,-182,-264,-272,-273,228,228,-258,-309,228,-266,228,228,228,228,-204,228,228,228,228,-11,228,-209,-208,-206,-80,-288,228,-309,-281,228,228,-282,228,-210,-207,228,-212,-211,]),'LPAREN':([0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,19,20,21,23,24,25,26,29,30,31,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,59,61,63,65,68,69,70,71,73,74,76,77,78,79,80,81,83,84,85,88,91,92,96,101,102,103,104,105,106,107,108,113,115,120,121,126,127,131,133,139,140,142,143,144,147,148,149,150,151,153,154,155,156,157,158,159,160,161,162,165,166,167,169,170,171,172,173,174,175,176,177,178,180,181,184,185,186,187,188,189,192,193,196,198,199,202,203,206,207,208,209,213,214,215,216,217,218,219,220,222,224,225,228,229,230,232,234,235,237,238,239,244,249,250,251,253,254,255,258,260,264,265,266,267,268,269,270,271,272,273,274,275,276,277,279,280,282,284,285,286,288,291,293,296,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,319,324,325,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,356,357,359,361,362,363,367,368,369,370,371,372,376,378,380,383,384,385,386,389,394,395,397,398,418,419,420,421,422,423,424,427,428,430,432,433,435,437,439,441,442,443,447,448,454,457,459,460,461,463,466,467,469,470,471,472,473,474,475,476,477,479,480,481,482,483,484,485,487,488,495,496,500,501,502,504,505,507,509,510,511,512,513,514,517,520,522,523,524,526,527,528,],[4,-309,-104,-117,4,-115,-101,-99,60,-97,-116,-98,-64,4,-60,-102,-93,-66,-96,-111,-106,-65,-95,-112,4,-221,-109,-309,-113,81,-63,-118,4,-29,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,-309,60,81,4,-61,-133,-307,-132,-153,-152,-28,-164,-166,-27,-90,81,-92,81,145,-37,-89,-91,-94,-83,-86,-88,-69,-30,173,-28,-309,189,-294,-70,173,-135,-130,-308,81,-167,-165,81,145,-309,173,173,81,-298,-270,-286,-301,-305,-302,-299,-284,277,-268,-285,285,287,288,-267,291,-297,-266,-271,173,291,-303,-265,-295,-283,-300,-296,-269,173,173,-36,-35,189,189,327,-45,173,331,-73,-76,-72,173,-74,339,343,288,173,348,-78,-199,-198,-77,-200,173,-75,-266,-294,355,-139,-137,-134,-131,-128,288,-129,-154,288,-38,173,-28,-309,-71,-309,-227,-230,-228,-224,-225,-229,-231,173,-233,-234,-226,-232,-306,173,-304,-280,-279,173,173,-31,173,173,-34,398,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,173,288,288,189,327,331,-309,-266,173,-218,-217,173,-215,173,173,173,-201,173,-214,173,-80,-202,173,173,-138,-136,81,288,-156,-155,-266,173,173,-44,-43,-12,288,173,-11,-278,-277,-276,-275,-274,-287,288,398,398,-33,-32,-197,-191,173,-28,-309,-193,-195,-266,-213,-216,173,-205,173,482,-79,-203,-39,-42,-309,-182,-272,-273,173,288,-309,288,-266,173,173,-51,-50,-192,-194,-196,173,173,-204,173,173,173,-41,-40,173,-11,-46,-49,173,-209,-208,-206,-80,-288,173,-309,-281,-48,-47,173,173,-282,173,-210,-207,173,-212,-211,]),'MINUSMINUS':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,115,121,131,139,144,147,148,150,151,153,154,155,156,157,158,159,160,161,162,166,167,169,170,171,172,173,174,175,176,177,178,180,181,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,229,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,276,277,279,280,282,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,383,384,385,386,389,394,395,422,423,424,430,432,433,435,437,439,442,443,454,457,459,460,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,507,509,510,511,514,517,520,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,174,-28,-309,-294,174,-308,-167,-309,174,174,-298,-270,-286,-301,-305,-302,-299,-284,174,-268,-285,280,174,-267,174,-297,-266,-271,174,174,-303,-265,-295,-283,-300,-296,-269,174,174,174,-73,-76,-72,174,-74,174,174,-78,-199,-198,-77,-200,174,-75,-266,-294,174,174,174,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,174,-233,-234,-226,-232,-306,174,-304,-280,-279,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,-309,-266,174,-218,-217,174,-215,174,174,174,-201,174,-214,174,-80,-202,174,174,174,-266,174,174,-12,174,174,-11,-278,-277,-276,-275,-274,-287,174,174,-28,-309,-266,-213,-216,174,-205,174,-79,-203,-309,-182,-272,-273,174,174,-309,174,-266,174,174,174,174,-204,174,174,174,174,-11,174,-209,-208,-206,-80,-288,174,-309,-281,174,174,-282,174,-210,-207,174,-212,-211,]),'ID':([0,1,2,3,4,5,6,7,9,10,11,12,13,14,15,16,17,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,38,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,59,60,63,65,68,69,70,71,73,74,75,76,77,78,79,80,81,83,84,91,92,96,101,102,103,104,106,107,108,113,120,121,126,127,131,133,134,135,139,140,142,144,145,147,148,149,151,159,160,166,167,169,171,173,174,176,184,185,186,189,192,198,200,202,203,204,206,207,208,214,215,217,218,219,220,222,224,225,228,232,234,235,237,238,239,244,248,249,250,253,254,255,258,260,264,265,266,267,268,269,270,271,272,273,274,275,277,281,283,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,319,326,327,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,356,357,359,361,362,363,367,368,369,372,375,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,462,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,508,509,510,514,517,522,523,524,526,527,528,],[37,-309,-104,-117,37,-115,-101,-99,-97,-116,-98,-64,37,-60,-102,-93,-66,-96,-111,-106,-141,-65,-95,-112,37,71,74,-221,-109,-309,-113,37,-63,-118,37,-140,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,-309,115,37,37,-61,-133,-307,-132,-153,-152,138,-28,-164,-166,-27,-90,37,-92,37,-89,-91,-94,-83,-86,-88,-69,115,-28,-309,37,-70,229,-135,-130,-308,37,138,138,-167,-165,37,-309,115,115,115,37,-270,115,-268,115,-267,115,-266,115,115,-265,-269,115,115,37,37,115,115,-73,-76,335,-72,115,-74,115,229,-78,-199,-198,-77,-200,229,-75,-266,-139,-137,-134,-131,-128,115,-129,138,-154,115,115,-28,-309,-71,-309,-227,-230,-228,-224,-225,-229,-231,115,-233,-234,-226,-232,115,384,386,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,37,-309,115,-266,115,-218,-217,115,-215,115,229,115,-201,115,-214,229,-80,-202,229,115,-138,-136,37,115,-156,-155,-266,115,115,-12,115,115,115,-11,115,115,-28,-309,-266,-213,-216,115,-205,229,-79,-203,-309,-182,115,115,115,-309,115,-266,115,115,115,229,-204,229,115,229,115,-11,115,-209,-208,-206,-80,115,115,-309,115,229,229,-210,-207,229,-212,-211,]),'IF':([53,70,101,104,121,131,202,203,206,208,215,217,218,219,220,222,224,225,336,337,340,342,346,349,351,352,353,354,432,433,437,439,442,443,479,480,481,483,501,502,504,505,517,522,523,524,526,527,528,],[-68,-307,-83,-69,230,-308,-73,-76,-72,-74,230,-78,-199,-198,-77,-200,230,-75,-218,-217,-215,230,-201,-214,230,-80,-202,230,-213,-216,-205,230,-79,-203,230,-204,230,230,-209,-208,-206,-80,230,230,-210,-207,230,-212,-211,]),'STRING_LITERAL':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,131,139,144,147,148,151,159,160,161,166,167,169,171,173,174,175,176,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,277,279,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,509,510,514,517,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,175,-28,-309,175,-308,-167,-309,175,175,-270,175,-268,279,175,-267,175,-266,175,175,-303,-265,-269,175,175,175,-73,-76,-72,175,-74,175,175,-78,-199,-198,-77,-200,175,-75,-266,175,175,175,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,175,-233,-234,-226,-232,175,-304,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,-309,-266,175,-218,-217,175,-215,175,175,175,-201,175,-214,175,-80,-202,175,175,175,-266,175,175,-12,175,175,-11,175,175,-28,-309,-266,-213,-216,175,-205,175,-79,-203,-309,-182,175,175,-309,175,-266,175,175,175,175,-204,175,175,175,175,-11,175,-209,-208,-206,-80,175,-309,175,175,175,-210,-207,175,-212,-211,]),'FLOAT':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,258,277,286,287,288,291,293,296,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[39,-309,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,39,-96,-111,-106,-65,-95,-112,39,-221,-109,-113,39,-63,-118,39,-29,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,39,-53,39,-84,39,39,-61,-133,-307,-132,39,-153,-152,-166,-90,-92,39,-89,-91,-94,-83,-86,-88,-69,-30,39,39,-70,39,-85,39,39,39,-135,-130,-145,-146,-142,-308,39,39,-167,39,39,-36,-35,39,39,-73,-76,-72,-74,39,-78,-199,-198,-77,-200,-75,39,-139,39,-137,-134,-143,-131,-128,-129,-154,-71,39,-31,39,39,39,-34,39,39,39,-218,-217,39,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,39,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'XOREQUAL':([115,131,150,152,153,154,155,156,157,158,161,162,170,172,175,177,178,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,458,459,460,465,507,511,520,],[-294,-308,-298,268,-286,-301,-305,-302,-299,-284,-285,-259,-297,-271,-303,-295,-283,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-264,-272,-273,-258,-288,-281,-282,]),'LSHIFTEQUAL':([115,131,150,152,153,154,155,156,157,158,161,162,170,172,175,177,178,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,458,459,460,465,507,511,520,],[-294,-308,-298,270,-286,-301,-305,-302,-299,-284,-285,-259,-297,-271,-303,-295,-283,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-264,-272,-273,-258,-288,-281,-282,]),'RBRACKET':([3,35,51,59,78,79,106,107,115,131,139,144,150,152,153,154,155,156,157,158,161,162,163,164,168,170,171,172,175,177,178,179,180,181,182,183,198,227,253,254,276,278,279,280,282,289,290,292,297,317,318,326,328,329,330,345,366,367,381,383,384,385,386,387,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,422,423,429,430,436,449,450,453,458,459,460,465,468,469,494,497,498,507,511,519,520,],[-117,-118,-119,-309,-166,-27,-309,-28,-294,-308,-167,-309,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,286,-238,-4,-297,293,-271,-303,-295,-283,-236,-300,-296,-3,-222,-309,-219,-309,-28,-306,-263,-304,-280,-279,-257,-262,-260,-261,418,419,-309,427,428,-309,-235,447,448,-223,-278,-277,-276,-275,459,-274,-287,-244,-256,-245,-243,-247,-251,-246,-242,-249,-254,-240,-239,-248,-255,-250,-252,-253,-241,-309,-28,474,475,-220,484,485,486,-264,-272,-273,-258,495,496,-237,512,513,-288,-281,525,-282,]),}
_lr_action = {}
for _k, _v in _lr_action_items.items():
@@ -16,7 +16,7 @@
_lr_action[_x][_k] = _y
del _lr_action_items
-_lr_goto_items = {'expression_statement':([121,212,221,338,348,433,472,474,476,509,514,518,],[199,199,199,199,199,199,199,199,199,199,199,199,]),'struct_or_union_specifier':([0,18,26,36,60,65,67,72,87,117,121,123,124,128,129,142,170,186,198,212,228,229,273,283,284,287,323,327,335,392,],[5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,]),'init_declarator_list':([33,63,],[82,82,]),'init_declarator_list_opt':([33,63,],[90,90,]),'iteration_statement':([121,212,221,338,348,433,472,474,476,509,514,518,],[200,200,200,200,200,200,200,200,200,200,200,200,]),'unified_string_literal':([106,121,144,145,156,163,166,170,171,182,183,195,204,211,212,221,235,246,249,267,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,326,335,337,338,339,344,348,349,355,362,363,370,372,389,416,429,433,454,456,460,463,464,470,472,474,475,476,480,493,501,506,509,514,518,],[158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,]),'assignment_expression_opt':([106,195,249,326,416,],[160,324,360,423,461,]),'brace_open':([27,28,66,69,71,73,74,121,143,144,145,212,221,338,348,372,389,433,451,456,457,472,474,476,480,509,514,518,],[72,75,121,123,124,131,132,121,121,256,256,121,121,121,121,256,459,121,459,459,459,121,121,121,256,121,121,121,]),'enumerator':([75,131,132,244,],[133,133,133,358,]),'typeid_noparen_declarator':([113,],[194,]),'type_qualifier_list_opt':([31,59,108,141,251,322,418,],[77,106,183,249,363,416,464,]),'declaration_specifiers_no_type_opt':([1,47,49,],[55,102,103,]),'expression_opt':([121,212,221,335,338,348,429,433,470,472,474,476,493,506,509,514,518,],[202,202,202,428,202,202,471,202,492,202,202,202,507,513,202,202,202,]),'designation':([256,447,459,502,],[366,366,366,366,]),'parameter_list':([60,142,186,323,327,392,],[116,116,116,116,116,116,]),'labeled_statement':([121,212,221,338,348,433,472,474,476,509,514,518,],[203,203,203,203,203,203,203,203,203,203,203,203,]),'abstract_declarator':([113,186,292,392,],[188,319,188,319,]),'translation_unit':([0,],[26,]),'init_declarator':([33,63,139,146,],[94,94,248,259,]),'direct_abstract_declarator':([113,186,189,292,315,391,392,],[196,196,321,196,321,321,196,]),'designator_list':([256,447,459,502,],[373,373,373,373,]),'identifier':([60,106,121,142,144,145,156,163,166,170,171,182,183,195,197,204,211,212,221,235,246,249,267,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,323,326,335,337,338,339,344,348,349,355,362,363,369,370,372,389,416,429,433,454,455,456,460,463,464,470,472,474,475,476,480,493,500,501,506,509,514,518,],[118,175,175,118,175,175,175,175,175,175,175,175,175,175,328,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,118,175,175,175,175,175,175,175,175,175,175,175,445,175,175,175,175,175,175,175,485,175,175,175,175,175,175,175,175,175,175,175,510,175,175,175,175,175,]),'offsetof_member_designator':([455,],[484,]),'unary_expression':([106,121,144,145,156,163,166,170,171,182,183,195,204,211,212,221,235,246,249,267,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,326,335,337,338,339,344,348,349,355,362,363,370,372,389,416,429,433,454,456,460,463,464,470,472,474,475,476,480,493,501,506,509,514,518,],[149,149,149,149,274,285,288,149,293,149,149,149,149,285,149,149,285,285,149,149,149,149,149,149,149,285,285,285,285,285,285,285,285,285,285,285,285,285,285,285,285,149,285,285,149,149,149,149,149,149,149,149,285,149,149,285,149,285,149,149,149,149,285,285,149,149,149,149,149,149,149,149,149,149,149,149,149,149,]),'abstract_declarator_opt':([113,292,],[187,390,]),'initializer':([144,145,372,480,],[255,258,448,498,]),'direct_id_declarator':([0,4,13,26,33,36,63,65,81,84,113,130,139,146,186,189,315,353,],[8,8,61,8,8,8,8,8,8,61,8,8,8,8,8,61,61,8,]),'struct_declaration_list':([72,123,124,],[129,228,229,]),'pp_directive':([0,26,],[12,12,]),'declaration_list':([18,87,],[67,67,]),'id_init_declarator':([36,65,],[95,95,]),'type_specifier':([0,18,26,36,60,65,67,72,87,117,121,123,124,128,129,142,170,186,198,212,228,229,273,283,284,287,323,327,335,392,],[16,16,16,96,16,96,16,125,16,96,16,125,125,230,125,16,125,16,16,16,125,125,125,125,125,125,16,16,16,16,]),'compound_statement':([66,121,143,212,221,338,348,433,472,474,476,509,514,518,],[120,205,254,205,205,205,205,205,205,205,205,205,205,205,]),'pointer':([0,4,26,33,36,63,65,77,81,113,130,139,146,186,292,353,392,],[13,13,13,84,13,84,13,137,84,189,84,84,84,315,391,84,391,]),'typeid_declarator':([33,63,81,130,139,146,353,],[86,86,138,86,86,86,86,]),'id_init_declarator_list':([36,65,],[98,98,]),'declarator':([33,63,130,139,146,353,],[89,89,241,89,89,241,]),'argument_expression_list':([281,],[384,]),'struct_declarator_list_opt':([130,],[238,]),'typedef_name':([0,18,26,36,60,65,67,72,87,117,121,123,124,128,129,142,170,186,198,212,228,229,273,283,284,287,323,327,335,392,],[32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,]),'parameter_type_list_opt':([186,327,392,],[318,425,318,]),'struct_declarator':([130,353,],[239,438,]),'type_qualifier':([0,1,18,26,31,33,47,49,59,60,63,67,72,76,87,107,108,113,121,123,124,128,129,130,141,142,170,186,198,212,228,229,250,251,273,283,284,287,292,322,323,327,335,392,417,418,],[47,47,47,47,78,91,47,47,78,47,91,47,78,136,47,136,78,91,47,78,78,136,78,240,78,47,78,47,47,47,78,78,136,78,78,78,78,78,240,78,47,47,47,47,136,78,]),'assignment_operator':([149,],[267,]),'expression':([121,170,204,212,221,273,280,284,287,310,335,338,339,344,348,349,429,433,470,472,474,475,476,493,501,506,509,514,518,],[208,290,334,208,208,290,381,290,290,409,208,208,432,434,208,437,208,208,208,208,208,496,208,208,511,208,208,208,208,]),'storage_class_specifier':([0,1,18,26,33,47,49,60,63,67,87,113,121,142,186,198,212,323,327,335,392,],[1,1,1,1,80,1,1,1,80,1,1,80,1,1,1,1,1,1,1,1,1,]),'unified_wstring_literal':([106,121,144,145,156,163,166,170,171,182,183,195,204,211,212,221,235,246,249,267,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,326,335,337,338,339,344,348,349,355,362,363,370,372,389,416,429,433,454,456,460,463,464,470,472,474,475,476,480,493,501,506,509,514,518,],[150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,]),'translation_unit_or_empty':([0,],[43,]),'initializer_list_opt':([256,],[367,]),'brace_close':([129,134,223,228,229,242,243,367,447,486,502,],[231,245,347,350,351,356,357,444,482,503,512,]),'direct_typeid_declarator':([33,63,81,84,130,139,146,353,],[85,85,85,140,85,85,85,85,]),'external_declaration':([0,26,],[14,68,]),'type_name':([170,273,283,284,287,],[291,376,385,386,387,]),'block_item_list':([121,],[212,]),'pppragma_directive':([0,26,121,212,221,338,348,433,472,474,476,509,514,518,],[23,23,214,214,214,214,214,214,214,214,214,214,214,214,]),'statement':([121,212,221,338,348,433,472,474,476,509,514,518,],[215,215,346,431,436,473,494,495,497,516,519,520,]),'cast_expression':([106,121,144,145,163,170,182,183,195,204,211,212,221,235,246,249,267,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,326,335,337,338,339,344,348,349,355,362,363,370,372,389,416,429,433,454,456,460,463,464,470,472,474,475,476,480,493,501,506,509,514,518,],[161,161,161,161,286,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,458,161,161,161,161,458,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,]),'struct_declarator_list':([130,],[236,]),'empty':([0,1,18,31,33,36,47,49,59,60,63,65,87,106,108,113,121,130,141,142,186,195,212,221,249,251,256,292,322,323,326,327,335,338,348,392,416,418,429,433,447,459,470,472,474,476,493,502,506,509,514,518,],[52,57,62,79,93,100,57,57,79,110,93,100,62,179,79,192,220,237,79,110,316,179,343,343,179,79,374,192,79,110,179,316,343,343,343,316,179,79,343,343,481,481,343,343,343,343,343,481,343,343,343,343,]),'parameter_declaration':([60,142,186,198,323,327,392,],[112,112,112,330,112,112,112,]),'primary_expression':([106,121,144,145,156,163,166,170,171,182,183,195,204,211,212,221,235,246,249,267,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,326,335,337,338,339,344,348,349,355,362,363,370,372,389,416,429,433,454,456,460,463,464,470,472,474,475,476,480,493,501,506,509,514,518,],[169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,]),'declaration':([0,18,26,67,87,121,212,335,],[34,64,34,122,64,216,216,429,]),'declaration_specifiers_no_type':([0,1,18,26,47,49,60,67,87,121,142,186,198,212,323,327,335,392,],[36,56,65,36,56,56,117,65,65,65,117,117,117,65,117,117,65,117,]),'jump_statement':([121,212,221,338,348,433,472,474,476,509,514,518,],[217,217,217,217,217,217,217,217,217,217,217,217,]),'enumerator_list':([75,131,132,],[134,242,243,]),'block_item':([121,212,],[219,342,]),'constant_expression':([211,235,246,355,370,],[340,352,359,439,446,]),'identifier_list_opt':([60,142,323,],[109,252,419,]),'constant':([106,121,144,145,156,163,166,170,171,182,183,195,204,211,212,221,235,246,249,267,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,326,335,337,338,339,344,348,349,355,362,363,370,372,389,416,429,433,454,456,460,463,464,470,472,474,475,476,480,493,501,506,509,514,518,],[155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,]),'type_specifier_no_typeid':([0,18,26,33,36,60,63,65,67,72,87,113,117,121,123,124,128,129,130,142,170,186,198,212,228,229,273,283,284,287,292,323,327,335,392,],[10,10,10,83,10,10,83,10,10,10,10,83,10,10,10,10,10,10,234,10,10,10,10,10,10,10,10,10,10,10,234,10,10,10,10,]),'struct_declaration':([72,123,124,129,228,229,],[127,127,127,232,232,232,]),'direct_typeid_noparen_declarator':([113,189,],[190,320,]),'id_declarator':([0,4,26,33,36,63,65,81,113,130,139,146,186,353,],[18,58,18,87,97,119,97,58,191,119,119,119,58,119,]),'selection_statement':([121,212,221,338,348,433,472,474,476,509,514,518,],[222,222,222,222,222,222,222,222,222,222,222,222,]),'postfix_expression':([106,121,144,145,156,163,166,170,171,182,183,195,204,211,212,221,235,246,249,267,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,326,335,337,338,339,344,348,349,355,362,363,370,372,389,416,429,433,454,456,460,463,464,470,472,474,475,476,480,493,501,506,509,514,518,],[159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,]),'initializer_list':([256,459,],[371,486,]),'unary_operator':([106,121,144,145,156,163,166,170,171,182,183,195,204,211,212,221,235,246,249,267,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,326,335,337,338,339,344,348,349,355,362,363,370,372,389,416,429,433,454,456,460,463,464,470,472,474,475,476,480,493,501,506,509,514,518,],[163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,]),'struct_or_union':([0,18,26,36,60,65,67,72,87,117,121,123,124,128,129,142,170,186,198,212,228,229,273,283,284,287,323,327,335,392,],[27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,]),'block_item_list_opt':([121,],[223,]),'assignment_expression':([106,121,144,145,170,182,183,195,204,212,221,249,267,273,280,281,284,287,310,326,335,337,338,339,344,348,349,362,363,372,416,429,433,454,463,464,470,472,474,475,476,480,493,501,506,509,514,518,],[165,224,257,257,224,313,314,165,224,224,224,165,375,224,224,382,224,224,224,165,224,430,224,224,224,224,224,442,443,257,165,224,224,483,490,491,224,224,224,224,224,257,224,224,224,224,224,224,]),'designation_opt':([256,447,459,502,],[372,480,372,480,]),'parameter_type_list':([60,142,186,323,327,392,],[111,253,317,420,317,317,]),'type_qualifier_list':([31,59,72,108,123,124,129,141,170,228,229,251,273,283,284,287,322,418,],[76,107,128,76,128,128,128,250,128,128,128,76,128,128,128,128,417,76,]),'designator':([256,373,447,459,502,],[368,449,368,368,368,]),'id_init_declarator_list_opt':([36,65,],[99,99,]),'declaration_specifiers':([0,18,26,60,67,87,121,142,186,198,212,323,327,335,392,],[33,63,33,113,63,63,63,113,113,113,63,113,113,63,113,]),'identifier_list':([60,142,323,],[114,114,114,]),'declaration_list_opt':([18,87,],[66,143,]),'function_definition':([0,26,],[40,40,]),'binary_expression':([106,121,144,145,170,182,183,195,204,211,212,221,235,246,249,267,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,326,335,337,338,339,344,348,349,355,362,363,370,372,416,429,433,454,460,463,464,470,472,474,475,476,480,493,501,506,509,514,518,],[176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,176,410,411,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,]),'enum_specifier':([0,18,26,36,60,65,67,72,87,117,121,123,124,128,129,142,170,186,198,212,228,229,273,283,284,287,323,327,335,392,],[44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,]),'decl_body':([0,18,26,67,87,121,212,335,],[46,46,46,46,46,46,46,46,]),'function_specifier':([0,1,18,26,33,47,49,60,63,67,87,113,121,142,186,198,212,323,327,335,392,],[49,49,49,49,92,49,49,49,92,49,49,92,49,49,49,49,49,49,49,49,49,]),'specifier_qualifier_list':([72,123,124,129,170,228,229,273,283,284,287,],[130,130,130,130,292,130,130,292,292,292,292,]),'conditional_expression':([106,121,144,145,170,182,183,195,204,211,212,221,235,246,249,267,273,280,281,284,287,310,326,335,337,338,339,344,348,349,355,362,363,370,372,416,429,433,454,460,463,464,470,472,474,475,476,480,493,501,506,509,514,518,],[180,180,180,180,180,180,180,180,180,341,180,180,341,341,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,341,180,180,341,180,180,180,180,180,487,180,180,180,180,180,180,180,180,180,180,180,180,180,180,]),}
+_lr_goto_items = {'expression_statement':([121,215,224,342,351,354,439,479,481,483,517,522,526,],[202,202,202,202,202,202,202,202,202,202,202,202,202,]),'struct_or_union_specifier':([0,18,26,36,60,65,67,72,87,117,121,123,124,125,132,145,173,189,201,215,231,233,277,287,288,291,327,331,339,398,],[5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,]),'init_declarator_list':([33,63,],[82,82,]),'init_declarator_list_opt':([33,63,],[90,90,]),'iteration_statement':([121,215,224,342,351,354,439,479,481,483,517,522,526,],[203,203,203,203,203,203,203,203,203,203,203,203,203,]),'unified_string_literal':([106,121,147,148,159,166,169,173,174,185,186,198,207,214,215,224,239,250,253,271,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,330,339,341,342,343,348,351,354,355,361,368,369,376,378,395,422,435,439,461,463,467,470,471,477,479,481,482,483,487,500,509,514,517,522,526,],[161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,]),'assignment_expression_opt':([106,198,253,330,422,],[163,328,366,429,468,]),'brace_open':([27,28,66,69,71,73,74,121,146,147,148,215,224,342,351,354,378,395,439,458,463,464,479,481,483,487,517,522,526,],[72,75,121,123,124,134,135,121,121,260,260,121,121,121,121,121,260,466,121,466,466,466,121,121,121,260,121,121,121,]),'enumerator':([75,134,135,248,],[136,136,136,364,]),'typeid_noparen_declarator':([113,],[197,]),'type_qualifier_list_opt':([31,59,108,144,255,326,424,],[77,106,186,253,369,422,471,]),'declaration_specifiers_no_type_opt':([1,47,49,],[55,102,103,]),'expression_opt':([121,215,224,339,342,351,354,435,439,477,479,481,483,500,514,517,522,526,],[205,205,205,434,205,205,205,478,205,499,205,205,205,515,521,205,205,205,]),'designation':([260,454,466,510,],[372,372,372,372,]),'parameter_list':([60,145,189,327,331,398,],[116,116,116,116,116,116,]),'labeled_statement':([121,215,224,342,351,354,439,479,481,483,517,522,526,],[206,206,206,206,206,206,206,206,206,206,206,206,206,]),'abstract_declarator':([113,189,296,398,],[191,323,191,323,]),'translation_unit':([0,],[26,]),'init_declarator':([33,63,142,149,],[94,94,252,263,]),'direct_abstract_declarator':([113,189,192,296,319,397,398,],[199,199,325,199,325,325,199,]),'designator_list':([260,454,466,510,],[379,379,379,379,]),'identifier':([60,106,121,145,147,148,159,166,169,173,174,185,186,198,200,207,214,215,224,239,250,253,271,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,327,330,339,341,342,343,348,351,354,355,361,368,369,375,376,378,395,422,435,439,461,462,463,467,470,471,477,479,481,482,483,487,500,508,509,514,517,522,526,],[118,178,178,118,178,178,178,178,178,178,178,178,178,178,332,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,118,178,178,178,178,178,178,178,178,178,178,178,178,452,178,178,178,178,178,178,178,492,178,178,178,178,178,178,178,178,178,178,178,518,178,178,178,178,178,]),'offsetof_member_designator':([462,],[491,]),'unary_expression':([106,121,147,148,159,166,169,173,174,185,186,198,207,214,215,224,239,250,253,271,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,330,339,341,342,343,348,351,354,355,361,368,369,376,378,395,422,435,439,461,463,467,470,471,477,479,481,482,483,487,500,509,514,517,522,526,],[152,152,152,152,278,289,292,152,297,152,152,152,152,289,152,152,289,289,152,152,152,152,152,152,152,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,152,289,289,152,152,152,152,152,152,152,152,152,289,152,152,289,152,289,152,152,152,152,289,289,152,152,152,152,152,152,152,152,152,152,152,152,152,152,]),'abstract_declarator_opt':([113,296,],[190,396,]),'initializer':([147,148,378,487,],[259,262,455,506,]),'direct_id_declarator':([0,4,13,26,33,36,63,65,81,84,113,133,142,149,189,192,319,359,],[8,8,61,8,8,8,8,8,8,61,8,8,8,8,8,61,61,8,]),'struct_declaration_list':([72,123,124,],[125,231,233,]),'pp_directive':([0,26,],[12,12,]),'declaration_list':([18,87,],[67,67,]),'id_init_declarator':([36,65,],[95,95,]),'type_specifier':([0,18,26,36,60,65,67,72,87,117,121,123,124,125,132,145,173,189,201,215,231,233,277,287,288,291,327,331,339,398,],[16,16,16,96,16,96,16,127,16,96,16,127,127,127,237,16,127,16,16,16,127,127,127,127,127,127,16,16,16,16,]),'compound_statement':([66,121,146,215,224,342,351,354,439,479,481,483,517,522,526,],[120,208,258,208,208,208,208,208,208,208,208,208,208,208,208,]),'pointer':([0,4,26,33,36,63,65,77,81,113,133,142,149,189,296,359,398,],[13,13,13,84,13,84,13,140,84,192,84,84,84,319,397,84,397,]),'typeid_declarator':([33,63,81,133,142,149,359,],[86,86,141,86,86,86,86,]),'id_init_declarator_list':([36,65,],[98,98,]),'declarator':([33,63,133,142,149,359,],[89,89,245,89,89,245,]),'argument_expression_list':([285,],[390,]),'struct_declarator_list_opt':([133,],[242,]),'typedef_name':([0,18,26,36,60,65,67,72,87,117,121,123,124,125,132,145,173,189,201,215,231,233,277,287,288,291,327,331,339,398,],[32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,]),'parameter_type_list_opt':([189,331,398,],[322,431,322,]),'struct_declarator':([133,359,],[243,445,]),'type_qualifier':([0,1,18,26,31,33,47,49,59,60,63,67,72,76,87,107,108,113,121,123,124,125,132,133,144,145,173,189,201,215,231,233,254,255,277,287,288,291,296,326,327,331,339,398,423,424,],[47,47,47,47,78,91,47,47,78,47,91,47,78,139,47,139,78,91,47,78,78,78,139,244,78,47,78,47,47,47,78,78,139,78,78,78,78,78,244,78,47,47,47,47,139,78,]),'assignment_operator':([152,],[271,]),'expression':([121,173,207,215,224,277,284,288,291,314,339,342,343,348,351,354,355,435,439,477,479,481,482,483,500,509,514,517,522,526,],[211,294,338,211,211,294,387,294,294,415,211,211,438,440,211,211,444,211,211,211,211,211,503,211,211,519,211,211,211,211,]),'storage_class_specifier':([0,1,18,26,33,47,49,60,63,67,87,113,121,145,189,201,215,327,331,339,398,],[1,1,1,1,80,1,1,1,80,1,1,80,1,1,1,1,1,1,1,1,1,]),'unified_wstring_literal':([106,121,147,148,159,166,169,173,174,185,186,198,207,214,215,224,239,250,253,271,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,330,339,341,342,343,348,351,354,355,361,368,369,376,378,395,422,435,439,461,463,467,470,471,477,479,481,482,483,487,500,509,514,517,522,526,],[153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,]),'translation_unit_or_empty':([0,],[43,]),'initializer_list_opt':([260,],[373,]),'brace_close':([72,123,124,125,137,226,231,233,246,247,373,454,493,510,],[126,232,234,235,249,353,356,357,362,363,451,489,511,520,]),'direct_typeid_declarator':([33,63,81,84,133,142,149,359,],[85,85,85,143,85,85,85,85,]),'external_declaration':([0,26,],[14,68,]),'pragmacomp_or_statement':([224,342,354,439,479,481,483,517,522,526,],[350,437,443,480,501,502,504,524,527,528,]),'type_name':([173,277,287,288,291,],[295,382,391,392,393,]),'block_item_list':([121,],[215,]),'pppragma_directive':([0,26,72,121,123,124,125,215,224,231,233,342,351,354,439,479,481,483,517,522,526,],[23,23,129,217,129,129,129,217,351,129,129,351,217,351,351,351,351,351,351,351,351,]),'statement':([121,215,224,342,351,354,439,479,481,483,517,522,526,],[218,218,352,352,442,352,352,352,352,505,352,352,352,]),'cast_expression':([106,121,147,148,166,173,185,186,198,207,214,215,224,239,250,253,271,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,330,339,341,342,343,348,351,354,355,361,368,369,376,378,395,422,435,439,461,463,467,470,471,477,479,481,482,483,487,500,509,514,517,522,526,],[164,164,164,164,290,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,465,164,164,164,164,465,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,]),'struct_declarator_list':([133,],[240,]),'empty':([0,1,18,31,33,36,47,49,59,60,63,65,87,106,108,113,121,133,144,145,189,198,215,224,253,255,260,296,326,327,330,331,339,342,351,354,398,422,424,435,439,454,466,477,479,481,483,500,510,514,517,522,526,],[52,57,62,79,93,100,57,57,79,110,93,100,62,182,79,195,223,241,79,110,320,182,347,347,182,79,380,195,79,110,182,320,347,347,347,347,320,182,79,347,347,488,488,347,347,347,347,347,488,347,347,347,347,]),'parameter_declaration':([60,145,189,201,327,331,398,],[112,112,112,334,112,112,112,]),'primary_expression':([106,121,147,148,159,166,169,173,174,185,186,198,207,214,215,224,239,250,253,271,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,330,339,341,342,343,348,351,354,355,361,368,369,376,378,395,422,435,439,461,463,467,470,471,477,479,481,482,483,487,500,509,514,517,522,526,],[172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,]),'declaration':([0,18,26,67,87,121,215,339,],[34,64,34,122,64,219,219,435,]),'declaration_specifiers_no_type':([0,1,18,26,47,49,60,67,87,121,145,189,201,215,327,331,339,398,],[36,56,65,36,56,56,117,65,65,65,117,117,117,65,117,117,65,117,]),'jump_statement':([121,215,224,342,351,354,439,479,481,483,517,522,526,],[220,220,220,220,220,220,220,220,220,220,220,220,220,]),'enumerator_list':([75,134,135,],[137,246,247,]),'block_item':([121,215,],[222,346,]),'constant_expression':([214,239,250,361,376,],[344,358,365,446,453,]),'identifier_list_opt':([60,145,327,],[109,256,425,]),'constant':([106,121,147,148,159,166,169,173,174,185,186,198,207,214,215,224,239,250,253,271,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,330,339,341,342,343,348,351,354,355,361,368,369,376,378,395,422,435,439,461,463,467,470,471,477,479,481,482,483,487,500,509,514,517,522,526,],[158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,]),'type_specifier_no_typeid':([0,18,26,33,36,60,63,65,67,72,87,113,117,121,123,124,125,132,133,145,173,189,201,215,231,233,277,287,288,291,296,327,331,339,398,],[10,10,10,83,10,10,83,10,10,10,10,83,10,10,10,10,10,10,238,10,10,10,10,10,10,10,10,10,10,10,238,10,10,10,10,]),'struct_declaration':([72,123,124,125,231,233,],[130,130,130,236,236,236,]),'direct_typeid_noparen_declarator':([113,192,],[193,324,]),'id_declarator':([0,4,26,33,36,63,65,81,113,133,142,149,189,359,],[18,58,18,87,97,119,97,58,194,119,119,119,58,119,]),'selection_statement':([121,215,224,342,351,354,439,479,481,483,517,522,526,],[225,225,225,225,225,225,225,225,225,225,225,225,225,]),'postfix_expression':([106,121,147,148,159,166,169,173,174,185,186,198,207,214,215,224,239,250,253,271,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,330,339,341,342,343,348,351,354,355,361,368,369,376,378,395,422,435,439,461,463,467,470,471,477,479,481,482,483,487,500,509,514,517,522,526,],[162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,]),'initializer_list':([260,466,],[377,493,]),'unary_operator':([106,121,147,148,159,166,169,173,174,185,186,198,207,214,215,224,239,250,253,271,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,330,339,341,342,343,348,351,354,355,361,368,369,376,378,395,422,435,439,461,463,467,470,471,477,479,481,482,483,487,500,509,514,517,522,526,],[166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,]),'struct_or_union':([0,18,26,36,60,65,67,72,87,117,121,123,124,125,132,145,173,189,201,215,231,233,277,287,288,291,327,331,339,398,],[27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,]),'block_item_list_opt':([121,],[226,]),'assignment_expression':([106,121,147,148,173,185,186,198,207,215,224,253,271,277,284,285,288,291,314,330,339,341,342,343,348,351,354,355,368,369,378,422,435,439,461,470,471,477,479,481,482,483,487,500,509,514,517,522,526,],[168,227,261,261,227,317,318,168,227,227,227,168,381,227,227,388,227,227,227,168,227,436,227,227,227,227,227,227,449,450,261,168,227,227,490,497,498,227,227,227,227,227,261,227,227,227,227,227,227,]),'designation_opt':([260,454,466,510,],[378,487,378,487,]),'parameter_type_list':([60,145,189,327,331,398,],[111,257,321,426,321,321,]),'type_qualifier_list':([31,59,72,108,123,124,125,144,173,231,233,255,277,287,288,291,326,424,],[76,107,132,76,132,132,132,254,132,132,132,76,132,132,132,132,423,76,]),'designator':([260,379,454,466,510,],[374,456,374,374,374,]),'id_init_declarator_list_opt':([36,65,],[99,99,]),'declaration_specifiers':([0,18,26,60,67,87,121,145,189,201,215,327,331,339,398,],[33,63,33,113,63,63,63,113,113,113,63,113,113,63,113,]),'identifier_list':([60,145,327,],[114,114,114,]),'declaration_list_opt':([18,87,],[66,146,]),'function_definition':([0,26,],[40,40,]),'binary_expression':([106,121,147,148,173,185,186,198,207,214,215,224,239,250,253,271,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,330,339,341,342,343,348,351,354,355,361,368,369,376,378,422,435,439,461,467,470,471,477,479,481,482,483,487,500,509,514,517,522,526,],[179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,179,416,417,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,]),'enum_specifier':([0,18,26,36,60,65,67,72,87,117,121,123,124,125,132,145,173,189,201,215,231,233,277,287,288,291,327,331,339,398,],[44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,]),'decl_body':([0,18,26,67,87,121,215,339,],[46,46,46,46,46,46,46,46,]),'function_specifier':([0,1,18,26,33,47,49,60,63,67,87,113,121,145,189,201,215,327,331,339,398,],[49,49,49,49,92,49,49,49,92,49,49,92,49,49,49,49,49,49,49,49,49,]),'specifier_qualifier_list':([72,123,124,125,173,231,233,277,287,288,291,],[133,133,133,133,296,133,133,296,296,296,296,]),'conditional_expression':([106,121,147,148,173,185,186,198,207,214,215,224,239,250,253,271,277,284,285,288,291,314,330,339,341,342,343,348,351,354,355,361,368,369,376,378,422,435,439,461,467,470,471,477,479,481,482,483,487,500,509,514,517,522,526,],[183,183,183,183,183,183,183,183,183,345,183,183,345,345,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,345,183,183,345,183,183,183,183,183,494,183,183,183,183,183,183,183,183,183,183,183,183,183,183,]),}
_lr_goto = {}
for _k, _v in _lr_goto_items.items():
@@ -26,63 +26,63 @@
del _lr_goto_items
_lr_productions = [
("S' -> translation_unit_or_empty","S'",1,None,None,None),
- ('abstract_declarator_opt -> empty','abstract_declarator_opt',1,'p_abstract_declarator_opt','plyparser.py',42),
- ('abstract_declarator_opt -> abstract_declarator','abstract_declarator_opt',1,'p_abstract_declarator_opt','plyparser.py',43),
- ('assignment_expression_opt -> empty','assignment_expression_opt',1,'p_assignment_expression_opt','plyparser.py',42),
- ('assignment_expression_opt -> assignment_expression','assignment_expression_opt',1,'p_assignment_expression_opt','plyparser.py',43),
- ('block_item_list_opt -> empty','block_item_list_opt',1,'p_block_item_list_opt','plyparser.py',42),
- ('block_item_list_opt -> block_item_list','block_item_list_opt',1,'p_block_item_list_opt','plyparser.py',43),
- ('declaration_list_opt -> empty','declaration_list_opt',1,'p_declaration_list_opt','plyparser.py',42),
- ('declaration_list_opt -> declaration_list','declaration_list_opt',1,'p_declaration_list_opt','plyparser.py',43),
- ('declaration_specifiers_no_type_opt -> empty','declaration_specifiers_no_type_opt',1,'p_declaration_specifiers_no_type_opt','plyparser.py',42),
- ('declaration_specifiers_no_type_opt -> declaration_specifiers_no_type','declaration_specifiers_no_type_opt',1,'p_declaration_specifiers_no_type_opt','plyparser.py',43),
- ('designation_opt -> empty','designation_opt',1,'p_designation_opt','plyparser.py',42),
- ('designation_opt -> designation','designation_opt',1,'p_designation_opt','plyparser.py',43),
- ('expression_opt -> empty','expression_opt',1,'p_expression_opt','plyparser.py',42),
- ('expression_opt -> expression','expression_opt',1,'p_expression_opt','plyparser.py',43),
- ('id_init_declarator_list_opt -> empty','id_init_declarator_list_opt',1,'p_id_init_declarator_list_opt','plyparser.py',42),
- ('id_init_declarator_list_opt -> id_init_declarator_list','id_init_declarator_list_opt',1,'p_id_init_declarator_list_opt','plyparser.py',43),
- ('identifier_list_opt -> empty','identifier_list_opt',1,'p_identifier_list_opt','plyparser.py',42),
- ('identifier_list_opt -> identifier_list','identifier_list_opt',1,'p_identifier_list_opt','plyparser.py',43),
- ('init_declarator_list_opt -> empty','init_declarator_list_opt',1,'p_init_declarator_list_opt','plyparser.py',42),
- ('init_declarator_list_opt -> init_declarator_list','init_declarator_list_opt',1,'p_init_declarator_list_opt','plyparser.py',43),
- ('initializer_list_opt -> empty','initializer_list_opt',1,'p_initializer_list_opt','plyparser.py',42),
- ('initializer_list_opt -> initializer_list','initializer_list_opt',1,'p_initializer_list_opt','plyparser.py',43),
- ('parameter_type_list_opt -> empty','parameter_type_list_opt',1,'p_parameter_type_list_opt','plyparser.py',42),
- ('parameter_type_list_opt -> parameter_type_list','parameter_type_list_opt',1,'p_parameter_type_list_opt','plyparser.py',43),
- ('struct_declarator_list_opt -> empty','struct_declarator_list_opt',1,'p_struct_declarator_list_opt','plyparser.py',42),
- ('struct_declarator_list_opt -> struct_declarator_list','struct_declarator_list_opt',1,'p_struct_declarator_list_opt','plyparser.py',43),
- ('type_qualifier_list_opt -> empty','type_qualifier_list_opt',1,'p_type_qualifier_list_opt','plyparser.py',42),
- ('type_qualifier_list_opt -> type_qualifier_list','type_qualifier_list_opt',1,'p_type_qualifier_list_opt','plyparser.py',43),
- ('direct_id_declarator -> ID','direct_id_declarator',1,'p_direct_id_declarator_1','plyparser.py',109),
- ('direct_id_declarator -> LPAREN id_declarator RPAREN','direct_id_declarator',3,'p_direct_id_declarator_2','plyparser.py',109),
- ('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_id_declarator',5,'p_direct_id_declarator_3','plyparser.py',109),
- ('direct_id_declarator -> direct_id_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_id_declarator',6,'p_direct_id_declarator_4','plyparser.py',109),
- ('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_id_declarator',6,'p_direct_id_declarator_4','plyparser.py',110),
- ('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_id_declarator',5,'p_direct_id_declarator_5','plyparser.py',109),
- ('direct_id_declarator -> direct_id_declarator LPAREN parameter_type_list RPAREN','direct_id_declarator',4,'p_direct_id_declarator_6','plyparser.py',109),
- ('direct_id_declarator -> direct_id_declarator LPAREN identifier_list_opt RPAREN','direct_id_declarator',4,'p_direct_id_declarator_6','plyparser.py',110),
- ('direct_typeid_declarator -> TYPEID','direct_typeid_declarator',1,'p_direct_typeid_declarator_1','plyparser.py',109),
- ('direct_typeid_declarator -> LPAREN typeid_declarator RPAREN','direct_typeid_declarator',3,'p_direct_typeid_declarator_2','plyparser.py',109),
- ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_typeid_declarator',5,'p_direct_typeid_declarator_3','plyparser.py',109),
- ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_typeid_declarator',6,'p_direct_typeid_declarator_4','plyparser.py',109),
- ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_typeid_declarator',6,'p_direct_typeid_declarator_4','plyparser.py',110),
- ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_typeid_declarator',5,'p_direct_typeid_declarator_5','plyparser.py',109),
- ('direct_typeid_declarator -> direct_typeid_declarator LPAREN parameter_type_list RPAREN','direct_typeid_declarator',4,'p_direct_typeid_declarator_6','plyparser.py',109),
- ('direct_typeid_declarator -> direct_typeid_declarator LPAREN identifier_list_opt RPAREN','direct_typeid_declarator',4,'p_direct_typeid_declarator_6','plyparser.py',110),
- ('direct_typeid_noparen_declarator -> TYPEID','direct_typeid_noparen_declarator',1,'p_direct_typeid_noparen_declarator_1','plyparser.py',109),
- ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_typeid_noparen_declarator',5,'p_direct_typeid_noparen_declarator_3','plyparser.py',109),
- ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_typeid_noparen_declarator',6,'p_direct_typeid_noparen_declarator_4','plyparser.py',109),
- ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_typeid_noparen_declarator',6,'p_direct_typeid_noparen_declarator_4','plyparser.py',110),
- ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_typeid_noparen_declarator',5,'p_direct_typeid_noparen_declarator_5','plyparser.py',109),
- ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LPAREN parameter_type_list RPAREN','direct_typeid_noparen_declarator',4,'p_direct_typeid_noparen_declarator_6','plyparser.py',109),
- ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LPAREN identifier_list_opt RPAREN','direct_typeid_noparen_declarator',4,'p_direct_typeid_noparen_declarator_6','plyparser.py',110),
- ('id_declarator -> direct_id_declarator','id_declarator',1,'p_id_declarator_1','plyparser.py',109),
- ('id_declarator -> pointer direct_id_declarator','id_declarator',2,'p_id_declarator_2','plyparser.py',109),
- ('typeid_declarator -> direct_typeid_declarator','typeid_declarator',1,'p_typeid_declarator_1','plyparser.py',109),
- ('typeid_declarator -> pointer direct_typeid_declarator','typeid_declarator',2,'p_typeid_declarator_2','plyparser.py',109),
- ('typeid_noparen_declarator -> direct_typeid_noparen_declarator','typeid_noparen_declarator',1,'p_typeid_noparen_declarator_1','plyparser.py',109),
- ('typeid_noparen_declarator -> pointer direct_typeid_noparen_declarator','typeid_noparen_declarator',2,'p_typeid_noparen_declarator_2','plyparser.py',109),
+ ('abstract_declarator_opt -> empty','abstract_declarator_opt',1,'p_abstract_declarator_opt','plyparser.py',43),
+ ('abstract_declarator_opt -> abstract_declarator','abstract_declarator_opt',1,'p_abstract_declarator_opt','plyparser.py',44),
+ ('assignment_expression_opt -> empty','assignment_expression_opt',1,'p_assignment_expression_opt','plyparser.py',43),
+ ('assignment_expression_opt -> assignment_expression','assignment_expression_opt',1,'p_assignment_expression_opt','plyparser.py',44),
+ ('block_item_list_opt -> empty','block_item_list_opt',1,'p_block_item_list_opt','plyparser.py',43),
+ ('block_item_list_opt -> block_item_list','block_item_list_opt',1,'p_block_item_list_opt','plyparser.py',44),
+ ('declaration_list_opt -> empty','declaration_list_opt',1,'p_declaration_list_opt','plyparser.py',43),
+ ('declaration_list_opt -> declaration_list','declaration_list_opt',1,'p_declaration_list_opt','plyparser.py',44),
+ ('declaration_specifiers_no_type_opt -> empty','declaration_specifiers_no_type_opt',1,'p_declaration_specifiers_no_type_opt','plyparser.py',43),
+ ('declaration_specifiers_no_type_opt -> declaration_specifiers_no_type','declaration_specifiers_no_type_opt',1,'p_declaration_specifiers_no_type_opt','plyparser.py',44),
+ ('designation_opt -> empty','designation_opt',1,'p_designation_opt','plyparser.py',43),
+ ('designation_opt -> designation','designation_opt',1,'p_designation_opt','plyparser.py',44),
+ ('expression_opt -> empty','expression_opt',1,'p_expression_opt','plyparser.py',43),
+ ('expression_opt -> expression','expression_opt',1,'p_expression_opt','plyparser.py',44),
+ ('id_init_declarator_list_opt -> empty','id_init_declarator_list_opt',1,'p_id_init_declarator_list_opt','plyparser.py',43),
+ ('id_init_declarator_list_opt -> id_init_declarator_list','id_init_declarator_list_opt',1,'p_id_init_declarator_list_opt','plyparser.py',44),
+ ('identifier_list_opt -> empty','identifier_list_opt',1,'p_identifier_list_opt','plyparser.py',43),
+ ('identifier_list_opt -> identifier_list','identifier_list_opt',1,'p_identifier_list_opt','plyparser.py',44),
+ ('init_declarator_list_opt -> empty','init_declarator_list_opt',1,'p_init_declarator_list_opt','plyparser.py',43),
+ ('init_declarator_list_opt -> init_declarator_list','init_declarator_list_opt',1,'p_init_declarator_list_opt','plyparser.py',44),
+ ('initializer_list_opt -> empty','initializer_list_opt',1,'p_initializer_list_opt','plyparser.py',43),
+ ('initializer_list_opt -> initializer_list','initializer_list_opt',1,'p_initializer_list_opt','plyparser.py',44),
+ ('parameter_type_list_opt -> empty','parameter_type_list_opt',1,'p_parameter_type_list_opt','plyparser.py',43),
+ ('parameter_type_list_opt -> parameter_type_list','parameter_type_list_opt',1,'p_parameter_type_list_opt','plyparser.py',44),
+ ('struct_declarator_list_opt -> empty','struct_declarator_list_opt',1,'p_struct_declarator_list_opt','plyparser.py',43),
+ ('struct_declarator_list_opt -> struct_declarator_list','struct_declarator_list_opt',1,'p_struct_declarator_list_opt','plyparser.py',44),
+ ('type_qualifier_list_opt -> empty','type_qualifier_list_opt',1,'p_type_qualifier_list_opt','plyparser.py',43),
+ ('type_qualifier_list_opt -> type_qualifier_list','type_qualifier_list_opt',1,'p_type_qualifier_list_opt','plyparser.py',44),
+ ('direct_id_declarator -> ID','direct_id_declarator',1,'p_direct_id_declarator_1','plyparser.py',126),
+ ('direct_id_declarator -> LPAREN id_declarator RPAREN','direct_id_declarator',3,'p_direct_id_declarator_2','plyparser.py',126),
+ ('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_id_declarator',5,'p_direct_id_declarator_3','plyparser.py',126),
+ ('direct_id_declarator -> direct_id_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_id_declarator',6,'p_direct_id_declarator_4','plyparser.py',126),
+ ('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_id_declarator',6,'p_direct_id_declarator_4','plyparser.py',127),
+ ('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_id_declarator',5,'p_direct_id_declarator_5','plyparser.py',126),
+ ('direct_id_declarator -> direct_id_declarator LPAREN parameter_type_list RPAREN','direct_id_declarator',4,'p_direct_id_declarator_6','plyparser.py',126),
+ ('direct_id_declarator -> direct_id_declarator LPAREN identifier_list_opt RPAREN','direct_id_declarator',4,'p_direct_id_declarator_6','plyparser.py',127),
+ ('direct_typeid_declarator -> TYPEID','direct_typeid_declarator',1,'p_direct_typeid_declarator_1','plyparser.py',126),
+ ('direct_typeid_declarator -> LPAREN typeid_declarator RPAREN','direct_typeid_declarator',3,'p_direct_typeid_declarator_2','plyparser.py',126),
+ ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_typeid_declarator',5,'p_direct_typeid_declarator_3','plyparser.py',126),
+ ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_typeid_declarator',6,'p_direct_typeid_declarator_4','plyparser.py',126),
+ ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_typeid_declarator',6,'p_direct_typeid_declarator_4','plyparser.py',127),
+ ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_typeid_declarator',5,'p_direct_typeid_declarator_5','plyparser.py',126),
+ ('direct_typeid_declarator -> direct_typeid_declarator LPAREN parameter_type_list RPAREN','direct_typeid_declarator',4,'p_direct_typeid_declarator_6','plyparser.py',126),
+ ('direct_typeid_declarator -> direct_typeid_declarator LPAREN identifier_list_opt RPAREN','direct_typeid_declarator',4,'p_direct_typeid_declarator_6','plyparser.py',127),
+ ('direct_typeid_noparen_declarator -> TYPEID','direct_typeid_noparen_declarator',1,'p_direct_typeid_noparen_declarator_1','plyparser.py',126),
+ ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_typeid_noparen_declarator',5,'p_direct_typeid_noparen_declarator_3','plyparser.py',126),
+ ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_typeid_noparen_declarator',6,'p_direct_typeid_noparen_declarator_4','plyparser.py',126),
+ ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_typeid_noparen_declarator',6,'p_direct_typeid_noparen_declarator_4','plyparser.py',127),
+ ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_typeid_noparen_declarator',5,'p_direct_typeid_noparen_declarator_5','plyparser.py',126),
+ ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LPAREN parameter_type_list RPAREN','direct_typeid_noparen_declarator',4,'p_direct_typeid_noparen_declarator_6','plyparser.py',126),
+ ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LPAREN identifier_list_opt RPAREN','direct_typeid_noparen_declarator',4,'p_direct_typeid_noparen_declarator_6','plyparser.py',127),
+ ('id_declarator -> direct_id_declarator','id_declarator',1,'p_id_declarator_1','plyparser.py',126),
+ ('id_declarator -> pointer direct_id_declarator','id_declarator',2,'p_id_declarator_2','plyparser.py',126),
+ ('typeid_declarator -> direct_typeid_declarator','typeid_declarator',1,'p_typeid_declarator_1','plyparser.py',126),
+ ('typeid_declarator -> pointer direct_typeid_declarator','typeid_declarator',2,'p_typeid_declarator_2','plyparser.py',126),
+ ('typeid_noparen_declarator -> direct_typeid_noparen_declarator','typeid_noparen_declarator',1,'p_typeid_noparen_declarator_1','plyparser.py',126),
+ ('typeid_noparen_declarator -> pointer direct_typeid_noparen_declarator','typeid_noparen_declarator',2,'p_typeid_noparen_declarator_2','plyparser.py',126),
('translation_unit_or_empty -> translation_unit','translation_unit_or_empty',1,'p_translation_unit_or_empty','c_parser.py',514),
('translation_unit_or_empty -> empty','translation_unit_or_empty',1,'p_translation_unit_or_empty','c_parser.py',515),
('translation_unit -> external_declaration','translation_unit',1,'p_translation_unit_1','c_parser.py',523),
@@ -104,229 +104,235 @@
('statement -> iteration_statement','statement',1,'p_statement','c_parser.py',613),
('statement -> jump_statement','statement',1,'p_statement','c_parser.py',614),
('statement -> pppragma_directive','statement',1,'p_statement','c_parser.py',615),
- ('decl_body -> declaration_specifiers init_declarator_list_opt','decl_body',2,'p_decl_body','c_parser.py',629),
- ('decl_body -> declaration_specifiers_no_type id_init_declarator_list_opt','decl_body',2,'p_decl_body','c_parser.py',630),
- ('declaration -> decl_body SEMI','declaration',2,'p_declaration','c_parser.py',689),
- ('declaration_list -> declaration','declaration_list',1,'p_declaration_list','c_parser.py',698),
- ('declaration_list -> declaration_list declaration','declaration_list',2,'p_declaration_list','c_parser.py',699),
- ('declaration_specifiers_no_type -> type_qualifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_1','c_parser.py',709),
- ('declaration_specifiers_no_type -> storage_class_specifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_2','c_parser.py',714),
- ('declaration_specifiers_no_type -> function_specifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_3','c_parser.py',719),
- ('declaration_specifiers -> declaration_specifiers type_qualifier','declaration_specifiers',2,'p_declaration_specifiers_1','c_parser.py',725),
- ('declaration_specifiers -> declaration_specifiers storage_class_specifier','declaration_specifiers',2,'p_declaration_specifiers_2','c_parser.py',730),
- ('declaration_specifiers -> declaration_specifiers function_specifier','declaration_specifiers',2,'p_declaration_specifiers_3','c_parser.py',735),
- ('declaration_specifiers -> declaration_specifiers type_specifier_no_typeid','declaration_specifiers',2,'p_declaration_specifiers_4','c_parser.py',740),
- ('declaration_specifiers -> type_specifier','declaration_specifiers',1,'p_declaration_specifiers_5','c_parser.py',745),
- ('declaration_specifiers -> declaration_specifiers_no_type type_specifier','declaration_specifiers',2,'p_declaration_specifiers_6','c_parser.py',750),
- ('storage_class_specifier -> AUTO','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',756),
- ('storage_class_specifier -> REGISTER','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',757),
- ('storage_class_specifier -> STATIC','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',758),
- ('storage_class_specifier -> EXTERN','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',759),
- ('storage_class_specifier -> TYPEDEF','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',760),
- ('function_specifier -> INLINE','function_specifier',1,'p_function_specifier','c_parser.py',765),
- ('type_specifier_no_typeid -> VOID','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',770),
- ('type_specifier_no_typeid -> _BOOL','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',771),
- ('type_specifier_no_typeid -> CHAR','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',772),
- ('type_specifier_no_typeid -> SHORT','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',773),
- ('type_specifier_no_typeid -> INT','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',774),
- ('type_specifier_no_typeid -> LONG','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',775),
- ('type_specifier_no_typeid -> FLOAT','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',776),
- ('type_specifier_no_typeid -> DOUBLE','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',777),
- ('type_specifier_no_typeid -> _COMPLEX','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',778),
- ('type_specifier_no_typeid -> SIGNED','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',779),
- ('type_specifier_no_typeid -> UNSIGNED','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',780),
- ('type_specifier_no_typeid -> __INT128','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',781),
- ('type_specifier -> typedef_name','type_specifier',1,'p_type_specifier','c_parser.py',786),
- ('type_specifier -> enum_specifier','type_specifier',1,'p_type_specifier','c_parser.py',787),
- ('type_specifier -> struct_or_union_specifier','type_specifier',1,'p_type_specifier','c_parser.py',788),
- ('type_specifier -> type_specifier_no_typeid','type_specifier',1,'p_type_specifier','c_parser.py',789),
- ('type_qualifier -> CONST','type_qualifier',1,'p_type_qualifier','c_parser.py',794),
- ('type_qualifier -> RESTRICT','type_qualifier',1,'p_type_qualifier','c_parser.py',795),
- ('type_qualifier -> VOLATILE','type_qualifier',1,'p_type_qualifier','c_parser.py',796),
- ('init_declarator_list -> init_declarator','init_declarator_list',1,'p_init_declarator_list','c_parser.py',801),
- ('init_declarator_list -> init_declarator_list COMMA init_declarator','init_declarator_list',3,'p_init_declarator_list','c_parser.py',802),
- ('init_declarator -> declarator','init_declarator',1,'p_init_declarator','c_parser.py',810),
- ('init_declarator -> declarator EQUALS initializer','init_declarator',3,'p_init_declarator','c_parser.py',811),
- ('id_init_declarator_list -> id_init_declarator','id_init_declarator_list',1,'p_id_init_declarator_list','c_parser.py',816),
- ('id_init_declarator_list -> id_init_declarator_list COMMA init_declarator','id_init_declarator_list',3,'p_id_init_declarator_list','c_parser.py',817),
- ('id_init_declarator -> id_declarator','id_init_declarator',1,'p_id_init_declarator','c_parser.py',822),
- ('id_init_declarator -> id_declarator EQUALS initializer','id_init_declarator',3,'p_id_init_declarator','c_parser.py',823),
- ('specifier_qualifier_list -> specifier_qualifier_list type_specifier_no_typeid','specifier_qualifier_list',2,'p_specifier_qualifier_list_1','c_parser.py',830),
- ('specifier_qualifier_list -> specifier_qualifier_list type_qualifier','specifier_qualifier_list',2,'p_specifier_qualifier_list_2','c_parser.py',835),
- ('specifier_qualifier_list -> type_specifier','specifier_qualifier_list',1,'p_specifier_qualifier_list_3','c_parser.py',840),
- ('specifier_qualifier_list -> type_qualifier_list type_specifier','specifier_qualifier_list',2,'p_specifier_qualifier_list_4','c_parser.py',845),
- ('struct_or_union_specifier -> struct_or_union ID','struct_or_union_specifier',2,'p_struct_or_union_specifier_1','c_parser.py',854),
- ('struct_or_union_specifier -> struct_or_union TYPEID','struct_or_union_specifier',2,'p_struct_or_union_specifier_1','c_parser.py',855),
- ('struct_or_union_specifier -> struct_or_union brace_open struct_declaration_list brace_close','struct_or_union_specifier',4,'p_struct_or_union_specifier_2','c_parser.py',864),
- ('struct_or_union_specifier -> struct_or_union ID brace_open struct_declaration_list brace_close','struct_or_union_specifier',5,'p_struct_or_union_specifier_3','c_parser.py',873),
- ('struct_or_union_specifier -> struct_or_union TYPEID brace_open struct_declaration_list brace_close','struct_or_union_specifier',5,'p_struct_or_union_specifier_3','c_parser.py',874),
- ('struct_or_union -> STRUCT','struct_or_union',1,'p_struct_or_union','c_parser.py',883),
- ('struct_or_union -> UNION','struct_or_union',1,'p_struct_or_union','c_parser.py',884),
- ('struct_declaration_list -> struct_declaration','struct_declaration_list',1,'p_struct_declaration_list','c_parser.py',891),
- ('struct_declaration_list -> struct_declaration_list struct_declaration','struct_declaration_list',2,'p_struct_declaration_list','c_parser.py',892),
- ('struct_declaration -> specifier_qualifier_list struct_declarator_list_opt SEMI','struct_declaration',3,'p_struct_declaration_1','c_parser.py',900),
- ('struct_declaration -> SEMI','struct_declaration',1,'p_struct_declaration_2','c_parser.py',938),
- ('struct_declarator_list -> struct_declarator','struct_declarator_list',1,'p_struct_declarator_list','c_parser.py',943),
- ('struct_declarator_list -> struct_declarator_list COMMA struct_declarator','struct_declarator_list',3,'p_struct_declarator_list','c_parser.py',944),
- ('struct_declarator -> declarator','struct_declarator',1,'p_struct_declarator_1','c_parser.py',952),
- ('struct_declarator -> declarator COLON constant_expression','struct_declarator',3,'p_struct_declarator_2','c_parser.py',957),
- ('struct_declarator -> COLON constant_expression','struct_declarator',2,'p_struct_declarator_2','c_parser.py',958),
- ('enum_specifier -> ENUM ID','enum_specifier',2,'p_enum_specifier_1','c_parser.py',966),
- ('enum_specifier -> ENUM TYPEID','enum_specifier',2,'p_enum_specifier_1','c_parser.py',967),
- ('enum_specifier -> ENUM brace_open enumerator_list brace_close','enum_specifier',4,'p_enum_specifier_2','c_parser.py',972),
- ('enum_specifier -> ENUM ID brace_open enumerator_list brace_close','enum_specifier',5,'p_enum_specifier_3','c_parser.py',977),
- ('enum_specifier -> ENUM TYPEID brace_open enumerator_list brace_close','enum_specifier',5,'p_enum_specifier_3','c_parser.py',978),
- ('enumerator_list -> enumerator','enumerator_list',1,'p_enumerator_list','c_parser.py',983),
- ('enumerator_list -> enumerator_list COMMA','enumerator_list',2,'p_enumerator_list','c_parser.py',984),
- ('enumerator_list -> enumerator_list COMMA enumerator','enumerator_list',3,'p_enumerator_list','c_parser.py',985),
- ('enumerator -> ID','enumerator',1,'p_enumerator','c_parser.py',996),
- ('enumerator -> ID EQUALS constant_expression','enumerator',3,'p_enumerator','c_parser.py',997),
- ('declarator -> id_declarator','declarator',1,'p_declarator','c_parser.py',1012),
- ('declarator -> typeid_declarator','declarator',1,'p_declarator','c_parser.py',1013),
- ('pointer -> TIMES type_qualifier_list_opt','pointer',2,'p_pointer','c_parser.py',1124),
- ('pointer -> TIMES type_qualifier_list_opt pointer','pointer',3,'p_pointer','c_parser.py',1125),
- ('type_qualifier_list -> type_qualifier','type_qualifier_list',1,'p_type_qualifier_list','c_parser.py',1154),
- ('type_qualifier_list -> type_qualifier_list type_qualifier','type_qualifier_list',2,'p_type_qualifier_list','c_parser.py',1155),
- ('parameter_type_list -> parameter_list','parameter_type_list',1,'p_parameter_type_list','c_parser.py',1160),
- ('parameter_type_list -> parameter_list COMMA ELLIPSIS','parameter_type_list',3,'p_parameter_type_list','c_parser.py',1161),
- ('parameter_list -> parameter_declaration','parameter_list',1,'p_parameter_list','c_parser.py',1169),
- ('parameter_list -> parameter_list COMMA parameter_declaration','parameter_list',3,'p_parameter_list','c_parser.py',1170),
- ('parameter_declaration -> declaration_specifiers id_declarator','parameter_declaration',2,'p_parameter_declaration_1','c_parser.py',1189),
- ('parameter_declaration -> declaration_specifiers typeid_noparen_declarator','parameter_declaration',2,'p_parameter_declaration_1','c_parser.py',1190),
- ('parameter_declaration -> declaration_specifiers abstract_declarator_opt','parameter_declaration',2,'p_parameter_declaration_2','c_parser.py',1201),
- ('identifier_list -> identifier','identifier_list',1,'p_identifier_list','c_parser.py',1232),
- ('identifier_list -> identifier_list COMMA identifier','identifier_list',3,'p_identifier_list','c_parser.py',1233),
- ('initializer -> assignment_expression','initializer',1,'p_initializer_1','c_parser.py',1242),
- ('initializer -> brace_open initializer_list_opt brace_close','initializer',3,'p_initializer_2','c_parser.py',1247),
- ('initializer -> brace_open initializer_list COMMA brace_close','initializer',4,'p_initializer_2','c_parser.py',1248),
- ('initializer_list -> designation_opt initializer','initializer_list',2,'p_initializer_list','c_parser.py',1256),
- ('initializer_list -> initializer_list COMMA designation_opt initializer','initializer_list',4,'p_initializer_list','c_parser.py',1257),
- ('designation -> designator_list EQUALS','designation',2,'p_designation','c_parser.py',1268),
- ('designator_list -> designator','designator_list',1,'p_designator_list','c_parser.py',1276),
- ('designator_list -> designator_list designator','designator_list',2,'p_designator_list','c_parser.py',1277),
- ('designator -> LBRACKET constant_expression RBRACKET','designator',3,'p_designator','c_parser.py',1282),
- ('designator -> PERIOD identifier','designator',2,'p_designator','c_parser.py',1283),
- ('type_name -> specifier_qualifier_list abstract_declarator_opt','type_name',2,'p_type_name','c_parser.py',1288),
- ('abstract_declarator -> pointer','abstract_declarator',1,'p_abstract_declarator_1','c_parser.py',1299),
- ('abstract_declarator -> pointer direct_abstract_declarator','abstract_declarator',2,'p_abstract_declarator_2','c_parser.py',1307),
- ('abstract_declarator -> direct_abstract_declarator','abstract_declarator',1,'p_abstract_declarator_3','c_parser.py',1312),
- ('direct_abstract_declarator -> LPAREN abstract_declarator RPAREN','direct_abstract_declarator',3,'p_direct_abstract_declarator_1','c_parser.py',1322),
- ('direct_abstract_declarator -> direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator_2','c_parser.py',1326),
- ('direct_abstract_declarator -> LBRACKET assignment_expression_opt RBRACKET','direct_abstract_declarator',3,'p_direct_abstract_declarator_3','c_parser.py',1337),
- ('direct_abstract_declarator -> direct_abstract_declarator LBRACKET TIMES RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator_4','c_parser.py',1346),
- ('direct_abstract_declarator -> LBRACKET TIMES RBRACKET','direct_abstract_declarator',3,'p_direct_abstract_declarator_5','c_parser.py',1357),
- ('direct_abstract_declarator -> direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN','direct_abstract_declarator',4,'p_direct_abstract_declarator_6','c_parser.py',1366),
- ('direct_abstract_declarator -> LPAREN parameter_type_list_opt RPAREN','direct_abstract_declarator',3,'p_direct_abstract_declarator_7','c_parser.py',1376),
- ('block_item -> declaration','block_item',1,'p_block_item','c_parser.py',1387),
- ('block_item -> statement','block_item',1,'p_block_item','c_parser.py',1388),
- ('block_item_list -> block_item','block_item_list',1,'p_block_item_list','c_parser.py',1395),
- ('block_item_list -> block_item_list block_item','block_item_list',2,'p_block_item_list','c_parser.py',1396),
- ('compound_statement -> brace_open block_item_list_opt brace_close','compound_statement',3,'p_compound_statement_1','c_parser.py',1402),
- ('labeled_statement -> ID COLON statement','labeled_statement',3,'p_labeled_statement_1','c_parser.py',1408),
- ('labeled_statement -> CASE constant_expression COLON statement','labeled_statement',4,'p_labeled_statement_2','c_parser.py',1412),
- ('labeled_statement -> DEFAULT COLON statement','labeled_statement',3,'p_labeled_statement_3','c_parser.py',1416),
- ('selection_statement -> IF LPAREN expression RPAREN statement','selection_statement',5,'p_selection_statement_1','c_parser.py',1420),
- ('selection_statement -> IF LPAREN expression RPAREN statement ELSE statement','selection_statement',7,'p_selection_statement_2','c_parser.py',1424),
- ('selection_statement -> SWITCH LPAREN expression RPAREN statement','selection_statement',5,'p_selection_statement_3','c_parser.py',1428),
- ('iteration_statement -> WHILE LPAREN expression RPAREN statement','iteration_statement',5,'p_iteration_statement_1','c_parser.py',1433),
- ('iteration_statement -> DO statement WHILE LPAREN expression RPAREN SEMI','iteration_statement',7,'p_iteration_statement_2','c_parser.py',1437),
- ('iteration_statement -> FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN statement','iteration_statement',9,'p_iteration_statement_3','c_parser.py',1441),
- ('iteration_statement -> FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN statement','iteration_statement',8,'p_iteration_statement_4','c_parser.py',1445),
- ('jump_statement -> GOTO ID SEMI','jump_statement',3,'p_jump_statement_1','c_parser.py',1450),
- ('jump_statement -> BREAK SEMI','jump_statement',2,'p_jump_statement_2','c_parser.py',1454),
- ('jump_statement -> CONTINUE SEMI','jump_statement',2,'p_jump_statement_3','c_parser.py',1458),
- ('jump_statement -> RETURN expression SEMI','jump_statement',3,'p_jump_statement_4','c_parser.py',1462),
- ('jump_statement -> RETURN SEMI','jump_statement',2,'p_jump_statement_4','c_parser.py',1463),
- ('expression_statement -> expression_opt SEMI','expression_statement',2,'p_expression_statement','c_parser.py',1468),
- ('expression -> assignment_expression','expression',1,'p_expression','c_parser.py',1475),
- ('expression -> expression COMMA assignment_expression','expression',3,'p_expression','c_parser.py',1476),
- ('typedef_name -> TYPEID','typedef_name',1,'p_typedef_name','c_parser.py',1488),
- ('assignment_expression -> conditional_expression','assignment_expression',1,'p_assignment_expression','c_parser.py',1492),
- ('assignment_expression -> unary_expression assignment_operator assignment_expression','assignment_expression',3,'p_assignment_expression','c_parser.py',1493),
- ('assignment_operator -> EQUALS','assignment_operator',1,'p_assignment_operator','c_parser.py',1506),
- ('assignment_operator -> XOREQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1507),
- ('assignment_operator -> TIMESEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1508),
- ('assignment_operator -> DIVEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1509),
- ('assignment_operator -> MODEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1510),
- ('assignment_operator -> PLUSEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1511),
- ('assignment_operator -> MINUSEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1512),
- ('assignment_operator -> LSHIFTEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1513),
- ('assignment_operator -> RSHIFTEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1514),
- ('assignment_operator -> ANDEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1515),
- ('assignment_operator -> OREQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1516),
- ('constant_expression -> conditional_expression','constant_expression',1,'p_constant_expression','c_parser.py',1521),
- ('conditional_expression -> binary_expression','conditional_expression',1,'p_conditional_expression','c_parser.py',1525),
- ('conditional_expression -> binary_expression CONDOP expression COLON conditional_expression','conditional_expression',5,'p_conditional_expression','c_parser.py',1526),
- ('binary_expression -> cast_expression','binary_expression',1,'p_binary_expression','c_parser.py',1534),
- ('binary_expression -> binary_expression TIMES binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1535),
- ('binary_expression -> binary_expression DIVIDE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1536),
- ('binary_expression -> binary_expression MOD binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1537),
- ('binary_expression -> binary_expression PLUS binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1538),
- ('binary_expression -> binary_expression MINUS binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1539),
- ('binary_expression -> binary_expression RSHIFT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1540),
- ('binary_expression -> binary_expression LSHIFT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1541),
- ('binary_expression -> binary_expression LT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1542),
- ('binary_expression -> binary_expression LE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1543),
- ('binary_expression -> binary_expression GE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1544),
- ('binary_expression -> binary_expression GT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1545),
- ('binary_expression -> binary_expression EQ binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1546),
- ('binary_expression -> binary_expression NE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1547),
- ('binary_expression -> binary_expression AND binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1548),
- ('binary_expression -> binary_expression OR binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1549),
- ('binary_expression -> binary_expression XOR binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1550),
- ('binary_expression -> binary_expression LAND binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1551),
- ('binary_expression -> binary_expression LOR binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1552),
- ('cast_expression -> unary_expression','cast_expression',1,'p_cast_expression_1','c_parser.py',1560),
- ('cast_expression -> LPAREN type_name RPAREN cast_expression','cast_expression',4,'p_cast_expression_2','c_parser.py',1564),
- ('unary_expression -> postfix_expression','unary_expression',1,'p_unary_expression_1','c_parser.py',1568),
- ('unary_expression -> PLUSPLUS unary_expression','unary_expression',2,'p_unary_expression_2','c_parser.py',1572),
- ('unary_expression -> MINUSMINUS unary_expression','unary_expression',2,'p_unary_expression_2','c_parser.py',1573),
- ('unary_expression -> unary_operator cast_expression','unary_expression',2,'p_unary_expression_2','c_parser.py',1574),
- ('unary_expression -> SIZEOF unary_expression','unary_expression',2,'p_unary_expression_3','c_parser.py',1579),
- ('unary_expression -> SIZEOF LPAREN type_name RPAREN','unary_expression',4,'p_unary_expression_3','c_parser.py',1580),
- ('unary_operator -> AND','unary_operator',1,'p_unary_operator','c_parser.py',1588),
- ('unary_operator -> TIMES','unary_operator',1,'p_unary_operator','c_parser.py',1589),
- ('unary_operator -> PLUS','unary_operator',1,'p_unary_operator','c_parser.py',1590),
- ('unary_operator -> MINUS','unary_operator',1,'p_unary_operator','c_parser.py',1591),
- ('unary_operator -> NOT','unary_operator',1,'p_unary_operator','c_parser.py',1592),
- ('unary_operator -> LNOT','unary_operator',1,'p_unary_operator','c_parser.py',1593),
- ('postfix_expression -> primary_expression','postfix_expression',1,'p_postfix_expression_1','c_parser.py',1598),
- ('postfix_expression -> postfix_expression LBRACKET expression RBRACKET','postfix_expression',4,'p_postfix_expression_2','c_parser.py',1602),
- ('postfix_expression -> postfix_expression LPAREN argument_expression_list RPAREN','postfix_expression',4,'p_postfix_expression_3','c_parser.py',1606),
- ('postfix_expression -> postfix_expression LPAREN RPAREN','postfix_expression',3,'p_postfix_expression_3','c_parser.py',1607),
- ('postfix_expression -> postfix_expression PERIOD ID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1612),
- ('postfix_expression -> postfix_expression PERIOD TYPEID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1613),
- ('postfix_expression -> postfix_expression ARROW ID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1614),
- ('postfix_expression -> postfix_expression ARROW TYPEID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1615),
- ('postfix_expression -> postfix_expression PLUSPLUS','postfix_expression',2,'p_postfix_expression_5','c_parser.py',1621),
- ('postfix_expression -> postfix_expression MINUSMINUS','postfix_expression',2,'p_postfix_expression_5','c_parser.py',1622),
- ('postfix_expression -> LPAREN type_name RPAREN brace_open initializer_list brace_close','postfix_expression',6,'p_postfix_expression_6','c_parser.py',1627),
- ('postfix_expression -> LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close','postfix_expression',7,'p_postfix_expression_6','c_parser.py',1628),
- ('primary_expression -> identifier','primary_expression',1,'p_primary_expression_1','c_parser.py',1633),
- ('primary_expression -> constant','primary_expression',1,'p_primary_expression_2','c_parser.py',1637),
- ('primary_expression -> unified_string_literal','primary_expression',1,'p_primary_expression_3','c_parser.py',1641),
- ('primary_expression -> unified_wstring_literal','primary_expression',1,'p_primary_expression_3','c_parser.py',1642),
- ('primary_expression -> LPAREN expression RPAREN','primary_expression',3,'p_primary_expression_4','c_parser.py',1647),
- ('primary_expression -> OFFSETOF LPAREN type_name COMMA offsetof_member_designator RPAREN','primary_expression',6,'p_primary_expression_5','c_parser.py',1651),
- ('offsetof_member_designator -> identifier','offsetof_member_designator',1,'p_offsetof_member_designator','c_parser.py',1659),
- ('offsetof_member_designator -> offsetof_member_designator PERIOD identifier','offsetof_member_designator',3,'p_offsetof_member_designator','c_parser.py',1660),
- ('offsetof_member_designator -> offsetof_member_designator LBRACKET expression RBRACKET','offsetof_member_designator',4,'p_offsetof_member_designator','c_parser.py',1661),
- ('argument_expression_list -> assignment_expression','argument_expression_list',1,'p_argument_expression_list','c_parser.py',1674),
- ('argument_expression_list -> argument_expression_list COMMA assignment_expression','argument_expression_list',3,'p_argument_expression_list','c_parser.py',1675),
- ('identifier -> ID','identifier',1,'p_identifier','c_parser.py',1684),
- ('constant -> INT_CONST_DEC','constant',1,'p_constant_1','c_parser.py',1688),
- ('constant -> INT_CONST_OCT','constant',1,'p_constant_1','c_parser.py',1689),
- ('constant -> INT_CONST_HEX','constant',1,'p_constant_1','c_parser.py',1690),
- ('constant -> INT_CONST_BIN','constant',1,'p_constant_1','c_parser.py',1691),
- ('constant -> FLOAT_CONST','constant',1,'p_constant_2','c_parser.py',1697),
- ('constant -> HEX_FLOAT_CONST','constant',1,'p_constant_2','c_parser.py',1698),
- ('constant -> CHAR_CONST','constant',1,'p_constant_3','c_parser.py',1704),
- ('constant -> WCHAR_CONST','constant',1,'p_constant_3','c_parser.py',1705),
- ('unified_string_literal -> STRING_LITERAL','unified_string_literal',1,'p_unified_string_literal','c_parser.py',1716),
- ('unified_string_literal -> unified_string_literal STRING_LITERAL','unified_string_literal',2,'p_unified_string_literal','c_parser.py',1717),
- ('unified_wstring_literal -> WSTRING_LITERAL','unified_wstring_literal',1,'p_unified_wstring_literal','c_parser.py',1727),
- ('unified_wstring_literal -> unified_wstring_literal WSTRING_LITERAL','unified_wstring_literal',2,'p_unified_wstring_literal','c_parser.py',1728),
- ('brace_open -> LBRACE','brace_open',1,'p_brace_open','c_parser.py',1738),
- ('brace_close -> RBRACE','brace_close',1,'p_brace_close','c_parser.py',1744),
- ('empty -> ','empty',0,'p_empty','c_parser.py',1750),
+ ('pragmacomp_or_statement -> pppragma_directive statement','pragmacomp_or_statement',2,'p_pragmacomp_or_statement','c_parser.py',662),
+ ('pragmacomp_or_statement -> statement','pragmacomp_or_statement',1,'p_pragmacomp_or_statement','c_parser.py',663),
+ ('decl_body -> declaration_specifiers init_declarator_list_opt','decl_body',2,'p_decl_body','c_parser.py',682),
+ ('decl_body -> declaration_specifiers_no_type id_init_declarator_list_opt','decl_body',2,'p_decl_body','c_parser.py',683),
+ ('declaration -> decl_body SEMI','declaration',2,'p_declaration','c_parser.py',742),
+ ('declaration_list -> declaration','declaration_list',1,'p_declaration_list','c_parser.py',751),
+ ('declaration_list -> declaration_list declaration','declaration_list',2,'p_declaration_list','c_parser.py',752),
+ ('declaration_specifiers_no_type -> type_qualifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_1','c_parser.py',762),
+ ('declaration_specifiers_no_type -> storage_class_specifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_2','c_parser.py',767),
+ ('declaration_specifiers_no_type -> function_specifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_3','c_parser.py',772),
+ ('declaration_specifiers -> declaration_specifiers type_qualifier','declaration_specifiers',2,'p_declaration_specifiers_1','c_parser.py',778),
+ ('declaration_specifiers -> declaration_specifiers storage_class_specifier','declaration_specifiers',2,'p_declaration_specifiers_2','c_parser.py',783),
+ ('declaration_specifiers -> declaration_specifiers function_specifier','declaration_specifiers',2,'p_declaration_specifiers_3','c_parser.py',788),
+ ('declaration_specifiers -> declaration_specifiers type_specifier_no_typeid','declaration_specifiers',2,'p_declaration_specifiers_4','c_parser.py',793),
+ ('declaration_specifiers -> type_specifier','declaration_specifiers',1,'p_declaration_specifiers_5','c_parser.py',798),
+ ('declaration_specifiers -> declaration_specifiers_no_type type_specifier','declaration_specifiers',2,'p_declaration_specifiers_6','c_parser.py',803),
+ ('storage_class_specifier -> AUTO','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',809),
+ ('storage_class_specifier -> REGISTER','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',810),
+ ('storage_class_specifier -> STATIC','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',811),
+ ('storage_class_specifier -> EXTERN','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',812),
+ ('storage_class_specifier -> TYPEDEF','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',813),
+ ('function_specifier -> INLINE','function_specifier',1,'p_function_specifier','c_parser.py',818),
+ ('type_specifier_no_typeid -> VOID','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',823),
+ ('type_specifier_no_typeid -> _BOOL','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',824),
+ ('type_specifier_no_typeid -> CHAR','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',825),
+ ('type_specifier_no_typeid -> SHORT','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',826),
+ ('type_specifier_no_typeid -> INT','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',827),
+ ('type_specifier_no_typeid -> LONG','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',828),
+ ('type_specifier_no_typeid -> FLOAT','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',829),
+ ('type_specifier_no_typeid -> DOUBLE','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',830),
+ ('type_specifier_no_typeid -> _COMPLEX','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',831),
+ ('type_specifier_no_typeid -> SIGNED','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',832),
+ ('type_specifier_no_typeid -> UNSIGNED','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',833),
+ ('type_specifier_no_typeid -> __INT128','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',834),
+ ('type_specifier -> typedef_name','type_specifier',1,'p_type_specifier','c_parser.py',839),
+ ('type_specifier -> enum_specifier','type_specifier',1,'p_type_specifier','c_parser.py',840),
+ ('type_specifier -> struct_or_union_specifier','type_specifier',1,'p_type_specifier','c_parser.py',841),
+ ('type_specifier -> type_specifier_no_typeid','type_specifier',1,'p_type_specifier','c_parser.py',842),
+ ('type_qualifier -> CONST','type_qualifier',1,'p_type_qualifier','c_parser.py',847),
+ ('type_qualifier -> RESTRICT','type_qualifier',1,'p_type_qualifier','c_parser.py',848),
+ ('type_qualifier -> VOLATILE','type_qualifier',1,'p_type_qualifier','c_parser.py',849),
+ ('init_declarator_list -> init_declarator','init_declarator_list',1,'p_init_declarator_list','c_parser.py',854),
+ ('init_declarator_list -> init_declarator_list COMMA init_declarator','init_declarator_list',3,'p_init_declarator_list','c_parser.py',855),
+ ('init_declarator -> declarator','init_declarator',1,'p_init_declarator','c_parser.py',863),
+ ('init_declarator -> declarator EQUALS initializer','init_declarator',3,'p_init_declarator','c_parser.py',864),
+ ('id_init_declarator_list -> id_init_declarator','id_init_declarator_list',1,'p_id_init_declarator_list','c_parser.py',869),
+ ('id_init_declarator_list -> id_init_declarator_list COMMA init_declarator','id_init_declarator_list',3,'p_id_init_declarator_list','c_parser.py',870),
+ ('id_init_declarator -> id_declarator','id_init_declarator',1,'p_id_init_declarator','c_parser.py',875),
+ ('id_init_declarator -> id_declarator EQUALS initializer','id_init_declarator',3,'p_id_init_declarator','c_parser.py',876),
+ ('specifier_qualifier_list -> specifier_qualifier_list type_specifier_no_typeid','specifier_qualifier_list',2,'p_specifier_qualifier_list_1','c_parser.py',883),
+ ('specifier_qualifier_list -> specifier_qualifier_list type_qualifier','specifier_qualifier_list',2,'p_specifier_qualifier_list_2','c_parser.py',888),
+ ('specifier_qualifier_list -> type_specifier','specifier_qualifier_list',1,'p_specifier_qualifier_list_3','c_parser.py',893),
+ ('specifier_qualifier_list -> type_qualifier_list type_specifier','specifier_qualifier_list',2,'p_specifier_qualifier_list_4','c_parser.py',898),
+ ('struct_or_union_specifier -> struct_or_union ID','struct_or_union_specifier',2,'p_struct_or_union_specifier_1','c_parser.py',907),
+ ('struct_or_union_specifier -> struct_or_union TYPEID','struct_or_union_specifier',2,'p_struct_or_union_specifier_1','c_parser.py',908),
+ ('struct_or_union_specifier -> struct_or_union brace_open struct_declaration_list brace_close','struct_or_union_specifier',4,'p_struct_or_union_specifier_2','c_parser.py',918),
+ ('struct_or_union_specifier -> struct_or_union brace_open brace_close','struct_or_union_specifier',3,'p_struct_or_union_specifier_2','c_parser.py',919),
+ ('struct_or_union_specifier -> struct_or_union ID brace_open struct_declaration_list brace_close','struct_or_union_specifier',5,'p_struct_or_union_specifier_3','c_parser.py',936),
+ ('struct_or_union_specifier -> struct_or_union ID brace_open brace_close','struct_or_union_specifier',4,'p_struct_or_union_specifier_3','c_parser.py',937),
+ ('struct_or_union_specifier -> struct_or_union TYPEID brace_open struct_declaration_list brace_close','struct_or_union_specifier',5,'p_struct_or_union_specifier_3','c_parser.py',938),
+ ('struct_or_union_specifier -> struct_or_union TYPEID brace_open brace_close','struct_or_union_specifier',4,'p_struct_or_union_specifier_3','c_parser.py',939),
+ ('struct_or_union -> STRUCT','struct_or_union',1,'p_struct_or_union','c_parser.py',955),
+ ('struct_or_union -> UNION','struct_or_union',1,'p_struct_or_union','c_parser.py',956),
+ ('struct_declaration_list -> struct_declaration','struct_declaration_list',1,'p_struct_declaration_list','c_parser.py',963),
+ ('struct_declaration_list -> struct_declaration_list struct_declaration','struct_declaration_list',2,'p_struct_declaration_list','c_parser.py',964),
+ ('struct_declaration -> specifier_qualifier_list struct_declarator_list_opt SEMI','struct_declaration',3,'p_struct_declaration_1','c_parser.py',972),
+ ('struct_declaration -> SEMI','struct_declaration',1,'p_struct_declaration_2','c_parser.py',1010),
+ ('struct_declaration -> pppragma_directive','struct_declaration',1,'p_struct_declaration_3','c_parser.py',1015),
+ ('struct_declarator_list -> struct_declarator','struct_declarator_list',1,'p_struct_declarator_list','c_parser.py',1020),
+ ('struct_declarator_list -> struct_declarator_list COMMA struct_declarator','struct_declarator_list',3,'p_struct_declarator_list','c_parser.py',1021),
+ ('struct_declarator -> declarator','struct_declarator',1,'p_struct_declarator_1','c_parser.py',1029),
+ ('struct_declarator -> declarator COLON constant_expression','struct_declarator',3,'p_struct_declarator_2','c_parser.py',1034),
+ ('struct_declarator -> COLON constant_expression','struct_declarator',2,'p_struct_declarator_2','c_parser.py',1035),
+ ('enum_specifier -> ENUM ID','enum_specifier',2,'p_enum_specifier_1','c_parser.py',1043),
+ ('enum_specifier -> ENUM TYPEID','enum_specifier',2,'p_enum_specifier_1','c_parser.py',1044),
+ ('enum_specifier -> ENUM brace_open enumerator_list brace_close','enum_specifier',4,'p_enum_specifier_2','c_parser.py',1049),
+ ('enum_specifier -> ENUM ID brace_open enumerator_list brace_close','enum_specifier',5,'p_enum_specifier_3','c_parser.py',1054),
+ ('enum_specifier -> ENUM TYPEID brace_open enumerator_list brace_close','enum_specifier',5,'p_enum_specifier_3','c_parser.py',1055),
+ ('enumerator_list -> enumerator','enumerator_list',1,'p_enumerator_list','c_parser.py',1060),
+ ('enumerator_list -> enumerator_list COMMA','enumerator_list',2,'p_enumerator_list','c_parser.py',1061),
+ ('enumerator_list -> enumerator_list COMMA enumerator','enumerator_list',3,'p_enumerator_list','c_parser.py',1062),
+ ('enumerator -> ID','enumerator',1,'p_enumerator','c_parser.py',1073),
+ ('enumerator -> ID EQUALS constant_expression','enumerator',3,'p_enumerator','c_parser.py',1074),
+ ('declarator -> id_declarator','declarator',1,'p_declarator','c_parser.py',1089),
+ ('declarator -> typeid_declarator','declarator',1,'p_declarator','c_parser.py',1090),
+ ('pointer -> TIMES type_qualifier_list_opt','pointer',2,'p_pointer','c_parser.py',1201),
+ ('pointer -> TIMES type_qualifier_list_opt pointer','pointer',3,'p_pointer','c_parser.py',1202),
+ ('type_qualifier_list -> type_qualifier','type_qualifier_list',1,'p_type_qualifier_list','c_parser.py',1231),
+ ('type_qualifier_list -> type_qualifier_list type_qualifier','type_qualifier_list',2,'p_type_qualifier_list','c_parser.py',1232),
+ ('parameter_type_list -> parameter_list','parameter_type_list',1,'p_parameter_type_list','c_parser.py',1237),
+ ('parameter_type_list -> parameter_list COMMA ELLIPSIS','parameter_type_list',3,'p_parameter_type_list','c_parser.py',1238),
+ ('parameter_list -> parameter_declaration','parameter_list',1,'p_parameter_list','c_parser.py',1246),
+ ('parameter_list -> parameter_list COMMA parameter_declaration','parameter_list',3,'p_parameter_list','c_parser.py',1247),
+ ('parameter_declaration -> declaration_specifiers id_declarator','parameter_declaration',2,'p_parameter_declaration_1','c_parser.py',1266),
+ ('parameter_declaration -> declaration_specifiers typeid_noparen_declarator','parameter_declaration',2,'p_parameter_declaration_1','c_parser.py',1267),
+ ('parameter_declaration -> declaration_specifiers abstract_declarator_opt','parameter_declaration',2,'p_parameter_declaration_2','c_parser.py',1278),
+ ('identifier_list -> identifier','identifier_list',1,'p_identifier_list','c_parser.py',1309),
+ ('identifier_list -> identifier_list COMMA identifier','identifier_list',3,'p_identifier_list','c_parser.py',1310),
+ ('initializer -> assignment_expression','initializer',1,'p_initializer_1','c_parser.py',1319),
+ ('initializer -> brace_open initializer_list_opt brace_close','initializer',3,'p_initializer_2','c_parser.py',1324),
+ ('initializer -> brace_open initializer_list COMMA brace_close','initializer',4,'p_initializer_2','c_parser.py',1325),
+ ('initializer_list -> designation_opt initializer','initializer_list',2,'p_initializer_list','c_parser.py',1333),
+ ('initializer_list -> initializer_list COMMA designation_opt initializer','initializer_list',4,'p_initializer_list','c_parser.py',1334),
+ ('designation -> designator_list EQUALS','designation',2,'p_designation','c_parser.py',1345),
+ ('designator_list -> designator','designator_list',1,'p_designator_list','c_parser.py',1353),
+ ('designator_list -> designator_list designator','designator_list',2,'p_designator_list','c_parser.py',1354),
+ ('designator -> LBRACKET constant_expression RBRACKET','designator',3,'p_designator','c_parser.py',1359),
+ ('designator -> PERIOD identifier','designator',2,'p_designator','c_parser.py',1360),
+ ('type_name -> specifier_qualifier_list abstract_declarator_opt','type_name',2,'p_type_name','c_parser.py',1365),
+ ('abstract_declarator -> pointer','abstract_declarator',1,'p_abstract_declarator_1','c_parser.py',1376),
+ ('abstract_declarator -> pointer direct_abstract_declarator','abstract_declarator',2,'p_abstract_declarator_2','c_parser.py',1384),
+ ('abstract_declarator -> direct_abstract_declarator','abstract_declarator',1,'p_abstract_declarator_3','c_parser.py',1389),
+ ('direct_abstract_declarator -> LPAREN abstract_declarator RPAREN','direct_abstract_declarator',3,'p_direct_abstract_declarator_1','c_parser.py',1399),
+ ('direct_abstract_declarator -> direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator_2','c_parser.py',1403),
+ ('direct_abstract_declarator -> LBRACKET assignment_expression_opt RBRACKET','direct_abstract_declarator',3,'p_direct_abstract_declarator_3','c_parser.py',1414),
+ ('direct_abstract_declarator -> direct_abstract_declarator LBRACKET TIMES RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator_4','c_parser.py',1423),
+ ('direct_abstract_declarator -> LBRACKET TIMES RBRACKET','direct_abstract_declarator',3,'p_direct_abstract_declarator_5','c_parser.py',1434),
+ ('direct_abstract_declarator -> direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN','direct_abstract_declarator',4,'p_direct_abstract_declarator_6','c_parser.py',1443),
+ ('direct_abstract_declarator -> LPAREN parameter_type_list_opt RPAREN','direct_abstract_declarator',3,'p_direct_abstract_declarator_7','c_parser.py',1453),
+ ('block_item -> declaration','block_item',1,'p_block_item','c_parser.py',1464),
+ ('block_item -> statement','block_item',1,'p_block_item','c_parser.py',1465),
+ ('block_item_list -> block_item','block_item_list',1,'p_block_item_list','c_parser.py',1472),
+ ('block_item_list -> block_item_list block_item','block_item_list',2,'p_block_item_list','c_parser.py',1473),
+ ('compound_statement -> brace_open block_item_list_opt brace_close','compound_statement',3,'p_compound_statement_1','c_parser.py',1479),
+ ('labeled_statement -> ID COLON pragmacomp_or_statement','labeled_statement',3,'p_labeled_statement_1','c_parser.py',1485),
+ ('labeled_statement -> CASE constant_expression COLON pragmacomp_or_statement','labeled_statement',4,'p_labeled_statement_2','c_parser.py',1489),
+ ('labeled_statement -> DEFAULT COLON pragmacomp_or_statement','labeled_statement',3,'p_labeled_statement_3','c_parser.py',1493),
+ ('selection_statement -> IF LPAREN expression RPAREN pragmacomp_or_statement','selection_statement',5,'p_selection_statement_1','c_parser.py',1497),
+ ('selection_statement -> IF LPAREN expression RPAREN statement ELSE pragmacomp_or_statement','selection_statement',7,'p_selection_statement_2','c_parser.py',1501),
+ ('selection_statement -> SWITCH LPAREN expression RPAREN pragmacomp_or_statement','selection_statement',5,'p_selection_statement_3','c_parser.py',1505),
+ ('iteration_statement -> WHILE LPAREN expression RPAREN pragmacomp_or_statement','iteration_statement',5,'p_iteration_statement_1','c_parser.py',1510),
+ ('iteration_statement -> DO pragmacomp_or_statement WHILE LPAREN expression RPAREN SEMI','iteration_statement',7,'p_iteration_statement_2','c_parser.py',1514),
+ ('iteration_statement -> FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement','iteration_statement',9,'p_iteration_statement_3','c_parser.py',1518),
+ ('iteration_statement -> FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement','iteration_statement',8,'p_iteration_statement_4','c_parser.py',1522),
+ ('jump_statement -> GOTO ID SEMI','jump_statement',3,'p_jump_statement_1','c_parser.py',1527),
+ ('jump_statement -> BREAK SEMI','jump_statement',2,'p_jump_statement_2','c_parser.py',1531),
+ ('jump_statement -> CONTINUE SEMI','jump_statement',2,'p_jump_statement_3','c_parser.py',1535),
+ ('jump_statement -> RETURN expression SEMI','jump_statement',3,'p_jump_statement_4','c_parser.py',1539),
+ ('jump_statement -> RETURN SEMI','jump_statement',2,'p_jump_statement_4','c_parser.py',1540),
+ ('expression_statement -> expression_opt SEMI','expression_statement',2,'p_expression_statement','c_parser.py',1545),
+ ('expression -> assignment_expression','expression',1,'p_expression','c_parser.py',1552),
+ ('expression -> expression COMMA assignment_expression','expression',3,'p_expression','c_parser.py',1553),
+ ('typedef_name -> TYPEID','typedef_name',1,'p_typedef_name','c_parser.py',1565),
+ ('assignment_expression -> conditional_expression','assignment_expression',1,'p_assignment_expression','c_parser.py',1569),
+ ('assignment_expression -> unary_expression assignment_operator assignment_expression','assignment_expression',3,'p_assignment_expression','c_parser.py',1570),
+ ('assignment_operator -> EQUALS','assignment_operator',1,'p_assignment_operator','c_parser.py',1583),
+ ('assignment_operator -> XOREQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1584),
+ ('assignment_operator -> TIMESEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1585),
+ ('assignment_operator -> DIVEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1586),
+ ('assignment_operator -> MODEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1587),
+ ('assignment_operator -> PLUSEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1588),
+ ('assignment_operator -> MINUSEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1589),
+ ('assignment_operator -> LSHIFTEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1590),
+ ('assignment_operator -> RSHIFTEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1591),
+ ('assignment_operator -> ANDEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1592),
+ ('assignment_operator -> OREQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1593),
+ ('constant_expression -> conditional_expression','constant_expression',1,'p_constant_expression','c_parser.py',1598),
+ ('conditional_expression -> binary_expression','conditional_expression',1,'p_conditional_expression','c_parser.py',1602),
+ ('conditional_expression -> binary_expression CONDOP expression COLON conditional_expression','conditional_expression',5,'p_conditional_expression','c_parser.py',1603),
+ ('binary_expression -> cast_expression','binary_expression',1,'p_binary_expression','c_parser.py',1611),
+ ('binary_expression -> binary_expression TIMES binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1612),
+ ('binary_expression -> binary_expression DIVIDE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1613),
+ ('binary_expression -> binary_expression MOD binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1614),
+ ('binary_expression -> binary_expression PLUS binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1615),
+ ('binary_expression -> binary_expression MINUS binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1616),
+ ('binary_expression -> binary_expression RSHIFT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1617),
+ ('binary_expression -> binary_expression LSHIFT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1618),
+ ('binary_expression -> binary_expression LT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1619),
+ ('binary_expression -> binary_expression LE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1620),
+ ('binary_expression -> binary_expression GE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1621),
+ ('binary_expression -> binary_expression GT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1622),
+ ('binary_expression -> binary_expression EQ binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1623),
+ ('binary_expression -> binary_expression NE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1624),
+ ('binary_expression -> binary_expression AND binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1625),
+ ('binary_expression -> binary_expression OR binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1626),
+ ('binary_expression -> binary_expression XOR binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1627),
+ ('binary_expression -> binary_expression LAND binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1628),
+ ('binary_expression -> binary_expression LOR binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1629),
+ ('cast_expression -> unary_expression','cast_expression',1,'p_cast_expression_1','c_parser.py',1637),
+ ('cast_expression -> LPAREN type_name RPAREN cast_expression','cast_expression',4,'p_cast_expression_2','c_parser.py',1641),
+ ('unary_expression -> postfix_expression','unary_expression',1,'p_unary_expression_1','c_parser.py',1645),
+ ('unary_expression -> PLUSPLUS unary_expression','unary_expression',2,'p_unary_expression_2','c_parser.py',1649),
+ ('unary_expression -> MINUSMINUS unary_expression','unary_expression',2,'p_unary_expression_2','c_parser.py',1650),
+ ('unary_expression -> unary_operator cast_expression','unary_expression',2,'p_unary_expression_2','c_parser.py',1651),
+ ('unary_expression -> SIZEOF unary_expression','unary_expression',2,'p_unary_expression_3','c_parser.py',1656),
+ ('unary_expression -> SIZEOF LPAREN type_name RPAREN','unary_expression',4,'p_unary_expression_3','c_parser.py',1657),
+ ('unary_operator -> AND','unary_operator',1,'p_unary_operator','c_parser.py',1665),
+ ('unary_operator -> TIMES','unary_operator',1,'p_unary_operator','c_parser.py',1666),
+ ('unary_operator -> PLUS','unary_operator',1,'p_unary_operator','c_parser.py',1667),
+ ('unary_operator -> MINUS','unary_operator',1,'p_unary_operator','c_parser.py',1668),
+ ('unary_operator -> NOT','unary_operator',1,'p_unary_operator','c_parser.py',1669),
+ ('unary_operator -> LNOT','unary_operator',1,'p_unary_operator','c_parser.py',1670),
+ ('postfix_expression -> primary_expression','postfix_expression',1,'p_postfix_expression_1','c_parser.py',1675),
+ ('postfix_expression -> postfix_expression LBRACKET expression RBRACKET','postfix_expression',4,'p_postfix_expression_2','c_parser.py',1679),
+ ('postfix_expression -> postfix_expression LPAREN argument_expression_list RPAREN','postfix_expression',4,'p_postfix_expression_3','c_parser.py',1683),
+ ('postfix_expression -> postfix_expression LPAREN RPAREN','postfix_expression',3,'p_postfix_expression_3','c_parser.py',1684),
+ ('postfix_expression -> postfix_expression PERIOD ID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1689),
+ ('postfix_expression -> postfix_expression PERIOD TYPEID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1690),
+ ('postfix_expression -> postfix_expression ARROW ID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1691),
+ ('postfix_expression -> postfix_expression ARROW TYPEID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1692),
+ ('postfix_expression -> postfix_expression PLUSPLUS','postfix_expression',2,'p_postfix_expression_5','c_parser.py',1698),
+ ('postfix_expression -> postfix_expression MINUSMINUS','postfix_expression',2,'p_postfix_expression_5','c_parser.py',1699),
+ ('postfix_expression -> LPAREN type_name RPAREN brace_open initializer_list brace_close','postfix_expression',6,'p_postfix_expression_6','c_parser.py',1704),
+ ('postfix_expression -> LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close','postfix_expression',7,'p_postfix_expression_6','c_parser.py',1705),
+ ('primary_expression -> identifier','primary_expression',1,'p_primary_expression_1','c_parser.py',1710),
+ ('primary_expression -> constant','primary_expression',1,'p_primary_expression_2','c_parser.py',1714),
+ ('primary_expression -> unified_string_literal','primary_expression',1,'p_primary_expression_3','c_parser.py',1718),
+ ('primary_expression -> unified_wstring_literal','primary_expression',1,'p_primary_expression_3','c_parser.py',1719),
+ ('primary_expression -> LPAREN expression RPAREN','primary_expression',3,'p_primary_expression_4','c_parser.py',1724),
+ ('primary_expression -> OFFSETOF LPAREN type_name COMMA offsetof_member_designator RPAREN','primary_expression',6,'p_primary_expression_5','c_parser.py',1728),
+ ('offsetof_member_designator -> identifier','offsetof_member_designator',1,'p_offsetof_member_designator','c_parser.py',1736),
+ ('offsetof_member_designator -> offsetof_member_designator PERIOD identifier','offsetof_member_designator',3,'p_offsetof_member_designator','c_parser.py',1737),
+ ('offsetof_member_designator -> offsetof_member_designator LBRACKET expression RBRACKET','offsetof_member_designator',4,'p_offsetof_member_designator','c_parser.py',1738),
+ ('argument_expression_list -> assignment_expression','argument_expression_list',1,'p_argument_expression_list','c_parser.py',1751),
+ ('argument_expression_list -> argument_expression_list COMMA assignment_expression','argument_expression_list',3,'p_argument_expression_list','c_parser.py',1752),
+ ('identifier -> ID','identifier',1,'p_identifier','c_parser.py',1761),
+ ('constant -> INT_CONST_DEC','constant',1,'p_constant_1','c_parser.py',1765),
+ ('constant -> INT_CONST_OCT','constant',1,'p_constant_1','c_parser.py',1766),
+ ('constant -> INT_CONST_HEX','constant',1,'p_constant_1','c_parser.py',1767),
+ ('constant -> INT_CONST_BIN','constant',1,'p_constant_1','c_parser.py',1768),
+ ('constant -> FLOAT_CONST','constant',1,'p_constant_2','c_parser.py',1774),
+ ('constant -> HEX_FLOAT_CONST','constant',1,'p_constant_2','c_parser.py',1775),
+ ('constant -> CHAR_CONST','constant',1,'p_constant_3','c_parser.py',1791),
+ ('constant -> WCHAR_CONST','constant',1,'p_constant_3','c_parser.py',1792),
+ ('unified_string_literal -> STRING_LITERAL','unified_string_literal',1,'p_unified_string_literal','c_parser.py',1803),
+ ('unified_string_literal -> unified_string_literal STRING_LITERAL','unified_string_literal',2,'p_unified_string_literal','c_parser.py',1804),
+ ('unified_wstring_literal -> WSTRING_LITERAL','unified_wstring_literal',1,'p_unified_wstring_literal','c_parser.py',1814),
+ ('unified_wstring_literal -> unified_wstring_literal WSTRING_LITERAL','unified_wstring_literal',2,'p_unified_wstring_literal','c_parser.py',1815),
+ ('brace_open -> LBRACE','brace_open',1,'p_brace_open','c_parser.py',1825),
+ ('brace_close -> RBRACE','brace_close',1,'p_brace_close','c_parser.py',1831),
+ ('empty -> ','empty',0,'p_empty','c_parser.py',1837),
]
diff --git a/functions/source/CreateSSHKey/six-1.15.0.dist-info/INSTALLER b/functions/source/CreateSSHKey/six-1.15.0.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/functions/source/CreateSSHKey/six-1.15.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/functions/source/CreateSSHKey/six-1.15.0.dist-info/LICENSE b/functions/source/CreateSSHKey/six-1.15.0.dist-info/LICENSE
new file mode 100644
index 0000000..de66331
--- /dev/null
+++ b/functions/source/CreateSSHKey/six-1.15.0.dist-info/LICENSE
@@ -0,0 +1,18 @@
+Copyright (c) 2010-2020 Benjamin Peterson
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/functions/source/CreateSSHKey/six-1.15.0.dist-info/METADATA b/functions/source/CreateSSHKey/six-1.15.0.dist-info/METADATA
new file mode 100644
index 0000000..869bf25
--- /dev/null
+++ b/functions/source/CreateSSHKey/six-1.15.0.dist-info/METADATA
@@ -0,0 +1,49 @@
+Metadata-Version: 2.1
+Name: six
+Version: 1.15.0
+Summary: Python 2 and 3 compatibility utilities
+Home-page: https://github.com/benjaminp/six
+Author: Benjamin Peterson
+Author-email: benjamin@python.org
+License: MIT
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 3
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Utilities
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*
+
+.. image:: https://img.shields.io/pypi/v/six.svg
+ :target: https://pypi.org/project/six/
+ :alt: six on PyPI
+
+.. image:: https://travis-ci.org/benjaminp/six.svg?branch=master
+ :target: https://travis-ci.org/benjaminp/six
+ :alt: six on TravisCI
+
+.. image:: https://readthedocs.org/projects/six/badge/?version=latest
+ :target: https://six.readthedocs.io/
+ :alt: six's documentation on Read the Docs
+
+.. image:: https://img.shields.io/badge/license-MIT-green.svg
+ :target: https://github.com/benjaminp/six/blob/master/LICENSE
+ :alt: MIT License badge
+
+Six is a Python 2 and 3 compatibility library. It provides utility functions
+for smoothing over the differences between the Python versions with the goal of
+writing Python code that is compatible on both Python versions. See the
+documentation for more information on what is provided.
+
+Six supports Python 2.7 and 3.3+. It is contained in only one Python
+file, so it can be easily copied into your project. (The copyright and license
+notice must be retained.)
+
+Online documentation is at https://six.readthedocs.io/.
+
+Bugs can be reported to https://github.com/benjaminp/six. The code can also
+be found there.
+
+
diff --git a/functions/source/CreateSSHKey/six-1.15.0.dist-info/RECORD b/functions/source/CreateSSHKey/six-1.15.0.dist-info/RECORD
new file mode 100644
index 0000000..80bf846
--- /dev/null
+++ b/functions/source/CreateSSHKey/six-1.15.0.dist-info/RECORD
@@ -0,0 +1,8 @@
+__pycache__/six.cpython-38.pyc,,
+six-1.15.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+six-1.15.0.dist-info/LICENSE,sha256=i7hQxWWqOJ_cFvOkaWWtI9gq3_YPI5P8J2K2MYXo5sk,1066
+six-1.15.0.dist-info/METADATA,sha256=W6rlyoeMZHXh6srP9NXNsm0rjAf_660re8WdH5TBT8E,1795
+six-1.15.0.dist-info/RECORD,,
+six-1.15.0.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
+six-1.15.0.dist-info/top_level.txt,sha256=_iVH_iYEtEXnD8nYGQYpYFUvkUW9sEO1GYbkeKSAais,4
+six.py,sha256=U4Z_yv534W5CNyjY9i8V1OXY2SjAny8y2L5vDLhhThM,34159
diff --git a/functions/source/CreateSSHKey/six-1.15.0.dist-info/WHEEL b/functions/source/CreateSSHKey/six-1.15.0.dist-info/WHEEL
new file mode 100644
index 0000000..ef99c6c
--- /dev/null
+++ b/functions/source/CreateSSHKey/six-1.15.0.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.34.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/functions/source/CreateSSHKey/six-1.15.0.dist-info/top_level.txt b/functions/source/CreateSSHKey/six-1.15.0.dist-info/top_level.txt
new file mode 100644
index 0000000..ffe2fce
--- /dev/null
+++ b/functions/source/CreateSSHKey/six-1.15.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+six
diff --git a/functions/source/CreateSSHKey/six.py b/functions/source/CreateSSHKey/six.py
index 6bf4fd3..83f6978 100644
--- a/functions/source/CreateSSHKey/six.py
+++ b/functions/source/CreateSSHKey/six.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2010-2017 Benjamin Peterson
+# Copyright (c) 2010-2020 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
@@ -29,7 +29,7 @@
import types
__author__ = "Benjamin Peterson "
-__version__ = "1.11.0"
+__version__ = "1.15.0"
# Useful for very coarse version differentiation.
@@ -255,9 +255,11 @@ class _MovedItems(_LazyModule):
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
+ MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"),
MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
- MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
+ MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"),
+ MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread" if sys.version_info < (3, 9) else "_thread"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
@@ -637,13 +639,16 @@ def u(s):
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
+ del io
_assertCountEqual = "assertCountEqual"
if sys.version_info[1] <= 1:
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
+ _assertNotRegex = "assertNotRegexpMatches"
else:
_assertRaisesRegex = "assertRaisesRegex"
_assertRegex = "assertRegex"
+ _assertNotRegex = "assertNotRegex"
else:
def b(s):
return s
@@ -665,6 +670,7 @@ def indexbytes(buf, i):
_assertCountEqual = "assertItemsEqual"
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
+ _assertNotRegex = "assertNotRegexpMatches"
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
@@ -681,6 +687,10 @@ def assertRegex(self, *args, **kwargs):
return getattr(self, _assertRegex)(*args, **kwargs)
+def assertNotRegex(self, *args, **kwargs):
+ return getattr(self, _assertNotRegex)(*args, **kwargs)
+
+
if PY3:
exec_ = getattr(moves.builtins, "exec")
@@ -716,16 +726,7 @@ def exec_(_code_, _globs_=None, _locs_=None):
""")
-if sys.version_info[:2] == (3, 2):
- exec_("""def raise_from(value, from_value):
- try:
- if from_value is None:
- raise value
- raise value from from_value
- finally:
- value = None
-""")
-elif sys.version_info[:2] > (3, 2):
+if sys.version_info[:2] > (3,):
exec_("""def raise_from(value, from_value):
try:
raise value from from_value
@@ -805,13 +806,33 @@ def print_(*args, **kwargs):
_add_doc(reraise, """Reraise an exception.""")
if sys.version_info[0:2] < (3, 4):
+ # This does exactly the same what the :func:`py3:functools.update_wrapper`
+ # function does on Python versions after 3.2. It sets the ``__wrapped__``
+ # attribute on ``wrapper`` object and it doesn't raise an error if any of
+ # the attributes mentioned in ``assigned`` and ``updated`` are missing on
+ # ``wrapped`` object.
+ def _update_wrapper(wrapper, wrapped,
+ assigned=functools.WRAPPER_ASSIGNMENTS,
+ updated=functools.WRAPPER_UPDATES):
+ for attr in assigned:
+ try:
+ value = getattr(wrapped, attr)
+ except AttributeError:
+ continue
+ else:
+ setattr(wrapper, attr, value)
+ for attr in updated:
+ getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
+ wrapper.__wrapped__ = wrapped
+ return wrapper
+ _update_wrapper.__doc__ = functools.update_wrapper.__doc__
+
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES):
- def wrapper(f):
- f = functools.wraps(wrapped, assigned, updated)(f)
- f.__wrapped__ = wrapped
- return f
- return wrapper
+ return functools.partial(_update_wrapper, wrapped=wrapped,
+ assigned=assigned, updated=updated)
+ wraps.__doc__ = functools.wraps.__doc__
+
else:
wraps = functools.wraps
@@ -824,7 +845,15 @@ def with_metaclass(meta, *bases):
class metaclass(type):
def __new__(cls, name, this_bases, d):
- return meta(name, bases, d)
+ if sys.version_info[:2] >= (3, 7):
+ # This version introduced PEP 560 that requires a bit
+ # of extra care (we mimic what is done by __build_class__).
+ resolved_bases = types.resolve_bases(bases)
+ if resolved_bases is not bases:
+ d['__orig_bases__'] = bases
+ else:
+ resolved_bases = bases
+ return meta(name, resolved_bases, d)
@classmethod
def __prepare__(cls, name, this_bases):
@@ -844,13 +873,75 @@ def wrapper(cls):
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
+ if hasattr(cls, '__qualname__'):
+ orig_vars['__qualname__'] = cls.__qualname__
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
+def ensure_binary(s, encoding='utf-8', errors='strict'):
+ """Coerce **s** to six.binary_type.
+
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+
+ For Python 3:
+ - `str` -> encoded to `bytes`
+ - `bytes` -> `bytes`
+ """
+ if isinstance(s, binary_type):
+ return s
+ if isinstance(s, text_type):
+ return s.encode(encoding, errors)
+ raise TypeError("not expecting type '%s'" % type(s))
+
+
+def ensure_str(s, encoding='utf-8', errors='strict'):
+ """Coerce *s* to `str`.
+
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ # Optimization: Fast return for the common case.
+ if type(s) is str:
+ return s
+ if PY2 and isinstance(s, text_type):
+ return s.encode(encoding, errors)
+ elif PY3 and isinstance(s, binary_type):
+ return s.decode(encoding, errors)
+ elif not isinstance(s, (text_type, binary_type)):
+ raise TypeError("not expecting type '%s'" % type(s))
+ return s
+
+
+def ensure_text(s, encoding='utf-8', errors='strict'):
+ """Coerce *s* to six.text_type.
+
+ For Python 2:
+ - `unicode` -> `unicode`
+ - `str` -> `unicode`
+
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ if isinstance(s, binary_type):
+ return s.decode(encoding, errors)
+ elif isinstance(s, text_type):
+ return s
+ else:
+ raise TypeError("not expecting type '%s'" % type(s))
+
+
def python_2_unicode_compatible(klass):
"""
- A decorator that defines __unicode__ and __str__ methods under Python 2.
+ A class decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
diff --git a/functions/source/DeleteBucketContents/cfnresponse.py b/functions/source/DeleteBucketContents/cfnresponse.py
index bb955a5..a146b3f 100644
--- a/functions/source/DeleteBucketContents/cfnresponse.py
+++ b/functions/source/DeleteBucketContents/cfnresponse.py
@@ -1,20 +1,20 @@
-# Copyright 2016 Amazon Web Services, Inc. or its affiliates. All Rights Reserved.
+# Copyright 2020 Amazon Web Services, Inc. or its affiliates. All Rights Reserved.
# This file is licensed to you under the AWS Customer Agreement (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at http://aws.amazon.com/agreement/ .
# This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or implied.
# See the License for the specific language governing permissions and limitations under the License.
-from botocore.vendored import requests
+import urllib3
import json
-
+http = urllib3.PoolManager()
SUCCESS = "SUCCESS"
FAILED = "FAILED"
-def send(event, context, responseStatus, responseData, physicalResourceId):
+def send(event, context, responseStatus, responseData, physicalResourceId=None, noEcho=False):
responseUrl = event['ResponseURL']
- print responseUrl
+ print(responseUrl)
responseBody = {}
responseBody['Status'] = responseStatus
@@ -23,21 +23,21 @@ def send(event, context, responseStatus, responseData, physicalResourceId):
responseBody['StackId'] = event['StackId']
responseBody['RequestId'] = event['RequestId']
responseBody['LogicalResourceId'] = event['LogicalResourceId']
+ responseBody['NoEcho'] = noEcho
responseBody['Data'] = responseData
json_responseBody = json.dumps(responseBody)
-
- print "Response body:\n" + json_responseBody
+
+ print("Response body:\n" + json_responseBody)
headers = {
- 'content-type' : '',
+ 'content-type' : '',
'content-length' : str(len(json_responseBody))
}
-
+
try:
- response = requests.put(responseUrl,
- data=json_responseBody,
- headers=headers)
- print "Status code: " + response.reason
+
+ response = http.request('PUT',responseUrl,body=json_responseBody.encode('utf-8'),headers=headers)
+ print("Status code: " + response.reason)
except Exception as e:
- print "send(..) failed executing requests.put(..): " + str(e)
+ print("send(..) failed executing requests.put(..): " + str(e))
\ No newline at end of file
diff --git a/functions/source/DeleteBucketContents/lambda_function.py b/functions/source/DeleteBucketContents/lambda_function.py
index d082c83..75bd202 100644
--- a/functions/source/DeleteBucketContents/lambda_function.py
+++ b/functions/source/DeleteBucketContents/lambda_function.py
@@ -1,4 +1,4 @@
-# Copyright 2016 Amazon Web Services, Inc. or its affiliates. All Rights Reserved.
+# Copyright 2020 Amazon Web Services, Inc. or its affiliates. All Rights Reserved.
# This file is licensed to you under the AWS Customer Agreement (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at http://aws.amazon.com/agreement/ .
@@ -14,13 +14,13 @@ def lambda_handler(event,context):
if event['RequestType'] == 'Delete':
s3 = boto3.client('s3')
# Delete KeyBucket contents
- print 'Getting KeyBucket objects...'
+ print ('Getting KeyBucket objects...')
s3objects = s3.list_objects_v2(Bucket=event["ResourceProperties"]["KeyBucket"])
if 'Contents' in s3objects.keys():
- print 'Deleting KeyBucket objects %s...' % str([{'Key':key['Key']} for key in s3objects['Contents']])
+ print ('Deleting KeyBucket objects %s...' % str([{'Key':key['Key']} for key in s3objects['Contents']]))
s3.delete_objects(Bucket=event["ResourceProperties"]["KeyBucket"],Delete={'Objects':[{'Key':key['Key']} for key in s3objects['Contents']]})
# Delete Output bucket contents and versions
- print 'Getting OutputBucket objects...'
+ print ('Getting OutputBucket objects...')
objects=[]
versions=s3.list_object_versions(Bucket=event["ResourceProperties"]["OutputBucket"])
while versions:
@@ -38,5 +38,5 @@ def lambda_handler(event,context):
s3.delete_objects(Bucket=event["ResourceProperties"]["OutputBucket"],Delete={'Objects':objects})
cfnresponse.send(event, context, cfnresponse.SUCCESS, {}, '')
except:
- print traceback.print_exc()
+ print (traceback.print_exc())
cfnresponse.send(event, context, cfnresponse.FAILED, {}, '')
diff --git a/functions/source/GitPullS3/.libs_cffi_backend/libffi-45372312.so.6.0.4 b/functions/source/GitPullS3/.libs_cffi_backend/libffi-45372312.so.6.0.4
deleted file mode 100755
index 59e65c0..0000000
Binary files a/functions/source/GitPullS3/.libs_cffi_backend/libffi-45372312.so.6.0.4 and /dev/null differ
diff --git a/functions/source/GitPullS3/NOTICE.txt b/functions/source/GitPullS3/NOTICE.txt
index da7f14e..218d4e7 100644
--- a/functions/source/GitPullS3/NOTICE.txt
+++ b/functions/source/GitPullS3/NOTICE.txt
@@ -1,2 +1,2 @@
Git2S3-GitPullS3
-Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
\ No newline at end of file
diff --git a/functions/source/GitPullS3/_cffi_backend.so b/functions/source/GitPullS3/_cffi_backend.so
deleted file mode 100755
index 7c699f1..0000000
Binary files a/functions/source/GitPullS3/_cffi_backend.so and /dev/null differ
diff --git a/functions/source/GitPullS3/_pygit2.so b/functions/source/GitPullS3/_pygit2.so
deleted file mode 100755
index 86c3978..0000000
Binary files a/functions/source/GitPullS3/_pygit2.so and /dev/null differ
diff --git a/functions/source/GitPullS3/cffi/__init__.py b/functions/source/GitPullS3/cffi/__init__.py
deleted file mode 100644
index 9cbfe37..0000000
--- a/functions/source/GitPullS3/cffi/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError',
- 'FFIError']
-
-from .api import FFI
-from .error import CDefError, FFIError, VerificationError, VerificationMissing
-
-__version__ = "1.11.5"
-__version_info__ = (1, 11, 5)
-
-# The verifier module file names are based on the CRC32 of a string that
-# contains the following version number. It may be older than __version__
-# if nothing is clearly incompatible.
-__version_verifier_modules__ = "0.8.6"
diff --git a/functions/source/GitPullS3/cffi/_cffi_errors.h b/functions/source/GitPullS3/cffi/_cffi_errors.h
deleted file mode 100644
index 60dcc3b..0000000
--- a/functions/source/GitPullS3/cffi/_cffi_errors.h
+++ /dev/null
@@ -1,145 +0,0 @@
-#ifndef CFFI_MESSAGEBOX
-# ifdef _MSC_VER
-# define CFFI_MESSAGEBOX 1
-# else
-# define CFFI_MESSAGEBOX 0
-# endif
-#endif
-
-
-#if CFFI_MESSAGEBOX
-/* Windows only: logic to take the Python-CFFI embedding logic
- initialization errors and display them in a background thread
- with MessageBox. The idea is that if the whole program closes
- as a result of this problem, then likely it is already a console
- program and you can read the stderr output in the console too.
- If it is not a console program, then it will likely show its own
- dialog to complain, or generally not abruptly close, and for this
- case the background thread should stay alive.
-*/
-static void *volatile _cffi_bootstrap_text;
-
-static PyObject *_cffi_start_error_capture(void)
-{
- PyObject *result = NULL;
- PyObject *x, *m, *bi;
-
- if (InterlockedCompareExchangePointer(&_cffi_bootstrap_text,
- (void *)1, NULL) != NULL)
- return (PyObject *)1;
-
- m = PyImport_AddModule("_cffi_error_capture");
- if (m == NULL)
- goto error;
-
- result = PyModule_GetDict(m);
- if (result == NULL)
- goto error;
-
-#if PY_MAJOR_VERSION >= 3
- bi = PyImport_ImportModule("builtins");
-#else
- bi = PyImport_ImportModule("__builtin__");
-#endif
- if (bi == NULL)
- goto error;
- PyDict_SetItemString(result, "__builtins__", bi);
- Py_DECREF(bi);
-
- x = PyRun_String(
- "import sys\n"
- "class FileLike:\n"
- " def write(self, x):\n"
- " of.write(x)\n"
- " self.buf += x\n"
- "fl = FileLike()\n"
- "fl.buf = ''\n"
- "of = sys.stderr\n"
- "sys.stderr = fl\n"
- "def done():\n"
- " sys.stderr = of\n"
- " return fl.buf\n", /* make sure the returned value stays alive */
- Py_file_input,
- result, result);
- Py_XDECREF(x);
-
- error:
- if (PyErr_Occurred())
- {
- PyErr_WriteUnraisable(Py_None);
- PyErr_Clear();
- }
- return result;
-}
-
-#pragma comment(lib, "user32.lib")
-
-static DWORD WINAPI _cffi_bootstrap_dialog(LPVOID ignored)
-{
- Sleep(666); /* may be interrupted if the whole process is closing */
-#if PY_MAJOR_VERSION >= 3
- MessageBoxW(NULL, (wchar_t *)_cffi_bootstrap_text,
- L"Python-CFFI error",
- MB_OK | MB_ICONERROR);
-#else
- MessageBoxA(NULL, (char *)_cffi_bootstrap_text,
- "Python-CFFI error",
- MB_OK | MB_ICONERROR);
-#endif
- _cffi_bootstrap_text = NULL;
- return 0;
-}
-
-static void _cffi_stop_error_capture(PyObject *ecap)
-{
- PyObject *s;
- void *text;
-
- if (ecap == (PyObject *)1)
- return;
-
- if (ecap == NULL)
- goto error;
-
- s = PyRun_String("done()", Py_eval_input, ecap, ecap);
- if (s == NULL)
- goto error;
-
- /* Show a dialog box, but in a background thread, and
- never show multiple dialog boxes at once. */
-#if PY_MAJOR_VERSION >= 3
- text = PyUnicode_AsWideCharString(s, NULL);
-#else
- text = PyString_AsString(s);
-#endif
-
- _cffi_bootstrap_text = text;
-
- if (text != NULL)
- {
- HANDLE h;
- h = CreateThread(NULL, 0, _cffi_bootstrap_dialog,
- NULL, 0, NULL);
- if (h != NULL)
- CloseHandle(h);
- }
- /* decref the string, but it should stay alive as 'fl.buf'
- in the small module above. It will really be freed only if
- we later get another similar error. So it's a leak of at
- most one copy of the small module. That's fine for this
- situation which is usually a "fatal error" anyway. */
- Py_DECREF(s);
- PyErr_Clear();
- return;
-
- error:
- _cffi_bootstrap_text = NULL;
- PyErr_Clear();
-}
-
-#else
-
-static PyObject *_cffi_start_error_capture(void) { return NULL; }
-static void _cffi_stop_error_capture(PyObject *ecap) { }
-
-#endif
diff --git a/functions/source/GitPullS3/cffi/_cffi_include.h b/functions/source/GitPullS3/cffi/_cffi_include.h
deleted file mode 100644
index 37ea74f..0000000
--- a/functions/source/GitPullS3/cffi/_cffi_include.h
+++ /dev/null
@@ -1,308 +0,0 @@
-#define _CFFI_
-
-/* We try to define Py_LIMITED_API before including Python.h.
-
- Mess: we can only define it if Py_DEBUG, Py_TRACE_REFS and
- Py_REF_DEBUG are not defined. This is a best-effort approximation:
- we can learn about Py_DEBUG from pyconfig.h, but it is unclear if
- the same works for the other two macros. Py_DEBUG implies them,
- but not the other way around.
-
- Issue #350 is still open: on Windows, the code here causes it to link
- with PYTHON36.DLL (for example) instead of PYTHON3.DLL. A fix was
- attempted in 164e526a5515 and 14ce6985e1c3, but reverted: virtualenv
- does not make PYTHON3.DLL available, and so the "correctly" compiled
- version would not run inside a virtualenv. We will re-apply the fix
- after virtualenv has been fixed for some time. For explanation, see
- issue #355. For a workaround if you want PYTHON3.DLL and don't worry
- about virtualenv, see issue #350. See also 'py_limited_api' in
- setuptools_ext.py.
-*/
-#if !defined(_CFFI_USE_EMBEDDING) && !defined(Py_LIMITED_API)
-# include
-# if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG)
-# define Py_LIMITED_API
-# endif
-#endif
-
-#include
-#ifdef __cplusplus
-extern "C" {
-#endif
-#include
-#include "parse_c_type.h"
-
-/* this block of #ifs should be kept exactly identical between
- c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py
- and cffi/_cffi_include.h */
-#if defined(_MSC_VER)
-# include /* for alloca() */
-# if _MSC_VER < 1600 /* MSVC < 2010 */
- typedef __int8 int8_t;
- typedef __int16 int16_t;
- typedef __int32 int32_t;
- typedef __int64 int64_t;
- typedef unsigned __int8 uint8_t;
- typedef unsigned __int16 uint16_t;
- typedef unsigned __int32 uint32_t;
- typedef unsigned __int64 uint64_t;
- typedef __int8 int_least8_t;
- typedef __int16 int_least16_t;
- typedef __int32 int_least32_t;
- typedef __int64 int_least64_t;
- typedef unsigned __int8 uint_least8_t;
- typedef unsigned __int16 uint_least16_t;
- typedef unsigned __int32 uint_least32_t;
- typedef unsigned __int64 uint_least64_t;
- typedef __int8 int_fast8_t;
- typedef __int16 int_fast16_t;
- typedef __int32 int_fast32_t;
- typedef __int64 int_fast64_t;
- typedef unsigned __int8 uint_fast8_t;
- typedef unsigned __int16 uint_fast16_t;
- typedef unsigned __int32 uint_fast32_t;
- typedef unsigned __int64 uint_fast64_t;
- typedef __int64 intmax_t;
- typedef unsigned __int64 uintmax_t;
-# else
-# include
-# endif
-# if _MSC_VER < 1800 /* MSVC < 2013 */
-# ifndef __cplusplus
- typedef unsigned char _Bool;
-# endif
-# endif
-#else
-# include
-# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
-# include
-# endif
-#endif
-
-#ifdef __GNUC__
-# define _CFFI_UNUSED_FN __attribute__((unused))
-#else
-# define _CFFI_UNUSED_FN /* nothing */
-#endif
-
-#ifdef __cplusplus
-# ifndef _Bool
- typedef bool _Bool; /* semi-hackish: C++ has no _Bool; bool is builtin */
-# endif
-#endif
-
-/********** CPython-specific section **********/
-#ifndef PYPY_VERSION
-
-
-#if PY_MAJOR_VERSION >= 3
-# define PyInt_FromLong PyLong_FromLong
-#endif
-
-#define _cffi_from_c_double PyFloat_FromDouble
-#define _cffi_from_c_float PyFloat_FromDouble
-#define _cffi_from_c_long PyInt_FromLong
-#define _cffi_from_c_ulong PyLong_FromUnsignedLong
-#define _cffi_from_c_longlong PyLong_FromLongLong
-#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong
-#define _cffi_from_c__Bool PyBool_FromLong
-
-#define _cffi_to_c_double PyFloat_AsDouble
-#define _cffi_to_c_float PyFloat_AsDouble
-
-#define _cffi_from_c_int(x, type) \
- (((type)-1) > 0 ? /* unsigned */ \
- (sizeof(type) < sizeof(long) ? \
- PyInt_FromLong((long)x) : \
- sizeof(type) == sizeof(long) ? \
- PyLong_FromUnsignedLong((unsigned long)x) : \
- PyLong_FromUnsignedLongLong((unsigned long long)x)) : \
- (sizeof(type) <= sizeof(long) ? \
- PyInt_FromLong((long)x) : \
- PyLong_FromLongLong((long long)x)))
-
-#define _cffi_to_c_int(o, type) \
- ((type)( \
- sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \
- : (type)_cffi_to_c_i8(o)) : \
- sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \
- : (type)_cffi_to_c_i16(o)) : \
- sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \
- : (type)_cffi_to_c_i32(o)) : \
- sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \
- : (type)_cffi_to_c_i64(o)) : \
- (Py_FatalError("unsupported size for type " #type), (type)0)))
-
-#define _cffi_to_c_i8 \
- ((int(*)(PyObject *))_cffi_exports[1])
-#define _cffi_to_c_u8 \
- ((int(*)(PyObject *))_cffi_exports[2])
-#define _cffi_to_c_i16 \
- ((int(*)(PyObject *))_cffi_exports[3])
-#define _cffi_to_c_u16 \
- ((int(*)(PyObject *))_cffi_exports[4])
-#define _cffi_to_c_i32 \
- ((int(*)(PyObject *))_cffi_exports[5])
-#define _cffi_to_c_u32 \
- ((unsigned int(*)(PyObject *))_cffi_exports[6])
-#define _cffi_to_c_i64 \
- ((long long(*)(PyObject *))_cffi_exports[7])
-#define _cffi_to_c_u64 \
- ((unsigned long long(*)(PyObject *))_cffi_exports[8])
-#define _cffi_to_c_char \
- ((int(*)(PyObject *))_cffi_exports[9])
-#define _cffi_from_c_pointer \
- ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[10])
-#define _cffi_to_c_pointer \
- ((char *(*)(PyObject *, struct _cffi_ctypedescr *))_cffi_exports[11])
-#define _cffi_get_struct_layout \
- not used any more
-#define _cffi_restore_errno \
- ((void(*)(void))_cffi_exports[13])
-#define _cffi_save_errno \
- ((void(*)(void))_cffi_exports[14])
-#define _cffi_from_c_char \
- ((PyObject *(*)(char))_cffi_exports[15])
-#define _cffi_from_c_deref \
- ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[16])
-#define _cffi_to_c \
- ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[17])
-#define _cffi_from_c_struct \
- ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[18])
-#define _cffi_to_c_wchar_t \
- ((_cffi_wchar_t(*)(PyObject *))_cffi_exports[19])
-#define _cffi_from_c_wchar_t \
- ((PyObject *(*)(_cffi_wchar_t))_cffi_exports[20])
-#define _cffi_to_c_long_double \
- ((long double(*)(PyObject *))_cffi_exports[21])
-#define _cffi_to_c__Bool \
- ((_Bool(*)(PyObject *))_cffi_exports[22])
-#define _cffi_prepare_pointer_call_argument \
- ((Py_ssize_t(*)(struct _cffi_ctypedescr *, \
- PyObject *, char **))_cffi_exports[23])
-#define _cffi_convert_array_from_object \
- ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[24])
-#define _CFFI_CPIDX 25
-#define _cffi_call_python \
- ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX])
-#define _cffi_to_c_wchar3216_t \
- ((int(*)(PyObject *))_cffi_exports[26])
-#define _cffi_from_c_wchar3216_t \
- ((PyObject *(*)(int))_cffi_exports[27])
-#define _CFFI_NUM_EXPORTS 28
-
-struct _cffi_ctypedescr;
-
-static void *_cffi_exports[_CFFI_NUM_EXPORTS];
-
-#define _cffi_type(index) ( \
- assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \
- (struct _cffi_ctypedescr *)_cffi_types[index])
-
-static PyObject *_cffi_init(const char *module_name, Py_ssize_t version,
- const struct _cffi_type_context_s *ctx)
-{
- PyObject *module, *o_arg, *new_module;
- void *raw[] = {
- (void *)module_name,
- (void *)version,
- (void *)_cffi_exports,
- (void *)ctx,
- };
-
- module = PyImport_ImportModule("_cffi_backend");
- if (module == NULL)
- goto failure;
-
- o_arg = PyLong_FromVoidPtr((void *)raw);
- if (o_arg == NULL)
- goto failure;
-
- new_module = PyObject_CallMethod(
- module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg);
-
- Py_DECREF(o_arg);
- Py_DECREF(module);
- return new_module;
-
- failure:
- Py_XDECREF(module);
- return NULL;
-}
-
-
-#ifdef HAVE_WCHAR_H
-typedef wchar_t _cffi_wchar_t;
-#else
-typedef uint16_t _cffi_wchar_t; /* same random pick as _cffi_backend.c */
-#endif
-
-_CFFI_UNUSED_FN static uint16_t _cffi_to_c_char16_t(PyObject *o)
-{
- if (sizeof(_cffi_wchar_t) == 2)
- return (uint16_t)_cffi_to_c_wchar_t(o);
- else
- return (uint16_t)_cffi_to_c_wchar3216_t(o);
-}
-
-_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char16_t(uint16_t x)
-{
- if (sizeof(_cffi_wchar_t) == 2)
- return _cffi_from_c_wchar_t((_cffi_wchar_t)x);
- else
- return _cffi_from_c_wchar3216_t((int)x);
-}
-
-_CFFI_UNUSED_FN static int _cffi_to_c_char32_t(PyObject *o)
-{
- if (sizeof(_cffi_wchar_t) == 4)
- return (int)_cffi_to_c_wchar_t(o);
- else
- return (int)_cffi_to_c_wchar3216_t(o);
-}
-
-_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char32_t(int x)
-{
- if (sizeof(_cffi_wchar_t) == 4)
- return _cffi_from_c_wchar_t((_cffi_wchar_t)x);
- else
- return _cffi_from_c_wchar3216_t(x);
-}
-
-
-/********** end CPython-specific section **********/
-#else
-_CFFI_UNUSED_FN
-static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *);
-# define _cffi_call_python _cffi_call_python_org
-#endif
-
-
-#define _cffi_array_len(array) (sizeof(array) / sizeof((array)[0]))
-
-#define _cffi_prim_int(size, sign) \
- ((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8 : _CFFI_PRIM_UINT8) : \
- (size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) : \
- (size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) : \
- (size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) : \
- _CFFI__UNKNOWN_PRIM)
-
-#define _cffi_prim_float(size) \
- ((size) == sizeof(float) ? _CFFI_PRIM_FLOAT : \
- (size) == sizeof(double) ? _CFFI_PRIM_DOUBLE : \
- (size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE : \
- _CFFI__UNKNOWN_FLOAT_PRIM)
-
-#define _cffi_check_int(got, got_nonpos, expected) \
- ((got_nonpos) == (expected <= 0) && \
- (got) == (unsigned long long)expected)
-
-#ifdef MS_WIN32
-# define _cffi_stdcall __stdcall
-#else
-# define _cffi_stdcall /* nothing */
-#endif
-
-#ifdef __cplusplus
-}
-#endif
diff --git a/functions/source/GitPullS3/cffi/_embedding.h b/functions/source/GitPullS3/cffi/_embedding.h
deleted file mode 100644
index b24652e..0000000
--- a/functions/source/GitPullS3/cffi/_embedding.h
+++ /dev/null
@@ -1,484 +0,0 @@
-
-/***** Support code for embedding *****/
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-
-#if defined(_WIN32)
-# define CFFI_DLLEXPORT __declspec(dllexport)
-#elif defined(__GNUC__)
-# define CFFI_DLLEXPORT __attribute__((visibility("default")))
-#else
-# define CFFI_DLLEXPORT /* nothing */
-#endif
-
-
-/* There are two global variables of type _cffi_call_python_fnptr:
-
- * _cffi_call_python, which we declare just below, is the one called
- by ``extern "Python"`` implementations.
-
- * _cffi_call_python_org, which on CPython is actually part of the
- _cffi_exports[] array, is the function pointer copied from
- _cffi_backend.
-
- After initialization is complete, both are equal. However, the
- first one remains equal to &_cffi_start_and_call_python until the
- very end of initialization, when we are (or should be) sure that
- concurrent threads also see a completely initialized world, and
- only then is it changed.
-*/
-#undef _cffi_call_python
-typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *);
-static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *);
-static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python;
-
-
-#ifndef _MSC_VER
- /* --- Assuming a GCC not infinitely old --- */
-# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n)
-# define cffi_write_barrier() __sync_synchronize()
-# if !defined(__amd64__) && !defined(__x86_64__) && \
- !defined(__i386__) && !defined(__i386)
-# define cffi_read_barrier() __sync_synchronize()
-# else
-# define cffi_read_barrier() (void)0
-# endif
-#else
- /* --- Windows threads version --- */
-# include
-# define cffi_compare_and_swap(l,o,n) \
- (InterlockedCompareExchangePointer(l,n,o) == (o))
-# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0)
-# define cffi_read_barrier() (void)0
-static volatile LONG _cffi_dummy;
-#endif
-
-#ifdef WITH_THREAD
-# ifndef _MSC_VER
-# include
- static pthread_mutex_t _cffi_embed_startup_lock;
-# else
- static CRITICAL_SECTION _cffi_embed_startup_lock;
-# endif
- static char _cffi_embed_startup_lock_ready = 0;
-#endif
-
-static void _cffi_acquire_reentrant_mutex(void)
-{
- static void *volatile lock = NULL;
-
- while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) {
- /* should ideally do a spin loop instruction here, but
- hard to do it portably and doesn't really matter I
- think: pthread_mutex_init() should be very fast, and
- this is only run at start-up anyway. */
- }
-
-#ifdef WITH_THREAD
- if (!_cffi_embed_startup_lock_ready) {
-# ifndef _MSC_VER
- pthread_mutexattr_t attr;
- pthread_mutexattr_init(&attr);
- pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE);
- pthread_mutex_init(&_cffi_embed_startup_lock, &attr);
-# else
- InitializeCriticalSection(&_cffi_embed_startup_lock);
-# endif
- _cffi_embed_startup_lock_ready = 1;
- }
-#endif
-
- while (!cffi_compare_and_swap(&lock, (void *)1, NULL))
- ;
-
-#ifndef _MSC_VER
- pthread_mutex_lock(&_cffi_embed_startup_lock);
-#else
- EnterCriticalSection(&_cffi_embed_startup_lock);
-#endif
-}
-
-static void _cffi_release_reentrant_mutex(void)
-{
-#ifndef _MSC_VER
- pthread_mutex_unlock(&_cffi_embed_startup_lock);
-#else
- LeaveCriticalSection(&_cffi_embed_startup_lock);
-#endif
-}
-
-
-/********** CPython-specific section **********/
-#ifndef PYPY_VERSION
-
-#include "_cffi_errors.h"
-
-
-#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX]
-
-PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */
-
-static void _cffi_py_initialize(void)
-{
- /* XXX use initsigs=0, which "skips initialization registration of
- signal handlers, which might be useful when Python is
- embedded" according to the Python docs. But review and think
- if it should be a user-controllable setting.
-
- XXX we should also give a way to write errors to a buffer
- instead of to stderr.
-
- XXX if importing 'site' fails, CPython (any version) calls
- exit(). Should we try to work around this behavior here?
- */
- Py_InitializeEx(0);
-}
-
-static int _cffi_initialize_python(void)
-{
- /* This initializes Python, imports _cffi_backend, and then the
- present .dll/.so is set up as a CPython C extension module.
- */
- int result;
- PyGILState_STATE state;
- PyObject *pycode=NULL, *global_dict=NULL, *x;
-
- state = PyGILState_Ensure();
-
- /* Call the initxxx() function from the present module. It will
- create and initialize us as a CPython extension module, instead
- of letting the startup Python code do it---it might reimport
- the same .dll/.so and get maybe confused on some platforms.
- It might also have troubles locating the .dll/.so again for all
- I know.
- */
- (void)_CFFI_PYTHON_STARTUP_FUNC();
- if (PyErr_Occurred())
- goto error;
-
- /* Now run the Python code provided to ffi.embedding_init_code().
- */
- pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE,
- "",
- Py_file_input);
- if (pycode == NULL)
- goto error;
- global_dict = PyDict_New();
- if (global_dict == NULL)
- goto error;
- if (PyDict_SetItemString(global_dict, "__builtins__",
- PyThreadState_GET()->interp->builtins) < 0)
- goto error;
- x = PyEval_EvalCode(
-#if PY_MAJOR_VERSION < 3
- (PyCodeObject *)
-#endif
- pycode, global_dict, global_dict);
- if (x == NULL)
- goto error;
- Py_DECREF(x);
-
- /* Done! Now if we've been called from
- _cffi_start_and_call_python() in an ``extern "Python"``, we can
- only hope that the Python code did correctly set up the
- corresponding @ffi.def_extern() function. Otherwise, the
- general logic of ``extern "Python"`` functions (inside the
- _cffi_backend module) will find that the reference is still
- missing and print an error.
- */
- result = 0;
- done:
- Py_XDECREF(pycode);
- Py_XDECREF(global_dict);
- PyGILState_Release(state);
- return result;
-
- error:;
- {
- /* Print as much information as potentially useful.
- Debugging load-time failures with embedding is not fun
- */
- PyObject *ecap;
- PyObject *exception, *v, *tb, *f, *modules, *mod;
- PyErr_Fetch(&exception, &v, &tb);
- ecap = _cffi_start_error_capture();
- f = PySys_GetObject((char *)"stderr");
- if (f != NULL && f != Py_None) {
- PyFile_WriteString(
- "Failed to initialize the Python-CFFI embedding logic:\n\n", f);
- }
-
- if (exception != NULL) {
- PyErr_NormalizeException(&exception, &v, &tb);
- PyErr_Display(exception, v, tb);
- }
- Py_XDECREF(exception);
- Py_XDECREF(v);
- Py_XDECREF(tb);
-
- if (f != NULL && f != Py_None) {
- PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME
- "\ncompiled with cffi version: 1.11.5"
- "\n_cffi_backend module: ", f);
- modules = PyImport_GetModuleDict();
- mod = PyDict_GetItemString(modules, "_cffi_backend");
- if (mod == NULL) {
- PyFile_WriteString("not loaded", f);
- }
- else {
- v = PyObject_GetAttrString(mod, "__file__");
- PyFile_WriteObject(v, f, 0);
- Py_XDECREF(v);
- }
- PyFile_WriteString("\nsys.path: ", f);
- PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0);
- PyFile_WriteString("\n\n", f);
- }
- _cffi_stop_error_capture(ecap);
- }
- result = -1;
- goto done;
-}
-
-PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */
-
-static int _cffi_carefully_make_gil(void)
-{
- /* This does the basic initialization of Python. It can be called
- completely concurrently from unrelated threads. It assumes
- that we don't hold the GIL before (if it exists), and we don't
- hold it afterwards.
-
- (What it really does used to be completely different in Python 2
- and Python 3, with the Python 2 solution avoiding the spin-lock
- around the Py_InitializeEx() call. However, after recent changes
- to CPython 2.7 (issue #358) it no longer works. So we use the
- Python 3 solution everywhere.)
-
- This initializes Python by calling Py_InitializeEx().
- Important: this must not be called concurrently at all.
- So we use a global variable as a simple spin lock. This global
- variable must be from 'libpythonX.Y.so', not from this
- cffi-based extension module, because it must be shared from
- different cffi-based extension modules. We choose
- _PyParser_TokenNames[0] as a completely arbitrary pointer value
- that is never written to. The default is to point to the
- string "ENDMARKER". We change it temporarily to point to the
- next character in that string. (Yes, I know it's REALLY
- obscure.)
- */
-
-#ifdef WITH_THREAD
- char *volatile *lock = (char *volatile *)_PyParser_TokenNames;
- char *old_value;
-
- while (1) { /* spin loop */
- old_value = *lock;
- if (old_value[0] == 'E') {
- assert(old_value[1] == 'N');
- if (cffi_compare_and_swap(lock, old_value, old_value + 1))
- break;
- }
- else {
- assert(old_value[0] == 'N');
- /* should ideally do a spin loop instruction here, but
- hard to do it portably and doesn't really matter I
- think: PyEval_InitThreads() should be very fast, and
- this is only run at start-up anyway. */
- }
- }
-#endif
-
- /* call Py_InitializeEx() */
- {
- PyGILState_STATE state = PyGILState_UNLOCKED;
- if (!Py_IsInitialized())
- _cffi_py_initialize();
- else
- state = PyGILState_Ensure();
-
- PyEval_InitThreads();
- PyGILState_Release(state);
- }
-
-#ifdef WITH_THREAD
- /* release the lock */
- while (!cffi_compare_and_swap(lock, old_value + 1, old_value))
- ;
-#endif
-
- return 0;
-}
-
-/********** end CPython-specific section **********/
-
-
-#else
-
-
-/********** PyPy-specific section **********/
-
-PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */
-
-static struct _cffi_pypy_init_s {
- const char *name;
- void (*func)(const void *[]);
- const char *code;
-} _cffi_pypy_init = {
- _CFFI_MODULE_NAME,
- (void(*)(const void *[]))_CFFI_PYTHON_STARTUP_FUNC,
- _CFFI_PYTHON_STARTUP_CODE,
-};
-
-extern int pypy_carefully_make_gil(const char *);
-extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *);
-
-static int _cffi_carefully_make_gil(void)
-{
- return pypy_carefully_make_gil(_CFFI_MODULE_NAME);
-}
-
-static int _cffi_initialize_python(void)
-{
- return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init);
-}
-
-/********** end PyPy-specific section **********/
-
-
-#endif
-
-
-#ifdef __GNUC__
-__attribute__((noinline))
-#endif
-static _cffi_call_python_fnptr _cffi_start_python(void)
-{
- /* Delicate logic to initialize Python. This function can be
- called multiple times concurrently, e.g. when the process calls
- its first ``extern "Python"`` functions in multiple threads at
- once. It can also be called recursively, in which case we must
- ignore it. We also have to consider what occurs if several
- different cffi-based extensions reach this code in parallel
- threads---it is a different copy of the code, then, and we
- can't have any shared global variable unless it comes from
- 'libpythonX.Y.so'.
-
- Idea:
-
- * _cffi_carefully_make_gil(): "carefully" call
- PyEval_InitThreads() (possibly with Py_InitializeEx() first).
-
- * then we use a (local) custom lock to make sure that a call to this
- cffi-based extension will wait if another call to the *same*
- extension is running the initialization in another thread.
- It is reentrant, so that a recursive call will not block, but
- only one from a different thread.
-
- * then we grab the GIL and (Python 2) we call Py_InitializeEx().
- At this point, concurrent calls to Py_InitializeEx() are not
- possible: we have the GIL.
-
- * do the rest of the specific initialization, which may
- temporarily release the GIL but not the custom lock.
- Only release the custom lock when we are done.
- */
- static char called = 0;
-
- if (_cffi_carefully_make_gil() != 0)
- return NULL;
-
- _cffi_acquire_reentrant_mutex();
-
- /* Here the GIL exists, but we don't have it. We're only protected
- from concurrency by the reentrant mutex. */
-
- /* This file only initializes the embedded module once, the first
- time this is called, even if there are subinterpreters. */
- if (!called) {
- called = 1; /* invoke _cffi_initialize_python() only once,
- but don't set '_cffi_call_python' right now,
- otherwise concurrent threads won't call
- this function at all (we need them to wait) */
- if (_cffi_initialize_python() == 0) {
- /* now initialization is finished. Switch to the fast-path. */
-
- /* We would like nobody to see the new value of
- '_cffi_call_python' without also seeing the rest of the
- data initialized. However, this is not possible. But
- the new value of '_cffi_call_python' is the function
- 'cffi_call_python()' from _cffi_backend. So: */
- cffi_write_barrier();
- /* ^^^ we put a write barrier here, and a corresponding
- read barrier at the start of cffi_call_python(). This
- ensures that after that read barrier, we see everything
- done here before the write barrier.
- */
-
- assert(_cffi_call_python_org != NULL);
- _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org;
- }
- else {
- /* initialization failed. Reset this to NULL, even if it was
- already set to some other value. Future calls to
- _cffi_start_python() are still forced to occur, and will
- always return NULL from now on. */
- _cffi_call_python_org = NULL;
- }
- }
-
- _cffi_release_reentrant_mutex();
-
- return (_cffi_call_python_fnptr)_cffi_call_python_org;
-}
-
-static
-void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args)
-{
- _cffi_call_python_fnptr fnptr;
- int current_err = errno;
-#ifdef _MSC_VER
- int current_lasterr = GetLastError();
-#endif
- fnptr = _cffi_start_python();
- if (fnptr == NULL) {
- fprintf(stderr, "function %s() called, but initialization code "
- "failed. Returning 0.\n", externpy->name);
- memset(args, 0, externpy->size_of_result);
- }
-#ifdef _MSC_VER
- SetLastError(current_lasterr);
-#endif
- errno = current_err;
-
- if (fnptr != NULL)
- fnptr(externpy, args);
-}
-
-
-/* The cffi_start_python() function makes sure Python is initialized
- and our cffi module is set up. It can be called manually from the
- user C code. The same effect is obtained automatically from any
- dll-exported ``extern "Python"`` function. This function returns
- -1 if initialization failed, 0 if all is OK. */
-_CFFI_UNUSED_FN
-static int cffi_start_python(void)
-{
- if (_cffi_call_python == &_cffi_start_and_call_python) {
- if (_cffi_start_python() == NULL)
- return -1;
- }
- cffi_read_barrier();
- return 0;
-}
-
-#undef cffi_compare_and_swap
-#undef cffi_write_barrier
-#undef cffi_read_barrier
-
-#ifdef __cplusplus
-}
-#endif
diff --git a/functions/source/GitPullS3/cffi/api.py b/functions/source/GitPullS3/cffi/api.py
deleted file mode 100644
index 7b63ca7..0000000
--- a/functions/source/GitPullS3/cffi/api.py
+++ /dev/null
@@ -1,935 +0,0 @@
-import sys, types
-from .lock import allocate_lock
-from .error import CDefError
-from . import model
-
-try:
- callable
-except NameError:
- # Python 3.1
- from collections import Callable
- callable = lambda x: isinstance(x, Callable)
-
-try:
- basestring
-except NameError:
- # Python 3.x
- basestring = str
-
-
-
-class FFI(object):
- r'''
- The main top-level class that you instantiate once, or once per module.
-
- Example usage:
-
- ffi = FFI()
- ffi.cdef("""
- int printf(const char *, ...);
- """)
-
- C = ffi.dlopen(None) # standard library
- -or-
- C = ffi.verify() # use a C compiler: verify the decl above is right
-
- C.printf("hello, %s!\n", ffi.new("char[]", "world"))
- '''
-
- def __init__(self, backend=None):
- """Create an FFI instance. The 'backend' argument is used to
- select a non-default backend, mostly for tests.
- """
- if backend is None:
- # You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with
- # _cffi_backend.so compiled.
- import _cffi_backend as backend
- from . import __version__
- if backend.__version__ != __version__:
- # bad version! Try to be as explicit as possible.
- if hasattr(backend, '__file__'):
- # CPython
- raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. When we import the top-level '_cffi_backend' extension module, we get version %s, located in %r. The two versions should be equal; check your installation." % (
- __version__, __file__,
- backend.__version__, backend.__file__))
- else:
- # PyPy
- raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. This interpreter comes with a built-in '_cffi_backend' module, which is version %s. The two versions should be equal; check your installation." % (
- __version__, __file__, backend.__version__))
- # (If you insist you can also try to pass the option
- # 'backend=backend_ctypes.CTypesBackend()', but don't
- # rely on it! It's probably not going to work well.)
-
- from . import cparser
- self._backend = backend
- self._lock = allocate_lock()
- self._parser = cparser.Parser()
- self._cached_btypes = {}
- self._parsed_types = types.ModuleType('parsed_types').__dict__
- self._new_types = types.ModuleType('new_types').__dict__
- self._function_caches = []
- self._libraries = []
- self._cdefsources = []
- self._included_ffis = []
- self._windows_unicode = None
- self._init_once_cache = {}
- self._cdef_version = None
- self._embedding = None
- self._typecache = model.get_typecache(backend)
- if hasattr(backend, 'set_ffi'):
- backend.set_ffi(self)
- for name in list(backend.__dict__):
- if name.startswith('RTLD_'):
- setattr(self, name, getattr(backend, name))
- #
- with self._lock:
- self.BVoidP = self._get_cached_btype(model.voidp_type)
- self.BCharA = self._get_cached_btype(model.char_array_type)
- if isinstance(backend, types.ModuleType):
- # _cffi_backend: attach these constants to the class
- if not hasattr(FFI, 'NULL'):
- FFI.NULL = self.cast(self.BVoidP, 0)
- FFI.CData, FFI.CType = backend._get_types()
- else:
- # ctypes backend: attach these constants to the instance
- self.NULL = self.cast(self.BVoidP, 0)
- self.CData, self.CType = backend._get_types()
- self.buffer = backend.buffer
-
- def cdef(self, csource, override=False, packed=False):
- """Parse the given C source. This registers all declared functions,
- types, and global variables. The functions and global variables can
- then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'.
- The types can be used in 'ffi.new()' and other functions.
- If 'packed' is specified as True, all structs declared inside this
- cdef are packed, i.e. laid out without any field alignment at all.
- """
- self._cdef(csource, override=override, packed=packed)
-
- def embedding_api(self, csource, packed=False):
- self._cdef(csource, packed=packed, dllexport=True)
- if self._embedding is None:
- self._embedding = ''
-
- def _cdef(self, csource, override=False, **options):
- if not isinstance(csource, str): # unicode, on Python 2
- if not isinstance(csource, basestring):
- raise TypeError("cdef() argument must be a string")
- csource = csource.encode('ascii')
- with self._lock:
- self._cdef_version = object()
- self._parser.parse(csource, override=override, **options)
- self._cdefsources.append(csource)
- if override:
- for cache in self._function_caches:
- cache.clear()
- finishlist = self._parser._recomplete
- if finishlist:
- self._parser._recomplete = []
- for tp in finishlist:
- tp.finish_backend_type(self, finishlist)
-
- def dlopen(self, name, flags=0):
- """Load and return a dynamic library identified by 'name'.
- The standard C library can be loaded by passing None.
- Note that functions and types declared by 'ffi.cdef()' are not
- linked to a particular library, just like C headers; in the
- library we only look for the actual (untyped) symbols.
- """
- assert isinstance(name, basestring) or name is None
- with self._lock:
- lib, function_cache = _make_ffi_library(self, name, flags)
- self._function_caches.append(function_cache)
- self._libraries.append(lib)
- return lib
-
- def dlclose(self, lib):
- """Close a library obtained with ffi.dlopen(). After this call,
- access to functions or variables from the library will fail
- (possibly with a segmentation fault).
- """
- type(lib).__cffi_close__(lib)
-
- def _typeof_locked(self, cdecl):
- # call me with the lock!
- key = cdecl
- if key in self._parsed_types:
- return self._parsed_types[key]
- #
- if not isinstance(cdecl, str): # unicode, on Python 2
- cdecl = cdecl.encode('ascii')
- #
- type = self._parser.parse_type(cdecl)
- really_a_function_type = type.is_raw_function
- if really_a_function_type:
- type = type.as_function_pointer()
- btype = self._get_cached_btype(type)
- result = btype, really_a_function_type
- self._parsed_types[key] = result
- return result
-
- def _typeof(self, cdecl, consider_function_as_funcptr=False):
- # string -> ctype object
- try:
- result = self._parsed_types[cdecl]
- except KeyError:
- with self._lock:
- result = self._typeof_locked(cdecl)
- #
- btype, really_a_function_type = result
- if really_a_function_type and not consider_function_as_funcptr:
- raise CDefError("the type %r is a function type, not a "
- "pointer-to-function type" % (cdecl,))
- return btype
-
- def typeof(self, cdecl):
- """Parse the C type given as a string and return the
- corresponding object.
- It can also be used on 'cdata' instance to get its C type.
- """
- if isinstance(cdecl, basestring):
- return self._typeof(cdecl)
- if isinstance(cdecl, self.CData):
- return self._backend.typeof(cdecl)
- if isinstance(cdecl, types.BuiltinFunctionType):
- res = _builtin_function_type(cdecl)
- if res is not None:
- return res
- if (isinstance(cdecl, types.FunctionType)
- and hasattr(cdecl, '_cffi_base_type')):
- with self._lock:
- return self._get_cached_btype(cdecl._cffi_base_type)
- raise TypeError(type(cdecl))
-
- def sizeof(self, cdecl):
- """Return the size in bytes of the argument. It can be a
- string naming a C type, or a 'cdata' instance.
- """
- if isinstance(cdecl, basestring):
- BType = self._typeof(cdecl)
- return self._backend.sizeof(BType)
- else:
- return self._backend.sizeof(cdecl)
-
- def alignof(self, cdecl):
- """Return the natural alignment size in bytes of the C type
- given as a string.
- """
- if isinstance(cdecl, basestring):
- cdecl = self._typeof(cdecl)
- return self._backend.alignof(cdecl)
-
- def offsetof(self, cdecl, *fields_or_indexes):
- """Return the offset of the named field inside the given
- structure or array, which must be given as a C type name.
- You can give several field names in case of nested structures.
- You can also give numeric values which correspond to array
- items, in case of an array type.
- """
- if isinstance(cdecl, basestring):
- cdecl = self._typeof(cdecl)
- return self._typeoffsetof(cdecl, *fields_or_indexes)[1]
-
- def new(self, cdecl, init=None):
- """Allocate an instance according to the specified C type and
- return a pointer to it. The specified C type must be either a
- pointer or an array: ``new('X *')`` allocates an X and returns
- a pointer to it, whereas ``new('X[n]')`` allocates an array of
- n X'es and returns an array referencing it (which works
- mostly like a pointer, like in C). You can also use
- ``new('X[]', n)`` to allocate an array of a non-constant
- length n.
-
- The memory is initialized following the rules of declaring a
- global variable in C: by default it is zero-initialized, but
- an explicit initializer can be given which can be used to
- fill all or part of the memory.
-
- When the returned object goes out of scope, the memory
- is freed. In other words the returned object has
- ownership of the value of type 'cdecl' that it points to. This
- means that the raw data can be used as long as this object is
- kept alive, but must not be used for a longer time. Be careful
- about that when copying the pointer to the memory somewhere
- else, e.g. into another structure.
- """
- if isinstance(cdecl, basestring):
- cdecl = self._typeof(cdecl)
- return self._backend.newp(cdecl, init)
-
- def new_allocator(self, alloc=None, free=None,
- should_clear_after_alloc=True):
- """Return a new allocator, i.e. a function that behaves like ffi.new()
- but uses the provided low-level 'alloc' and 'free' functions.
-
- 'alloc' is called with the size as argument. If it returns NULL, a
- MemoryError is raised. 'free' is called with the result of 'alloc'
- as argument. Both can be either Python function or directly C
- functions. If 'free' is None, then no free function is called.
- If both 'alloc' and 'free' are None, the default is used.
-
- If 'should_clear_after_alloc' is set to False, then the memory
- returned by 'alloc' is assumed to be already cleared (or you are
- fine with garbage); otherwise CFFI will clear it.
- """
- compiled_ffi = self._backend.FFI()
- allocator = compiled_ffi.new_allocator(alloc, free,
- should_clear_after_alloc)
- def allocate(cdecl, init=None):
- if isinstance(cdecl, basestring):
- cdecl = self._typeof(cdecl)
- return allocator(cdecl, init)
- return allocate
-
- def cast(self, cdecl, source):
- """Similar to a C cast: returns an instance of the named C
- type initialized with the given 'source'. The source is
- casted between integers or pointers of any type.
- """
- if isinstance(cdecl, basestring):
- cdecl = self._typeof(cdecl)
- return self._backend.cast(cdecl, source)
-
- def string(self, cdata, maxlen=-1):
- """Return a Python string (or unicode string) from the 'cdata'.
- If 'cdata' is a pointer or array of characters or bytes, returns
- the null-terminated string. The returned string extends until
- the first null character, or at most 'maxlen' characters. If
- 'cdata' is an array then 'maxlen' defaults to its length.
-
- If 'cdata' is a pointer or array of wchar_t, returns a unicode
- string following the same rules.
-
- If 'cdata' is a single character or byte or a wchar_t, returns
- it as a string or unicode string.
-
- If 'cdata' is an enum, returns the value of the enumerator as a
- string, or 'NUMBER' if the value is out of range.
- """
- return self._backend.string(cdata, maxlen)
-
- def unpack(self, cdata, length):
- """Unpack an array of C data of the given length,
- returning a Python string/unicode/list.
-
- If 'cdata' is a pointer to 'char', returns a byte string.
- It does not stop at the first null. This is equivalent to:
- ffi.buffer(cdata, length)[:]
-
- If 'cdata' is a pointer to 'wchar_t', returns a unicode string.
- 'length' is measured in wchar_t's; it is not the size in bytes.
-
- If 'cdata' is a pointer to anything else, returns a list of
- 'length' items. This is a faster equivalent to:
- [cdata[i] for i in range(length)]
- """
- return self._backend.unpack(cdata, length)
-
- #def buffer(self, cdata, size=-1):
- # """Return a read-write buffer object that references the raw C data
- # pointed to by the given 'cdata'. The 'cdata' must be a pointer or
- # an array. Can be passed to functions expecting a buffer, or directly
- # manipulated with:
- #
- # buf[:] get a copy of it in a regular string, or
- # buf[idx] as a single character
- # buf[:] = ...
- # buf[idx] = ... change the content
- # """
- # note that 'buffer' is a type, set on this instance by __init__
-
- def from_buffer(self, python_buffer):
- """Return a that points to the data of the
- given Python object, which must support the buffer interface.
- Note that this is not meant to be used on the built-in types
- str or unicode (you can build 'char[]' arrays explicitly)
- but only on objects containing large quantities of raw data
- in some other format, like 'array.array' or numpy arrays.
- """
- return self._backend.from_buffer(self.BCharA, python_buffer)
-
- def memmove(self, dest, src, n):
- """ffi.memmove(dest, src, n) copies n bytes of memory from src to dest.
-
- Like the C function memmove(), the memory areas may overlap;
- apart from that it behaves like the C function memcpy().
-
- 'src' can be any cdata ptr or array, or any Python buffer object.
- 'dest' can be any cdata ptr or array, or a writable Python buffer
- object. The size to copy, 'n', is always measured in bytes.
-
- Unlike other methods, this one supports all Python buffer including
- byte strings and bytearrays---but it still does not support
- non-contiguous buffers.
- """
- return self._backend.memmove(dest, src, n)
-
- def callback(self, cdecl, python_callable=None, error=None, onerror=None):
- """Return a callback object or a decorator making such a
- callback object. 'cdecl' must name a C function pointer type.
- The callback invokes the specified 'python_callable' (which may
- be provided either directly or via a decorator). Important: the
- callback object must be manually kept alive for as long as the
- callback may be invoked from the C level.
- """
- def callback_decorator_wrap(python_callable):
- if not callable(python_callable):
- raise TypeError("the 'python_callable' argument "
- "is not callable")
- return self._backend.callback(cdecl, python_callable,
- error, onerror)
- if isinstance(cdecl, basestring):
- cdecl = self._typeof(cdecl, consider_function_as_funcptr=True)
- if python_callable is None:
- return callback_decorator_wrap # decorator mode
- else:
- return callback_decorator_wrap(python_callable) # direct mode
-
- def getctype(self, cdecl, replace_with=''):
- """Return a string giving the C type 'cdecl', which may be itself
- a string or a object. If 'replace_with' is given, it gives
- extra text to append (or insert for more complicated C types), like
- a variable name, or '*' to get actually the C type 'pointer-to-cdecl'.
- """
- if isinstance(cdecl, basestring):
- cdecl = self._typeof(cdecl)
- replace_with = replace_with.strip()
- if (replace_with.startswith('*')
- and '&[' in self._backend.getcname(cdecl, '&')):
- replace_with = '(%s)' % replace_with
- elif replace_with and not replace_with[0] in '[(':
- replace_with = ' ' + replace_with
- return self._backend.getcname(cdecl, replace_with)
-
- def gc(self, cdata, destructor, size=0):
- """Return a new cdata object that points to the same
- data. Later, when this new cdata object is garbage-collected,
- 'destructor(old_cdata_object)' will be called.
-
- The optional 'size' gives an estimate of the size, used to
- trigger the garbage collection more eagerly. So far only used
- on PyPy. It tells the GC that the returned object keeps alive
- roughly 'size' bytes of external memory.
- """
- return self._backend.gcp(cdata, destructor, size)
-
- def _get_cached_btype(self, type):
- assert self._lock.acquire(False) is False
- # call me with the lock!
- try:
- BType = self._cached_btypes[type]
- except KeyError:
- finishlist = []
- BType = type.get_cached_btype(self, finishlist)
- for type in finishlist:
- type.finish_backend_type(self, finishlist)
- return BType
-
- def verify(self, source='', tmpdir=None, **kwargs):
- """Verify that the current ffi signatures compile on this
- machine, and return a dynamic library object. The dynamic
- library can be used to call functions and access global
- variables declared in this 'ffi'. The library is compiled
- by the C compiler: it gives you C-level API compatibility
- (including calling macros). This is unlike 'ffi.dlopen()',
- which requires binary compatibility in the signatures.
- """
- from .verifier import Verifier, _caller_dir_pycache
- #
- # If set_unicode(True) was called, insert the UNICODE and
- # _UNICODE macro declarations
- if self._windows_unicode:
- self._apply_windows_unicode(kwargs)
- #
- # Set the tmpdir here, and not in Verifier.__init__: it picks
- # up the caller's directory, which we want to be the caller of
- # ffi.verify(), as opposed to the caller of Veritier().
- tmpdir = tmpdir or _caller_dir_pycache()
- #
- # Make a Verifier() and use it to load the library.
- self.verifier = Verifier(self, source, tmpdir, **kwargs)
- lib = self.verifier.load_library()
- #
- # Save the loaded library for keep-alive purposes, even
- # if the caller doesn't keep it alive itself (it should).
- self._libraries.append(lib)
- return lib
-
- def _get_errno(self):
- return self._backend.get_errno()
- def _set_errno(self, errno):
- self._backend.set_errno(errno)
- errno = property(_get_errno, _set_errno, None,
- "the value of 'errno' from/to the C calls")
-
- def getwinerror(self, code=-1):
- return self._backend.getwinerror(code)
-
- def _pointer_to(self, ctype):
- with self._lock:
- return model.pointer_cache(self, ctype)
-
- def addressof(self, cdata, *fields_or_indexes):
- """Return the address of a .
- If 'fields_or_indexes' are given, returns the address of that
- field or array item in the structure or array, recursively in
- case of nested structures.
- """
- try:
- ctype = self._backend.typeof(cdata)
- except TypeError:
- if '__addressof__' in type(cdata).__dict__:
- return type(cdata).__addressof__(cdata, *fields_or_indexes)
- raise
- if fields_or_indexes:
- ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes)
- else:
- if ctype.kind == "pointer":
- raise TypeError("addressof(pointer)")
- offset = 0
- ctypeptr = self._pointer_to(ctype)
- return self._backend.rawaddressof(ctypeptr, cdata, offset)
-
- def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes):
- ctype, offset = self._backend.typeoffsetof(ctype, field_or_index)
- for field1 in fields_or_indexes:
- ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1)
- offset += offset1
- return ctype, offset
-
- def include(self, ffi_to_include):
- """Includes the typedefs, structs, unions and enums defined
- in another FFI instance. Usage is similar to a #include in C,
- where a part of the program might include types defined in
- another part for its own usage. Note that the include()
- method has no effect on functions, constants and global
- variables, which must anyway be accessed directly from the
- lib object returned by the original FFI instance.
- """
- if not isinstance(ffi_to_include, FFI):
- raise TypeError("ffi.include() expects an argument that is also of"
- " type cffi.FFI, not %r" % (
- type(ffi_to_include).__name__,))
- if ffi_to_include is self:
- raise ValueError("self.include(self)")
- with ffi_to_include._lock:
- with self._lock:
- self._parser.include(ffi_to_include._parser)
- self._cdefsources.append('[')
- self._cdefsources.extend(ffi_to_include._cdefsources)
- self._cdefsources.append(']')
- self._included_ffis.append(ffi_to_include)
-
- def new_handle(self, x):
- return self._backend.newp_handle(self.BVoidP, x)
-
- def from_handle(self, x):
- return self._backend.from_handle(x)
-
- def set_unicode(self, enabled_flag):
- """Windows: if 'enabled_flag' is True, enable the UNICODE and
- _UNICODE defines in C, and declare the types like TCHAR and LPTCSTR
- to be (pointers to) wchar_t. If 'enabled_flag' is False,
- declare these types to be (pointers to) plain 8-bit characters.
- This is mostly for backward compatibility; you usually want True.
- """
- if self._windows_unicode is not None:
- raise ValueError("set_unicode() can only be called once")
- enabled_flag = bool(enabled_flag)
- if enabled_flag:
- self.cdef("typedef wchar_t TBYTE;"
- "typedef wchar_t TCHAR;"
- "typedef const wchar_t *LPCTSTR;"
- "typedef const wchar_t *PCTSTR;"
- "typedef wchar_t *LPTSTR;"
- "typedef wchar_t *PTSTR;"
- "typedef TBYTE *PTBYTE;"
- "typedef TCHAR *PTCHAR;")
- else:
- self.cdef("typedef char TBYTE;"
- "typedef char TCHAR;"
- "typedef const char *LPCTSTR;"
- "typedef const char *PCTSTR;"
- "typedef char *LPTSTR;"
- "typedef char *PTSTR;"
- "typedef TBYTE *PTBYTE;"
- "typedef TCHAR *PTCHAR;")
- self._windows_unicode = enabled_flag
-
- def _apply_windows_unicode(self, kwds):
- defmacros = kwds.get('define_macros', ())
- if not isinstance(defmacros, (list, tuple)):
- raise TypeError("'define_macros' must be a list or tuple")
- defmacros = list(defmacros) + [('UNICODE', '1'),
- ('_UNICODE', '1')]
- kwds['define_macros'] = defmacros
-
- def _apply_embedding_fix(self, kwds):
- # must include an argument like "-lpython2.7" for the compiler
- def ensure(key, value):
- lst = kwds.setdefault(key, [])
- if value not in lst:
- lst.append(value)
- #
- if '__pypy__' in sys.builtin_module_names:
- import os
- if sys.platform == "win32":
- # we need 'libpypy-c.lib'. Current distributions of
- # pypy (>= 4.1) contain it as 'libs/python27.lib'.
- pythonlib = "python27"
- if hasattr(sys, 'prefix'):
- ensure('library_dirs', os.path.join(sys.prefix, 'libs'))
- else:
- # we need 'libpypy-c.{so,dylib}', which should be by
- # default located in 'sys.prefix/bin' for installed
- # systems.
- if sys.version_info < (3,):
- pythonlib = "pypy-c"
- else:
- pythonlib = "pypy3-c"
- if hasattr(sys, 'prefix'):
- ensure('library_dirs', os.path.join(sys.prefix, 'bin'))
- # On uninstalled pypy's, the libpypy-c is typically found in
- # .../pypy/goal/.
- if hasattr(sys, 'prefix'):
- ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal'))
- else:
- if sys.platform == "win32":
- template = "python%d%d"
- if hasattr(sys, 'gettotalrefcount'):
- template += '_d'
- else:
- try:
- import sysconfig
- except ImportError: # 2.6
- from distutils import sysconfig
- template = "python%d.%d"
- if sysconfig.get_config_var('DEBUG_EXT'):
- template += sysconfig.get_config_var('DEBUG_EXT')
- pythonlib = (template %
- (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
- if hasattr(sys, 'abiflags'):
- pythonlib += sys.abiflags
- ensure('libraries', pythonlib)
- if sys.platform == "win32":
- ensure('extra_link_args', '/MANIFEST')
-
- def set_source(self, module_name, source, source_extension='.c', **kwds):
- import os
- if hasattr(self, '_assigned_source'):
- raise ValueError("set_source() cannot be called several times "
- "per ffi object")
- if not isinstance(module_name, basestring):
- raise TypeError("'module_name' must be a string")
- if os.sep in module_name or (os.altsep and os.altsep in module_name):
- raise ValueError("'module_name' must not contain '/': use a dotted "
- "name to make a 'package.module' location")
- self._assigned_source = (str(module_name), source,
- source_extension, kwds)
-
- def distutils_extension(self, tmpdir='build', verbose=True):
- from distutils.dir_util import mkpath
- from .recompiler import recompile
- #
- if not hasattr(self, '_assigned_source'):
- if hasattr(self, 'verifier'): # fallback, 'tmpdir' ignored
- return self.verifier.get_extension()
- raise ValueError("set_source() must be called before"
- " distutils_extension()")
- module_name, source, source_extension, kwds = self._assigned_source
- if source is None:
- raise TypeError("distutils_extension() is only for C extension "
- "modules, not for dlopen()-style pure Python "
- "modules")
- mkpath(tmpdir)
- ext, updated = recompile(self, module_name,
- source, tmpdir=tmpdir, extradir=tmpdir,
- source_extension=source_extension,
- call_c_compiler=False, **kwds)
- if verbose:
- if updated:
- sys.stderr.write("regenerated: %r\n" % (ext.sources[0],))
- else:
- sys.stderr.write("not modified: %r\n" % (ext.sources[0],))
- return ext
-
- def emit_c_code(self, filename):
- from .recompiler import recompile
- #
- if not hasattr(self, '_assigned_source'):
- raise ValueError("set_source() must be called before emit_c_code()")
- module_name, source, source_extension, kwds = self._assigned_source
- if source is None:
- raise TypeError("emit_c_code() is only for C extension modules, "
- "not for dlopen()-style pure Python modules")
- recompile(self, module_name, source,
- c_file=filename, call_c_compiler=False, **kwds)
-
- def emit_python_code(self, filename):
- from .recompiler import recompile
- #
- if not hasattr(self, '_assigned_source'):
- raise ValueError("set_source() must be called before emit_c_code()")
- module_name, source, source_extension, kwds = self._assigned_source
- if source is not None:
- raise TypeError("emit_python_code() is only for dlopen()-style "
- "pure Python modules, not for C extension modules")
- recompile(self, module_name, source,
- c_file=filename, call_c_compiler=False, **kwds)
-
- def compile(self, tmpdir='.', verbose=0, target=None, debug=None):
- """The 'target' argument gives the final file name of the
- compiled DLL. Use '*' to force distutils' choice, suitable for
- regular CPython C API modules. Use a file name ending in '.*'
- to ask for the system's default extension for dynamic libraries
- (.so/.dll/.dylib).
-
- The default is '*' when building a non-embedded C API extension,
- and (module_name + '.*') when building an embedded library.
- """
- from .recompiler import recompile
- #
- if not hasattr(self, '_assigned_source'):
- raise ValueError("set_source() must be called before compile()")
- module_name, source, source_extension, kwds = self._assigned_source
- return recompile(self, module_name, source, tmpdir=tmpdir,
- target=target, source_extension=source_extension,
- compiler_verbose=verbose, debug=debug, **kwds)
-
- def init_once(self, func, tag):
- # Read _init_once_cache[tag], which is either (False, lock) if
- # we're calling the function now in some thread, or (True, result).
- # Don't call setdefault() in most cases, to avoid allocating and
- # immediately freeing a lock; but still use setdefaut() to avoid
- # races.
- try:
- x = self._init_once_cache[tag]
- except KeyError:
- x = self._init_once_cache.setdefault(tag, (False, allocate_lock()))
- # Common case: we got (True, result), so we return the result.
- if x[0]:
- return x[1]
- # Else, it's a lock. Acquire it to serialize the following tests.
- with x[1]:
- # Read again from _init_once_cache the current status.
- x = self._init_once_cache[tag]
- if x[0]:
- return x[1]
- # Call the function and store the result back.
- result = func()
- self._init_once_cache[tag] = (True, result)
- return result
-
- def embedding_init_code(self, pysource):
- if self._embedding:
- raise ValueError("embedding_init_code() can only be called once")
- # fix 'pysource' before it gets dumped into the C file:
- # - remove empty lines at the beginning, so it starts at "line 1"
- # - dedent, if all non-empty lines are indented
- # - check for SyntaxErrors
- import re
- match = re.match(r'\s*\n', pysource)
- if match:
- pysource = pysource[match.end():]
- lines = pysource.splitlines() or ['']
- prefix = re.match(r'\s*', lines[0]).group()
- for i in range(1, len(lines)):
- line = lines[i]
- if line.rstrip():
- while not line.startswith(prefix):
- prefix = prefix[:-1]
- i = len(prefix)
- lines = [line[i:]+'\n' for line in lines]
- pysource = ''.join(lines)
- #
- compile(pysource, "cffi_init", "exec")
- #
- self._embedding = pysource
-
- def def_extern(self, *args, **kwds):
- raise ValueError("ffi.def_extern() is only available on API-mode FFI "
- "objects")
-
- def list_types(self):
- """Returns the user type names known to this FFI instance.
- This returns a tuple containing three lists of names:
- (typedef_names, names_of_structs, names_of_unions)
- """
- typedefs = []
- structs = []
- unions = []
- for key in self._parser._declarations:
- if key.startswith('typedef '):
- typedefs.append(key[8:])
- elif key.startswith('struct '):
- structs.append(key[7:])
- elif key.startswith('union '):
- unions.append(key[6:])
- typedefs.sort()
- structs.sort()
- unions.sort()
- return (typedefs, structs, unions)
-
-
-def _load_backend_lib(backend, name, flags):
- import os
- if name is None:
- if sys.platform != "win32":
- return backend.load_library(None, flags)
- name = "c" # Windows: load_library(None) fails, but this works
- # on Python 2 (backward compatibility hack only)
- first_error = None
- if '.' in name or '/' in name or os.sep in name:
- try:
- return backend.load_library(name, flags)
- except OSError as e:
- first_error = e
- import ctypes.util
- path = ctypes.util.find_library(name)
- if path is None:
- if name == "c" and sys.platform == "win32" and sys.version_info >= (3,):
- raise OSError("dlopen(None) cannot work on Windows for Python 3 "
- "(see http://bugs.python.org/issue23606)")
- msg = ("ctypes.util.find_library() did not manage "
- "to locate a library called %r" % (name,))
- if first_error is not None:
- msg = "%s. Additionally, %s" % (first_error, msg)
- raise OSError(msg)
- return backend.load_library(path, flags)
-
-def _make_ffi_library(ffi, libname, flags):
- backend = ffi._backend
- backendlib = _load_backend_lib(backend, libname, flags)
- #
- def accessor_function(name):
- key = 'function ' + name
- tp, _ = ffi._parser._declarations[key]
- BType = ffi._get_cached_btype(tp)
- value = backendlib.load_function(BType, name)
- library.__dict__[name] = value
- #
- def accessor_variable(name):
- key = 'variable ' + name
- tp, _ = ffi._parser._declarations[key]
- BType = ffi._get_cached_btype(tp)
- read_variable = backendlib.read_variable
- write_variable = backendlib.write_variable
- setattr(FFILibrary, name, property(
- lambda self: read_variable(BType, name),
- lambda self, value: write_variable(BType, name, value)))
- #
- def addressof_var(name):
- try:
- return addr_variables[name]
- except KeyError:
- with ffi._lock:
- if name not in addr_variables:
- key = 'variable ' + name
- tp, _ = ffi._parser._declarations[key]
- BType = ffi._get_cached_btype(tp)
- if BType.kind != 'array':
- BType = model.pointer_cache(ffi, BType)
- p = backendlib.load_function(BType, name)
- addr_variables[name] = p
- return addr_variables[name]
- #
- def accessor_constant(name):
- raise NotImplementedError("non-integer constant '%s' cannot be "
- "accessed from a dlopen() library" % (name,))
- #
- def accessor_int_constant(name):
- library.__dict__[name] = ffi._parser._int_constants[name]
- #
- accessors = {}
- accessors_version = [False]
- addr_variables = {}
- #
- def update_accessors():
- if accessors_version[0] is ffi._cdef_version:
- return
- #
- for key, (tp, _) in ffi._parser._declarations.items():
- if not isinstance(tp, model.EnumType):
- tag, name = key.split(' ', 1)
- if tag == 'function':
- accessors[name] = accessor_function
- elif tag == 'variable':
- accessors[name] = accessor_variable
- elif tag == 'constant':
- accessors[name] = accessor_constant
- else:
- for i, enumname in enumerate(tp.enumerators):
- def accessor_enum(name, tp=tp, i=i):
- tp.check_not_partial()
- library.__dict__[name] = tp.enumvalues[i]
- accessors[enumname] = accessor_enum
- for name in ffi._parser._int_constants:
- accessors.setdefault(name, accessor_int_constant)
- accessors_version[0] = ffi._cdef_version
- #
- def make_accessor(name):
- with ffi._lock:
- if name in library.__dict__ or name in FFILibrary.__dict__:
- return # added by another thread while waiting for the lock
- if name not in accessors:
- update_accessors()
- if name not in accessors:
- raise AttributeError(name)
- accessors[name](name)
- #
- class FFILibrary(object):
- def __getattr__(self, name):
- make_accessor(name)
- return getattr(self, name)
- def __setattr__(self, name, value):
- try:
- property = getattr(self.__class__, name)
- except AttributeError:
- make_accessor(name)
- setattr(self, name, value)
- else:
- property.__set__(self, value)
- def __dir__(self):
- with ffi._lock:
- update_accessors()
- return accessors.keys()
- def __addressof__(self, name):
- if name in library.__dict__:
- return library.__dict__[name]
- if name in FFILibrary.__dict__:
- return addressof_var(name)
- make_accessor(name)
- if name in library.__dict__:
- return library.__dict__[name]
- if name in FFILibrary.__dict__:
- return addressof_var(name)
- raise AttributeError("cffi library has no function or "
- "global variable named '%s'" % (name,))
- def __cffi_close__(self):
- backendlib.close_lib()
- self.__dict__.clear()
- #
- if libname is not None:
- try:
- if not isinstance(libname, str): # unicode, on Python 2
- libname = libname.encode('utf-8')
- FFILibrary.__name__ = 'FFILibrary_%s' % libname
- except UnicodeError:
- pass
- library = FFILibrary()
- return library, library.__dict__
-
-def _builtin_function_type(func):
- # a hack to make at least ffi.typeof(builtin_function) work,
- # if the builtin function was obtained by 'vengine_cpy'.
- import sys
- try:
- module = sys.modules[func.__module__]
- ffi = module._cffi_original_ffi
- types_of_builtin_funcs = module._cffi_types_of_builtin_funcs
- tp = types_of_builtin_funcs[func]
- except (KeyError, AttributeError, TypeError):
- return None
- else:
- with ffi._lock:
- return ffi._get_cached_btype(tp)
diff --git a/functions/source/GitPullS3/cffi/backend_ctypes.py b/functions/source/GitPullS3/cffi/backend_ctypes.py
deleted file mode 100644
index 5ef3c13..0000000
--- a/functions/source/GitPullS3/cffi/backend_ctypes.py
+++ /dev/null
@@ -1,1114 +0,0 @@
-import ctypes, ctypes.util, operator, sys
-from . import model
-
-if sys.version_info < (3,):
- bytechr = chr
-else:
- unicode = str
- long = int
- xrange = range
- bytechr = lambda num: bytes([num])
-
-class CTypesType(type):
- pass
-
-class CTypesData(object):
- __metaclass__ = CTypesType
- __slots__ = ['__weakref__']
- __name__ = ''
-
- def __init__(self, *args):
- raise TypeError("cannot instantiate %r" % (self.__class__,))
-
- @classmethod
- def _newp(cls, init):
- raise TypeError("expected a pointer or array ctype, got '%s'"
- % (cls._get_c_name(),))
-
- @staticmethod
- def _to_ctypes(value):
- raise TypeError
-
- @classmethod
- def _arg_to_ctypes(cls, *value):
- try:
- ctype = cls._ctype
- except AttributeError:
- raise TypeError("cannot create an instance of %r" % (cls,))
- if value:
- res = cls._to_ctypes(*value)
- if not isinstance(res, ctype):
- res = cls._ctype(res)
- else:
- res = cls._ctype()
- return res
-
- @classmethod
- def _create_ctype_obj(cls, init):
- if init is None:
- return cls._arg_to_ctypes()
- else:
- return cls._arg_to_ctypes(init)
-
- @staticmethod
- def _from_ctypes(ctypes_value):
- raise TypeError
-
- @classmethod
- def _get_c_name(cls, replace_with=''):
- return cls._reftypename.replace(' &', replace_with)
-
- @classmethod
- def _fix_class(cls):
- cls.__name__ = 'CData<%s>' % (cls._get_c_name(),)
- cls.__qualname__ = 'CData<%s>' % (cls._get_c_name(),)
- cls.__module__ = 'ffi'
-
- def _get_own_repr(self):
- raise NotImplementedError
-
- def _addr_repr(self, address):
- if address == 0:
- return 'NULL'
- else:
- if address < 0:
- address += 1 << (8*ctypes.sizeof(ctypes.c_void_p))
- return '0x%x' % address
-
- def __repr__(self, c_name=None):
- own = self._get_own_repr()
- return '' % (c_name or self._get_c_name(), own)
-
- def _convert_to_address(self, BClass):
- if BClass is None:
- raise TypeError("cannot convert %r to an address" % (
- self._get_c_name(),))
- else:
- raise TypeError("cannot convert %r to %r" % (
- self._get_c_name(), BClass._get_c_name()))
-
- @classmethod
- def _get_size(cls):
- return ctypes.sizeof(cls._ctype)
-
- def _get_size_of_instance(self):
- return ctypes.sizeof(self._ctype)
-
- @classmethod
- def _cast_from(cls, source):
- raise TypeError("cannot cast to %r" % (cls._get_c_name(),))
-
- def _cast_to_integer(self):
- return self._convert_to_address(None)
-
- @classmethod
- def _alignment(cls):
- return ctypes.alignment(cls._ctype)
-
- def __iter__(self):
- raise TypeError("cdata %r does not support iteration" % (
- self._get_c_name()),)
-
- def _make_cmp(name):
- cmpfunc = getattr(operator, name)
- def cmp(self, other):
- v_is_ptr = not isinstance(self, CTypesGenericPrimitive)
- w_is_ptr = (isinstance(other, CTypesData) and
- not isinstance(other, CTypesGenericPrimitive))
- if v_is_ptr and w_is_ptr:
- return cmpfunc(self._convert_to_address(None),
- other._convert_to_address(None))
- elif v_is_ptr or w_is_ptr:
- return NotImplemented
- else:
- if isinstance(self, CTypesGenericPrimitive):
- self = self._value
- if isinstance(other, CTypesGenericPrimitive):
- other = other._value
- return cmpfunc(self, other)
- cmp.func_name = name
- return cmp
-
- __eq__ = _make_cmp('__eq__')
- __ne__ = _make_cmp('__ne__')
- __lt__ = _make_cmp('__lt__')
- __le__ = _make_cmp('__le__')
- __gt__ = _make_cmp('__gt__')
- __ge__ = _make_cmp('__ge__')
-
- def __hash__(self):
- return hash(self._convert_to_address(None))
-
- def _to_string(self, maxlen):
- raise TypeError("string(): %r" % (self,))
-
-
-class CTypesGenericPrimitive(CTypesData):
- __slots__ = []
-
- def __hash__(self):
- return hash(self._value)
-
- def _get_own_repr(self):
- return repr(self._from_ctypes(self._value))
-
-
-class CTypesGenericArray(CTypesData):
- __slots__ = []
-
- @classmethod
- def _newp(cls, init):
- return cls(init)
-
- def __iter__(self):
- for i in xrange(len(self)):
- yield self[i]
-
- def _get_own_repr(self):
- return self._addr_repr(ctypes.addressof(self._blob))
-
-
-class CTypesGenericPtr(CTypesData):
- __slots__ = ['_address', '_as_ctype_ptr']
- _automatic_casts = False
- kind = "pointer"
-
- @classmethod
- def _newp(cls, init):
- return cls(init)
-
- @classmethod
- def _cast_from(cls, source):
- if source is None:
- address = 0
- elif isinstance(source, CTypesData):
- address = source._cast_to_integer()
- elif isinstance(source, (int, long)):
- address = source
- else:
- raise TypeError("bad type for cast to %r: %r" %
- (cls, type(source).__name__))
- return cls._new_pointer_at(address)
-
- @classmethod
- def _new_pointer_at(cls, address):
- self = cls.__new__(cls)
- self._address = address
- self._as_ctype_ptr = ctypes.cast(address, cls._ctype)
- return self
-
- def _get_own_repr(self):
- try:
- return self._addr_repr(self._address)
- except AttributeError:
- return '???'
-
- def _cast_to_integer(self):
- return self._address
-
- def __nonzero__(self):
- return bool(self._address)
- __bool__ = __nonzero__
-
- @classmethod
- def _to_ctypes(cls, value):
- if not isinstance(value, CTypesData):
- raise TypeError("unexpected %s object" % type(value).__name__)
- address = value._convert_to_address(cls)
- return ctypes.cast(address, cls._ctype)
-
- @classmethod
- def _from_ctypes(cls, ctypes_ptr):
- address = ctypes.cast(ctypes_ptr, ctypes.c_void_p).value or 0
- return cls._new_pointer_at(address)
-
- @classmethod
- def _initialize(cls, ctypes_ptr, value):
- if value:
- ctypes_ptr.contents = cls._to_ctypes(value).contents
-
- def _convert_to_address(self, BClass):
- if (BClass in (self.__class__, None) or BClass._automatic_casts
- or self._automatic_casts):
- return self._address
- else:
- return CTypesData._convert_to_address(self, BClass)
-
-
-class CTypesBaseStructOrUnion(CTypesData):
- __slots__ = ['_blob']
-
- @classmethod
- def _create_ctype_obj(cls, init):
- # may be overridden
- raise TypeError("cannot instantiate opaque type %s" % (cls,))
-
- def _get_own_repr(self):
- return self._addr_repr(ctypes.addressof(self._blob))
-
- @classmethod
- def _offsetof(cls, fieldname):
- return getattr(cls._ctype, fieldname).offset
-
- def _convert_to_address(self, BClass):
- if getattr(BClass, '_BItem', None) is self.__class__:
- return ctypes.addressof(self._blob)
- else:
- return CTypesData._convert_to_address(self, BClass)
-
- @classmethod
- def _from_ctypes(cls, ctypes_struct_or_union):
- self = cls.__new__(cls)
- self._blob = ctypes_struct_or_union
- return self
-
- @classmethod
- def _to_ctypes(cls, value):
- return value._blob
-
- def __repr__(self, c_name=None):
- return CTypesData.__repr__(self, c_name or self._get_c_name(' &'))
-
-
-class CTypesBackend(object):
-
- PRIMITIVE_TYPES = {
- 'char': ctypes.c_char,
- 'short': ctypes.c_short,
- 'int': ctypes.c_int,
- 'long': ctypes.c_long,
- 'long long': ctypes.c_longlong,
- 'signed char': ctypes.c_byte,
- 'unsigned char': ctypes.c_ubyte,
- 'unsigned short': ctypes.c_ushort,
- 'unsigned int': ctypes.c_uint,
- 'unsigned long': ctypes.c_ulong,
- 'unsigned long long': ctypes.c_ulonglong,
- 'float': ctypes.c_float,
- 'double': ctypes.c_double,
- '_Bool': ctypes.c_bool,
- }
-
- for _name in ['unsigned long long', 'unsigned long',
- 'unsigned int', 'unsigned short', 'unsigned char']:
- _size = ctypes.sizeof(PRIMITIVE_TYPES[_name])
- PRIMITIVE_TYPES['uint%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name]
- if _size == ctypes.sizeof(ctypes.c_void_p):
- PRIMITIVE_TYPES['uintptr_t'] = PRIMITIVE_TYPES[_name]
- if _size == ctypes.sizeof(ctypes.c_size_t):
- PRIMITIVE_TYPES['size_t'] = PRIMITIVE_TYPES[_name]
-
- for _name in ['long long', 'long', 'int', 'short', 'signed char']:
- _size = ctypes.sizeof(PRIMITIVE_TYPES[_name])
- PRIMITIVE_TYPES['int%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name]
- if _size == ctypes.sizeof(ctypes.c_void_p):
- PRIMITIVE_TYPES['intptr_t'] = PRIMITIVE_TYPES[_name]
- PRIMITIVE_TYPES['ptrdiff_t'] = PRIMITIVE_TYPES[_name]
- if _size == ctypes.sizeof(ctypes.c_size_t):
- PRIMITIVE_TYPES['ssize_t'] = PRIMITIVE_TYPES[_name]
-
-
- def __init__(self):
- self.RTLD_LAZY = 0 # not supported anyway by ctypes
- self.RTLD_NOW = 0
- self.RTLD_GLOBAL = ctypes.RTLD_GLOBAL
- self.RTLD_LOCAL = ctypes.RTLD_LOCAL
-
- def set_ffi(self, ffi):
- self.ffi = ffi
-
- def _get_types(self):
- return CTypesData, CTypesType
-
- def load_library(self, path, flags=0):
- cdll = ctypes.CDLL(path, flags)
- return CTypesLibrary(self, cdll)
-
- def new_void_type(self):
- class CTypesVoid(CTypesData):
- __slots__ = []
- _reftypename = 'void &'
- @staticmethod
- def _from_ctypes(novalue):
- return None
- @staticmethod
- def _to_ctypes(novalue):
- if novalue is not None:
- raise TypeError("None expected, got %s object" %
- (type(novalue).__name__,))
- return None
- CTypesVoid._fix_class()
- return CTypesVoid
-
- def new_primitive_type(self, name):
- if name == 'wchar_t':
- raise NotImplementedError(name)
- ctype = self.PRIMITIVE_TYPES[name]
- if name == 'char':
- kind = 'char'
- elif name in ('float', 'double'):
- kind = 'float'
- else:
- if name in ('signed char', 'unsigned char'):
- kind = 'byte'
- elif name == '_Bool':
- kind = 'bool'
- else:
- kind = 'int'
- is_signed = (ctype(-1).value == -1)
- #
- def _cast_source_to_int(source):
- if isinstance(source, (int, long, float)):
- source = int(source)
- elif isinstance(source, CTypesData):
- source = source._cast_to_integer()
- elif isinstance(source, bytes):
- source = ord(source)
- elif source is None:
- source = 0
- else:
- raise TypeError("bad type for cast to %r: %r" %
- (CTypesPrimitive, type(source).__name__))
- return source
- #
- kind1 = kind
- class CTypesPrimitive(CTypesGenericPrimitive):
- __slots__ = ['_value']
- _ctype = ctype
- _reftypename = '%s &' % name
- kind = kind1
-
- def __init__(self, value):
- self._value = value
-
- @staticmethod
- def _create_ctype_obj(init):
- if init is None:
- return ctype()
- return ctype(CTypesPrimitive._to_ctypes(init))
-
- if kind == 'int' or kind == 'byte':
- @classmethod
- def _cast_from(cls, source):
- source = _cast_source_to_int(source)
- source = ctype(source).value # cast within range
- return cls(source)
- def __int__(self):
- return self._value
-
- if kind == 'bool':
- @classmethod
- def _cast_from(cls, source):
- if not isinstance(source, (int, long, float)):
- source = _cast_source_to_int(source)
- return cls(bool(source))
- def __int__(self):
- return self._value
-
- if kind == 'char':
- @classmethod
- def _cast_from(cls, source):
- source = _cast_source_to_int(source)
- source = bytechr(source & 0xFF)
- return cls(source)
- def __int__(self):
- return ord(self._value)
-
- if kind == 'float':
- @classmethod
- def _cast_from(cls, source):
- if isinstance(source, float):
- pass
- elif isinstance(source, CTypesGenericPrimitive):
- if hasattr(source, '__float__'):
- source = float(source)
- else:
- source = int(source)
- else:
- source = _cast_source_to_int(source)
- source = ctype(source).value # fix precision
- return cls(source)
- def __int__(self):
- return int(self._value)
- def __float__(self):
- return self._value
-
- _cast_to_integer = __int__
-
- if kind == 'int' or kind == 'byte' or kind == 'bool':
- @staticmethod
- def _to_ctypes(x):
- if not isinstance(x, (int, long)):
- if isinstance(x, CTypesData):
- x = int(x)
- else:
- raise TypeError("integer expected, got %s" %
- type(x).__name__)
- if ctype(x).value != x:
- if not is_signed and x < 0:
- raise OverflowError("%s: negative integer" % name)
- else:
- raise OverflowError("%s: integer out of bounds"
- % name)
- return x
-
- if kind == 'char':
- @staticmethod
- def _to_ctypes(x):
- if isinstance(x, bytes) and len(x) == 1:
- return x
- if isinstance(x, CTypesPrimitive): # >
- return x._value
- raise TypeError("character expected, got %s" %
- type(x).__name__)
- def __nonzero__(self):
- return ord(self._value) != 0
- else:
- def __nonzero__(self):
- return self._value != 0
- __bool__ = __nonzero__
-
- if kind == 'float':
- @staticmethod
- def _to_ctypes(x):
- if not isinstance(x, (int, long, float, CTypesData)):
- raise TypeError("float expected, got %s" %
- type(x).__name__)
- return ctype(x).value
-
- @staticmethod
- def _from_ctypes(value):
- return getattr(value, 'value', value)
-
- @staticmethod
- def _initialize(blob, init):
- blob.value = CTypesPrimitive._to_ctypes(init)
-
- if kind == 'char':
- def _to_string(self, maxlen):
- return self._value
- if kind == 'byte':
- def _to_string(self, maxlen):
- return chr(self._value & 0xff)
- #
- CTypesPrimitive._fix_class()
- return CTypesPrimitive
-
- def new_pointer_type(self, BItem):
- getbtype = self.ffi._get_cached_btype
- if BItem is getbtype(model.PrimitiveType('char')):
- kind = 'charp'
- elif BItem in (getbtype(model.PrimitiveType('signed char')),
- getbtype(model.PrimitiveType('unsigned char'))):
- kind = 'bytep'
- elif BItem is getbtype(model.void_type):
- kind = 'voidp'
- else:
- kind = 'generic'
- #
- class CTypesPtr(CTypesGenericPtr):
- __slots__ = ['_own']
- if kind == 'charp':
- __slots__ += ['__as_strbuf']
- _BItem = BItem
- if hasattr(BItem, '_ctype'):
- _ctype = ctypes.POINTER(BItem._ctype)
- _bitem_size = ctypes.sizeof(BItem._ctype)
- else:
- _ctype = ctypes.c_void_p
- if issubclass(BItem, CTypesGenericArray):
- _reftypename = BItem._get_c_name('(* &)')
- else:
- _reftypename = BItem._get_c_name(' * &')
-
- def __init__(self, init):
- ctypeobj = BItem._create_ctype_obj(init)
- if kind == 'charp':
- self.__as_strbuf = ctypes.create_string_buffer(
- ctypeobj.value + b'\x00')
- self._as_ctype_ptr = ctypes.cast(
- self.__as_strbuf, self._ctype)
- else:
- self._as_ctype_ptr = ctypes.pointer(ctypeobj)
- self._address = ctypes.cast(self._as_ctype_ptr,
- ctypes.c_void_p).value
- self._own = True
-
- def __add__(self, other):
- if isinstance(other, (int, long)):
- return self._new_pointer_at(self._address +
- other * self._bitem_size)
- else:
- return NotImplemented
-
- def __sub__(self, other):
- if isinstance(other, (int, long)):
- return self._new_pointer_at(self._address -
- other * self._bitem_size)
- elif type(self) is type(other):
- return (self._address - other._address) // self._bitem_size
- else:
- return NotImplemented
-
- def __getitem__(self, index):
- if getattr(self, '_own', False) and index != 0:
- raise IndexError
- return BItem._from_ctypes(self._as_ctype_ptr[index])
-
- def __setitem__(self, index, value):
- self._as_ctype_ptr[index] = BItem._to_ctypes(value)
-
- if kind == 'charp' or kind == 'voidp':
- @classmethod
- def _arg_to_ctypes(cls, *value):
- if value and isinstance(value[0], bytes):
- return ctypes.c_char_p(value[0])
- else:
- return super(CTypesPtr, cls)._arg_to_ctypes(*value)
-
- if kind == 'charp' or kind == 'bytep':
- def _to_string(self, maxlen):
- if maxlen < 0:
- maxlen = sys.maxsize
- p = ctypes.cast(self._as_ctype_ptr,
- ctypes.POINTER(ctypes.c_char))
- n = 0
- while n < maxlen and p[n] != b'\x00':
- n += 1
- return b''.join([p[i] for i in range(n)])
-
- def _get_own_repr(self):
- if getattr(self, '_own', False):
- return 'owning %d bytes' % (
- ctypes.sizeof(self._as_ctype_ptr.contents),)
- return super(CTypesPtr, self)._get_own_repr()
- #
- if (BItem is self.ffi._get_cached_btype(model.void_type) or
- BItem is self.ffi._get_cached_btype(model.PrimitiveType('char'))):
- CTypesPtr._automatic_casts = True
- #
- CTypesPtr._fix_class()
- return CTypesPtr
-
- def new_array_type(self, CTypesPtr, length):
- if length is None:
- brackets = ' &[]'
- else:
- brackets = ' &[%d]' % length
- BItem = CTypesPtr._BItem
- getbtype = self.ffi._get_cached_btype
- if BItem is getbtype(model.PrimitiveType('char')):
- kind = 'char'
- elif BItem in (getbtype(model.PrimitiveType('signed char')),
- getbtype(model.PrimitiveType('unsigned char'))):
- kind = 'byte'
- else:
- kind = 'generic'
- #
- class CTypesArray(CTypesGenericArray):
- __slots__ = ['_blob', '_own']
- if length is not None:
- _ctype = BItem._ctype * length
- else:
- __slots__.append('_ctype')
- _reftypename = BItem._get_c_name(brackets)
- _declared_length = length
- _CTPtr = CTypesPtr
-
- def __init__(self, init):
- if length is None:
- if isinstance(init, (int, long)):
- len1 = init
- init = None
- elif kind == 'char' and isinstance(init, bytes):
- len1 = len(init) + 1 # extra null
- else:
- init = tuple(init)
- len1 = len(init)
- self._ctype = BItem._ctype * len1
- self._blob = self._ctype()
- self._own = True
- if init is not None:
- self._initialize(self._blob, init)
-
- @staticmethod
- def _initialize(blob, init):
- if isinstance(init, bytes):
- init = [init[i:i+1] for i in range(len(init))]
- else:
- init = tuple(init)
- if len(init) > len(blob):
- raise IndexError("too many initializers")
- addr = ctypes.cast(blob, ctypes.c_void_p).value
- PTR = ctypes.POINTER(BItem._ctype)
- itemsize = ctypes.sizeof(BItem._ctype)
- for i, value in enumerate(init):
- p = ctypes.cast(addr + i * itemsize, PTR)
- BItem._initialize(p.contents, value)
-
- def __len__(self):
- return len(self._blob)
-
- def __getitem__(self, index):
- if not (0 <= index < len(self._blob)):
- raise IndexError
- return BItem._from_ctypes(self._blob[index])
-
- def __setitem__(self, index, value):
- if not (0 <= index < len(self._blob)):
- raise IndexError
- self._blob[index] = BItem._to_ctypes(value)
-
- if kind == 'char' or kind == 'byte':
- def _to_string(self, maxlen):
- if maxlen < 0:
- maxlen = len(self._blob)
- p = ctypes.cast(self._blob,
- ctypes.POINTER(ctypes.c_char))
- n = 0
- while n < maxlen and p[n] != b'\x00':
- n += 1
- return b''.join([p[i] for i in range(n)])
-
- def _get_own_repr(self):
- if getattr(self, '_own', False):
- return 'owning %d bytes' % (ctypes.sizeof(self._blob),)
- return super(CTypesArray, self)._get_own_repr()
-
- def _convert_to_address(self, BClass):
- if BClass in (CTypesPtr, None) or BClass._automatic_casts:
- return ctypes.addressof(self._blob)
- else:
- return CTypesData._convert_to_address(self, BClass)
-
- @staticmethod
- def _from_ctypes(ctypes_array):
- self = CTypesArray.__new__(CTypesArray)
- self._blob = ctypes_array
- return self
-
- @staticmethod
- def _arg_to_ctypes(value):
- return CTypesPtr._arg_to_ctypes(value)
-
- def __add__(self, other):
- if isinstance(other, (int, long)):
- return CTypesPtr._new_pointer_at(
- ctypes.addressof(self._blob) +
- other * ctypes.sizeof(BItem._ctype))
- else:
- return NotImplemented
-
- @classmethod
- def _cast_from(cls, source):
- raise NotImplementedError("casting to %r" % (
- cls._get_c_name(),))
- #
- CTypesArray._fix_class()
- return CTypesArray
-
- def _new_struct_or_union(self, kind, name, base_ctypes_class):
- #
- class struct_or_union(base_ctypes_class):
- pass
- struct_or_union.__name__ = '%s_%s' % (kind, name)
- kind1 = kind
- #
- class CTypesStructOrUnion(CTypesBaseStructOrUnion):
- __slots__ = ['_blob']
- _ctype = struct_or_union
- _reftypename = '%s &' % (name,)
- _kind = kind = kind1
- #
- CTypesStructOrUnion._fix_class()
- return CTypesStructOrUnion
-
- def new_struct_type(self, name):
- return self._new_struct_or_union('struct', name, ctypes.Structure)
-
- def new_union_type(self, name):
- return self._new_struct_or_union('union', name, ctypes.Union)
-
- def complete_struct_or_union(self, CTypesStructOrUnion, fields, tp,
- totalsize=-1, totalalignment=-1, sflags=0):
- if totalsize >= 0 or totalalignment >= 0:
- raise NotImplementedError("the ctypes backend of CFFI does not support "
- "structures completed by verify(); please "
- "compile and install the _cffi_backend module.")
- struct_or_union = CTypesStructOrUnion._ctype
- fnames = [fname for (fname, BField, bitsize) in fields]
- btypes = [BField for (fname, BField, bitsize) in fields]
- bitfields = [bitsize for (fname, BField, bitsize) in fields]
- #
- bfield_types = {}
- cfields = []
- for (fname, BField, bitsize) in fields:
- if bitsize < 0:
- cfields.append((fname, BField._ctype))
- bfield_types[fname] = BField
- else:
- cfields.append((fname, BField._ctype, bitsize))
- bfield_types[fname] = Ellipsis
- if sflags & 8:
- struct_or_union._pack_ = 1
- struct_or_union._fields_ = cfields
- CTypesStructOrUnion._bfield_types = bfield_types
- #
- @staticmethod
- def _create_ctype_obj(init):
- result = struct_or_union()
- if init is not None:
- initialize(result, init)
- return result
- CTypesStructOrUnion._create_ctype_obj = _create_ctype_obj
- #
- def initialize(blob, init):
- if is_union:
- if len(init) > 1:
- raise ValueError("union initializer: %d items given, but "
- "only one supported (use a dict if needed)"
- % (len(init),))
- if not isinstance(init, dict):
- if isinstance(init, (bytes, unicode)):
- raise TypeError("union initializer: got a str")
- init = tuple(init)
- if len(init) > len(fnames):
- raise ValueError("too many values for %s initializer" %
- CTypesStructOrUnion._get_c_name())
- init = dict(zip(fnames, init))
- addr = ctypes.addressof(blob)
- for fname, value in init.items():
- BField, bitsize = name2fieldtype[fname]
- assert bitsize < 0, \
- "not implemented: initializer with bit fields"
- offset = CTypesStructOrUnion._offsetof(fname)
- PTR = ctypes.POINTER(BField._ctype)
- p = ctypes.cast(addr + offset, PTR)
- BField._initialize(p.contents, value)
- is_union = CTypesStructOrUnion._kind == 'union'
- name2fieldtype = dict(zip(fnames, zip(btypes, bitfields)))
- #
- for fname, BField, bitsize in fields:
- if fname == '':
- raise NotImplementedError("nested anonymous structs/unions")
- if hasattr(CTypesStructOrUnion, fname):
- raise ValueError("the field name %r conflicts in "
- "the ctypes backend" % fname)
- if bitsize < 0:
- def getter(self, fname=fname, BField=BField,
- offset=CTypesStructOrUnion._offsetof(fname),
- PTR=ctypes.POINTER(BField._ctype)):
- addr = ctypes.addressof(self._blob)
- p = ctypes.cast(addr + offset, PTR)
- return BField._from_ctypes(p.contents)
- def setter(self, value, fname=fname, BField=BField):
- setattr(self._blob, fname, BField._to_ctypes(value))
- #
- if issubclass(BField, CTypesGenericArray):
- setter = None
- if BField._declared_length == 0:
- def getter(self, fname=fname, BFieldPtr=BField._CTPtr,
- offset=CTypesStructOrUnion._offsetof(fname),
- PTR=ctypes.POINTER(BField._ctype)):
- addr = ctypes.addressof(self._blob)
- p = ctypes.cast(addr + offset, PTR)
- return BFieldPtr._from_ctypes(p)
- #
- else:
- def getter(self, fname=fname, BField=BField):
- return BField._from_ctypes(getattr(self._blob, fname))
- def setter(self, value, fname=fname, BField=BField):
- # xxx obscure workaround
- value = BField._to_ctypes(value)
- oldvalue = getattr(self._blob, fname)
- setattr(self._blob, fname, value)
- if value != getattr(self._blob, fname):
- setattr(self._blob, fname, oldvalue)
- raise OverflowError("value too large for bitfield")
- setattr(CTypesStructOrUnion, fname, property(getter, setter))
- #
- CTypesPtr = self.ffi._get_cached_btype(model.PointerType(tp))
- for fname in fnames:
- if hasattr(CTypesPtr, fname):
- raise ValueError("the field name %r conflicts in "
- "the ctypes backend" % fname)
- def getter(self, fname=fname):
- return getattr(self[0], fname)
- def setter(self, value, fname=fname):
- setattr(self[0], fname, value)
- setattr(CTypesPtr, fname, property(getter, setter))
-
- def new_function_type(self, BArgs, BResult, has_varargs):
- nameargs = [BArg._get_c_name() for BArg in BArgs]
- if has_varargs:
- nameargs.append('...')
- nameargs = ', '.join(nameargs)
- #
- class CTypesFunctionPtr(CTypesGenericPtr):
- __slots__ = ['_own_callback', '_name']
- _ctype = ctypes.CFUNCTYPE(getattr(BResult, '_ctype', None),
- *[BArg._ctype for BArg in BArgs],
- use_errno=True)
- _reftypename = BResult._get_c_name('(* &)(%s)' % (nameargs,))
-
- def __init__(self, init, error=None):
- # create a callback to the Python callable init()
- import traceback
- assert not has_varargs, "varargs not supported for callbacks"
- if getattr(BResult, '_ctype', None) is not None:
- error = BResult._from_ctypes(
- BResult._create_ctype_obj(error))
- else:
- error = None
- def callback(*args):
- args2 = []
- for arg, BArg in zip(args, BArgs):
- args2.append(BArg._from_ctypes(arg))
- try:
- res2 = init(*args2)
- res2 = BResult._to_ctypes(res2)
- except:
- traceback.print_exc()
- res2 = error
- if issubclass(BResult, CTypesGenericPtr):
- if res2:
- res2 = ctypes.cast(res2, ctypes.c_void_p).value
- # .value: http://bugs.python.org/issue1574593
- else:
- res2 = None
- #print repr(res2)
- return res2
- if issubclass(BResult, CTypesGenericPtr):
- # The only pointers callbacks can return are void*s:
- # http://bugs.python.org/issue5710
- callback_ctype = ctypes.CFUNCTYPE(
- ctypes.c_void_p,
- *[BArg._ctype for BArg in BArgs],
- use_errno=True)
- else:
- callback_ctype = CTypesFunctionPtr._ctype
- self._as_ctype_ptr = callback_ctype(callback)
- self._address = ctypes.cast(self._as_ctype_ptr,
- ctypes.c_void_p).value
- self._own_callback = init
-
- @staticmethod
- def _initialize(ctypes_ptr, value):
- if value:
- raise NotImplementedError("ctypes backend: not supported: "
- "initializers for function pointers")
-
- def __repr__(self):
- c_name = getattr(self, '_name', None)
- if c_name:
- i = self._reftypename.index('(* &)')
- if self._reftypename[i-1] not in ' )*':
- c_name = ' ' + c_name
- c_name = self._reftypename.replace('(* &)', c_name)
- return CTypesData.__repr__(self, c_name)
-
- def _get_own_repr(self):
- if getattr(self, '_own_callback', None) is not None:
- return 'calling %r' % (self._own_callback,)
- return super(CTypesFunctionPtr, self)._get_own_repr()
-
- def __call__(self, *args):
- if has_varargs:
- assert len(args) >= len(BArgs)
- extraargs = args[len(BArgs):]
- args = args[:len(BArgs)]
- else:
- assert len(args) == len(BArgs)
- ctypes_args = []
- for arg, BArg in zip(args, BArgs):
- ctypes_args.append(BArg._arg_to_ctypes(arg))
- if has_varargs:
- for i, arg in enumerate(extraargs):
- if arg is None:
- ctypes_args.append(ctypes.c_void_p(0)) # NULL
- continue
- if not isinstance(arg, CTypesData):
- raise TypeError(
- "argument %d passed in the variadic part "
- "needs to be a cdata object (got %s)" %
- (1 + len(BArgs) + i, type(arg).__name__))
- ctypes_args.append(arg._arg_to_ctypes(arg))
- result = self._as_ctype_ptr(*ctypes_args)
- return BResult._from_ctypes(result)
- #
- CTypesFunctionPtr._fix_class()
- return CTypesFunctionPtr
-
- def new_enum_type(self, name, enumerators, enumvalues, CTypesInt):
- assert isinstance(name, str)
- reverse_mapping = dict(zip(reversed(enumvalues),
- reversed(enumerators)))
- #
- class CTypesEnum(CTypesInt):
- __slots__ = []
- _reftypename = '%s &' % name
-
- def _get_own_repr(self):
- value = self._value
- try:
- return '%d: %s' % (value, reverse_mapping[value])
- except KeyError:
- return str(value)
-
- def _to_string(self, maxlen):
- value = self._value
- try:
- return reverse_mapping[value]
- except KeyError:
- return str(value)
- #
- CTypesEnum._fix_class()
- return CTypesEnum
-
- def get_errno(self):
- return ctypes.get_errno()
-
- def set_errno(self, value):
- ctypes.set_errno(value)
-
- def string(self, b, maxlen=-1):
- return b._to_string(maxlen)
-
- def buffer(self, bptr, size=-1):
- raise NotImplementedError("buffer() with ctypes backend")
-
- def sizeof(self, cdata_or_BType):
- if isinstance(cdata_or_BType, CTypesData):
- return cdata_or_BType._get_size_of_instance()
- else:
- assert issubclass(cdata_or_BType, CTypesData)
- return cdata_or_BType._get_size()
-
- def alignof(self, BType):
- assert issubclass(BType, CTypesData)
- return BType._alignment()
-
- def newp(self, BType, source):
- if not issubclass(BType, CTypesData):
- raise TypeError
- return BType._newp(source)
-
- def cast(self, BType, source):
- return BType._cast_from(source)
-
- def callback(self, BType, source, error, onerror):
- assert onerror is None # XXX not implemented
- return BType(source, error)
-
- _weakref_cache_ref = None
-
- def gcp(self, cdata, destructor, size=0):
- if self._weakref_cache_ref is None:
- import weakref
- class MyRef(weakref.ref):
- def __eq__(self, other):
- myref = self()
- return self is other or (
- myref is not None and myref is other())
- def __ne__(self, other):
- return not (self == other)
- def __hash__(self):
- try:
- return self._hash
- except AttributeError:
- self._hash = hash(self())
- return self._hash
- self._weakref_cache_ref = {}, MyRef
- weak_cache, MyRef = self._weakref_cache_ref
-
- if destructor is None:
- try:
- del weak_cache[MyRef(cdata)]
- except KeyError:
- raise TypeError("Can remove destructor only on a object "
- "previously returned by ffi.gc()")
- return None
-
- def remove(k):
- cdata, destructor = weak_cache.pop(k, (None, None))
- if destructor is not None:
- destructor(cdata)
-
- new_cdata = self.cast(self.typeof(cdata), cdata)
- assert new_cdata is not cdata
- weak_cache[MyRef(new_cdata, remove)] = (cdata, destructor)
- return new_cdata
-
- typeof = type
-
- def getcname(self, BType, replace_with):
- return BType._get_c_name(replace_with)
-
- def typeoffsetof(self, BType, fieldname, num=0):
- if isinstance(fieldname, str):
- if num == 0 and issubclass(BType, CTypesGenericPtr):
- BType = BType._BItem
- if not issubclass(BType, CTypesBaseStructOrUnion):
- raise TypeError("expected a struct or union ctype")
- BField = BType._bfield_types[fieldname]
- if BField is Ellipsis:
- raise TypeError("not supported for bitfields")
- return (BField, BType._offsetof(fieldname))
- elif isinstance(fieldname, (int, long)):
- if issubclass(BType, CTypesGenericArray):
- BType = BType._CTPtr
- if not issubclass(BType, CTypesGenericPtr):
- raise TypeError("expected an array or ptr ctype")
- BItem = BType._BItem
- offset = BItem._get_size() * fieldname
- if offset > sys.maxsize:
- raise OverflowError
- return (BItem, offset)
- else:
- raise TypeError(type(fieldname))
-
- def rawaddressof(self, BTypePtr, cdata, offset=None):
- if isinstance(cdata, CTypesBaseStructOrUnion):
- ptr = ctypes.pointer(type(cdata)._to_ctypes(cdata))
- elif isinstance(cdata, CTypesGenericPtr):
- if offset is None or not issubclass(type(cdata)._BItem,
- CTypesBaseStructOrUnion):
- raise TypeError("unexpected cdata type")
- ptr = type(cdata)._to_ctypes(cdata)
- elif isinstance(cdata, CTypesGenericArray):
- ptr = type(cdata)._to_ctypes(cdata)
- else:
- raise TypeError("expected a ")
- if offset:
- ptr = ctypes.cast(
- ctypes.c_void_p(
- ctypes.cast(ptr, ctypes.c_void_p).value + offset),
- type(ptr))
- return BTypePtr._from_ctypes(ptr)
-
-
-class CTypesLibrary(object):
-
- def __init__(self, backend, cdll):
- self.backend = backend
- self.cdll = cdll
-
- def load_function(self, BType, name):
- c_func = getattr(self.cdll, name)
- funcobj = BType._from_ctypes(c_func)
- funcobj._name = name
- return funcobj
-
- def read_variable(self, BType, name):
- try:
- ctypes_obj = BType._ctype.in_dll(self.cdll, name)
- except AttributeError as e:
- raise NotImplementedError(e)
- return BType._from_ctypes(ctypes_obj)
-
- def write_variable(self, BType, name, value):
- new_ctypes_obj = BType._to_ctypes(value)
- ctypes_obj = BType._ctype.in_dll(self.cdll, name)
- ctypes.memmove(ctypes.addressof(ctypes_obj),
- ctypes.addressof(new_ctypes_obj),
- ctypes.sizeof(BType._ctype))
diff --git a/functions/source/GitPullS3/cffi/cffi_opcode.py b/functions/source/GitPullS3/cffi/cffi_opcode.py
deleted file mode 100644
index a0df98d..0000000
--- a/functions/source/GitPullS3/cffi/cffi_opcode.py
+++ /dev/null
@@ -1,187 +0,0 @@
-from .error import VerificationError
-
-class CffiOp(object):
- def __init__(self, op, arg):
- self.op = op
- self.arg = arg
-
- def as_c_expr(self):
- if self.op is None:
- assert isinstance(self.arg, str)
- return '(_cffi_opcode_t)(%s)' % (self.arg,)
- classname = CLASS_NAME[self.op]
- return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg)
-
- def as_python_bytes(self):
- if self.op is None and self.arg.isdigit():
- value = int(self.arg) # non-negative: '-' not in self.arg
- if value >= 2**31:
- raise OverflowError("cannot emit %r: limited to 2**31-1"
- % (self.arg,))
- return format_four_bytes(value)
- if isinstance(self.arg, str):
- raise VerificationError("cannot emit to Python: %r" % (self.arg,))
- return format_four_bytes((self.arg << 8) | self.op)
-
- def __str__(self):
- classname = CLASS_NAME.get(self.op, self.op)
- return '(%s %s)' % (classname, self.arg)
-
-def format_four_bytes(num):
- return '\\x%02X\\x%02X\\x%02X\\x%02X' % (
- (num >> 24) & 0xFF,
- (num >> 16) & 0xFF,
- (num >> 8) & 0xFF,
- (num ) & 0xFF)
-
-OP_PRIMITIVE = 1
-OP_POINTER = 3
-OP_ARRAY = 5
-OP_OPEN_ARRAY = 7
-OP_STRUCT_UNION = 9
-OP_ENUM = 11
-OP_FUNCTION = 13
-OP_FUNCTION_END = 15
-OP_NOOP = 17
-OP_BITFIELD = 19
-OP_TYPENAME = 21
-OP_CPYTHON_BLTN_V = 23 # varargs
-OP_CPYTHON_BLTN_N = 25 # noargs
-OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg)
-OP_CONSTANT = 29
-OP_CONSTANT_INT = 31
-OP_GLOBAL_VAR = 33
-OP_DLOPEN_FUNC = 35
-OP_DLOPEN_CONST = 37
-OP_GLOBAL_VAR_F = 39
-OP_EXTERN_PYTHON = 41
-
-PRIM_VOID = 0
-PRIM_BOOL = 1
-PRIM_CHAR = 2
-PRIM_SCHAR = 3
-PRIM_UCHAR = 4
-PRIM_SHORT = 5
-PRIM_USHORT = 6
-PRIM_INT = 7
-PRIM_UINT = 8
-PRIM_LONG = 9
-PRIM_ULONG = 10
-PRIM_LONGLONG = 11
-PRIM_ULONGLONG = 12
-PRIM_FLOAT = 13
-PRIM_DOUBLE = 14
-PRIM_LONGDOUBLE = 15
-
-PRIM_WCHAR = 16
-PRIM_INT8 = 17
-PRIM_UINT8 = 18
-PRIM_INT16 = 19
-PRIM_UINT16 = 20
-PRIM_INT32 = 21
-PRIM_UINT32 = 22
-PRIM_INT64 = 23
-PRIM_UINT64 = 24
-PRIM_INTPTR = 25
-PRIM_UINTPTR = 26
-PRIM_PTRDIFF = 27
-PRIM_SIZE = 28
-PRIM_SSIZE = 29
-PRIM_INT_LEAST8 = 30
-PRIM_UINT_LEAST8 = 31
-PRIM_INT_LEAST16 = 32
-PRIM_UINT_LEAST16 = 33
-PRIM_INT_LEAST32 = 34
-PRIM_UINT_LEAST32 = 35
-PRIM_INT_LEAST64 = 36
-PRIM_UINT_LEAST64 = 37
-PRIM_INT_FAST8 = 38
-PRIM_UINT_FAST8 = 39
-PRIM_INT_FAST16 = 40
-PRIM_UINT_FAST16 = 41
-PRIM_INT_FAST32 = 42
-PRIM_UINT_FAST32 = 43
-PRIM_INT_FAST64 = 44
-PRIM_UINT_FAST64 = 45
-PRIM_INTMAX = 46
-PRIM_UINTMAX = 47
-PRIM_FLOATCOMPLEX = 48
-PRIM_DOUBLECOMPLEX = 49
-PRIM_CHAR16 = 50
-PRIM_CHAR32 = 51
-
-_NUM_PRIM = 52
-_UNKNOWN_PRIM = -1
-_UNKNOWN_FLOAT_PRIM = -2
-_UNKNOWN_LONG_DOUBLE = -3
-
-_IO_FILE_STRUCT = -1
-
-PRIMITIVE_TO_INDEX = {
- 'char': PRIM_CHAR,
- 'short': PRIM_SHORT,
- 'int': PRIM_INT,
- 'long': PRIM_LONG,
- 'long long': PRIM_LONGLONG,
- 'signed char': PRIM_SCHAR,
- 'unsigned char': PRIM_UCHAR,
- 'unsigned short': PRIM_USHORT,
- 'unsigned int': PRIM_UINT,
- 'unsigned long': PRIM_ULONG,
- 'unsigned long long': PRIM_ULONGLONG,
- 'float': PRIM_FLOAT,
- 'double': PRIM_DOUBLE,
- 'long double': PRIM_LONGDOUBLE,
- 'float _Complex': PRIM_FLOATCOMPLEX,
- 'double _Complex': PRIM_DOUBLECOMPLEX,
- '_Bool': PRIM_BOOL,
- 'wchar_t': PRIM_WCHAR,
- 'char16_t': PRIM_CHAR16,
- 'char32_t': PRIM_CHAR32,
- 'int8_t': PRIM_INT8,
- 'uint8_t': PRIM_UINT8,
- 'int16_t': PRIM_INT16,
- 'uint16_t': PRIM_UINT16,
- 'int32_t': PRIM_INT32,
- 'uint32_t': PRIM_UINT32,
- 'int64_t': PRIM_INT64,
- 'uint64_t': PRIM_UINT64,
- 'intptr_t': PRIM_INTPTR,
- 'uintptr_t': PRIM_UINTPTR,
- 'ptrdiff_t': PRIM_PTRDIFF,
- 'size_t': PRIM_SIZE,
- 'ssize_t': PRIM_SSIZE,
- 'int_least8_t': PRIM_INT_LEAST8,
- 'uint_least8_t': PRIM_UINT_LEAST8,
- 'int_least16_t': PRIM_INT_LEAST16,
- 'uint_least16_t': PRIM_UINT_LEAST16,
- 'int_least32_t': PRIM_INT_LEAST32,
- 'uint_least32_t': PRIM_UINT_LEAST32,
- 'int_least64_t': PRIM_INT_LEAST64,
- 'uint_least64_t': PRIM_UINT_LEAST64,
- 'int_fast8_t': PRIM_INT_FAST8,
- 'uint_fast8_t': PRIM_UINT_FAST8,
- 'int_fast16_t': PRIM_INT_FAST16,
- 'uint_fast16_t': PRIM_UINT_FAST16,
- 'int_fast32_t': PRIM_INT_FAST32,
- 'uint_fast32_t': PRIM_UINT_FAST32,
- 'int_fast64_t': PRIM_INT_FAST64,
- 'uint_fast64_t': PRIM_UINT_FAST64,
- 'intmax_t': PRIM_INTMAX,
- 'uintmax_t': PRIM_UINTMAX,
- }
-
-F_UNION = 0x01
-F_CHECK_FIELDS = 0x02
-F_PACKED = 0x04
-F_EXTERNAL = 0x08
-F_OPAQUE = 0x10
-
-G_FLAGS = dict([('_CFFI_' + _key, globals()[_key])
- for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED',
- 'F_EXTERNAL', 'F_OPAQUE']])
-
-CLASS_NAME = {}
-for _name, _value in list(globals().items()):
- if _name.startswith('OP_') and isinstance(_value, int):
- CLASS_NAME[_value] = _name[3:]
diff --git a/functions/source/GitPullS3/cffi/commontypes.py b/functions/source/GitPullS3/cffi/commontypes.py
deleted file mode 100644
index 8ec97c7..0000000
--- a/functions/source/GitPullS3/cffi/commontypes.py
+++ /dev/null
@@ -1,80 +0,0 @@
-import sys
-from . import model
-from .error import FFIError
-
-
-COMMON_TYPES = {}
-
-try:
- # fetch "bool" and all simple Windows types
- from _cffi_backend import _get_common_types
- _get_common_types(COMMON_TYPES)
-except ImportError:
- pass
-
-COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE')
-COMMON_TYPES['bool'] = '_Bool' # in case we got ImportError above
-
-for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
- if _type.endswith('_t'):
- COMMON_TYPES[_type] = _type
-del _type
-
-_CACHE = {}
-
-def resolve_common_type(parser, commontype):
- try:
- return _CACHE[commontype]
- except KeyError:
- cdecl = COMMON_TYPES.get(commontype, commontype)
- if not isinstance(cdecl, str):
- result, quals = cdecl, 0 # cdecl is already a BaseType
- elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
- result, quals = model.PrimitiveType(cdecl), 0
- elif cdecl == 'set-unicode-needed':
- raise FFIError("The Windows type %r is only available after "
- "you call ffi.set_unicode()" % (commontype,))
- else:
- if commontype == cdecl:
- raise FFIError(
- "Unsupported type: %r. Please look at "
- "http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations "
- "and file an issue if you think this type should really "
- "be supported." % (commontype,))
- result, quals = parser.parse_type_and_quals(cdecl) # recursive
-
- assert isinstance(result, model.BaseTypeByIdentity)
- _CACHE[commontype] = result, quals
- return result, quals
-
-
-# ____________________________________________________________
-# extra types for Windows (most of them are in commontypes.c)
-
-
-def win_common_types():
- return {
- "UNICODE_STRING": model.StructType(
- "_UNICODE_STRING",
- ["Length",
- "MaximumLength",
- "Buffer"],
- [model.PrimitiveType("unsigned short"),
- model.PrimitiveType("unsigned short"),
- model.PointerType(model.PrimitiveType("wchar_t"))],
- [-1, -1, -1]),
- "PUNICODE_STRING": "UNICODE_STRING *",
- "PCUNICODE_STRING": "const UNICODE_STRING *",
-
- "TBYTE": "set-unicode-needed",
- "TCHAR": "set-unicode-needed",
- "LPCTSTR": "set-unicode-needed",
- "PCTSTR": "set-unicode-needed",
- "LPTSTR": "set-unicode-needed",
- "PTSTR": "set-unicode-needed",
- "PTBYTE": "set-unicode-needed",
- "PTCHAR": "set-unicode-needed",
- }
-
-if sys.platform == 'win32':
- COMMON_TYPES.update(win_common_types())
diff --git a/functions/source/GitPullS3/cffi/cparser.py b/functions/source/GitPullS3/cffi/cparser.py
deleted file mode 100644
index f7e2e35..0000000
--- a/functions/source/GitPullS3/cffi/cparser.py
+++ /dev/null
@@ -1,891 +0,0 @@
-from . import model
-from .commontypes import COMMON_TYPES, resolve_common_type
-from .error import FFIError, CDefError
-try:
- from . import _pycparser as pycparser
-except ImportError:
- import pycparser
-import weakref, re, sys
-
-try:
- if sys.version_info < (3,):
- import thread as _thread
- else:
- import _thread
- lock = _thread.allocate_lock()
-except ImportError:
- lock = None
-
-CDEF_SOURCE_STRING = ""
-_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$",
- re.DOTALL | re.MULTILINE)
-_r_define = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)"
- r"\b((?:[^\n\\]|\\.)*?)$",
- re.DOTALL | re.MULTILINE)
-_r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}")
-_r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$")
-_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]")
-_r_words = re.compile(r"\w+|\S")
-_parser_cache = None
-_r_int_literal = re.compile(r"-?0?x?[0-9a-f]+[lu]*$", re.IGNORECASE)
-_r_stdcall1 = re.compile(r"\b(__stdcall|WINAPI)\b")
-_r_stdcall2 = re.compile(r"[(]\s*(__stdcall|WINAPI)\b")
-_r_cdecl = re.compile(r"\b__cdecl\b")
-_r_extern_python = re.compile(r'\bextern\s*"'
- r'(Python|Python\s*\+\s*C|C\s*\+\s*Python)"\s*.')
-_r_star_const_space = re.compile( # matches "* const "
- r"[*]\s*((const|volatile|restrict)\b\s*)+")
-_r_int_dotdotdot = re.compile(r"(\b(int|long|short|signed|unsigned|char)\s*)+"
- r"\.\.\.")
-_r_float_dotdotdot = re.compile(r"\b(double|float)\s*\.\.\.")
-
-def _get_parser():
- global _parser_cache
- if _parser_cache is None:
- _parser_cache = pycparser.CParser()
- return _parser_cache
-
-def _workaround_for_old_pycparser(csource):
- # Workaround for a pycparser issue (fixed between pycparser 2.10 and
- # 2.14): "char*const***" gives us a wrong syntax tree, the same as
- # for "char***(*const)". This means we can't tell the difference
- # afterwards. But "char(*const(***))" gives us the right syntax
- # tree. The issue only occurs if there are several stars in
- # sequence with no parenthesis inbetween, just possibly qualifiers.
- # Attempt to fix it by adding some parentheses in the source: each
- # time we see "* const" or "* const *", we add an opening
- # parenthesis before each star---the hard part is figuring out where
- # to close them.
- parts = []
- while True:
- match = _r_star_const_space.search(csource)
- if not match:
- break
- #print repr(''.join(parts)+csource), '=>',
- parts.append(csource[:match.start()])
- parts.append('('); closing = ')'
- parts.append(match.group()) # e.g. "* const "
- endpos = match.end()
- if csource.startswith('*', endpos):
- parts.append('('); closing += ')'
- level = 0
- i = endpos
- while i < len(csource):
- c = csource[i]
- if c == '(':
- level += 1
- elif c == ')':
- if level == 0:
- break
- level -= 1
- elif c in ',;=':
- if level == 0:
- break
- i += 1
- csource = csource[endpos:i] + closing + csource[i:]
- #print repr(''.join(parts)+csource)
- parts.append(csource)
- return ''.join(parts)
-
-def _preprocess_extern_python(csource):
- # input: `extern "Python" int foo(int);` or
- # `extern "Python" { int foo(int); }`
- # output:
- # void __cffi_extern_python_start;
- # int foo(int);
- # void __cffi_extern_python_stop;
- #
- # input: `extern "Python+C" int foo(int);`
- # output:
- # void __cffi_extern_python_plus_c_start;
- # int foo(int);
- # void __cffi_extern_python_stop;
- parts = []
- while True:
- match = _r_extern_python.search(csource)
- if not match:
- break
- endpos = match.end() - 1
- #print
- #print ''.join(parts)+csource
- #print '=>'
- parts.append(csource[:match.start()])
- if 'C' in match.group(1):
- parts.append('void __cffi_extern_python_plus_c_start; ')
- else:
- parts.append('void __cffi_extern_python_start; ')
- if csource[endpos] == '{':
- # grouping variant
- closing = csource.find('}', endpos)
- if closing < 0:
- raise CDefError("'extern \"Python\" {': no '}' found")
- if csource.find('{', endpos + 1, closing) >= 0:
- raise NotImplementedError("cannot use { } inside a block "
- "'extern \"Python\" { ... }'")
- parts.append(csource[endpos+1:closing])
- csource = csource[closing+1:]
- else:
- # non-grouping variant
- semicolon = csource.find(';', endpos)
- if semicolon < 0:
- raise CDefError("'extern \"Python\": no ';' found")
- parts.append(csource[endpos:semicolon+1])
- csource = csource[semicolon+1:]
- parts.append(' void __cffi_extern_python_stop;')
- #print ''.join(parts)+csource
- #print
- parts.append(csource)
- return ''.join(parts)
-
-def _preprocess(csource):
- # Remove comments. NOTE: this only work because the cdef() section
- # should not contain any string literal!
- csource = _r_comment.sub(' ', csource)
- # Remove the "#define FOO x" lines
- macros = {}
- for match in _r_define.finditer(csource):
- macroname, macrovalue = match.groups()
- macrovalue = macrovalue.replace('\\\n', '').strip()
- macros[macroname] = macrovalue
- csource = _r_define.sub('', csource)
- #
- if pycparser.__version__ < '2.14':
- csource = _workaround_for_old_pycparser(csource)
- #
- # BIG HACK: replace WINAPI or __stdcall with "volatile const".
- # It doesn't make sense for the return type of a function to be
- # "volatile volatile const", so we abuse it to detect __stdcall...
- # Hack number 2 is that "int(volatile *fptr)();" is not valid C
- # syntax, so we place the "volatile" before the opening parenthesis.
- csource = _r_stdcall2.sub(' volatile volatile const(', csource)
- csource = _r_stdcall1.sub(' volatile volatile const ', csource)
- csource = _r_cdecl.sub(' ', csource)
- #
- # Replace `extern "Python"` with start/end markers
- csource = _preprocess_extern_python(csource)
- #
- # Replace "[...]" with "[__dotdotdotarray__]"
- csource = _r_partial_array.sub('[__dotdotdotarray__]', csource)
- #
- # Replace "...}" with "__dotdotdotNUM__}". This construction should
- # occur only at the end of enums; at the end of structs we have "...;}"
- # and at the end of vararg functions "...);". Also replace "=...[,}]"
- # with ",__dotdotdotNUM__[,}]": this occurs in the enums too, when
- # giving an unknown value.
- matches = list(_r_partial_enum.finditer(csource))
- for number, match in enumerate(reversed(matches)):
- p = match.start()
- if csource[p] == '=':
- p2 = csource.find('...', p, match.end())
- assert p2 > p
- csource = '%s,__dotdotdot%d__ %s' % (csource[:p], number,
- csource[p2+3:])
- else:
- assert csource[p:p+3] == '...'
- csource = '%s __dotdotdot%d__ %s' % (csource[:p], number,
- csource[p+3:])
- # Replace "int ..." or "unsigned long int..." with "__dotdotdotint__"
- csource = _r_int_dotdotdot.sub(' __dotdotdotint__ ', csource)
- # Replace "float ..." or "double..." with "__dotdotdotfloat__"
- csource = _r_float_dotdotdot.sub(' __dotdotdotfloat__ ', csource)
- # Replace all remaining "..." with the same name, "__dotdotdot__",
- # which is declared with a typedef for the purpose of C parsing.
- return csource.replace('...', ' __dotdotdot__ '), macros
-
-def _common_type_names(csource):
- # Look in the source for what looks like usages of types from the
- # list of common types. A "usage" is approximated here as the
- # appearance of the word, minus a "definition" of the type, which
- # is the last word in a "typedef" statement. Approximative only
- # but should be fine for all the common types.
- look_for_words = set(COMMON_TYPES)
- look_for_words.add(';')
- look_for_words.add(',')
- look_for_words.add('(')
- look_for_words.add(')')
- look_for_words.add('typedef')
- words_used = set()
- is_typedef = False
- paren = 0
- previous_word = ''
- for word in _r_words.findall(csource):
- if word in look_for_words:
- if word == ';':
- if is_typedef:
- words_used.discard(previous_word)
- look_for_words.discard(previous_word)
- is_typedef = False
- elif word == 'typedef':
- is_typedef = True
- paren = 0
- elif word == '(':
- paren += 1
- elif word == ')':
- paren -= 1
- elif word == ',':
- if is_typedef and paren == 0:
- words_used.discard(previous_word)
- look_for_words.discard(previous_word)
- else: # word in COMMON_TYPES
- words_used.add(word)
- previous_word = word
- return words_used
-
-
-class Parser(object):
-
- def __init__(self):
- self._declarations = {}
- self._included_declarations = set()
- self._anonymous_counter = 0
- self._structnode2type = weakref.WeakKeyDictionary()
- self._options = {}
- self._int_constants = {}
- self._recomplete = []
- self._uses_new_feature = None
-
- def _parse(self, csource):
- csource, macros = _preprocess(csource)
- # XXX: for more efficiency we would need to poke into the
- # internals of CParser... the following registers the
- # typedefs, because their presence or absence influences the
- # parsing itself (but what they are typedef'ed to plays no role)
- ctn = _common_type_names(csource)
- typenames = []
- for name in sorted(self._declarations):
- if name.startswith('typedef '):
- name = name[8:]
- typenames.append(name)
- ctn.discard(name)
- typenames += sorted(ctn)
- #
- csourcelines = []
- csourcelines.append('# 1 ""')
- for typename in typenames:
- csourcelines.append('typedef int %s;' % typename)
- csourcelines.append('typedef int __dotdotdotint__, __dotdotdotfloat__,'
- ' __dotdotdot__;')
- # this forces pycparser to consider the following in the file
- # called from line 1
- csourcelines.append('# 1 "%s"' % (CDEF_SOURCE_STRING,))
- csourcelines.append(csource)
- fullcsource = '\n'.join(csourcelines)
- if lock is not None:
- lock.acquire() # pycparser is not thread-safe...
- try:
- ast = _get_parser().parse(fullcsource)
- except pycparser.c_parser.ParseError as e:
- self.convert_pycparser_error(e, csource)
- finally:
- if lock is not None:
- lock.release()
- # csource will be used to find buggy source text
- return ast, macros, csource
-
- def _convert_pycparser_error(self, e, csource):
- # xxx look for ":NUM:" at the start of str(e)
- # and interpret that as a line number. This will not work if
- # the user gives explicit ``# NUM "FILE"`` directives.
- line = None
- msg = str(e)
- match = re.match(r"%s:(\d+):" % (CDEF_SOURCE_STRING,), msg)
- if match:
- linenum = int(match.group(1), 10)
- csourcelines = csource.splitlines()
- if 1 <= linenum <= len(csourcelines):
- line = csourcelines[linenum-1]
- return line
-
- def convert_pycparser_error(self, e, csource):
- line = self._convert_pycparser_error(e, csource)
-
- msg = str(e)
- if line:
- msg = 'cannot parse "%s"\n%s' % (line.strip(), msg)
- else:
- msg = 'parse error\n%s' % (msg,)
- raise CDefError(msg)
-
- def parse(self, csource, override=False, packed=False, dllexport=False):
- prev_options = self._options
- try:
- self._options = {'override': override,
- 'packed': packed,
- 'dllexport': dllexport}
- self._internal_parse(csource)
- finally:
- self._options = prev_options
-
- def _internal_parse(self, csource):
- ast, macros, csource = self._parse(csource)
- # add the macros
- self._process_macros(macros)
- # find the first "__dotdotdot__" and use that as a separator
- # between the repeated typedefs and the real csource
- iterator = iter(ast.ext)
- for decl in iterator:
- if decl.name == '__dotdotdot__':
- break
- else:
- assert 0
- current_decl = None
- #
- try:
- self._inside_extern_python = '__cffi_extern_python_stop'
- for decl in iterator:
- current_decl = decl
- if isinstance(decl, pycparser.c_ast.Decl):
- self._parse_decl(decl)
- elif isinstance(decl, pycparser.c_ast.Typedef):
- if not decl.name:
- raise CDefError("typedef does not declare any name",
- decl)
- quals = 0
- if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) and
- decl.type.type.names[-1].startswith('__dotdotdot')):
- realtype = self._get_unknown_type(decl)
- elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and
- isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and
- isinstance(decl.type.type.type,
- pycparser.c_ast.IdentifierType) and
- decl.type.type.type.names[-1].startswith('__dotdotdot')):
- realtype = self._get_unknown_ptr_type(decl)
- else:
- realtype, quals = self._get_type_and_quals(
- decl.type, name=decl.name, partial_length_ok=True)
- self._declare('typedef ' + decl.name, realtype, quals=quals)
- elif decl.__class__.__name__ == 'Pragma':
- pass # skip pragma, only in pycparser 2.15
- else:
- raise CDefError("unexpected <%s>: this construct is valid "
- "C but not valid in cdef()" %
- decl.__class__.__name__, decl)
- except CDefError as e:
- if len(e.args) == 1:
- e.args = e.args + (current_decl,)
- raise
- except FFIError as e:
- msg = self._convert_pycparser_error(e, csource)
- if msg:
- e.args = (e.args[0] + "\n *** Err: %s" % msg,)
- raise
-
- def _add_constants(self, key, val):
- if key in self._int_constants:
- if self._int_constants[key] == val:
- return # ignore identical double declarations
- raise FFIError(
- "multiple declarations of constant: %s" % (key,))
- self._int_constants[key] = val
-
- def _add_integer_constant(self, name, int_str):
- int_str = int_str.lower().rstrip("ul")
- neg = int_str.startswith('-')
- if neg:
- int_str = int_str[1:]
- # "010" is not valid oct in py3
- if (int_str.startswith("0") and int_str != '0'
- and not int_str.startswith("0x")):
- int_str = "0o" + int_str[1:]
- pyvalue = int(int_str, 0)
- if neg:
- pyvalue = -pyvalue
- self._add_constants(name, pyvalue)
- self._declare('macro ' + name, pyvalue)
-
- def _process_macros(self, macros):
- for key, value in macros.items():
- value = value.strip()
- if _r_int_literal.match(value):
- self._add_integer_constant(key, value)
- elif value == '...':
- self._declare('macro ' + key, value)
- else:
- raise CDefError(
- 'only supports one of the following syntax:\n'
- ' #define %s ... (literally dot-dot-dot)\n'
- ' #define %s NUMBER (with NUMBER an integer'
- ' constant, decimal/hex/octal)\n'
- 'got:\n'
- ' #define %s %s'
- % (key, key, key, value))
-
- def _declare_function(self, tp, quals, decl):
- tp = self._get_type_pointer(tp, quals)
- if self._options.get('dllexport'):
- tag = 'dllexport_python '
- elif self._inside_extern_python == '__cffi_extern_python_start':
- tag = 'extern_python '
- elif self._inside_extern_python == '__cffi_extern_python_plus_c_start':
- tag = 'extern_python_plus_c '
- else:
- tag = 'function '
- self._declare(tag + decl.name, tp)
-
- def _parse_decl(self, decl):
- node = decl.type
- if isinstance(node, pycparser.c_ast.FuncDecl):
- tp, quals = self._get_type_and_quals(node, name=decl.name)
- assert isinstance(tp, model.RawFunctionType)
- self._declare_function(tp, quals, decl)
- else:
- if isinstance(node, pycparser.c_ast.Struct):
- self._get_struct_union_enum_type('struct', node)
- elif isinstance(node, pycparser.c_ast.Union):
- self._get_struct_union_enum_type('union', node)
- elif isinstance(node, pycparser.c_ast.Enum):
- self._get_struct_union_enum_type('enum', node)
- elif not decl.name:
- raise CDefError("construct does not declare any variable",
- decl)
- #
- if decl.name:
- tp, quals = self._get_type_and_quals(node,
- partial_length_ok=True)
- if tp.is_raw_function:
- self._declare_function(tp, quals, decl)
- elif (tp.is_integer_type() and
- hasattr(decl, 'init') and
- hasattr(decl.init, 'value') and
- _r_int_literal.match(decl.init.value)):
- self._add_integer_constant(decl.name, decl.init.value)
- elif (tp.is_integer_type() and
- isinstance(decl.init, pycparser.c_ast.UnaryOp) and
- decl.init.op == '-' and
- hasattr(decl.init.expr, 'value') and
- _r_int_literal.match(decl.init.expr.value)):
- self._add_integer_constant(decl.name,
- '-' + decl.init.expr.value)
- elif (tp is model.void_type and
- decl.name.startswith('__cffi_extern_python_')):
- # hack: `extern "Python"` in the C source is replaced
- # with "void __cffi_extern_python_start;" and
- # "void __cffi_extern_python_stop;"
- self._inside_extern_python = decl.name
- else:
- if self._inside_extern_python !='__cffi_extern_python_stop':
- raise CDefError(
- "cannot declare constants or "
- "variables with 'extern \"Python\"'")
- if (quals & model.Q_CONST) and not tp.is_array_type:
- self._declare('constant ' + decl.name, tp, quals=quals)
- else:
- self._declare('variable ' + decl.name, tp, quals=quals)
-
- def parse_type(self, cdecl):
- return self.parse_type_and_quals(cdecl)[0]
-
- def parse_type_and_quals(self, cdecl):
- ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2]
- assert not macros
- exprnode = ast.ext[-1].type.args.params[0]
- if isinstance(exprnode, pycparser.c_ast.ID):
- raise CDefError("unknown identifier '%s'" % (exprnode.name,))
- return self._get_type_and_quals(exprnode.type)
-
- def _declare(self, name, obj, included=False, quals=0):
- if name in self._declarations:
- prevobj, prevquals = self._declarations[name]
- if prevobj is obj and prevquals == quals:
- return
- if not self._options.get('override'):
- raise FFIError(
- "multiple declarations of %s (for interactive usage, "
- "try cdef(xx, override=True))" % (name,))
- assert '__dotdotdot__' not in name.split()
- self._declarations[name] = (obj, quals)
- if included:
- self._included_declarations.add(obj)
-
- def _extract_quals(self, type):
- quals = 0
- if isinstance(type, (pycparser.c_ast.TypeDecl,
- pycparser.c_ast.PtrDecl)):
- if 'const' in type.quals:
- quals |= model.Q_CONST
- if 'volatile' in type.quals:
- quals |= model.Q_VOLATILE
- if 'restrict' in type.quals:
- quals |= model.Q_RESTRICT
- return quals
-
- def _get_type_pointer(self, type, quals, declname=None):
- if isinstance(type, model.RawFunctionType):
- return type.as_function_pointer()
- if (isinstance(type, model.StructOrUnionOrEnum) and
- type.name.startswith('$') and type.name[1:].isdigit() and
- type.forcename is None and declname is not None):
- return model.NamedPointerType(type, declname, quals)
- return model.PointerType(type, quals)
-
- def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False):
- # first, dereference typedefs, if we have it already parsed, we're good
- if (isinstance(typenode, pycparser.c_ast.TypeDecl) and
- isinstance(typenode.type, pycparser.c_ast.IdentifierType) and
- len(typenode.type.names) == 1 and
- ('typedef ' + typenode.type.names[0]) in self._declarations):
- tp, quals = self._declarations['typedef ' + typenode.type.names[0]]
- quals |= self._extract_quals(typenode)
- return tp, quals
- #
- if isinstance(typenode, pycparser.c_ast.ArrayDecl):
- # array type
- if typenode.dim is None:
- length = None
- else:
- length = self._parse_constant(
- typenode.dim, partial_length_ok=partial_length_ok)
- tp, quals = self._get_type_and_quals(typenode.type,
- partial_length_ok=partial_length_ok)
- return model.ArrayType(tp, length), quals
- #
- if isinstance(typenode, pycparser.c_ast.PtrDecl):
- # pointer type
- itemtype, itemquals = self._get_type_and_quals(typenode.type)
- tp = self._get_type_pointer(itemtype, itemquals, declname=name)
- quals = self._extract_quals(typenode)
- return tp, quals
- #
- if isinstance(typenode, pycparser.c_ast.TypeDecl):
- quals = self._extract_quals(typenode)
- type = typenode.type
- if isinstance(type, pycparser.c_ast.IdentifierType):
- # assume a primitive type. get it from .names, but reduce
- # synonyms to a single chosen combination
- names = list(type.names)
- if names != ['signed', 'char']: # keep this unmodified
- prefixes = {}
- while names:
- name = names[0]
- if name in ('short', 'long', 'signed', 'unsigned'):
- prefixes[name] = prefixes.get(name, 0) + 1
- del names[0]
- else:
- break
- # ignore the 'signed' prefix below, and reorder the others
- newnames = []
- for prefix in ('unsigned', 'short', 'long'):
- for i in range(prefixes.get(prefix, 0)):
- newnames.append(prefix)
- if not names:
- names = ['int'] # implicitly
- if names == ['int']: # but kill it if 'short' or 'long'
- if 'short' in prefixes or 'long' in prefixes:
- names = []
- names = newnames + names
- ident = ' '.join(names)
- if ident == 'void':
- return model.void_type, quals
- if ident == '__dotdotdot__':
- raise FFIError(':%d: bad usage of "..."' %
- typenode.coord.line)
- tp0, quals0 = resolve_common_type(self, ident)
- return tp0, (quals | quals0)
- #
- if isinstance(type, pycparser.c_ast.Struct):
- # 'struct foobar'
- tp = self._get_struct_union_enum_type('struct', type, name)
- return tp, quals
- #
- if isinstance(type, pycparser.c_ast.Union):
- # 'union foobar'
- tp = self._get_struct_union_enum_type('union', type, name)
- return tp, quals
- #
- if isinstance(type, pycparser.c_ast.Enum):
- # 'enum foobar'
- tp = self._get_struct_union_enum_type('enum', type, name)
- return tp, quals
- #
- if isinstance(typenode, pycparser.c_ast.FuncDecl):
- # a function type
- return self._parse_function_type(typenode, name), 0
- #
- # nested anonymous structs or unions end up here
- if isinstance(typenode, pycparser.c_ast.Struct):
- return self._get_struct_union_enum_type('struct', typenode, name,
- nested=True), 0
- if isinstance(typenode, pycparser.c_ast.Union):
- return self._get_struct_union_enum_type('union', typenode, name,
- nested=True), 0
- #
- raise FFIError(":%d: bad or unsupported type declaration" %
- typenode.coord.line)
-
- def _parse_function_type(self, typenode, funcname=None):
- params = list(getattr(typenode.args, 'params', []))
- for i, arg in enumerate(params):
- if not hasattr(arg, 'type'):
- raise CDefError("%s arg %d: unknown type '%s'"
- " (if you meant to use the old C syntax of giving"
- " untyped arguments, it is not supported)"
- % (funcname or 'in expression', i + 1,
- getattr(arg, 'name', '?')))
- ellipsis = (
- len(params) > 0 and
- isinstance(params[-1].type, pycparser.c_ast.TypeDecl) and
- isinstance(params[-1].type.type,
- pycparser.c_ast.IdentifierType) and
- params[-1].type.type.names == ['__dotdotdot__'])
- if ellipsis:
- params.pop()
- if not params:
- raise CDefError(
- "%s: a function with only '(...)' as argument"
- " is not correct C" % (funcname or 'in expression'))
- args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type))
- for argdeclnode in params]
- if not ellipsis and args == [model.void_type]:
- args = []
- result, quals = self._get_type_and_quals(typenode.type)
- # the 'quals' on the result type are ignored. HACK: we absure them
- # to detect __stdcall functions: we textually replace "__stdcall"
- # with "volatile volatile const" above.
- abi = None
- if hasattr(typenode.type, 'quals'): # else, probable syntax error anyway
- if typenode.type.quals[-3:] == ['volatile', 'volatile', 'const']:
- abi = '__stdcall'
- return model.RawFunctionType(tuple(args), result, ellipsis, abi)
-
- def _as_func_arg(self, type, quals):
- if isinstance(type, model.ArrayType):
- return model.PointerType(type.item, quals)
- elif isinstance(type, model.RawFunctionType):
- return type.as_function_pointer()
- else:
- return type
-
- def _get_struct_union_enum_type(self, kind, type, name=None, nested=False):
- # First, a level of caching on the exact 'type' node of the AST.
- # This is obscure, but needed because pycparser "unrolls" declarations
- # such as "typedef struct { } foo_t, *foo_p" and we end up with
- # an AST that is not a tree, but a DAG, with the "type" node of the
- # two branches foo_t and foo_p of the trees being the same node.
- # It's a bit silly but detecting "DAG-ness" in the AST tree seems
- # to be the only way to distinguish this case from two independent
- # structs. See test_struct_with_two_usages.
- try:
- return self._structnode2type[type]
- except KeyError:
- pass
- #
- # Note that this must handle parsing "struct foo" any number of
- # times and always return the same StructType object. Additionally,
- # one of these times (not necessarily the first), the fields of
- # the struct can be specified with "struct foo { ...fields... }".
- # If no name is given, then we have to create a new anonymous struct
- # with no caching; in this case, the fields are either specified
- # right now or never.
- #
- force_name = name
- name = type.name
- #
- # get the type or create it if needed
- if name is None:
- # 'force_name' is used to guess a more readable name for
- # anonymous structs, for the common case "typedef struct { } foo".
- if force_name is not None:
- explicit_name = '$%s' % force_name
- else:
- self._anonymous_counter += 1
- explicit_name = '$%d' % self._anonymous_counter
- tp = None
- else:
- explicit_name = name
- key = '%s %s' % (kind, name)
- tp, _ = self._declarations.get(key, (None, None))
- #
- if tp is None:
- if kind == 'struct':
- tp = model.StructType(explicit_name, None, None, None)
- elif kind == 'union':
- tp = model.UnionType(explicit_name, None, None, None)
- elif kind == 'enum':
- if explicit_name == '__dotdotdot__':
- raise CDefError("Enums cannot be declared with ...")
- tp = self._build_enum_type(explicit_name, type.values)
- else:
- raise AssertionError("kind = %r" % (kind,))
- if name is not None:
- self._declare(key, tp)
- else:
- if kind == 'enum' and type.values is not None:
- raise NotImplementedError(
- "enum %s: the '{}' declaration should appear on the first "
- "time the enum is mentioned, not later" % explicit_name)
- if not tp.forcename:
- tp.force_the_name(force_name)
- if tp.forcename and '$' in tp.name:
- self._declare('anonymous %s' % tp.forcename, tp)
- #
- self._structnode2type[type] = tp
- #
- # enums: done here
- if kind == 'enum':
- return tp
- #
- # is there a 'type.decls'? If yes, then this is the place in the
- # C sources that declare the fields. If no, then just return the
- # existing type, possibly still incomplete.
- if type.decls is None:
- return tp
- #
- if tp.fldnames is not None:
- raise CDefError("duplicate declaration of struct %s" % name)
- fldnames = []
- fldtypes = []
- fldbitsize = []
- fldquals = []
- for decl in type.decls:
- if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and
- ''.join(decl.type.names) == '__dotdotdot__'):
- # XXX pycparser is inconsistent: 'names' should be a list
- # of strings, but is sometimes just one string. Use
- # str.join() as a way to cope with both.
- self._make_partial(tp, nested)
- continue
- if decl.bitsize is None:
- bitsize = -1
- else:
- bitsize = self._parse_constant(decl.bitsize)
- self._partial_length = False
- type, fqual = self._get_type_and_quals(decl.type,
- partial_length_ok=True)
- if self._partial_length:
- self._make_partial(tp, nested)
- if isinstance(type, model.StructType) and type.partial:
- self._make_partial(tp, nested)
- fldnames.append(decl.name or '')
- fldtypes.append(type)
- fldbitsize.append(bitsize)
- fldquals.append(fqual)
- tp.fldnames = tuple(fldnames)
- tp.fldtypes = tuple(fldtypes)
- tp.fldbitsize = tuple(fldbitsize)
- tp.fldquals = tuple(fldquals)
- if fldbitsize != [-1] * len(fldbitsize):
- if isinstance(tp, model.StructType) and tp.partial:
- raise NotImplementedError("%s: using both bitfields and '...;'"
- % (tp,))
- tp.packed = self._options.get('packed')
- if tp.completed: # must be re-completed: it is not opaque any more
- tp.completed = 0
- self._recomplete.append(tp)
- return tp
-
- def _make_partial(self, tp, nested):
- if not isinstance(tp, model.StructOrUnion):
- raise CDefError("%s cannot be partial" % (tp,))
- if not tp.has_c_name() and not nested:
- raise NotImplementedError("%s is partial but has no C name" %(tp,))
- tp.partial = True
-
- def _parse_constant(self, exprnode, partial_length_ok=False):
- # for now, limited to expressions that are an immediate number
- # or positive/negative number
- if isinstance(exprnode, pycparser.c_ast.Constant):
- s = exprnode.value
- if s.startswith('0'):
- if s.startswith('0x') or s.startswith('0X'):
- return int(s, 16)
- return int(s, 8)
- elif '1' <= s[0] <= '9':
- return int(s, 10)
- elif s[0] == "'" and s[-1] == "'" and (
- len(s) == 3 or (len(s) == 4 and s[1] == "\\")):
- return ord(s[-2])
- else:
- raise CDefError("invalid constant %r" % (s,))
- #
- if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and
- exprnode.op == '+'):
- return self._parse_constant(exprnode.expr)
- #
- if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and
- exprnode.op == '-'):
- return -self._parse_constant(exprnode.expr)
- # load previously defined int constant
- if (isinstance(exprnode, pycparser.c_ast.ID) and
- exprnode.name in self._int_constants):
- return self._int_constants[exprnode.name]
- #
- if (isinstance(exprnode, pycparser.c_ast.ID) and
- exprnode.name == '__dotdotdotarray__'):
- if partial_length_ok:
- self._partial_length = True
- return '...'
- raise FFIError(":%d: unsupported '[...]' here, cannot derive "
- "the actual array length in this context"
- % exprnode.coord.line)
- #
- if (isinstance(exprnode, pycparser.c_ast.BinaryOp) and
- exprnode.op == '+'):
- return (self._parse_constant(exprnode.left) +
- self._parse_constant(exprnode.right))
- #
- if (isinstance(exprnode, pycparser.c_ast.BinaryOp) and
- exprnode.op == '-'):
- return (self._parse_constant(exprnode.left) -
- self._parse_constant(exprnode.right))
- #
- raise FFIError(":%d: unsupported expression: expected a "
- "simple numeric constant" % exprnode.coord.line)
-
- def _build_enum_type(self, explicit_name, decls):
- if decls is not None:
- partial = False
- enumerators = []
- enumvalues = []
- nextenumvalue = 0
- for enum in decls.enumerators:
- if _r_enum_dotdotdot.match(enum.name):
- partial = True
- continue
- if enum.value is not None:
- nextenumvalue = self._parse_constant(enum.value)
- enumerators.append(enum.name)
- enumvalues.append(nextenumvalue)
- self._add_constants(enum.name, nextenumvalue)
- nextenumvalue += 1
- enumerators = tuple(enumerators)
- enumvalues = tuple(enumvalues)
- tp = model.EnumType(explicit_name, enumerators, enumvalues)
- tp.partial = partial
- else: # opaque enum
- tp = model.EnumType(explicit_name, (), ())
- return tp
-
- def include(self, other):
- for name, (tp, quals) in other._declarations.items():
- if name.startswith('anonymous $enum_$'):
- continue # fix for test_anonymous_enum_include
- kind = name.split(' ', 1)[0]
- if kind in ('struct', 'union', 'enum', 'anonymous', 'typedef'):
- self._declare(name, tp, included=True, quals=quals)
- for k, v in other._int_constants.items():
- self._add_constants(k, v)
-
- def _get_unknown_type(self, decl):
- typenames = decl.type.type.names
- if typenames == ['__dotdotdot__']:
- return model.unknown_type(decl.name)
-
- if typenames == ['__dotdotdotint__']:
- if self._uses_new_feature is None:
- self._uses_new_feature = "'typedef int... %s'" % decl.name
- return model.UnknownIntegerType(decl.name)
-
- if typenames == ['__dotdotdotfloat__']:
- # note: not for 'long double' so far
- if self._uses_new_feature is None:
- self._uses_new_feature = "'typedef float... %s'" % decl.name
- return model.UnknownFloatType(decl.name)
-
- raise FFIError(':%d: unsupported usage of "..." in typedef'
- % decl.coord.line)
-
- def _get_unknown_ptr_type(self, decl):
- if decl.type.type.type.names == ['__dotdotdot__']:
- return model.unknown_ptr_type(decl.name)
- raise FFIError(':%d: unsupported usage of "..." in typedef'
- % decl.coord.line)
diff --git a/functions/source/GitPullS3/cffi/error.py b/functions/source/GitPullS3/cffi/error.py
deleted file mode 100644
index ec19964..0000000
--- a/functions/source/GitPullS3/cffi/error.py
+++ /dev/null
@@ -1,23 +0,0 @@
-
-class FFIError(Exception):
- pass
-
-class CDefError(Exception):
- def __str__(self):
- try:
- current_decl = self.args[1]
- filename = current_decl.coord.file
- linenum = current_decl.coord.line
- prefix = '%s:%d: ' % (filename, linenum)
- except (AttributeError, TypeError, IndexError):
- prefix = ''
- return '%s%s' % (prefix, self.args[0])
-
-class VerificationError(Exception):
- """ An error raised when verification fails
- """
-
-class VerificationMissing(Exception):
- """ An error raised when incomplete structures are passed into
- cdef, but no verification has been done
- """
diff --git a/functions/source/GitPullS3/cffi/ffiplatform.py b/functions/source/GitPullS3/cffi/ffiplatform.py
deleted file mode 100644
index 8531346..0000000
--- a/functions/source/GitPullS3/cffi/ffiplatform.py
+++ /dev/null
@@ -1,127 +0,0 @@
-import sys, os
-from .error import VerificationError
-
-
-LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs',
- 'extra_objects', 'depends']
-
-def get_extension(srcfilename, modname, sources=(), **kwds):
- _hack_at_distutils()
- from distutils.core import Extension
- allsources = [srcfilename]
- for src in sources:
- allsources.append(os.path.normpath(src))
- return Extension(name=modname, sources=allsources, **kwds)
-
-def compile(tmpdir, ext, compiler_verbose=0, debug=None):
- """Compile a C extension module using distutils."""
-
- _hack_at_distutils()
- saved_environ = os.environ.copy()
- try:
- outputfilename = _build(tmpdir, ext, compiler_verbose, debug)
- outputfilename = os.path.abspath(outputfilename)
- finally:
- # workaround for a distutils bugs where some env vars can
- # become longer and longer every time it is used
- for key, value in saved_environ.items():
- if os.environ.get(key) != value:
- os.environ[key] = value
- return outputfilename
-
-def _build(tmpdir, ext, compiler_verbose=0, debug=None):
- # XXX compact but horrible :-(
- from distutils.core import Distribution
- import distutils.errors, distutils.log
- #
- dist = Distribution({'ext_modules': [ext]})
- dist.parse_config_files()
- options = dist.get_option_dict('build_ext')
- if debug is None:
- debug = sys.flags.debug
- options['debug'] = ('ffiplatform', debug)
- options['force'] = ('ffiplatform', True)
- options['build_lib'] = ('ffiplatform', tmpdir)
- options['build_temp'] = ('ffiplatform', tmpdir)
- #
- try:
- old_level = distutils.log.set_threshold(0) or 0
- try:
- distutils.log.set_verbosity(compiler_verbose)
- dist.run_command('build_ext')
- cmd_obj = dist.get_command_obj('build_ext')
- [soname] = cmd_obj.get_outputs()
- finally:
- distutils.log.set_threshold(old_level)
- except (distutils.errors.CompileError,
- distutils.errors.LinkError) as e:
- raise VerificationError('%s: %s' % (e.__class__.__name__, e))
- #
- return soname
-
-try:
- from os.path import samefile
-except ImportError:
- def samefile(f1, f2):
- return os.path.abspath(f1) == os.path.abspath(f2)
-
-def maybe_relative_path(path):
- if not os.path.isabs(path):
- return path # already relative
- dir = path
- names = []
- while True:
- prevdir = dir
- dir, name = os.path.split(prevdir)
- if dir == prevdir or not dir:
- return path # failed to make it relative
- names.append(name)
- try:
- if samefile(dir, os.curdir):
- names.reverse()
- return os.path.join(*names)
- except OSError:
- pass
-
-# ____________________________________________________________
-
-try:
- int_or_long = (int, long)
- import cStringIO
-except NameError:
- int_or_long = int # Python 3
- import io as cStringIO
-
-def _flatten(x, f):
- if isinstance(x, str):
- f.write('%ds%s' % (len(x), x))
- elif isinstance(x, dict):
- keys = sorted(x.keys())
- f.write('%dd' % len(keys))
- for key in keys:
- _flatten(key, f)
- _flatten(x[key], f)
- elif isinstance(x, (list, tuple)):
- f.write('%dl' % len(x))
- for value in x:
- _flatten(value, f)
- elif isinstance(x, int_or_long):
- f.write('%di' % (x,))
- else:
- raise TypeError(
- "the keywords to verify() contains unsupported object %r" % (x,))
-
-def flatten(x):
- f = cStringIO.StringIO()
- _flatten(x, f)
- return f.getvalue()
-
-def _hack_at_distutils():
- # Windows-only workaround for some configurations: see
- # https://bugs.python.org/issue23246 (Python 2.7 with
- # a specific MS compiler suite download)
- if sys.platform == "win32":
- try:
- import setuptools # for side-effects, patches distutils
- except ImportError:
- pass
diff --git a/functions/source/GitPullS3/cffi/lock.py b/functions/source/GitPullS3/cffi/lock.py
deleted file mode 100644
index db91b71..0000000
--- a/functions/source/GitPullS3/cffi/lock.py
+++ /dev/null
@@ -1,30 +0,0 @@
-import sys
-
-if sys.version_info < (3,):
- try:
- from thread import allocate_lock
- except ImportError:
- from dummy_thread import allocate_lock
-else:
- try:
- from _thread import allocate_lock
- except ImportError:
- from _dummy_thread import allocate_lock
-
-
-##import sys
-##l1 = allocate_lock
-
-##class allocate_lock(object):
-## def __init__(self):
-## self._real = l1()
-## def __enter__(self):
-## for i in range(4, 0, -1):
-## print sys._getframe(i).f_code
-## print
-## return self._real.__enter__()
-## def __exit__(self, *args):
-## return self._real.__exit__(*args)
-## def acquire(self, f):
-## assert f is False
-## return self._real.acquire(f)
diff --git a/functions/source/GitPullS3/cffi/model.py b/functions/source/GitPullS3/cffi/model.py
deleted file mode 100644
index 5d1139d..0000000
--- a/functions/source/GitPullS3/cffi/model.py
+++ /dev/null
@@ -1,611 +0,0 @@
-import types
-import weakref
-
-from .lock import allocate_lock
-from .error import CDefError, VerificationError, VerificationMissing
-
-# type qualifiers
-Q_CONST = 0x01
-Q_RESTRICT = 0x02
-Q_VOLATILE = 0x04
-
-def qualify(quals, replace_with):
- if quals & Q_CONST:
- replace_with = ' const ' + replace_with.lstrip()
- if quals & Q_VOLATILE:
- replace_with = ' volatile ' + replace_with.lstrip()
- if quals & Q_RESTRICT:
- # It seems that __restrict is supported by gcc and msvc.
- # If you hit some different compiler, add a #define in
- # _cffi_include.h for it (and in its copies, documented there)
- replace_with = ' __restrict ' + replace_with.lstrip()
- return replace_with
-
-
-class BaseTypeByIdentity(object):
- is_array_type = False
- is_raw_function = False
-
- def get_c_name(self, replace_with='', context='a C file', quals=0):
- result = self.c_name_with_marker
- assert result.count('&') == 1
- # some logic duplication with ffi.getctype()... :-(
- replace_with = replace_with.strip()
- if replace_with:
- if replace_with.startswith('*') and '&[' in result:
- replace_with = '(%s)' % replace_with
- elif not replace_with[0] in '[(':
- replace_with = ' ' + replace_with
- replace_with = qualify(quals, replace_with)
- result = result.replace('&', replace_with)
- if '$' in result:
- raise VerificationError(
- "cannot generate '%s' in %s: unknown type name"
- % (self._get_c_name(), context))
- return result
-
- def _get_c_name(self):
- return self.c_name_with_marker.replace('&', '')
-
- def has_c_name(self):
- return '$' not in self._get_c_name()
-
- def is_integer_type(self):
- return False
-
- def get_cached_btype(self, ffi, finishlist, can_delay=False):
- try:
- BType = ffi._cached_btypes[self]
- except KeyError:
- BType = self.build_backend_type(ffi, finishlist)
- BType2 = ffi._cached_btypes.setdefault(self, BType)
- assert BType2 is BType
- return BType
-
- def __repr__(self):
- return '<%s>' % (self._get_c_name(),)
-
- def _get_items(self):
- return [(name, getattr(self, name)) for name in self._attrs_]
-
-
-class BaseType(BaseTypeByIdentity):
-
- def __eq__(self, other):
- return (self.__class__ == other.__class__ and
- self._get_items() == other._get_items())
-
- def __ne__(self, other):
- return not self == other
-
- def __hash__(self):
- return hash((self.__class__, tuple(self._get_items())))
-
-
-class VoidType(BaseType):
- _attrs_ = ()
-
- def __init__(self):
- self.c_name_with_marker = 'void&'
-
- def build_backend_type(self, ffi, finishlist):
- return global_cache(self, ffi, 'new_void_type')
-
-void_type = VoidType()
-
-
-class BasePrimitiveType(BaseType):
- def is_complex_type(self):
- return False
-
-
-class PrimitiveType(BasePrimitiveType):
- _attrs_ = ('name',)
-
- ALL_PRIMITIVE_TYPES = {
- 'char': 'c',
- 'short': 'i',
- 'int': 'i',
- 'long': 'i',
- 'long long': 'i',
- 'signed char': 'i',
- 'unsigned char': 'i',
- 'unsigned short': 'i',
- 'unsigned int': 'i',
- 'unsigned long': 'i',
- 'unsigned long long': 'i',
- 'float': 'f',
- 'double': 'f',
- 'long double': 'f',
- 'float _Complex': 'j',
- 'double _Complex': 'j',
- '_Bool': 'i',
- # the following types are not primitive in the C sense
- 'wchar_t': 'c',
- 'char16_t': 'c',
- 'char32_t': 'c',
- 'int8_t': 'i',
- 'uint8_t': 'i',
- 'int16_t': 'i',
- 'uint16_t': 'i',
- 'int32_t': 'i',
- 'uint32_t': 'i',
- 'int64_t': 'i',
- 'uint64_t': 'i',
- 'int_least8_t': 'i',
- 'uint_least8_t': 'i',
- 'int_least16_t': 'i',
- 'uint_least16_t': 'i',
- 'int_least32_t': 'i',
- 'uint_least32_t': 'i',
- 'int_least64_t': 'i',
- 'uint_least64_t': 'i',
- 'int_fast8_t': 'i',
- 'uint_fast8_t': 'i',
- 'int_fast16_t': 'i',
- 'uint_fast16_t': 'i',
- 'int_fast32_t': 'i',
- 'uint_fast32_t': 'i',
- 'int_fast64_t': 'i',
- 'uint_fast64_t': 'i',
- 'intptr_t': 'i',
- 'uintptr_t': 'i',
- 'intmax_t': 'i',
- 'uintmax_t': 'i',
- 'ptrdiff_t': 'i',
- 'size_t': 'i',
- 'ssize_t': 'i',
- }
-
- def __init__(self, name):
- assert name in self.ALL_PRIMITIVE_TYPES
- self.name = name
- self.c_name_with_marker = name + '&'
-
- def is_char_type(self):
- return self.ALL_PRIMITIVE_TYPES[self.name] == 'c'
- def is_integer_type(self):
- return self.ALL_PRIMITIVE_TYPES[self.name] == 'i'
- def is_float_type(self):
- return self.ALL_PRIMITIVE_TYPES[self.name] == 'f'
- def is_complex_type(self):
- return self.ALL_PRIMITIVE_TYPES[self.name] == 'j'
-
- def build_backend_type(self, ffi, finishlist):
- return global_cache(self, ffi, 'new_primitive_type', self.name)
-
-
-class UnknownIntegerType(BasePrimitiveType):
- _attrs_ = ('name',)
-
- def __init__(self, name):
- self.name = name
- self.c_name_with_marker = name + '&'
-
- def is_integer_type(self):
- return True
-
- def build_backend_type(self, ffi, finishlist):
- raise NotImplementedError("integer type '%s' can only be used after "
- "compilation" % self.name)
-
-class UnknownFloatType(BasePrimitiveType):
- _attrs_ = ('name', )
-
- def __init__(self, name):
- self.name = name
- self.c_name_with_marker = name + '&'
-
- def build_backend_type(self, ffi, finishlist):
- raise NotImplementedError("float type '%s' can only be used after "
- "compilation" % self.name)
-
-
-class BaseFunctionType(BaseType):
- _attrs_ = ('args', 'result', 'ellipsis', 'abi')
-
- def __init__(self, args, result, ellipsis, abi=None):
- self.args = args
- self.result = result
- self.ellipsis = ellipsis
- self.abi = abi
- #
- reprargs = [arg._get_c_name() for arg in self.args]
- if self.ellipsis:
- reprargs.append('...')
- reprargs = reprargs or ['void']
- replace_with = self._base_pattern % (', '.join(reprargs),)
- if abi is not None:
- replace_with = replace_with[:1] + abi + ' ' + replace_with[1:]
- self.c_name_with_marker = (
- self.result.c_name_with_marker.replace('&', replace_with))
-
-
-class RawFunctionType(BaseFunctionType):
- # Corresponds to a C type like 'int(int)', which is the C type of
- # a function, but not a pointer-to-function. The backend has no
- # notion of such a type; it's used temporarily by parsing.
- _base_pattern = '(&)(%s)'
- is_raw_function = True
-
- def build_backend_type(self, ffi, finishlist):
- raise CDefError("cannot render the type %r: it is a function "
- "type, not a pointer-to-function type" % (self,))
-
- def as_function_pointer(self):
- return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi)
-
-
-class FunctionPtrType(BaseFunctionType):
- _base_pattern = '(*&)(%s)'
-
- def build_backend_type(self, ffi, finishlist):
- result = self.result.get_cached_btype(ffi, finishlist)
- args = []
- for tp in self.args:
- args.append(tp.get_cached_btype(ffi, finishlist))
- abi_args = ()
- if self.abi == "__stdcall":
- if not self.ellipsis: # __stdcall ignored for variadic funcs
- try:
- abi_args = (ffi._backend.FFI_STDCALL,)
- except AttributeError:
- pass
- return global_cache(self, ffi, 'new_function_type',
- tuple(args), result, self.ellipsis, *abi_args)
-
- def as_raw_function(self):
- return RawFunctionType(self.args, self.result, self.ellipsis, self.abi)
-
-
-class PointerType(BaseType):
- _attrs_ = ('totype', 'quals')
-
- def __init__(self, totype, quals=0):
- self.totype = totype
- self.quals = quals
- extra = qualify(quals, " *&")
- if totype.is_array_type:
- extra = "(%s)" % (extra.lstrip(),)
- self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra)
-
- def build_backend_type(self, ffi, finishlist):
- BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True)
- return global_cache(self, ffi, 'new_pointer_type', BItem)
-
-voidp_type = PointerType(void_type)
-
-def ConstPointerType(totype):
- return PointerType(totype, Q_CONST)
-
-const_voidp_type = ConstPointerType(void_type)
-
-
-class NamedPointerType(PointerType):
- _attrs_ = ('totype', 'name')
-
- def __init__(self, totype, name, quals=0):
- PointerType.__init__(self, totype, quals)
- self.name = name
- self.c_name_with_marker = name + '&'
-
-
-class ArrayType(BaseType):
- _attrs_ = ('item', 'length')
- is_array_type = True
-
- def __init__(self, item, length):
- self.item = item
- self.length = length
- #
- if length is None:
- brackets = '&[]'
- elif length == '...':
- brackets = '&[/*...*/]'
- else:
- brackets = '&[%s]' % length
- self.c_name_with_marker = (
- self.item.c_name_with_marker.replace('&', brackets))
-
- def resolve_length(self, newlength):
- return ArrayType(self.item, newlength)
-
- def build_backend_type(self, ffi, finishlist):
- if self.length == '...':
- raise CDefError("cannot render the type %r: unknown length" %
- (self,))
- self.item.get_cached_btype(ffi, finishlist) # force the item BType
- BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist)
- return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length)
-
-char_array_type = ArrayType(PrimitiveType('char'), None)
-
-
-class StructOrUnionOrEnum(BaseTypeByIdentity):
- _attrs_ = ('name',)
- forcename = None
-
- def build_c_name_with_marker(self):
- name = self.forcename or '%s %s' % (self.kind, self.name)
- self.c_name_with_marker = name + '&'
-
- def force_the_name(self, forcename):
- self.forcename = forcename
- self.build_c_name_with_marker()
-
- def get_official_name(self):
- assert self.c_name_with_marker.endswith('&')
- return self.c_name_with_marker[:-1]
-
-
-class StructOrUnion(StructOrUnionOrEnum):
- fixedlayout = None
- completed = 0
- partial = False
- packed = False
-
- def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None):
- self.name = name
- self.fldnames = fldnames
- self.fldtypes = fldtypes
- self.fldbitsize = fldbitsize
- self.fldquals = fldquals
- self.build_c_name_with_marker()
-
- def anonymous_struct_fields(self):
- if self.fldtypes is not None:
- for name, type in zip(self.fldnames, self.fldtypes):
- if name == '' and isinstance(type, StructOrUnion):
- yield type
-
- def enumfields(self, expand_anonymous_struct_union=True):
- fldquals = self.fldquals
- if fldquals is None:
- fldquals = (0,) * len(self.fldnames)
- for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes,
- self.fldbitsize, fldquals):
- if (name == '' and isinstance(type, StructOrUnion)
- and expand_anonymous_struct_union):
- # nested anonymous struct/union
- for result in type.enumfields():
- yield result
- else:
- yield (name, type, bitsize, quals)
-
- def force_flatten(self):
- # force the struct or union to have a declaration that lists
- # directly all fields returned by enumfields(), flattening
- # nested anonymous structs/unions.
- names = []
- types = []
- bitsizes = []
- fldquals = []
- for name, type, bitsize, quals in self.enumfields():
- names.append(name)
- types.append(type)
- bitsizes.append(bitsize)
- fldquals.append(quals)
- self.fldnames = tuple(names)
- self.fldtypes = tuple(types)
- self.fldbitsize = tuple(bitsizes)
- self.fldquals = tuple(fldquals)
-
- def get_cached_btype(self, ffi, finishlist, can_delay=False):
- BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist,
- can_delay)
- if not can_delay:
- self.finish_backend_type(ffi, finishlist)
- return BType
-
- def finish_backend_type(self, ffi, finishlist):
- if self.completed:
- if self.completed != 2:
- raise NotImplementedError("recursive structure declaration "
- "for '%s'" % (self.name,))
- return
- BType = ffi._cached_btypes[self]
- #
- self.completed = 1
- #
- if self.fldtypes is None:
- pass # not completing it: it's an opaque struct
- #
- elif self.fixedlayout is None:
- fldtypes = [tp.get_cached_btype(ffi, finishlist)
- for tp in self.fldtypes]
- lst = list(zip(self.fldnames, fldtypes, self.fldbitsize))
- sflags = 0
- if self.packed:
- sflags = 8 # SF_PACKED
- ffi._backend.complete_struct_or_union(BType, lst, self,
- -1, -1, sflags)
- #
- else:
- fldtypes = []
- fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout
- for i in range(len(self.fldnames)):
- fsize = fieldsize[i]
- ftype = self.fldtypes[i]
- #
- if isinstance(ftype, ArrayType) and ftype.length == '...':
- # fix the length to match the total size
- BItemType = ftype.item.get_cached_btype(ffi, finishlist)
- nlen, nrest = divmod(fsize, ffi.sizeof(BItemType))
- if nrest != 0:
- self._verification_error(
- "field '%s.%s' has a bogus size?" % (
- self.name, self.fldnames[i] or '{}'))
- ftype = ftype.resolve_length(nlen)
- self.fldtypes = (self.fldtypes[:i] + (ftype,) +
- self.fldtypes[i+1:])
- #
- BFieldType = ftype.get_cached_btype(ffi, finishlist)
- if isinstance(ftype, ArrayType) and ftype.length is None:
- assert fsize == 0
- else:
- bitemsize = ffi.sizeof(BFieldType)
- if bitemsize != fsize:
- self._verification_error(
- "field '%s.%s' is declared as %d bytes, but is "
- "really %d bytes" % (self.name,
- self.fldnames[i] or '{}',
- bitemsize, fsize))
- fldtypes.append(BFieldType)
- #
- lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs))
- ffi._backend.complete_struct_or_union(BType, lst, self,
- totalsize, totalalignment)
- self.completed = 2
-
- def _verification_error(self, msg):
- raise VerificationError(msg)
-
- def check_not_partial(self):
- if self.partial and self.fixedlayout is None:
- raise VerificationMissing(self._get_c_name())
-
- def build_backend_type(self, ffi, finishlist):
- self.check_not_partial()
- finishlist.append(self)
- #
- return global_cache(self, ffi, 'new_%s_type' % self.kind,
- self.get_official_name(), key=self)
-
-
-class StructType(StructOrUnion):
- kind = 'struct'
-
-
-class UnionType(StructOrUnion):
- kind = 'union'
-
-
-class EnumType(StructOrUnionOrEnum):
- kind = 'enum'
- partial = False
- partial_resolved = False
-
- def __init__(self, name, enumerators, enumvalues, baseinttype=None):
- self.name = name
- self.enumerators = enumerators
- self.enumvalues = enumvalues
- self.baseinttype = baseinttype
- self.build_c_name_with_marker()
-
- def force_the_name(self, forcename):
- StructOrUnionOrEnum.force_the_name(self, forcename)
- if self.forcename is None:
- name = self.get_official_name()
- self.forcename = '$' + name.replace(' ', '_')
-
- def check_not_partial(self):
- if self.partial and not self.partial_resolved:
- raise VerificationMissing(self._get_c_name())
-
- def build_backend_type(self, ffi, finishlist):
- self.check_not_partial()
- base_btype = self.build_baseinttype(ffi, finishlist)
- return global_cache(self, ffi, 'new_enum_type',
- self.get_official_name(),
- self.enumerators, self.enumvalues,
- base_btype, key=self)
-
- def build_baseinttype(self, ffi, finishlist):
- if self.baseinttype is not None:
- return self.baseinttype.get_cached_btype(ffi, finishlist)
- #
- if self.enumvalues:
- smallest_value = min(self.enumvalues)
- largest_value = max(self.enumvalues)
- else:
- import warnings
- try:
- # XXX! The goal is to ensure that the warnings.warn()
- # will not suppress the warning. We want to get it
- # several times if we reach this point several times.
- __warningregistry__.clear()
- except NameError:
- pass
- warnings.warn("%r has no values explicitly defined; "
- "guessing that it is equivalent to 'unsigned int'"
- % self._get_c_name())
- smallest_value = largest_value = 0
- if smallest_value < 0: # needs a signed type
- sign = 1
- candidate1 = PrimitiveType("int")
- candidate2 = PrimitiveType("long")
- else:
- sign = 0
- candidate1 = PrimitiveType("unsigned int")
- candidate2 = PrimitiveType("unsigned long")
- btype1 = candidate1.get_cached_btype(ffi, finishlist)
- btype2 = candidate2.get_cached_btype(ffi, finishlist)
- size1 = ffi.sizeof(btype1)
- size2 = ffi.sizeof(btype2)
- if (smallest_value >= ((-1) << (8*size1-1)) and
- largest_value < (1 << (8*size1-sign))):
- return btype1
- if (smallest_value >= ((-1) << (8*size2-1)) and
- largest_value < (1 << (8*size2-sign))):
- return btype2
- raise CDefError("%s values don't all fit into either 'long' "
- "or 'unsigned long'" % self._get_c_name())
-
-def unknown_type(name, structname=None):
- if structname is None:
- structname = '$%s' % name
- tp = StructType(structname, None, None, None)
- tp.force_the_name(name)
- tp.origin = "unknown_type"
- return tp
-
-def unknown_ptr_type(name, structname=None):
- if structname is None:
- structname = '$$%s' % name
- tp = StructType(structname, None, None, None)
- return NamedPointerType(tp, name)
-
-
-global_lock = allocate_lock()
-_typecache_cffi_backend = weakref.WeakValueDictionary()
-
-def get_typecache(backend):
- # returns _typecache_cffi_backend if backend is the _cffi_backend
- # module, or type(backend).__typecache if backend is an instance of
- # CTypesBackend (or some FakeBackend class during tests)
- if isinstance(backend, types.ModuleType):
- return _typecache_cffi_backend
- with global_lock:
- if not hasattr(type(backend), '__typecache'):
- type(backend).__typecache = weakref.WeakValueDictionary()
- return type(backend).__typecache
-
-def global_cache(srctype, ffi, funcname, *args, **kwds):
- key = kwds.pop('key', (funcname, args))
- assert not kwds
- try:
- return ffi._typecache[key]
- except KeyError:
- pass
- try:
- res = getattr(ffi._backend, funcname)(*args)
- except NotImplementedError as e:
- raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e))
- # note that setdefault() on WeakValueDictionary is not atomic
- # and contains a rare bug (http://bugs.python.org/issue19542);
- # we have to use a lock and do it ourselves
- cache = ffi._typecache
- with global_lock:
- res1 = cache.get(key)
- if res1 is None:
- cache[key] = res
- return res
- else:
- return res1
-
-def pointer_cache(ffi, BType):
- return global_cache('?', ffi, 'new_pointer_type', BType)
-
-def attach_exception_info(e, name):
- if e.args and type(e.args[0]) is str:
- e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:]
diff --git a/functions/source/GitPullS3/cffi/parse_c_type.h b/functions/source/GitPullS3/cffi/parse_c_type.h
deleted file mode 100644
index 84e4ef8..0000000
--- a/functions/source/GitPullS3/cffi/parse_c_type.h
+++ /dev/null
@@ -1,181 +0,0 @@
-
-/* This part is from file 'cffi/parse_c_type.h'. It is copied at the
- beginning of C sources generated by CFFI's ffi.set_source(). */
-
-typedef void *_cffi_opcode_t;
-
-#define _CFFI_OP(opcode, arg) (_cffi_opcode_t)(opcode | (((uintptr_t)(arg)) << 8))
-#define _CFFI_GETOP(cffi_opcode) ((unsigned char)(uintptr_t)cffi_opcode)
-#define _CFFI_GETARG(cffi_opcode) (((intptr_t)cffi_opcode) >> 8)
-
-#define _CFFI_OP_PRIMITIVE 1
-#define _CFFI_OP_POINTER 3
-#define _CFFI_OP_ARRAY 5
-#define _CFFI_OP_OPEN_ARRAY 7
-#define _CFFI_OP_STRUCT_UNION 9
-#define _CFFI_OP_ENUM 11
-#define _CFFI_OP_FUNCTION 13
-#define _CFFI_OP_FUNCTION_END 15
-#define _CFFI_OP_NOOP 17
-#define _CFFI_OP_BITFIELD 19
-#define _CFFI_OP_TYPENAME 21
-#define _CFFI_OP_CPYTHON_BLTN_V 23 // varargs
-#define _CFFI_OP_CPYTHON_BLTN_N 25 // noargs
-#define _CFFI_OP_CPYTHON_BLTN_O 27 // O (i.e. a single arg)
-#define _CFFI_OP_CONSTANT 29
-#define _CFFI_OP_CONSTANT_INT 31
-#define _CFFI_OP_GLOBAL_VAR 33
-#define _CFFI_OP_DLOPEN_FUNC 35
-#define _CFFI_OP_DLOPEN_CONST 37
-#define _CFFI_OP_GLOBAL_VAR_F 39
-#define _CFFI_OP_EXTERN_PYTHON 41
-
-#define _CFFI_PRIM_VOID 0
-#define _CFFI_PRIM_BOOL 1
-#define _CFFI_PRIM_CHAR 2
-#define _CFFI_PRIM_SCHAR 3
-#define _CFFI_PRIM_UCHAR 4
-#define _CFFI_PRIM_SHORT 5
-#define _CFFI_PRIM_USHORT 6
-#define _CFFI_PRIM_INT 7
-#define _CFFI_PRIM_UINT 8
-#define _CFFI_PRIM_LONG 9
-#define _CFFI_PRIM_ULONG 10
-#define _CFFI_PRIM_LONGLONG 11
-#define _CFFI_PRIM_ULONGLONG 12
-#define _CFFI_PRIM_FLOAT 13
-#define _CFFI_PRIM_DOUBLE 14
-#define _CFFI_PRIM_LONGDOUBLE 15
-
-#define _CFFI_PRIM_WCHAR 16
-#define _CFFI_PRIM_INT8 17
-#define _CFFI_PRIM_UINT8 18
-#define _CFFI_PRIM_INT16 19
-#define _CFFI_PRIM_UINT16 20
-#define _CFFI_PRIM_INT32 21
-#define _CFFI_PRIM_UINT32 22
-#define _CFFI_PRIM_INT64 23
-#define _CFFI_PRIM_UINT64 24
-#define _CFFI_PRIM_INTPTR 25
-#define _CFFI_PRIM_UINTPTR 26
-#define _CFFI_PRIM_PTRDIFF 27
-#define _CFFI_PRIM_SIZE 28
-#define _CFFI_PRIM_SSIZE 29
-#define _CFFI_PRIM_INT_LEAST8 30
-#define _CFFI_PRIM_UINT_LEAST8 31
-#define _CFFI_PRIM_INT_LEAST16 32
-#define _CFFI_PRIM_UINT_LEAST16 33
-#define _CFFI_PRIM_INT_LEAST32 34
-#define _CFFI_PRIM_UINT_LEAST32 35
-#define _CFFI_PRIM_INT_LEAST64 36
-#define _CFFI_PRIM_UINT_LEAST64 37
-#define _CFFI_PRIM_INT_FAST8 38
-#define _CFFI_PRIM_UINT_FAST8 39
-#define _CFFI_PRIM_INT_FAST16 40
-#define _CFFI_PRIM_UINT_FAST16 41
-#define _CFFI_PRIM_INT_FAST32 42
-#define _CFFI_PRIM_UINT_FAST32 43
-#define _CFFI_PRIM_INT_FAST64 44
-#define _CFFI_PRIM_UINT_FAST64 45
-#define _CFFI_PRIM_INTMAX 46
-#define _CFFI_PRIM_UINTMAX 47
-#define _CFFI_PRIM_FLOATCOMPLEX 48
-#define _CFFI_PRIM_DOUBLECOMPLEX 49
-#define _CFFI_PRIM_CHAR16 50
-#define _CFFI_PRIM_CHAR32 51
-
-#define _CFFI__NUM_PRIM 52
-#define _CFFI__UNKNOWN_PRIM (-1)
-#define _CFFI__UNKNOWN_FLOAT_PRIM (-2)
-#define _CFFI__UNKNOWN_LONG_DOUBLE (-3)
-
-#define _CFFI__IO_FILE_STRUCT (-1)
-
-
-struct _cffi_global_s {
- const char *name;
- void *address;
- _cffi_opcode_t type_op;
- void *size_or_direct_fn; // OP_GLOBAL_VAR: size, or 0 if unknown
- // OP_CPYTHON_BLTN_*: addr of direct function
-};
-
-struct _cffi_getconst_s {
- unsigned long long value;
- const struct _cffi_type_context_s *ctx;
- int gindex;
-};
-
-struct _cffi_struct_union_s {
- const char *name;
- int type_index; // -> _cffi_types, on a OP_STRUCT_UNION
- int flags; // _CFFI_F_* flags below
- size_t size;
- int alignment;
- int first_field_index; // -> _cffi_fields array
- int num_fields;
-};
-#define _CFFI_F_UNION 0x01 // is a union, not a struct
-#define _CFFI_F_CHECK_FIELDS 0x02 // complain if fields are not in the
- // "standard layout" or if some are missing
-#define _CFFI_F_PACKED 0x04 // for CHECK_FIELDS, assume a packed struct
-#define _CFFI_F_EXTERNAL 0x08 // in some other ffi.include()
-#define _CFFI_F_OPAQUE 0x10 // opaque
-
-struct _cffi_field_s {
- const char *name;
- size_t field_offset;
- size_t field_size;
- _cffi_opcode_t field_type_op;
-};
-
-struct _cffi_enum_s {
- const char *name;
- int type_index; // -> _cffi_types, on a OP_ENUM
- int type_prim; // _CFFI_PRIM_xxx
- const char *enumerators; // comma-delimited string
-};
-
-struct _cffi_typename_s {
- const char *name;
- int type_index; /* if opaque, points to a possibly artificial
- OP_STRUCT which is itself opaque */
-};
-
-struct _cffi_type_context_s {
- _cffi_opcode_t *types;
- const struct _cffi_global_s *globals;
- const struct _cffi_field_s *fields;
- const struct _cffi_struct_union_s *struct_unions;
- const struct _cffi_enum_s *enums;
- const struct _cffi_typename_s *typenames;
- int num_globals;
- int num_struct_unions;
- int num_enums;
- int num_typenames;
- const char *const *includes;
- int num_types;
- int flags; /* future extension */
-};
-
-struct _cffi_parse_info_s {
- const struct _cffi_type_context_s *ctx;
- _cffi_opcode_t *output;
- unsigned int output_size;
- size_t error_location;
- const char *error_message;
-};
-
-struct _cffi_externpy_s {
- const char *name;
- size_t size_of_result;
- void *reserved1, *reserved2;
-};
-
-#ifdef _CFFI_INTERNAL
-static int parse_c_type(struct _cffi_parse_info_s *info, const char *input);
-static int search_in_globals(const struct _cffi_type_context_s *ctx,
- const char *search, size_t search_len);
-static int search_in_struct_unions(const struct _cffi_type_context_s *ctx,
- const char *search, size_t search_len);
-#endif
diff --git a/functions/source/GitPullS3/cffi/recompiler.py b/functions/source/GitPullS3/cffi/recompiler.py
deleted file mode 100644
index f48b773..0000000
--- a/functions/source/GitPullS3/cffi/recompiler.py
+++ /dev/null
@@ -1,1560 +0,0 @@
-import os, sys, io
-from . import ffiplatform, model
-from .error import VerificationError
-from .cffi_opcode import *
-
-VERSION_BASE = 0x2601
-VERSION_EMBEDDED = 0x2701
-VERSION_CHAR16CHAR32 = 0x2801
-
-
-class GlobalExpr:
- def __init__(self, name, address, type_op, size=0, check_value=0):
- self.name = name
- self.address = address
- self.type_op = type_op
- self.size = size
- self.check_value = check_value
-
- def as_c_expr(self):
- return ' { "%s", (void *)%s, %s, (void *)%s },' % (
- self.name, self.address, self.type_op.as_c_expr(), self.size)
-
- def as_python_expr(self):
- return "b'%s%s',%d" % (self.type_op.as_python_bytes(), self.name,
- self.check_value)
-
-class FieldExpr:
- def __init__(self, name, field_offset, field_size, fbitsize, field_type_op):
- self.name = name
- self.field_offset = field_offset
- self.field_size = field_size
- self.fbitsize = fbitsize
- self.field_type_op = field_type_op
-
- def as_c_expr(self):
- spaces = " " * len(self.name)
- return (' { "%s", %s,\n' % (self.name, self.field_offset) +
- ' %s %s,\n' % (spaces, self.field_size) +
- ' %s %s },' % (spaces, self.field_type_op.as_c_expr()))
-
- def as_python_expr(self):
- raise NotImplementedError
-
- def as_field_python_expr(self):
- if self.field_type_op.op == OP_NOOP:
- size_expr = ''
- elif self.field_type_op.op == OP_BITFIELD:
- size_expr = format_four_bytes(self.fbitsize)
- else:
- raise NotImplementedError
- return "b'%s%s%s'" % (self.field_type_op.as_python_bytes(),
- size_expr,
- self.name)
-
-class StructUnionExpr:
- def __init__(self, name, type_index, flags, size, alignment, comment,
- first_field_index, c_fields):
- self.name = name
- self.type_index = type_index
- self.flags = flags
- self.size = size
- self.alignment = alignment
- self.comment = comment
- self.first_field_index = first_field_index
- self.c_fields = c_fields
-
- def as_c_expr(self):
- return (' { "%s", %d, %s,' % (self.name, self.type_index, self.flags)
- + '\n %s, %s, ' % (self.size, self.alignment)
- + '%d, %d ' % (self.first_field_index, len(self.c_fields))
- + ('/* %s */ ' % self.comment if self.comment else '')
- + '},')
-
- def as_python_expr(self):
- flags = eval(self.flags, G_FLAGS)
- fields_expr = [c_field.as_field_python_expr()
- for c_field in self.c_fields]
- return "(b'%s%s%s',%s)" % (
- format_four_bytes(self.type_index),
- format_four_bytes(flags),
- self.name,
- ','.join(fields_expr))
-
-class EnumExpr:
- def __init__(self, name, type_index, size, signed, allenums):
- self.name = name
- self.type_index = type_index
- self.size = size
- self.signed = signed
- self.allenums = allenums
-
- def as_c_expr(self):
- return (' { "%s", %d, _cffi_prim_int(%s, %s),\n'
- ' "%s" },' % (self.name, self.type_index,
- self.size, self.signed, self.allenums))
-
- def as_python_expr(self):
- prim_index = {
- (1, 0): PRIM_UINT8, (1, 1): PRIM_INT8,
- (2, 0): PRIM_UINT16, (2, 1): PRIM_INT16,
- (4, 0): PRIM_UINT32, (4, 1): PRIM_INT32,
- (8, 0): PRIM_UINT64, (8, 1): PRIM_INT64,
- }[self.size, self.signed]
- return "b'%s%s%s\\x00%s'" % (format_four_bytes(self.type_index),
- format_four_bytes(prim_index),
- self.name, self.allenums)
-
-class TypenameExpr:
- def __init__(self, name, type_index):
- self.name = name
- self.type_index = type_index
-
- def as_c_expr(self):
- return ' { "%s", %d },' % (self.name, self.type_index)
-
- def as_python_expr(self):
- return "b'%s%s'" % (format_four_bytes(self.type_index), self.name)
-
-
-# ____________________________________________________________
-
-
-class Recompiler:
- _num_externpy = 0
-
- def __init__(self, ffi, module_name, target_is_python=False):
- self.ffi = ffi
- self.module_name = module_name
- self.target_is_python = target_is_python
- self._version = VERSION_BASE
-
- def needs_version(self, ver):
- self._version = max(self._version, ver)
-
- def collect_type_table(self):
- self._typesdict = {}
- self._generate("collecttype")
- #
- all_decls = sorted(self._typesdict, key=str)
- #
- # prepare all FUNCTION bytecode sequences first
- self.cffi_types = []
- for tp in all_decls:
- if tp.is_raw_function:
- assert self._typesdict[tp] is None
- self._typesdict[tp] = len(self.cffi_types)
- self.cffi_types.append(tp) # placeholder
- for tp1 in tp.args:
- assert isinstance(tp1, (model.VoidType,
- model.BasePrimitiveType,
- model.PointerType,
- model.StructOrUnionOrEnum,
- model.FunctionPtrType))
- if self._typesdict[tp1] is None:
- self._typesdict[tp1] = len(self.cffi_types)
- self.cffi_types.append(tp1) # placeholder
- self.cffi_types.append('END') # placeholder
- #
- # prepare all OTHER bytecode sequences
- for tp in all_decls:
- if not tp.is_raw_function and self._typesdict[tp] is None:
- self._typesdict[tp] = len(self.cffi_types)
- self.cffi_types.append(tp) # placeholder
- if tp.is_array_type and tp.length is not None:
- self.cffi_types.append('LEN') # placeholder
- assert None not in self._typesdict.values()
- #
- # collect all structs and unions and enums
- self._struct_unions = {}
- self._enums = {}
- for tp in all_decls:
- if isinstance(tp, model.StructOrUnion):
- self._struct_unions[tp] = None
- elif isinstance(tp, model.EnumType):
- self._enums[tp] = None
- for i, tp in enumerate(sorted(self._struct_unions,
- key=lambda tp: tp.name)):
- self._struct_unions[tp] = i
- for i, tp in enumerate(sorted(self._enums,
- key=lambda tp: tp.name)):
- self._enums[tp] = i
- #
- # emit all bytecode sequences now
- for tp in all_decls:
- method = getattr(self, '_emit_bytecode_' + tp.__class__.__name__)
- method(tp, self._typesdict[tp])
- #
- # consistency check
- for op in self.cffi_types:
- assert isinstance(op, CffiOp)
- self.cffi_types = tuple(self.cffi_types) # don't change any more
-
- def _do_collect_type(self, tp):
- if not isinstance(tp, model.BaseTypeByIdentity):
- if isinstance(tp, tuple):
- for x in tp:
- self._do_collect_type(x)
- return
- if tp not in self._typesdict:
- self._typesdict[tp] = None
- if isinstance(tp, model.FunctionPtrType):
- self._do_collect_type(tp.as_raw_function())
- elif isinstance(tp, model.StructOrUnion):
- if tp.fldtypes is not None and (
- tp not in self.ffi._parser._included_declarations):
- for name1, tp1, _, _ in tp.enumfields():
- self._do_collect_type(self._field_type(tp, name1, tp1))
- else:
- for _, x in tp._get_items():
- self._do_collect_type(x)
-
- def _generate(self, step_name):
- lst = self.ffi._parser._declarations.items()
- for name, (tp, quals) in sorted(lst):
- kind, realname = name.split(' ', 1)
- try:
- method = getattr(self, '_generate_cpy_%s_%s' % (kind,
- step_name))
- except AttributeError:
- raise VerificationError(
- "not implemented in recompile(): %r" % name)
- try:
- self._current_quals = quals
- method(tp, realname)
- except Exception as e:
- model.attach_exception_info(e, name)
- raise
-
- # ----------
-
- ALL_STEPS = ["global", "field", "struct_union", "enum", "typename"]
-
- def collect_step_tables(self):
- # collect the declarations for '_cffi_globals', '_cffi_typenames', etc.
- self._lsts = {}
- for step_name in self.ALL_STEPS:
- self._lsts[step_name] = []
- self._seen_struct_unions = set()
- self._generate("ctx")
- self._add_missing_struct_unions()
- #
- for step_name in self.ALL_STEPS:
- lst = self._lsts[step_name]
- if step_name != "field":
- lst.sort(key=lambda entry: entry.name)
- self._lsts[step_name] = tuple(lst) # don't change any more
- #
- # check for a possible internal inconsistency: _cffi_struct_unions
- # should have been generated with exactly self._struct_unions
- lst = self._lsts["struct_union"]
- for tp, i in self._struct_unions.items():
- assert i < len(lst)
- assert lst[i].name == tp.name
- assert len(lst) == len(self._struct_unions)
- # same with enums
- lst = self._lsts["enum"]
- for tp, i in self._enums.items():
- assert i < len(lst)
- assert lst[i].name == tp.name
- assert len(lst) == len(self._enums)
-
- # ----------
-
- def _prnt(self, what=''):
- self._f.write(what + '\n')
-
- def write_source_to_f(self, f, preamble):
- if self.target_is_python:
- assert preamble is None
- self.write_py_source_to_f(f)
- else:
- assert preamble is not None
- self.write_c_source_to_f(f, preamble)
-
- def _rel_readlines(self, filename):
- g = open(os.path.join(os.path.dirname(__file__), filename), 'r')
- lines = g.readlines()
- g.close()
- return lines
-
- def write_c_source_to_f(self, f, preamble):
- self._f = f
- prnt = self._prnt
- if self.ffi._embedding is not None:
- prnt('#define _CFFI_USE_EMBEDDING')
- #
- # first the '#include' (actually done by inlining the file's content)
- lines = self._rel_readlines('_cffi_include.h')
- i = lines.index('#include "parse_c_type.h"\n')
- lines[i:i+1] = self._rel_readlines('parse_c_type.h')
- prnt(''.join(lines))
- #
- # if we have ffi._embedding != None, we give it here as a macro
- # and include an extra file
- base_module_name = self.module_name.split('.')[-1]
- if self.ffi._embedding is not None:
- prnt('#define _CFFI_MODULE_NAME "%s"' % (self.module_name,))
- prnt('static const char _CFFI_PYTHON_STARTUP_CODE[] = {')
- self._print_string_literal_in_array(self.ffi._embedding)
- prnt('0 };')
- prnt('#ifdef PYPY_VERSION')
- prnt('# define _CFFI_PYTHON_STARTUP_FUNC _cffi_pypyinit_%s' % (
- base_module_name,))
- prnt('#elif PY_MAJOR_VERSION >= 3')
- prnt('# define _CFFI_PYTHON_STARTUP_FUNC PyInit_%s' % (
- base_module_name,))
- prnt('#else')
- prnt('# define _CFFI_PYTHON_STARTUP_FUNC init%s' % (
- base_module_name,))
- prnt('#endif')
- lines = self._rel_readlines('_embedding.h')
- i = lines.index('#include "_cffi_errors.h"\n')
- lines[i:i+1] = self._rel_readlines('_cffi_errors.h')
- prnt(''.join(lines))
- self.needs_version(VERSION_EMBEDDED)
- #
- # then paste the C source given by the user, verbatim.
- prnt('/************************************************************/')
- prnt()
- prnt(preamble)
- prnt()
- prnt('/************************************************************/')
- prnt()
- #
- # the declaration of '_cffi_types'
- prnt('static void *_cffi_types[] = {')
- typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()])
- for i, op in enumerate(self.cffi_types):
- comment = ''
- if i in typeindex2type:
- comment = ' // ' + typeindex2type[i]._get_c_name()
- prnt('/* %2d */ %s,%s' % (i, op.as_c_expr(), comment))
- if not self.cffi_types:
- prnt(' 0')
- prnt('};')
- prnt()
- #
- # call generate_cpy_xxx_decl(), for every xxx found from
- # ffi._parser._declarations. This generates all the functions.
- self._seen_constants = set()
- self._generate("decl")
- #
- # the declaration of '_cffi_globals' and '_cffi_typenames'
- nums = {}
- for step_name in self.ALL_STEPS:
- lst = self._lsts[step_name]
- nums[step_name] = len(lst)
- if nums[step_name] > 0:
- prnt('static const struct _cffi_%s_s _cffi_%ss[] = {' % (
- step_name, step_name))
- for entry in lst:
- prnt(entry.as_c_expr())
- prnt('};')
- prnt()
- #
- # the declaration of '_cffi_includes'
- if self.ffi._included_ffis:
- prnt('static const char * const _cffi_includes[] = {')
- for ffi_to_include in self.ffi._included_ffis:
- try:
- included_module_name, included_source = (
- ffi_to_include._assigned_source[:2])
- except AttributeError:
- raise VerificationError(
- "ffi object %r includes %r, but the latter has not "
- "been prepared with set_source()" % (
- self.ffi, ffi_to_include,))
- if included_source is None:
- raise VerificationError(
- "not implemented yet: ffi.include() of a Python-based "
- "ffi inside a C-based ffi")
- prnt(' "%s",' % (included_module_name,))
- prnt(' NULL')
- prnt('};')
- prnt()
- #
- # the declaration of '_cffi_type_context'
- prnt('static const struct _cffi_type_context_s _cffi_type_context = {')
- prnt(' _cffi_types,')
- for step_name in self.ALL_STEPS:
- if nums[step_name] > 0:
- prnt(' _cffi_%ss,' % step_name)
- else:
- prnt(' NULL, /* no %ss */' % step_name)
- for step_name in self.ALL_STEPS:
- if step_name != "field":
- prnt(' %d, /* num_%ss */' % (nums[step_name], step_name))
- if self.ffi._included_ffis:
- prnt(' _cffi_includes,')
- else:
- prnt(' NULL, /* no includes */')
- prnt(' %d, /* num_types */' % (len(self.cffi_types),))
- flags = 0
- if self._num_externpy:
- flags |= 1 # set to mean that we use extern "Python"
- prnt(' %d, /* flags */' % flags)
- prnt('};')
- prnt()
- #
- # the init function
- prnt('#ifdef __GNUC__')
- prnt('# pragma GCC visibility push(default) /* for -fvisibility= */')
- prnt('#endif')
- prnt()
- prnt('#ifdef PYPY_VERSION')
- prnt('PyMODINIT_FUNC')
- prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,))
- prnt('{')
- if self._num_externpy:
- prnt(' if (((intptr_t)p[0]) >= 0x0A03) {')
- prnt(' _cffi_call_python_org = '
- '(void(*)(struct _cffi_externpy_s *, char *))p[1];')
- prnt(' }')
- prnt(' p[0] = (const void *)0x%x;' % self._version)
- prnt(' p[1] = &_cffi_type_context;')
- prnt('#if PY_MAJOR_VERSION >= 3')
- prnt(' return NULL;')
- prnt('#endif')
- prnt('}')
- # on Windows, distutils insists on putting init_cffi_xyz in
- # 'export_symbols', so instead of fighting it, just give up and
- # give it one
- prnt('# ifdef _MSC_VER')
- prnt(' PyMODINIT_FUNC')
- prnt('# if PY_MAJOR_VERSION >= 3')
- prnt(' PyInit_%s(void) { return NULL; }' % (base_module_name,))
- prnt('# else')
- prnt(' init%s(void) { }' % (base_module_name,))
- prnt('# endif')
- prnt('# endif')
- prnt('#elif PY_MAJOR_VERSION >= 3')
- prnt('PyMODINIT_FUNC')
- prnt('PyInit_%s(void)' % (base_module_name,))
- prnt('{')
- prnt(' return _cffi_init("%s", 0x%x, &_cffi_type_context);' % (
- self.module_name, self._version))
- prnt('}')
- prnt('#else')
- prnt('PyMODINIT_FUNC')
- prnt('init%s(void)' % (base_module_name,))
- prnt('{')
- prnt(' _cffi_init("%s", 0x%x, &_cffi_type_context);' % (
- self.module_name, self._version))
- prnt('}')
- prnt('#endif')
- prnt()
- prnt('#ifdef __GNUC__')
- prnt('# pragma GCC visibility pop')
- prnt('#endif')
- self._version = None
-
- def _to_py(self, x):
- if isinstance(x, str):
- return "b'%s'" % (x,)
- if isinstance(x, (list, tuple)):
- rep = [self._to_py(item) for item in x]
- if len(rep) == 1:
- rep.append('')
- return "(%s)" % (','.join(rep),)
- return x.as_python_expr() # Py2: unicode unexpected; Py3: bytes unexp.
-
- def write_py_source_to_f(self, f):
- self._f = f
- prnt = self._prnt
- #
- # header
- prnt("# auto-generated file")
- prnt("import _cffi_backend")
- #
- # the 'import' of the included ffis
- num_includes = len(self.ffi._included_ffis or ())
- for i in range(num_includes):
- ffi_to_include = self.ffi._included_ffis[i]
- try:
- included_module_name, included_source = (
- ffi_to_include._assigned_source[:2])
- except AttributeError:
- raise VerificationError(
- "ffi object %r includes %r, but the latter has not "
- "been prepared with set_source()" % (
- self.ffi, ffi_to_include,))
- if included_source is not None:
- raise VerificationError(
- "not implemented yet: ffi.include() of a C-based "
- "ffi inside a Python-based ffi")
- prnt('from %s import ffi as _ffi%d' % (included_module_name, i))
- prnt()
- prnt("ffi = _cffi_backend.FFI('%s'," % (self.module_name,))
- prnt(" _version = 0x%x," % (self._version,))
- self._version = None
- #
- # the '_types' keyword argument
- self.cffi_types = tuple(self.cffi_types) # don't change any more
- types_lst = [op.as_python_bytes() for op in self.cffi_types]
- prnt(' _types = %s,' % (self._to_py(''.join(types_lst)),))
- typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()])
- #
- # the keyword arguments from ALL_STEPS
- for step_name in self.ALL_STEPS:
- lst = self._lsts[step_name]
- if len(lst) > 0 and step_name != "field":
- prnt(' _%ss = %s,' % (step_name, self._to_py(lst)))
- #
- # the '_includes' keyword argument
- if num_includes > 0:
- prnt(' _includes = (%s,),' % (
- ', '.join(['_ffi%d' % i for i in range(num_includes)]),))
- #
- # the footer
- prnt(')')
-
- # ----------
-
- def _gettypenum(self, type):
- # a KeyError here is a bug. please report it! :-)
- return self._typesdict[type]
-
- def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode):
- extraarg = ''
- if isinstance(tp, model.BasePrimitiveType) and not tp.is_complex_type():
- if tp.is_integer_type() and tp.name != '_Bool':
- converter = '_cffi_to_c_int'
- extraarg = ', %s' % tp.name
- elif isinstance(tp, model.UnknownFloatType):
- # don't check with is_float_type(): it may be a 'long
- # double' here, and _cffi_to_c_double would loose precision
- converter = '(%s)_cffi_to_c_double' % (tp.get_c_name(''),)
- else:
- cname = tp.get_c_name('')
- converter = '(%s)_cffi_to_c_%s' % (cname,
- tp.name.replace(' ', '_'))
- if cname in ('char16_t', 'char32_t'):
- self.needs_version(VERSION_CHAR16CHAR32)
- errvalue = '-1'
- #
- elif isinstance(tp, model.PointerType):
- self._convert_funcarg_to_c_ptr_or_array(tp, fromvar,
- tovar, errcode)
- return
- #
- elif (isinstance(tp, model.StructOrUnionOrEnum) or
- isinstance(tp, model.BasePrimitiveType)):
- # a struct (not a struct pointer) as a function argument;
- # or, a complex (the same code works)
- self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)'
- % (tovar, self._gettypenum(tp), fromvar))
- self._prnt(' %s;' % errcode)
- return
- #
- elif isinstance(tp, model.FunctionPtrType):
- converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('')
- extraarg = ', _cffi_type(%d)' % self._gettypenum(tp)
- errvalue = 'NULL'
- #
- else:
- raise NotImplementedError(tp)
- #
- self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg))
- self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % (
- tovar, tp.get_c_name(''), errvalue))
- self._prnt(' %s;' % errcode)
-
- def _extra_local_variables(self, tp, localvars):
- if isinstance(tp, model.PointerType):
- localvars.add('Py_ssize_t datasize')
-
- def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode):
- self._prnt(' datasize = _cffi_prepare_pointer_call_argument(')
- self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % (
- self._gettypenum(tp), fromvar, tovar))
- self._prnt(' if (datasize != 0) {')
- self._prnt(' if (datasize < 0)')
- self._prnt(' %s;' % errcode)
- self._prnt(' %s = (%s)alloca((size_t)datasize);' % (
- tovar, tp.get_c_name('')))
- self._prnt(' memset((void *)%s, 0, (size_t)datasize);' % (tovar,))
- self._prnt(' if (_cffi_convert_array_from_object('
- '(char *)%s, _cffi_type(%d), %s) < 0)' % (
- tovar, self._gettypenum(tp), fromvar))
- self._prnt(' %s;' % errcode)
- self._prnt(' }')
-
- def _convert_expr_from_c(self, tp, var, context):
- if isinstance(tp, model.BasePrimitiveType):
- if tp.is_integer_type() and tp.name != '_Bool':
- return '_cffi_from_c_int(%s, %s)' % (var, tp.name)
- elif isinstance(tp, model.UnknownFloatType):
- return '_cffi_from_c_double(%s)' % (var,)
- elif tp.name != 'long double' and not tp.is_complex_type():
- cname = tp.name.replace(' ', '_')
- if cname in ('char16_t', 'char32_t'):
- self.needs_version(VERSION_CHAR16CHAR32)
- return '_cffi_from_c_%s(%s)' % (cname, var)
- else:
- return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % (
- var, self._gettypenum(tp))
- elif isinstance(tp, (model.PointerType, model.FunctionPtrType)):
- return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % (
- var, self._gettypenum(tp))
- elif isinstance(tp, model.ArrayType):
- return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % (
- var, self._gettypenum(model.PointerType(tp.item)))
- elif isinstance(tp, model.StructOrUnion):
- if tp.fldnames is None:
- raise TypeError("'%s' is used as %s, but is opaque" % (
- tp._get_c_name(), context))
- return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % (
- var, self._gettypenum(tp))
- elif isinstance(tp, model.EnumType):
- return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % (
- var, self._gettypenum(tp))
- else:
- raise NotImplementedError(tp)
-
- # ----------
- # typedefs
-
- def _typedef_type(self, tp, name):
- return self._global_type(tp, "(*(%s *)0)" % (name,))
-
- def _generate_cpy_typedef_collecttype(self, tp, name):
- self._do_collect_type(self._typedef_type(tp, name))
-
- def _generate_cpy_typedef_decl(self, tp, name):
- pass
-
- def _typedef_ctx(self, tp, name):
- type_index = self._typesdict[tp]
- self._lsts["typename"].append(TypenameExpr(name, type_index))
-
- def _generate_cpy_typedef_ctx(self, tp, name):
- tp = self._typedef_type(tp, name)
- self._typedef_ctx(tp, name)
- if getattr(tp, "origin", None) == "unknown_type":
- self._struct_ctx(tp, tp.name, approxname=None)
- elif isinstance(tp, model.NamedPointerType):
- self._struct_ctx(tp.totype, tp.totype.name, approxname=tp.name,
- named_ptr=tp)
-
- # ----------
- # function declarations
-
- def _generate_cpy_function_collecttype(self, tp, name):
- self._do_collect_type(tp.as_raw_function())
- if tp.ellipsis and not self.target_is_python:
- self._do_collect_type(tp)
-
- def _generate_cpy_function_decl(self, tp, name):
- assert not self.target_is_python
- assert isinstance(tp, model.FunctionPtrType)
- if tp.ellipsis:
- # cannot support vararg functions better than this: check for its
- # exact type (including the fixed arguments), and build it as a
- # constant function pointer (no CPython wrapper)
- self._generate_cpy_constant_decl(tp, name)
- return
- prnt = self._prnt
- numargs = len(tp.args)
- if numargs == 0:
- argname = 'noarg'
- elif numargs == 1:
- argname = 'arg0'
- else:
- argname = 'args'
- #
- # ------------------------------
- # the 'd' version of the function, only for addressof(lib, 'func')
- arguments = []
- call_arguments = []
- context = 'argument of %s' % name
- for i, type in enumerate(tp.args):
- arguments.append(type.get_c_name(' x%d' % i, context))
- call_arguments.append('x%d' % i)
- repr_arguments = ', '.join(arguments)
- repr_arguments = repr_arguments or 'void'
- if tp.abi:
- abi = tp.abi + ' '
- else:
- abi = ''
- name_and_arguments = '%s_cffi_d_%s(%s)' % (abi, name, repr_arguments)
- prnt('static %s' % (tp.result.get_c_name(name_and_arguments),))
- prnt('{')
- call_arguments = ', '.join(call_arguments)
- result_code = 'return '
- if isinstance(tp.result, model.VoidType):
- result_code = ''
- prnt(' %s%s(%s);' % (result_code, name, call_arguments))
- prnt('}')
- #
- prnt('#ifndef PYPY_VERSION') # ------------------------------
- #
- prnt('static PyObject *')
- prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname))
- prnt('{')
- #
- context = 'argument of %s' % name
- for i, type in enumerate(tp.args):
- arg = type.get_c_name(' x%d' % i, context)
- prnt(' %s;' % arg)
- #
- localvars = set()
- for type in tp.args:
- self._extra_local_variables(type, localvars)
- for decl in localvars:
- prnt(' %s;' % (decl,))
- #
- if not isinstance(tp.result, model.VoidType):
- result_code = 'result = '
- context = 'result of %s' % name
- result_decl = ' %s;' % tp.result.get_c_name(' result', context)
- prnt(result_decl)
- else:
- result_decl = None
- result_code = ''
- #
- if len(tp.args) > 1:
- rng = range(len(tp.args))
- for i in rng:
- prnt(' PyObject *arg%d;' % i)
- prnt()
- prnt(' if (!PyArg_UnpackTuple(args, "%s", %d, %d, %s))' % (
- name, len(rng), len(rng),
- ', '.join(['&arg%d' % i for i in rng])))
- prnt(' return NULL;')
- prnt()
- #
- for i, type in enumerate(tp.args):
- self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i,
- 'return NULL')
- prnt()
- #
- prnt(' Py_BEGIN_ALLOW_THREADS')
- prnt(' _cffi_restore_errno();')
- call_arguments = ['x%d' % i for i in range(len(tp.args))]
- call_arguments = ', '.join(call_arguments)
- prnt(' { %s%s(%s); }' % (result_code, name, call_arguments))
- prnt(' _cffi_save_errno();')
- prnt(' Py_END_ALLOW_THREADS')
- prnt()
- #
- prnt(' (void)self; /* unused */')
- if numargs == 0:
- prnt(' (void)noarg; /* unused */')
- if result_code:
- prnt(' return %s;' %
- self._convert_expr_from_c(tp.result, 'result', 'result type'))
- else:
- prnt(' Py_INCREF(Py_None);')
- prnt(' return Py_None;')
- prnt('}')
- #
- prnt('#else') # ------------------------------
- #
- # the PyPy version: need to replace struct/union arguments with
- # pointers, and if the result is a struct/union, insert a first
- # arg that is a pointer to the result. We also do that for
- # complex args and return type.
- def need_indirection(type):
- return (isinstance(type, model.StructOrUnion) or
- (isinstance(type, model.PrimitiveType) and
- type.is_complex_type()))
- difference = False
- arguments = []
- call_arguments = []
- context = 'argument of %s' % name
- for i, type in enumerate(tp.args):
- indirection = ''
- if need_indirection(type):
- indirection = '*'
- difference = True
- arg = type.get_c_name(' %sx%d' % (indirection, i), context)
- arguments.append(arg)
- call_arguments.append('%sx%d' % (indirection, i))
- tp_result = tp.result
- if need_indirection(tp_result):
- context = 'result of %s' % name
- arg = tp_result.get_c_name(' *result', context)
- arguments.insert(0, arg)
- tp_result = model.void_type
- result_decl = None
- result_code = '*result = '
- difference = True
- if difference:
- repr_arguments = ', '.join(arguments)
- repr_arguments = repr_arguments or 'void'
- name_and_arguments = '%s_cffi_f_%s(%s)' % (abi, name,
- repr_arguments)
- prnt('static %s' % (tp_result.get_c_name(name_and_arguments),))
- prnt('{')
- if result_decl:
- prnt(result_decl)
- call_arguments = ', '.join(call_arguments)
- prnt(' { %s%s(%s); }' % (result_code, name, call_arguments))
- if result_decl:
- prnt(' return result;')
- prnt('}')
- else:
- prnt('# define _cffi_f_%s _cffi_d_%s' % (name, name))
- #
- prnt('#endif') # ------------------------------
- prnt()
-
- def _generate_cpy_function_ctx(self, tp, name):
- if tp.ellipsis and not self.target_is_python:
- self._generate_cpy_constant_ctx(tp, name)
- return
- type_index = self._typesdict[tp.as_raw_function()]
- numargs = len(tp.args)
- if self.target_is_python:
- meth_kind = OP_DLOPEN_FUNC
- elif numargs == 0:
- meth_kind = OP_CPYTHON_BLTN_N # 'METH_NOARGS'
- elif numargs == 1:
- meth_kind = OP_CPYTHON_BLTN_O # 'METH_O'
- else:
- meth_kind = OP_CPYTHON_BLTN_V # 'METH_VARARGS'
- self._lsts["global"].append(
- GlobalExpr(name, '_cffi_f_%s' % name,
- CffiOp(meth_kind, type_index),
- size='_cffi_d_%s' % name))
-
- # ----------
- # named structs or unions
-
- def _field_type(self, tp_struct, field_name, tp_field):
- if isinstance(tp_field, model.ArrayType):
- actual_length = tp_field.length
- if actual_length == '...':
- ptr_struct_name = tp_struct.get_c_name('*')
- actual_length = '_cffi_array_len(((%s)0)->%s)' % (
- ptr_struct_name, field_name)
- tp_item = self._field_type(tp_struct, '%s[0]' % field_name,
- tp_field.item)
- tp_field = model.ArrayType(tp_item, actual_length)
- return tp_field
-
- def _struct_collecttype(self, tp):
- self._do_collect_type(tp)
- if self.target_is_python:
- # also requires nested anon struct/unions in ABI mode, recursively
- for fldtype in tp.anonymous_struct_fields():
- self._struct_collecttype(fldtype)
-
- def _struct_decl(self, tp, cname, approxname):
- if tp.fldtypes is None:
- return
- prnt = self._prnt
- checkfuncname = '_cffi_checkfld_%s' % (approxname,)
- prnt('_CFFI_UNUSED_FN')
- prnt('static void %s(%s *p)' % (checkfuncname, cname))
- prnt('{')
- prnt(' /* only to generate compile-time warnings or errors */')
- prnt(' (void)p;')
- for fname, ftype, fbitsize, fqual in tp.enumfields():
- try:
- if ftype.is_integer_type() or fbitsize >= 0:
- # accept all integers, but complain on float or double
- prnt(" (void)((p->%s) | 0); /* check that '%s.%s' is "
- "an integer */" % (fname, cname, fname))
- continue
- # only accept exactly the type declared, except that '[]'
- # is interpreted as a '*' and so will match any array length.
- # (It would also match '*', but that's harder to detect...)
- while (isinstance(ftype, model.ArrayType)
- and (ftype.length is None or ftype.length == '...')):
- ftype = ftype.item
- fname = fname + '[0]'
- prnt(' { %s = &p->%s; (void)tmp; }' % (
- ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
- fname))
- except VerificationError as e:
- prnt(' /* %s */' % str(e)) # cannot verify it, ignore
- prnt('}')
- prnt('struct _cffi_align_%s { char x; %s y; };' % (approxname, cname))
- prnt()
-
- def _struct_ctx(self, tp, cname, approxname, named_ptr=None):
- type_index = self._typesdict[tp]
- reason_for_not_expanding = None
- flags = []
- if isinstance(tp, model.UnionType):
- flags.append("_CFFI_F_UNION")
- if tp.fldtypes is None:
- flags.append("_CFFI_F_OPAQUE")
- reason_for_not_expanding = "opaque"
- if (tp not in self.ffi._parser._included_declarations and
- (named_ptr is None or
- named_ptr not in self.ffi._parser._included_declarations)):
- if tp.fldtypes is None:
- pass # opaque
- elif tp.partial or any(tp.anonymous_struct_fields()):
- pass # field layout obtained silently from the C compiler
- else:
- flags.append("_CFFI_F_CHECK_FIELDS")
- if tp.packed:
- flags.append("_CFFI_F_PACKED")
- else:
- flags.append("_CFFI_F_EXTERNAL")
- reason_for_not_expanding = "external"
- flags = '|'.join(flags) or '0'
- c_fields = []
- if reason_for_not_expanding is None:
- expand_anonymous_struct_union = not self.target_is_python
- enumfields = list(tp.enumfields(expand_anonymous_struct_union))
- for fldname, fldtype, fbitsize, fqual in enumfields:
- fldtype = self._field_type(tp, fldname, fldtype)
- self._check_not_opaque(fldtype,
- "field '%s.%s'" % (tp.name, fldname))
- # cname is None for _add_missing_struct_unions() only
- op = OP_NOOP
- if fbitsize >= 0:
- op = OP_BITFIELD
- size = '%d /* bits */' % fbitsize
- elif cname is None or (
- isinstance(fldtype, model.ArrayType) and
- fldtype.length is None):
- size = '(size_t)-1'
- else:
- size = 'sizeof(((%s)0)->%s)' % (
- tp.get_c_name('*') if named_ptr is None
- else named_ptr.name,
- fldname)
- if cname is None or fbitsize >= 0:
- offset = '(size_t)-1'
- elif named_ptr is not None:
- offset = '((char *)&((%s)0)->%s) - (char *)0' % (
- named_ptr.name, fldname)
- else:
- offset = 'offsetof(%s, %s)' % (tp.get_c_name(''), fldname)
- c_fields.append(
- FieldExpr(fldname, offset, size, fbitsize,
- CffiOp(op, self._typesdict[fldtype])))
- first_field_index = len(self._lsts["field"])
- self._lsts["field"].extend(c_fields)
- #
- if cname is None: # unknown name, for _add_missing_struct_unions
- size = '(size_t)-2'
- align = -2
- comment = "unnamed"
- else:
- if named_ptr is not None:
- size = 'sizeof(*(%s)0)' % (named_ptr.name,)
- align = '-1 /* unknown alignment */'
- else:
- size = 'sizeof(%s)' % (cname,)
- align = 'offsetof(struct _cffi_align_%s, y)' % (approxname,)
- comment = None
- else:
- size = '(size_t)-1'
- align = -1
- first_field_index = -1
- comment = reason_for_not_expanding
- self._lsts["struct_union"].append(
- StructUnionExpr(tp.name, type_index, flags, size, align, comment,
- first_field_index, c_fields))
- self._seen_struct_unions.add(tp)
-
- def _check_not_opaque(self, tp, location):
- while isinstance(tp, model.ArrayType):
- tp = tp.item
- if isinstance(tp, model.StructOrUnion) and tp.fldtypes is None:
- raise TypeError(
- "%s is of an opaque type (not declared in cdef())" % location)
-
- def _add_missing_struct_unions(self):
- # not very nice, but some struct declarations might be missing
- # because they don't have any known C name. Check that they are
- # not partial (we can't complete or verify them!) and emit them
- # anonymously.
- lst = list(self._struct_unions.items())
- lst.sort(key=lambda tp_order: tp_order[1])
- for tp, order in lst:
- if tp not in self._seen_struct_unions:
- if tp.partial:
- raise NotImplementedError("internal inconsistency: %r is "
- "partial but was not seen at "
- "this point" % (tp,))
- if tp.name.startswith('$') and tp.name[1:].isdigit():
- approxname = tp.name[1:]
- elif tp.name == '_IO_FILE' and tp.forcename == 'FILE':
- approxname = 'FILE'
- self._typedef_ctx(tp, 'FILE')
- else:
- raise NotImplementedError("internal inconsistency: %r" %
- (tp,))
- self._struct_ctx(tp, None, approxname)
-
- def _generate_cpy_struct_collecttype(self, tp, name):
- self._struct_collecttype(tp)
- _generate_cpy_union_collecttype = _generate_cpy_struct_collecttype
-
- def _struct_names(self, tp):
- cname = tp.get_c_name('')
- if ' ' in cname:
- return cname, cname.replace(' ', '_')
- else:
- return cname, '_' + cname
-
- def _generate_cpy_struct_decl(self, tp, name):
- self._struct_decl(tp, *self._struct_names(tp))
- _generate_cpy_union_decl = _generate_cpy_struct_decl
-
- def _generate_cpy_struct_ctx(self, tp, name):
- self._struct_ctx(tp, *self._struct_names(tp))
- _generate_cpy_union_ctx = _generate_cpy_struct_ctx
-
- # ----------
- # 'anonymous' declarations. These are produced for anonymous structs
- # or unions; the 'name' is obtained by a typedef.
-
- def _generate_cpy_anonymous_collecttype(self, tp, name):
- if isinstance(tp, model.EnumType):
- self._generate_cpy_enum_collecttype(tp, name)
- else:
- self._struct_collecttype(tp)
-
- def _generate_cpy_anonymous_decl(self, tp, name):
- if isinstance(tp, model.EnumType):
- self._generate_cpy_enum_decl(tp)
- else:
- self._struct_decl(tp, name, 'typedef_' + name)
-
- def _generate_cpy_anonymous_ctx(self, tp, name):
- if isinstance(tp, model.EnumType):
- self._enum_ctx(tp, name)
- else:
- self._struct_ctx(tp, name, 'typedef_' + name)
-
- # ----------
- # constants, declared with "static const ..."
-
- def _generate_cpy_const(self, is_int, name, tp=None, category='const',
- check_value=None):
- if (category, name) in self._seen_constants:
- raise VerificationError(
- "duplicate declaration of %s '%s'" % (category, name))
- self._seen_constants.add((category, name))
- #
- prnt = self._prnt
- funcname = '_cffi_%s_%s' % (category, name)
- if is_int:
- prnt('static int %s(unsigned long long *o)' % funcname)
- prnt('{')
- prnt(' int n = (%s) <= 0;' % (name,))
- prnt(' *o = (unsigned long long)((%s) | 0);'
- ' /* check that %s is an integer */' % (name, name))
- if check_value is not None:
- if check_value > 0:
- check_value = '%dU' % (check_value,)
- prnt(' if (!_cffi_check_int(*o, n, %s))' % (check_value,))
- prnt(' n |= 2;')
- prnt(' return n;')
- prnt('}')
- else:
- assert check_value is None
- prnt('static void %s(char *o)' % funcname)
- prnt('{')
- prnt(' *(%s)o = %s;' % (tp.get_c_name('*'), name))
- prnt('}')
- prnt()
-
- def _generate_cpy_constant_collecttype(self, tp, name):
- is_int = tp.is_integer_type()
- if not is_int or self.target_is_python:
- self._do_collect_type(tp)
-
- def _generate_cpy_constant_decl(self, tp, name):
- is_int = tp.is_integer_type()
- self._generate_cpy_const(is_int, name, tp)
-
- def _generate_cpy_constant_ctx(self, tp, name):
- if not self.target_is_python and tp.is_integer_type():
- type_op = CffiOp(OP_CONSTANT_INT, -1)
- else:
- if self.target_is_python:
- const_kind = OP_DLOPEN_CONST
- else:
- const_kind = OP_CONSTANT
- type_index = self._typesdict[tp]
- type_op = CffiOp(const_kind, type_index)
- self._lsts["global"].append(
- GlobalExpr(name, '_cffi_const_%s' % name, type_op))
-
- # ----------
- # enums
-
- def _generate_cpy_enum_collecttype(self, tp, name):
- self._do_collect_type(tp)
-
- def _generate_cpy_enum_decl(self, tp, name=None):
- for enumerator in tp.enumerators:
- self._generate_cpy_const(True, enumerator)
-
- def _enum_ctx(self, tp, cname):
- type_index = self._typesdict[tp]
- type_op = CffiOp(OP_ENUM, -1)
- if self.target_is_python:
- tp.check_not_partial()
- for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
- self._lsts["global"].append(
- GlobalExpr(enumerator, '_cffi_const_%s' % enumerator, type_op,
- check_value=enumvalue))
- #
- if cname is not None and '$' not in cname and not self.target_is_python:
- size = "sizeof(%s)" % cname
- signed = "((%s)-1) <= 0" % cname
- else:
- basetp = tp.build_baseinttype(self.ffi, [])
- size = self.ffi.sizeof(basetp)
- signed = int(int(self.ffi.cast(basetp, -1)) < 0)
- allenums = ",".join(tp.enumerators)
- self._lsts["enum"].append(
- EnumExpr(tp.name, type_index, size, signed, allenums))
-
- def _generate_cpy_enum_ctx(self, tp, name):
- self._enum_ctx(tp, tp._get_c_name())
-
- # ----------
- # macros: for now only for integers
-
- def _generate_cpy_macro_collecttype(self, tp, name):
- pass
-
- def _generate_cpy_macro_decl(self, tp, name):
- if tp == '...':
- check_value = None
- else:
- check_value = tp # an integer
- self._generate_cpy_const(True, name, check_value=check_value)
-
- def _generate_cpy_macro_ctx(self, tp, name):
- if tp == '...':
- if self.target_is_python:
- raise VerificationError(
- "cannot use the syntax '...' in '#define %s ...' when "
- "using the ABI mode" % (name,))
- check_value = None
- else:
- check_value = tp # an integer
- type_op = CffiOp(OP_CONSTANT_INT, -1)
- self._lsts["global"].append(
- GlobalExpr(name, '_cffi_const_%s' % name, type_op,
- check_value=check_value))
-
- # ----------
- # global variables
-
- def _global_type(self, tp, global_name):
- if isinstance(tp, model.ArrayType):
- actual_length = tp.length
- if actual_length == '...':
- actual_length = '_cffi_array_len(%s)' % (global_name,)
- tp_item = self._global_type(tp.item, '%s[0]' % global_name)
- tp = model.ArrayType(tp_item, actual_length)
- return tp
-
- def _generate_cpy_variable_collecttype(self, tp, name):
- self._do_collect_type(self._global_type(tp, name))
-
- def _generate_cpy_variable_decl(self, tp, name):
- prnt = self._prnt
- tp = self._global_type(tp, name)
- if isinstance(tp, model.ArrayType) and tp.length is None:
- tp = tp.item
- ampersand = ''
- else:
- ampersand = '&'
- # This code assumes that casts from "tp *" to "void *" is a
- # no-op, i.e. a function that returns a "tp *" can be called
- # as if it returned a "void *". This should be generally true
- # on any modern machine. The only exception to that rule (on
- # uncommon architectures, and as far as I can tell) might be
- # if 'tp' were a function type, but that is not possible here.
- # (If 'tp' is a function _pointer_ type, then casts from "fn_t
- # **" to "void *" are again no-ops, as far as I can tell.)
- decl = '*_cffi_var_%s(void)' % (name,)
- prnt('static ' + tp.get_c_name(decl, quals=self._current_quals))
- prnt('{')
- prnt(' return %s(%s);' % (ampersand, name))
- prnt('}')
- prnt()
-
- def _generate_cpy_variable_ctx(self, tp, name):
- tp = self._global_type(tp, name)
- type_index = self._typesdict[tp]
- if self.target_is_python:
- op = OP_GLOBAL_VAR
- else:
- op = OP_GLOBAL_VAR_F
- self._lsts["global"].append(
- GlobalExpr(name, '_cffi_var_%s' % name, CffiOp(op, type_index)))
-
- # ----------
- # extern "Python"
-
- def _generate_cpy_extern_python_collecttype(self, tp, name):
- assert isinstance(tp, model.FunctionPtrType)
- self._do_collect_type(tp)
- _generate_cpy_dllexport_python_collecttype = \
- _generate_cpy_extern_python_plus_c_collecttype = \
- _generate_cpy_extern_python_collecttype
-
- def _extern_python_decl(self, tp, name, tag_and_space):
- prnt = self._prnt
- if isinstance(tp.result, model.VoidType):
- size_of_result = '0'
- else:
- context = 'result of %s' % name
- size_of_result = '(int)sizeof(%s)' % (
- tp.result.get_c_name('', context),)
- prnt('static struct _cffi_externpy_s _cffi_externpy__%s =' % name)
- prnt(' { "%s.%s", %s };' % (self.module_name, name, size_of_result))
- prnt()
- #
- arguments = []
- context = 'argument of %s' % name
- for i, type in enumerate(tp.args):
- arg = type.get_c_name(' a%d' % i, context)
- arguments.append(arg)
- #
- repr_arguments = ', '.join(arguments)
- repr_arguments = repr_arguments or 'void'
- name_and_arguments = '%s(%s)' % (name, repr_arguments)
- if tp.abi == "__stdcall":
- name_and_arguments = '_cffi_stdcall ' + name_and_arguments
- #
- def may_need_128_bits(tp):
- return (isinstance(tp, model.PrimitiveType) and
- tp.name == 'long double')
- #
- size_of_a = max(len(tp.args)*8, 8)
- if may_need_128_bits(tp.result):
- size_of_a = max(size_of_a, 16)
- if isinstance(tp.result, model.StructOrUnion):
- size_of_a = 'sizeof(%s) > %d ? sizeof(%s) : %d' % (
- tp.result.get_c_name(''), size_of_a,
- tp.result.get_c_name(''), size_of_a)
- prnt('%s%s' % (tag_and_space, tp.result.get_c_name(name_and_arguments)))
- prnt('{')
- prnt(' char a[%s];' % size_of_a)
- prnt(' char *p = a;')
- for i, type in enumerate(tp.args):
- arg = 'a%d' % i
- if (isinstance(type, model.StructOrUnion) or
- may_need_128_bits(type)):
- arg = '&' + arg
- type = model.PointerType(type)
- prnt(' *(%s)(p + %d) = %s;' % (type.get_c_name('*'), i*8, arg))
- prnt(' _cffi_call_python(&_cffi_externpy__%s, p);' % name)
- if not isinstance(tp.result, model.VoidType):
- prnt(' return *(%s)p;' % (tp.result.get_c_name('*'),))
- prnt('}')
- prnt()
- self._num_externpy += 1
-
- def _generate_cpy_extern_python_decl(self, tp, name):
- self._extern_python_decl(tp, name, 'static ')
-
- def _generate_cpy_dllexport_python_decl(self, tp, name):
- self._extern_python_decl(tp, name, 'CFFI_DLLEXPORT ')
-
- def _generate_cpy_extern_python_plus_c_decl(self, tp, name):
- self._extern_python_decl(tp, name, '')
-
- def _generate_cpy_extern_python_ctx(self, tp, name):
- if self.target_is_python:
- raise VerificationError(
- "cannot use 'extern \"Python\"' in the ABI mode")
- if tp.ellipsis:
- raise NotImplementedError("a vararg function is extern \"Python\"")
- type_index = self._typesdict[tp]
- type_op = CffiOp(OP_EXTERN_PYTHON, type_index)
- self._lsts["global"].append(
- GlobalExpr(name, '&_cffi_externpy__%s' % name, type_op, name))
-
- _generate_cpy_dllexport_python_ctx = \
- _generate_cpy_extern_python_plus_c_ctx = \
- _generate_cpy_extern_python_ctx
-
- def _print_string_literal_in_array(self, s):
- prnt = self._prnt
- prnt('// # NB. this is not a string because of a size limit in MSVC')
- for line in s.splitlines(True):
- prnt(('// ' + line).rstrip())
- printed_line = ''
- for c in line:
- if len(printed_line) >= 76:
- prnt(printed_line)
- printed_line = ''
- printed_line += '%d,' % (ord(c),)
- prnt(printed_line)
-
- # ----------
- # emitting the opcodes for individual types
-
- def _emit_bytecode_VoidType(self, tp, index):
- self.cffi_types[index] = CffiOp(OP_PRIMITIVE, PRIM_VOID)
-
- def _emit_bytecode_PrimitiveType(self, tp, index):
- prim_index = PRIMITIVE_TO_INDEX[tp.name]
- self.cffi_types[index] = CffiOp(OP_PRIMITIVE, prim_index)
-
- def _emit_bytecode_UnknownIntegerType(self, tp, index):
- s = ('_cffi_prim_int(sizeof(%s), (\n'
- ' ((%s)-1) | 0 /* check that %s is an integer type */\n'
- ' ) <= 0)' % (tp.name, tp.name, tp.name))
- self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s)
-
- def _emit_bytecode_UnknownFloatType(self, tp, index):
- s = ('_cffi_prim_float(sizeof(%s) *\n'
- ' (((%s)1) / 2) * 2 /* integer => 0, float => 1 */\n'
- ' )' % (tp.name, tp.name))
- self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s)
-
- def _emit_bytecode_RawFunctionType(self, tp, index):
- self.cffi_types[index] = CffiOp(OP_FUNCTION, self._typesdict[tp.result])
- index += 1
- for tp1 in tp.args:
- realindex = self._typesdict[tp1]
- if index != realindex:
- if isinstance(tp1, model.PrimitiveType):
- self._emit_bytecode_PrimitiveType(tp1, index)
- else:
- self.cffi_types[index] = CffiOp(OP_NOOP, realindex)
- index += 1
- flags = int(tp.ellipsis)
- if tp.abi is not None:
- if tp.abi == '__stdcall':
- flags |= 2
- else:
- raise NotImplementedError("abi=%r" % (tp.abi,))
- self.cffi_types[index] = CffiOp(OP_FUNCTION_END, flags)
-
- def _emit_bytecode_PointerType(self, tp, index):
- self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[tp.totype])
-
- _emit_bytecode_ConstPointerType = _emit_bytecode_PointerType
- _emit_bytecode_NamedPointerType = _emit_bytecode_PointerType
-
- def _emit_bytecode_FunctionPtrType(self, tp, index):
- raw = tp.as_raw_function()
- self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[raw])
-
- def _emit_bytecode_ArrayType(self, tp, index):
- item_index = self._typesdict[tp.item]
- if tp.length is None:
- self.cffi_types[index] = CffiOp(OP_OPEN_ARRAY, item_index)
- elif tp.length == '...':
- raise VerificationError(
- "type %s badly placed: the '...' array length can only be "
- "used on global arrays or on fields of structures" % (
- str(tp).replace('/*...*/', '...'),))
- else:
- assert self.cffi_types[index + 1] == 'LEN'
- self.cffi_types[index] = CffiOp(OP_ARRAY, item_index)
- self.cffi_types[index + 1] = CffiOp(None, str(tp.length))
-
- def _emit_bytecode_StructType(self, tp, index):
- struct_index = self._struct_unions[tp]
- self.cffi_types[index] = CffiOp(OP_STRUCT_UNION, struct_index)
- _emit_bytecode_UnionType = _emit_bytecode_StructType
-
- def _emit_bytecode_EnumType(self, tp, index):
- enum_index = self._enums[tp]
- self.cffi_types[index] = CffiOp(OP_ENUM, enum_index)
-
-
-if sys.version_info >= (3,):
- NativeIO = io.StringIO
-else:
- class NativeIO(io.BytesIO):
- def write(self, s):
- if isinstance(s, unicode):
- s = s.encode('ascii')
- super(NativeIO, self).write(s)
-
-def _make_c_or_py_source(ffi, module_name, preamble, target_file, verbose):
- if verbose:
- print("generating %s" % (target_file,))
- recompiler = Recompiler(ffi, module_name,
- target_is_python=(preamble is None))
- recompiler.collect_type_table()
- recompiler.collect_step_tables()
- f = NativeIO()
- recompiler.write_source_to_f(f, preamble)
- output = f.getvalue()
- try:
- with open(target_file, 'r') as f1:
- if f1.read(len(output) + 1) != output:
- raise IOError
- if verbose:
- print("(already up-to-date)")
- return False # already up-to-date
- except IOError:
- tmp_file = '%s.~%d' % (target_file, os.getpid())
- with open(tmp_file, 'w') as f1:
- f1.write(output)
- try:
- os.rename(tmp_file, target_file)
- except OSError:
- os.unlink(target_file)
- os.rename(tmp_file, target_file)
- return True
-
-def make_c_source(ffi, module_name, preamble, target_c_file, verbose=False):
- assert preamble is not None
- return _make_c_or_py_source(ffi, module_name, preamble, target_c_file,
- verbose)
-
-def make_py_source(ffi, module_name, target_py_file, verbose=False):
- return _make_c_or_py_source(ffi, module_name, None, target_py_file,
- verbose)
-
-def _modname_to_file(outputdir, modname, extension):
- parts = modname.split('.')
- try:
- os.makedirs(os.path.join(outputdir, *parts[:-1]))
- except OSError:
- pass
- parts[-1] += extension
- return os.path.join(outputdir, *parts), parts
-
-
-# Aaargh. Distutils is not tested at all for the purpose of compiling
-# DLLs that are not extension modules. Here are some hacks to work
-# around that, in the _patch_for_*() functions...
-
-def _patch_meth(patchlist, cls, name, new_meth):
- old = getattr(cls, name)
- patchlist.append((cls, name, old))
- setattr(cls, name, new_meth)
- return old
-
-def _unpatch_meths(patchlist):
- for cls, name, old_meth in reversed(patchlist):
- setattr(cls, name, old_meth)
-
-def _patch_for_embedding(patchlist):
- if sys.platform == 'win32':
- # we must not remove the manifest when building for embedding!
- from distutils.msvc9compiler import MSVCCompiler
- _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref',
- lambda self, manifest_file: manifest_file)
-
- if sys.platform == 'darwin':
- # we must not make a '-bundle', but a '-dynamiclib' instead
- from distutils.ccompiler import CCompiler
- def my_link_shared_object(self, *args, **kwds):
- if '-bundle' in self.linker_so:
- self.linker_so = list(self.linker_so)
- i = self.linker_so.index('-bundle')
- self.linker_so[i] = '-dynamiclib'
- return old_link_shared_object(self, *args, **kwds)
- old_link_shared_object = _patch_meth(patchlist, CCompiler,
- 'link_shared_object',
- my_link_shared_object)
-
-def _patch_for_target(patchlist, target):
- from distutils.command.build_ext import build_ext
- # if 'target' is different from '*', we need to patch some internal
- # method to just return this 'target' value, instead of having it
- # built from module_name
- if target.endswith('.*'):
- target = target[:-2]
- if sys.platform == 'win32':
- target += '.dll'
- elif sys.platform == 'darwin':
- target += '.dylib'
- else:
- target += '.so'
- _patch_meth(patchlist, build_ext, 'get_ext_filename',
- lambda self, ext_name: target)
-
-
-def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True,
- c_file=None, source_extension='.c', extradir=None,
- compiler_verbose=1, target=None, debug=None, **kwds):
- if not isinstance(module_name, str):
- module_name = module_name.encode('ascii')
- if ffi._windows_unicode:
- ffi._apply_windows_unicode(kwds)
- if preamble is not None:
- embedding = (ffi._embedding is not None)
- if embedding:
- ffi._apply_embedding_fix(kwds)
- if c_file is None:
- c_file, parts = _modname_to_file(tmpdir, module_name,
- source_extension)
- if extradir:
- parts = [extradir] + parts
- ext_c_file = os.path.join(*parts)
- else:
- ext_c_file = c_file
- #
- if target is None:
- if embedding:
- target = '%s.*' % module_name
- else:
- target = '*'
- #
- ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds)
- updated = make_c_source(ffi, module_name, preamble, c_file,
- verbose=compiler_verbose)
- if call_c_compiler:
- patchlist = []
- cwd = os.getcwd()
- try:
- if embedding:
- _patch_for_embedding(patchlist)
- if target != '*':
- _patch_for_target(patchlist, target)
- if compiler_verbose:
- if tmpdir == '.':
- msg = 'the current directory is'
- else:
- msg = 'setting the current directory to'
- print('%s %r' % (msg, os.path.abspath(tmpdir)))
- os.chdir(tmpdir)
- outputfilename = ffiplatform.compile('.', ext,
- compiler_verbose, debug)
- finally:
- os.chdir(cwd)
- _unpatch_meths(patchlist)
- return outputfilename
- else:
- return ext, updated
- else:
- if c_file is None:
- c_file, _ = _modname_to_file(tmpdir, module_name, '.py')
- updated = make_py_source(ffi, module_name, c_file,
- verbose=compiler_verbose)
- if call_c_compiler:
- return c_file
- else:
- return None, updated
-
-def _verify(ffi, module_name, preamble, *args, **kwds):
- # FOR TESTS ONLY
- from testing.udir import udir
- import imp
- assert module_name not in sys.modules, "module name conflict: %r" % (
- module_name,)
- kwds.setdefault('tmpdir', str(udir))
- outputfilename = recompile(ffi, module_name, preamble, *args, **kwds)
- module = imp.load_dynamic(module_name, outputfilename)
- #
- # hack hack hack: copy all *bound methods* from module.ffi back to the
- # ffi instance. Then calls like ffi.new() will invoke module.ffi.new().
- for name in dir(module.ffi):
- if not name.startswith('_'):
- attr = getattr(module.ffi, name)
- if attr is not getattr(ffi, name, object()):
- setattr(ffi, name, attr)
- def typeof_disabled(*args, **kwds):
- raise NotImplementedError
- ffi._typeof = typeof_disabled
- for name in dir(ffi):
- if not name.startswith('_') and not hasattr(module.ffi, name):
- setattr(ffi, name, NotImplemented)
- return module.lib
diff --git a/functions/source/GitPullS3/cffi/setuptools_ext.py b/functions/source/GitPullS3/cffi/setuptools_ext.py
deleted file mode 100644
index 58fb8a3..0000000
--- a/functions/source/GitPullS3/cffi/setuptools_ext.py
+++ /dev/null
@@ -1,204 +0,0 @@
-import os
-import sys
-
-try:
- basestring
-except NameError:
- # Python 3.x
- basestring = str
-
-def error(msg):
- from distutils.errors import DistutilsSetupError
- raise DistutilsSetupError(msg)
-
-
-def execfile(filename, glob):
- # We use execfile() (here rewritten for Python 3) instead of
- # __import__() to load the build script. The problem with
- # a normal import is that in some packages, the intermediate
- # __init__.py files may already try to import the file that
- # we are generating.
- with open(filename) as f:
- src = f.read()
- src += '\n' # Python 2.6 compatibility
- code = compile(src, filename, 'exec')
- exec(code, glob, glob)
-
-
-def add_cffi_module(dist, mod_spec):
- from cffi.api import FFI
-
- if not isinstance(mod_spec, basestring):
- error("argument to 'cffi_modules=...' must be a str or a list of str,"
- " not %r" % (type(mod_spec).__name__,))
- mod_spec = str(mod_spec)
- try:
- build_file_name, ffi_var_name = mod_spec.split(':')
- except ValueError:
- error("%r must be of the form 'path/build.py:ffi_variable'" %
- (mod_spec,))
- if not os.path.exists(build_file_name):
- ext = ''
- rewritten = build_file_name.replace('.', '/') + '.py'
- if os.path.exists(rewritten):
- ext = ' (rewrite cffi_modules to [%r])' % (
- rewritten + ':' + ffi_var_name,)
- error("%r does not name an existing file%s" % (build_file_name, ext))
-
- mod_vars = {'__name__': '__cffi__', '__file__': build_file_name}
- execfile(build_file_name, mod_vars)
-
- try:
- ffi = mod_vars[ffi_var_name]
- except KeyError:
- error("%r: object %r not found in module" % (mod_spec,
- ffi_var_name))
- if not isinstance(ffi, FFI):
- ffi = ffi() # maybe it's a function instead of directly an ffi
- if not isinstance(ffi, FFI):
- error("%r is not an FFI instance (got %r)" % (mod_spec,
- type(ffi).__name__))
- if not hasattr(ffi, '_assigned_source'):
- error("%r: the set_source() method was not called" % (mod_spec,))
- module_name, source, source_extension, kwds = ffi._assigned_source
- if ffi._windows_unicode:
- kwds = kwds.copy()
- ffi._apply_windows_unicode(kwds)
-
- if source is None:
- _add_py_module(dist, ffi, module_name)
- else:
- _add_c_module(dist, ffi, module_name, source, source_extension, kwds)
-
-def _set_py_limited_api(Extension, kwds):
- """
- Add py_limited_api to kwds if setuptools >= 26 is in use.
- Do not alter the setting if it already exists.
- Setuptools takes care of ignoring the flag on Python 2 and PyPy.
-
- CPython itself should ignore the flag in a debugging version
- (by not listing .abi3.so in the extensions it supports), but
- it doesn't so far, creating troubles. That's why we check
- for "not hasattr(sys, 'gettotalrefcount')" (the 2.7 compatible equivalent
- of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401)
-
- On Windows, it's better not to use py_limited_api until issue #355
- can be resolved (by having virtualenv copy PYTHON3.DLL). See also
- the start of _cffi_include.h.
- """
- if ('py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount')
- and sys.platform != 'win32'):
- import setuptools
- try:
- setuptools_major_version = int(setuptools.__version__.partition('.')[0])
- if setuptools_major_version >= 26:
- kwds['py_limited_api'] = True
- except ValueError: # certain development versions of setuptools
- # If we don't know the version number of setuptools, we
- # try to set 'py_limited_api' anyway. At worst, we get a
- # warning.
- kwds['py_limited_api'] = True
- return kwds
-
-def _add_c_module(dist, ffi, module_name, source, source_extension, kwds):
- from distutils.core import Extension
- # We are a setuptools extension. Need this build_ext for py_limited_api.
- from setuptools.command.build_ext import build_ext
- from distutils.dir_util import mkpath
- from distutils import log
- from cffi import recompiler
-
- allsources = ['$PLACEHOLDER']
- allsources.extend(kwds.pop('sources', []))
- kwds = _set_py_limited_api(Extension, kwds)
- ext = Extension(name=module_name, sources=allsources, **kwds)
-
- def make_mod(tmpdir, pre_run=None):
- c_file = os.path.join(tmpdir, module_name + source_extension)
- log.info("generating cffi module %r" % c_file)
- mkpath(tmpdir)
- # a setuptools-only, API-only hook: called with the "ext" and "ffi"
- # arguments just before we turn the ffi into C code. To use it,
- # subclass the 'distutils.command.build_ext.build_ext' class and
- # add a method 'def pre_run(self, ext, ffi)'.
- if pre_run is not None:
- pre_run(ext, ffi)
- updated = recompiler.make_c_source(ffi, module_name, source, c_file)
- if not updated:
- log.info("already up-to-date")
- return c_file
-
- if dist.ext_modules is None:
- dist.ext_modules = []
- dist.ext_modules.append(ext)
-
- base_class = dist.cmdclass.get('build_ext', build_ext)
- class build_ext_make_mod(base_class):
- def run(self):
- if ext.sources[0] == '$PLACEHOLDER':
- pre_run = getattr(self, 'pre_run', None)
- ext.sources[0] = make_mod(self.build_temp, pre_run)
- base_class.run(self)
- dist.cmdclass['build_ext'] = build_ext_make_mod
- # NB. multiple runs here will create multiple 'build_ext_make_mod'
- # classes. Even in this case the 'build_ext' command should be
- # run once; but just in case, the logic above does nothing if
- # called again.
-
-
-def _add_py_module(dist, ffi, module_name):
- from distutils.dir_util import mkpath
- from setuptools.command.build_py import build_py
- from setuptools.command.build_ext import build_ext
- from distutils import log
- from cffi import recompiler
-
- def generate_mod(py_file):
- log.info("generating cffi module %r" % py_file)
- mkpath(os.path.dirname(py_file))
- updated = recompiler.make_py_source(ffi, module_name, py_file)
- if not updated:
- log.info("already up-to-date")
-
- base_class = dist.cmdclass.get('build_py', build_py)
- class build_py_make_mod(base_class):
- def run(self):
- base_class.run(self)
- module_path = module_name.split('.')
- module_path[-1] += '.py'
- generate_mod(os.path.join(self.build_lib, *module_path))
- dist.cmdclass['build_py'] = build_py_make_mod
-
- # distutils and setuptools have no notion I could find of a
- # generated python module. If we don't add module_name to
- # dist.py_modules, then things mostly work but there are some
- # combination of options (--root and --record) that will miss
- # the module. So we add it here, which gives a few apparently
- # harmless warnings about not finding the file outside the
- # build directory.
- if dist.py_modules is None:
- dist.py_modules = []
- dist.py_modules.append(module_name)
-
- # the following is only for "build_ext -i"
- base_class_2 = dist.cmdclass.get('build_ext', build_ext)
- class build_ext_make_mod(base_class_2):
- def run(self):
- base_class_2.run(self)
- if self.inplace:
- # from get_ext_fullpath() in distutils/command/build_ext.py
- module_path = module_name.split('.')
- package = '.'.join(module_path[:-1])
- build_py = self.get_finalized_command('build_py')
- package_dir = build_py.get_package_dir(package)
- file_name = module_path[-1] + '.py'
- generate_mod(os.path.join(package_dir, file_name))
- dist.cmdclass['build_ext'] = build_ext_make_mod
-
-def cffi_modules(dist, attr, value):
- assert attr == 'cffi_modules'
- if isinstance(value, basestring):
- value = [value]
-
- for cffi_module in value:
- add_cffi_module(dist, cffi_module)
diff --git a/functions/source/GitPullS3/cffi/vengine_cpy.py b/functions/source/GitPullS3/cffi/vengine_cpy.py
deleted file mode 100644
index 536f11f..0000000
--- a/functions/source/GitPullS3/cffi/vengine_cpy.py
+++ /dev/null
@@ -1,1015 +0,0 @@
-#
-# DEPRECATED: implementation for ffi.verify()
-#
-import sys, imp
-from . import model
-from .error import VerificationError
-
-
-class VCPythonEngine(object):
- _class_key = 'x'
- _gen_python_module = True
-
- def __init__(self, verifier):
- self.verifier = verifier
- self.ffi = verifier.ffi
- self._struct_pending_verification = {}
- self._types_of_builtin_functions = {}
-
- def patch_extension_kwds(self, kwds):
- pass
-
- def find_module(self, module_name, path, so_suffixes):
- try:
- f, filename, descr = imp.find_module(module_name, path)
- except ImportError:
- return None
- if f is not None:
- f.close()
- # Note that after a setuptools installation, there are both .py
- # and .so files with the same basename. The code here relies on
- # imp.find_module() locating the .so in priority.
- if descr[0] not in so_suffixes:
- return None
- return filename
-
- def collect_types(self):
- self._typesdict = {}
- self._generate("collecttype")
-
- def _prnt(self, what=''):
- self._f.write(what + '\n')
-
- def _gettypenum(self, type):
- # a KeyError here is a bug. please report it! :-)
- return self._typesdict[type]
-
- def _do_collect_type(self, tp):
- if ((not isinstance(tp, model.PrimitiveType)
- or tp.name == 'long double')
- and tp not in self._typesdict):
- num = len(self._typesdict)
- self._typesdict[tp] = num
-
- def write_source_to_f(self):
- self.collect_types()
- #
- # The new module will have a _cffi_setup() function that receives
- # objects from the ffi world, and that calls some setup code in
- # the module. This setup code is split in several independent
- # functions, e.g. one per constant. The functions are "chained"
- # by ending in a tail call to each other.
- #
- # This is further split in two chained lists, depending on if we
- # can do it at import-time or if we must wait for _cffi_setup() to
- # provide us with the objects. This is needed because we
- # need the values of the enum constants in order to build the
- # that we may have to pass to _cffi_setup().
- #
- # The following two 'chained_list_constants' items contains
- # the head of these two chained lists, as a string that gives the
- # call to do, if any.
- self._chained_list_constants = ['((void)lib,0)', '((void)lib,0)']
- #
- prnt = self._prnt
- # first paste some standard set of lines that are mostly '#define'
- prnt(cffimod_header)
- prnt()
- # then paste the C source given by the user, verbatim.
- prnt(self.verifier.preamble)
- prnt()
- #
- # call generate_cpy_xxx_decl(), for every xxx found from
- # ffi._parser._declarations. This generates all the functions.
- self._generate("decl")
- #
- # implement the function _cffi_setup_custom() as calling the
- # head of the chained list.
- self._generate_setup_custom()
- prnt()
- #
- # produce the method table, including the entries for the
- # generated Python->C function wrappers, which are done
- # by generate_cpy_function_method().
- prnt('static PyMethodDef _cffi_methods[] = {')
- self._generate("method")
- prnt(' {"_cffi_setup", _cffi_setup, METH_VARARGS, NULL},')
- prnt(' {NULL, NULL, 0, NULL} /* Sentinel */')
- prnt('};')
- prnt()
- #
- # standard init.
- modname = self.verifier.get_module_name()
- constants = self._chained_list_constants[False]
- prnt('#if PY_MAJOR_VERSION >= 3')
- prnt()
- prnt('static struct PyModuleDef _cffi_module_def = {')
- prnt(' PyModuleDef_HEAD_INIT,')
- prnt(' "%s",' % modname)
- prnt(' NULL,')
- prnt(' -1,')
- prnt(' _cffi_methods,')
- prnt(' NULL, NULL, NULL, NULL')
- prnt('};')
- prnt()
- prnt('PyMODINIT_FUNC')
- prnt('PyInit_%s(void)' % modname)
- prnt('{')
- prnt(' PyObject *lib;')
- prnt(' lib = PyModule_Create(&_cffi_module_def);')
- prnt(' if (lib == NULL)')
- prnt(' return NULL;')
- prnt(' if (%s < 0 || _cffi_init() < 0) {' % (constants,))
- prnt(' Py_DECREF(lib);')
- prnt(' return NULL;')
- prnt(' }')
- prnt(' return lib;')
- prnt('}')
- prnt()
- prnt('#else')
- prnt()
- prnt('PyMODINIT_FUNC')
- prnt('init%s(void)' % modname)
- prnt('{')
- prnt(' PyObject *lib;')
- prnt(' lib = Py_InitModule("%s", _cffi_methods);' % modname)
- prnt(' if (lib == NULL)')
- prnt(' return;')
- prnt(' if (%s < 0 || _cffi_init() < 0)' % (constants,))
- prnt(' return;')
- prnt(' return;')
- prnt('}')
- prnt()
- prnt('#endif')
-
- def load_library(self, flags=None):
- # XXX review all usages of 'self' here!
- # import it as a new extension module
- imp.acquire_lock()
- try:
- if hasattr(sys, "getdlopenflags"):
- previous_flags = sys.getdlopenflags()
- try:
- if hasattr(sys, "setdlopenflags") and flags is not None:
- sys.setdlopenflags(flags)
- module = imp.load_dynamic(self.verifier.get_module_name(),
- self.verifier.modulefilename)
- except ImportError as e:
- error = "importing %r: %s" % (self.verifier.modulefilename, e)
- raise VerificationError(error)
- finally:
- if hasattr(sys, "setdlopenflags"):
- sys.setdlopenflags(previous_flags)
- finally:
- imp.release_lock()
- #
- # call loading_cpy_struct() to get the struct layout inferred by
- # the C compiler
- self._load(module, 'loading')
- #
- # the C code will need the objects. Collect them in
- # order in a list.
- revmapping = dict([(value, key)
- for (key, value) in self._typesdict.items()])
- lst = [revmapping[i] for i in range(len(revmapping))]
- lst = list(map(self.ffi._get_cached_btype, lst))
- #
- # build the FFILibrary class and instance and call _cffi_setup().
- # this will set up some fields like '_cffi_types', and only then
- # it will invoke the chained list of functions that will really
- # build (notably) the constant objects, as if they are
- # pointers, and store them as attributes on the 'library' object.
- class FFILibrary(object):
- _cffi_python_module = module
- _cffi_ffi = self.ffi
- _cffi_dir = []
- def __dir__(self):
- return FFILibrary._cffi_dir + list(self.__dict__)
- library = FFILibrary()
- if module._cffi_setup(lst, VerificationError, library):
- import warnings
- warnings.warn("reimporting %r might overwrite older definitions"
- % (self.verifier.get_module_name()))
- #
- # finally, call the loaded_cpy_xxx() functions. This will perform
- # the final adjustments, like copying the Python->C wrapper
- # functions from the module to the 'library' object, and setting
- # up the FFILibrary class with properties for the global C variables.
- self._load(module, 'loaded', library=library)
- module._cffi_original_ffi = self.ffi
- module._cffi_types_of_builtin_funcs = self._types_of_builtin_functions
- return library
-
- def _get_declarations(self):
- lst = [(key, tp) for (key, (tp, qual)) in
- self.ffi._parser._declarations.items()]
- lst.sort()
- return lst
-
- def _generate(self, step_name):
- for name, tp in self._get_declarations():
- kind, realname = name.split(' ', 1)
- try:
- method = getattr(self, '_generate_cpy_%s_%s' % (kind,
- step_name))
- except AttributeError:
- raise VerificationError(
- "not implemented in verify(): %r" % name)
- try:
- method(tp, realname)
- except Exception as e:
- model.attach_exception_info(e, name)
- raise
-
- def _load(self, module, step_name, **kwds):
- for name, tp in self._get_declarations():
- kind, realname = name.split(' ', 1)
- method = getattr(self, '_%s_cpy_%s' % (step_name, kind))
- try:
- method(tp, realname, module, **kwds)
- except Exception as e:
- model.attach_exception_info(e, name)
- raise
-
- def _generate_nothing(self, tp, name):
- pass
-
- def _loaded_noop(self, tp, name, module, **kwds):
- pass
-
- # ----------
-
- def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode):
- extraarg = ''
- if isinstance(tp, model.PrimitiveType):
- if tp.is_integer_type() and tp.name != '_Bool':
- converter = '_cffi_to_c_int'
- extraarg = ', %s' % tp.name
- else:
- converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''),
- tp.name.replace(' ', '_'))
- errvalue = '-1'
- #
- elif isinstance(tp, model.PointerType):
- self._convert_funcarg_to_c_ptr_or_array(tp, fromvar,
- tovar, errcode)
- return
- #
- elif isinstance(tp, (model.StructOrUnion, model.EnumType)):
- # a struct (not a struct pointer) as a function argument
- self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)'
- % (tovar, self._gettypenum(tp), fromvar))
- self._prnt(' %s;' % errcode)
- return
- #
- elif isinstance(tp, model.FunctionPtrType):
- converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('')
- extraarg = ', _cffi_type(%d)' % self._gettypenum(tp)
- errvalue = 'NULL'
- #
- else:
- raise NotImplementedError(tp)
- #
- self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg))
- self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % (
- tovar, tp.get_c_name(''), errvalue))
- self._prnt(' %s;' % errcode)
-
- def _extra_local_variables(self, tp, localvars):
- if isinstance(tp, model.PointerType):
- localvars.add('Py_ssize_t datasize')
-
- def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode):
- self._prnt(' datasize = _cffi_prepare_pointer_call_argument(')
- self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % (
- self._gettypenum(tp), fromvar, tovar))
- self._prnt(' if (datasize != 0) {')
- self._prnt(' if (datasize < 0)')
- self._prnt(' %s;' % errcode)
- self._prnt(' %s = alloca((size_t)datasize);' % (tovar,))
- self._prnt(' memset((void *)%s, 0, (size_t)datasize);' % (tovar,))
- self._prnt(' if (_cffi_convert_array_from_object('
- '(char *)%s, _cffi_type(%d), %s) < 0)' % (
- tovar, self._gettypenum(tp), fromvar))
- self._prnt(' %s;' % errcode)
- self._prnt(' }')
-
- def _convert_expr_from_c(self, tp, var, context):
- if isinstance(tp, model.PrimitiveType):
- if tp.is_integer_type() and tp.name != '_Bool':
- return '_cffi_from_c_int(%s, %s)' % (var, tp.name)
- elif tp.name != 'long double':
- return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var)
- else:
- return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % (
- var, self._gettypenum(tp))
- elif isinstance(tp, (model.PointerType, model.FunctionPtrType)):
- return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % (
- var, self._gettypenum(tp))
- elif isinstance(tp, model.ArrayType):
- return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % (
- var, self._gettypenum(model.PointerType(tp.item)))
- elif isinstance(tp, model.StructOrUnion):
- if tp.fldnames is None:
- raise TypeError("'%s' is used as %s, but is opaque" % (
- tp._get_c_name(), context))
- return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % (
- var, self._gettypenum(tp))
- elif isinstance(tp, model.EnumType):
- return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % (
- var, self._gettypenum(tp))
- else:
- raise NotImplementedError(tp)
-
- # ----------
- # typedefs: generates no code so far
-
- _generate_cpy_typedef_collecttype = _generate_nothing
- _generate_cpy_typedef_decl = _generate_nothing
- _generate_cpy_typedef_method = _generate_nothing
- _loading_cpy_typedef = _loaded_noop
- _loaded_cpy_typedef = _loaded_noop
-
- # ----------
- # function declarations
-
- def _generate_cpy_function_collecttype(self, tp, name):
- assert isinstance(tp, model.FunctionPtrType)
- if tp.ellipsis:
- self._do_collect_type(tp)
- else:
- # don't call _do_collect_type(tp) in this common case,
- # otherwise test_autofilled_struct_as_argument fails
- for type in tp.args:
- self._do_collect_type(type)
- self._do_collect_type(tp.result)
-
- def _generate_cpy_function_decl(self, tp, name):
- assert isinstance(tp, model.FunctionPtrType)
- if tp.ellipsis:
- # cannot support vararg functions better than this: check for its
- # exact type (including the fixed arguments), and build it as a
- # constant function pointer (no CPython wrapper)
- self._generate_cpy_const(False, name, tp)
- return
- prnt = self._prnt
- numargs = len(tp.args)
- if numargs == 0:
- argname = 'noarg'
- elif numargs == 1:
- argname = 'arg0'
- else:
- argname = 'args'
- prnt('static PyObject *')
- prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname))
- prnt('{')
- #
- context = 'argument of %s' % name
- for i, type in enumerate(tp.args):
- prnt(' %s;' % type.get_c_name(' x%d' % i, context))
- #
- localvars = set()
- for type in tp.args:
- self._extra_local_variables(type, localvars)
- for decl in localvars:
- prnt(' %s;' % (decl,))
- #
- if not isinstance(tp.result, model.VoidType):
- result_code = 'result = '
- context = 'result of %s' % name
- prnt(' %s;' % tp.result.get_c_name(' result', context))
- else:
- result_code = ''
- #
- if len(tp.args) > 1:
- rng = range(len(tp.args))
- for i in rng:
- prnt(' PyObject *arg%d;' % i)
- prnt()
- prnt(' if (!PyArg_ParseTuple(args, "%s:%s", %s))' % (
- 'O' * numargs, name, ', '.join(['&arg%d' % i for i in rng])))
- prnt(' return NULL;')
- prnt()
- #
- for i, type in enumerate(tp.args):
- self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i,
- 'return NULL')
- prnt()
- #
- prnt(' Py_BEGIN_ALLOW_THREADS')
- prnt(' _cffi_restore_errno();')
- prnt(' { %s%s(%s); }' % (
- result_code, name,
- ', '.join(['x%d' % i for i in range(len(tp.args))])))
- prnt(' _cffi_save_errno();')
- prnt(' Py_END_ALLOW_THREADS')
- prnt()
- #
- prnt(' (void)self; /* unused */')
- if numargs == 0:
- prnt(' (void)noarg; /* unused */')
- if result_code:
- prnt(' return %s;' %
- self._convert_expr_from_c(tp.result, 'result', 'result type'))
- else:
- prnt(' Py_INCREF(Py_None);')
- prnt(' return Py_None;')
- prnt('}')
- prnt()
-
- def _generate_cpy_function_method(self, tp, name):
- if tp.ellipsis:
- return
- numargs = len(tp.args)
- if numargs == 0:
- meth = 'METH_NOARGS'
- elif numargs == 1:
- meth = 'METH_O'
- else:
- meth = 'METH_VARARGS'
- self._prnt(' {"%s", _cffi_f_%s, %s, NULL},' % (name, name, meth))
-
- _loading_cpy_function = _loaded_noop
-
- def _loaded_cpy_function(self, tp, name, module, library):
- if tp.ellipsis:
- return
- func = getattr(module, name)
- setattr(library, name, func)
- self._types_of_builtin_functions[func] = tp
-
- # ----------
- # named structs
-
- _generate_cpy_struct_collecttype = _generate_nothing
- def _generate_cpy_struct_decl(self, tp, name):
- assert name == tp.name
- self._generate_struct_or_union_decl(tp, 'struct', name)
- def _generate_cpy_struct_method(self, tp, name):
- self._generate_struct_or_union_method(tp, 'struct', name)
- def _loading_cpy_struct(self, tp, name, module):
- self._loading_struct_or_union(tp, 'struct', name, module)
- def _loaded_cpy_struct(self, tp, name, module, **kwds):
- self._loaded_struct_or_union(tp)
-
- _generate_cpy_union_collecttype = _generate_nothing
- def _generate_cpy_union_decl(self, tp, name):
- assert name == tp.name
- self._generate_struct_or_union_decl(tp, 'union', name)
- def _generate_cpy_union_method(self, tp, name):
- self._generate_struct_or_union_method(tp, 'union', name)
- def _loading_cpy_union(self, tp, name, module):
- self._loading_struct_or_union(tp, 'union', name, module)
- def _loaded_cpy_union(self, tp, name, module, **kwds):
- self._loaded_struct_or_union(tp)
-
- def _generate_struct_or_union_decl(self, tp, prefix, name):
- if tp.fldnames is None:
- return # nothing to do with opaque structs
- checkfuncname = '_cffi_check_%s_%s' % (prefix, name)
- layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
- cname = ('%s %s' % (prefix, name)).strip()
- #
- prnt = self._prnt
- prnt('static void %s(%s *p)' % (checkfuncname, cname))
- prnt('{')
- prnt(' /* only to generate compile-time warnings or errors */')
- prnt(' (void)p;')
- for fname, ftype, fbitsize, fqual in tp.enumfields():
- if (isinstance(ftype, model.PrimitiveType)
- and ftype.is_integer_type()) or fbitsize >= 0:
- # accept all integers, but complain on float or double
- prnt(' (void)((p->%s) << 1);' % fname)
- else:
- # only accept exactly the type declared.
- try:
- prnt(' { %s = &p->%s; (void)tmp; }' % (
- ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
- fname))
- except VerificationError as e:
- prnt(' /* %s */' % str(e)) # cannot verify it, ignore
- prnt('}')
- prnt('static PyObject *')
- prnt('%s(PyObject *self, PyObject *noarg)' % (layoutfuncname,))
- prnt('{')
- prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname)
- prnt(' static Py_ssize_t nums[] = {')
- prnt(' sizeof(%s),' % cname)
- prnt(' offsetof(struct _cffi_aligncheck, y),')
- for fname, ftype, fbitsize, fqual in tp.enumfields():
- if fbitsize >= 0:
- continue # xxx ignore fbitsize for now
- prnt(' offsetof(%s, %s),' % (cname, fname))
- if isinstance(ftype, model.ArrayType) and ftype.length is None:
- prnt(' 0, /* %s */' % ftype._get_c_name())
- else:
- prnt(' sizeof(((%s *)0)->%s),' % (cname, fname))
- prnt(' -1')
- prnt(' };')
- prnt(' (void)self; /* unused */')
- prnt(' (void)noarg; /* unused */')
- prnt(' return _cffi_get_struct_layout(nums);')
- prnt(' /* the next line is not executed, but compiled */')
- prnt(' %s(0);' % (checkfuncname,))
- prnt('}')
- prnt()
-
- def _generate_struct_or_union_method(self, tp, prefix, name):
- if tp.fldnames is None:
- return # nothing to do with opaque structs
- layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
- self._prnt(' {"%s", %s, METH_NOARGS, NULL},' % (layoutfuncname,
- layoutfuncname))
-
- def _loading_struct_or_union(self, tp, prefix, name, module):
- if tp.fldnames is None:
- return # nothing to do with opaque structs
- layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
- #
- function = getattr(module, layoutfuncname)
- layout = function()
- if isinstance(tp, model.StructOrUnion) and tp.partial:
- # use the function()'s sizes and offsets to guide the
- # layout of the struct
- totalsize = layout[0]
- totalalignment = layout[1]
- fieldofs = layout[2::2]
- fieldsize = layout[3::2]
- tp.force_flatten()
- assert len(fieldofs) == len(fieldsize) == len(tp.fldnames)
- tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment
- else:
- cname = ('%s %s' % (prefix, name)).strip()
- self._struct_pending_verification[tp] = layout, cname
-
- def _loaded_struct_or_union(self, tp):
- if tp.fldnames is None:
- return # nothing to do with opaque structs
- self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered
-
- if tp in self._struct_pending_verification:
- # check that the layout sizes and offsets match the real ones
- def check(realvalue, expectedvalue, msg):
- if realvalue != expectedvalue:
- raise VerificationError(
- "%s (we have %d, but C compiler says %d)"
- % (msg, expectedvalue, realvalue))
- ffi = self.ffi
- BStruct = ffi._get_cached_btype(tp)
- layout, cname = self._struct_pending_verification.pop(tp)
- check(layout[0], ffi.sizeof(BStruct), "wrong total size")
- check(layout[1], ffi.alignof(BStruct), "wrong total alignment")
- i = 2
- for fname, ftype, fbitsize, fqual in tp.enumfields():
- if fbitsize >= 0:
- continue # xxx ignore fbitsize for now
- check(layout[i], ffi.offsetof(BStruct, fname),
- "wrong offset for field %r" % (fname,))
- if layout[i+1] != 0:
- BField = ffi._get_cached_btype(ftype)
- check(layout[i+1], ffi.sizeof(BField),
- "wrong size for field %r" % (fname,))
- i += 2
- assert i == len(layout)
-
- # ----------
- # 'anonymous' declarations. These are produced for anonymous structs
- # or unions; the 'name' is obtained by a typedef.
-
- _generate_cpy_anonymous_collecttype = _generate_nothing
-
- def _generate_cpy_anonymous_decl(self, tp, name):
- if isinstance(tp, model.EnumType):
- self._generate_cpy_enum_decl(tp, name, '')
- else:
- self._generate_struct_or_union_decl(tp, '', name)
-
- def _generate_cpy_anonymous_method(self, tp, name):
- if not isinstance(tp, model.EnumType):
- self._generate_struct_or_union_method(tp, '', name)
-
- def _loading_cpy_anonymous(self, tp, name, module):
- if isinstance(tp, model.EnumType):
- self._loading_cpy_enum(tp, name, module)
- else:
- self._loading_struct_or_union(tp, '', name, module)
-
- def _loaded_cpy_anonymous(self, tp, name, module, **kwds):
- if isinstance(tp, model.EnumType):
- self._loaded_cpy_enum(tp, name, module, **kwds)
- else:
- self._loaded_struct_or_union(tp)
-
- # ----------
- # constants, likely declared with '#define'
-
- def _generate_cpy_const(self, is_int, name, tp=None, category='const',
- vartp=None, delayed=True, size_too=False,
- check_value=None):
- prnt = self._prnt
- funcname = '_cffi_%s_%s' % (category, name)
- prnt('static int %s(PyObject *lib)' % funcname)
- prnt('{')
- prnt(' PyObject *o;')
- prnt(' int res;')
- if not is_int:
- prnt(' %s;' % (vartp or tp).get_c_name(' i', name))
- else:
- assert category == 'const'
- #
- if check_value is not None:
- self._check_int_constant_value(name, check_value)
- #
- if not is_int:
- if category == 'var':
- realexpr = '&' + name
- else:
- realexpr = name
- prnt(' i = (%s);' % (realexpr,))
- prnt(' o = %s;' % (self._convert_expr_from_c(tp, 'i',
- 'variable type'),))
- assert delayed
- else:
- prnt(' o = _cffi_from_c_int_const(%s);' % name)
- prnt(' if (o == NULL)')
- prnt(' return -1;')
- if size_too:
- prnt(' {')
- prnt(' PyObject *o1 = o;')
- prnt(' o = Py_BuildValue("On", o1, (Py_ssize_t)sizeof(%s));'
- % (name,))
- prnt(' Py_DECREF(o1);')
- prnt(' if (o == NULL)')
- prnt(' return -1;')
- prnt(' }')
- prnt(' res = PyObject_SetAttrString(lib, "%s", o);' % name)
- prnt(' Py_DECREF(o);')
- prnt(' if (res < 0)')
- prnt(' return -1;')
- prnt(' return %s;' % self._chained_list_constants[delayed])
- self._chained_list_constants[delayed] = funcname + '(lib)'
- prnt('}')
- prnt()
-
- def _generate_cpy_constant_collecttype(self, tp, name):
- is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
- if not is_int:
- self._do_collect_type(tp)
-
- def _generate_cpy_constant_decl(self, tp, name):
- is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
- self._generate_cpy_const(is_int, name, tp)
-
- _generate_cpy_constant_method = _generate_nothing
- _loading_cpy_constant = _loaded_noop
- _loaded_cpy_constant = _loaded_noop
-
- # ----------
- # enums
-
- def _check_int_constant_value(self, name, value, err_prefix=''):
- prnt = self._prnt
- if value <= 0:
- prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % (
- name, name, value))
- else:
- prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % (
- name, name, value))
- prnt(' char buf[64];')
- prnt(' if ((%s) <= 0)' % name)
- prnt(' snprintf(buf, 63, "%%ld", (long)(%s));' % name)
- prnt(' else')
- prnt(' snprintf(buf, 63, "%%lu", (unsigned long)(%s));' %
- name)
- prnt(' PyErr_Format(_cffi_VerificationError,')
- prnt(' "%s%s has the real value %s, not %s",')
- prnt(' "%s", "%s", buf, "%d");' % (
- err_prefix, name, value))
- prnt(' return -1;')
- prnt(' }')
-
- def _enum_funcname(self, prefix, name):
- # "$enum_$1" => "___D_enum____D_1"
- name = name.replace('$', '___D_')
- return '_cffi_e_%s_%s' % (prefix, name)
-
- def _generate_cpy_enum_decl(self, tp, name, prefix='enum'):
- if tp.partial:
- for enumerator in tp.enumerators:
- self._generate_cpy_const(True, enumerator, delayed=False)
- return
- #
- funcname = self._enum_funcname(prefix, name)
- prnt = self._prnt
- prnt('static int %s(PyObject *lib)' % funcname)
- prnt('{')
- for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
- self._check_int_constant_value(enumerator, enumvalue,
- "enum %s: " % name)
- prnt(' return %s;' % self._chained_list_constants[True])
- self._chained_list_constants[True] = funcname + '(lib)'
- prnt('}')
- prnt()
-
- _generate_cpy_enum_collecttype = _generate_nothing
- _generate_cpy_enum_method = _generate_nothing
-
- def _loading_cpy_enum(self, tp, name, module):
- if tp.partial:
- enumvalues = [getattr(module, enumerator)
- for enumerator in tp.enumerators]
- tp.enumvalues = tuple(enumvalues)
- tp.partial_resolved = True
-
- def _loaded_cpy_enum(self, tp, name, module, library):
- for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
- setattr(library, enumerator, enumvalue)
-
- # ----------
- # macros: for now only for integers
-
- def _generate_cpy_macro_decl(self, tp, name):
- if tp == '...':
- check_value = None
- else:
- check_value = tp # an integer
- self._generate_cpy_const(True, name, check_value=check_value)
-
- _generate_cpy_macro_collecttype = _generate_nothing
- _generate_cpy_macro_method = _generate_nothing
- _loading_cpy_macro = _loaded_noop
- _loaded_cpy_macro = _loaded_noop
-
- # ----------
- # global variables
-
- def _generate_cpy_variable_collecttype(self, tp, name):
- if isinstance(tp, model.ArrayType):
- tp_ptr = model.PointerType(tp.item)
- else:
- tp_ptr = model.PointerType(tp)
- self._do_collect_type(tp_ptr)
-
- def _generate_cpy_variable_decl(self, tp, name):
- if isinstance(tp, model.ArrayType):
- tp_ptr = model.PointerType(tp.item)
- self._generate_cpy_const(False, name, tp, vartp=tp_ptr,
- size_too = (tp.length == '...'))
- else:
- tp_ptr = model.PointerType(tp)
- self._generate_cpy_const(False, name, tp_ptr, category='var')
-
- _generate_cpy_variable_method = _generate_nothing
- _loading_cpy_variable = _loaded_noop
-
- def _loaded_cpy_variable(self, tp, name, module, library):
- value = getattr(library, name)
- if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the
- # sense that "a=..." is forbidden
- if tp.length == '...':
- assert isinstance(value, tuple)
- (value, size) = value
- BItemType = self.ffi._get_cached_btype(tp.item)
- length, rest = divmod(size, self.ffi.sizeof(BItemType))
- if rest != 0:
- raise VerificationError(
- "bad size: %r does not seem to be an array of %s" %
- (name, tp.item))
- tp = tp.resolve_length(length)
- # 'value' is a which we have to replace with
- # a if the N is actually known
- if tp.length is not None:
- BArray = self.ffi._get_cached_btype(tp)
- value = self.ffi.cast(BArray, value)
- setattr(library, name, value)
- return
- # remove ptr= from the library instance, and replace
- # it by a property on the class, which reads/writes into ptr[0].
- ptr = value
- delattr(library, name)
- def getter(library):
- return ptr[0]
- def setter(library, value):
- ptr[0] = value
- setattr(type(library), name, property(getter, setter))
- type(library)._cffi_dir.append(name)
-
- # ----------
-
- def _generate_setup_custom(self):
- prnt = self._prnt
- prnt('static int _cffi_setup_custom(PyObject *lib)')
- prnt('{')
- prnt(' return %s;' % self._chained_list_constants[True])
- prnt('}')
-
-cffimod_header = r'''
-#include
-#include
-
-/* this block of #ifs should be kept exactly identical between
- c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py
- and cffi/_cffi_include.h */
-#if defined(_MSC_VER)
-# include /* for alloca() */
-# if _MSC_VER < 1600 /* MSVC < 2010 */
- typedef __int8 int8_t;
- typedef __int16 int16_t;
- typedef __int32 int32_t;
- typedef __int64 int64_t;
- typedef unsigned __int8 uint8_t;
- typedef unsigned __int16 uint16_t;
- typedef unsigned __int32 uint32_t;
- typedef unsigned __int64 uint64_t;
- typedef __int8 int_least8_t;
- typedef __int16 int_least16_t;
- typedef __int32 int_least32_t;
- typedef __int64 int_least64_t;
- typedef unsigned __int8 uint_least8_t;
- typedef unsigned __int16 uint_least16_t;
- typedef unsigned __int32 uint_least32_t;
- typedef unsigned __int64 uint_least64_t;
- typedef __int8 int_fast8_t;
- typedef __int16 int_fast16_t;
- typedef __int32 int_fast32_t;
- typedef __int64 int_fast64_t;
- typedef unsigned __int8 uint_fast8_t;
- typedef unsigned __int16 uint_fast16_t;
- typedef unsigned __int32 uint_fast32_t;
- typedef unsigned __int64 uint_fast64_t;
- typedef __int64 intmax_t;
- typedef unsigned __int64 uintmax_t;
-# else
-# include
-# endif
-# if _MSC_VER < 1800 /* MSVC < 2013 */
-# ifndef __cplusplus
- typedef unsigned char _Bool;
-# endif
-# endif
-#else
-# include
-# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
-# include
-# endif
-#endif
-
-#if PY_MAJOR_VERSION < 3
-# undef PyCapsule_CheckExact
-# undef PyCapsule_GetPointer
-# define PyCapsule_CheckExact(capsule) (PyCObject_Check(capsule))
-# define PyCapsule_GetPointer(capsule, name) \
- (PyCObject_AsVoidPtr(capsule))
-#endif
-
-#if PY_MAJOR_VERSION >= 3
-# define PyInt_FromLong PyLong_FromLong
-#endif
-
-#define _cffi_from_c_double PyFloat_FromDouble
-#define _cffi_from_c_float PyFloat_FromDouble
-#define _cffi_from_c_long PyInt_FromLong
-#define _cffi_from_c_ulong PyLong_FromUnsignedLong
-#define _cffi_from_c_longlong PyLong_FromLongLong
-#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong
-#define _cffi_from_c__Bool PyBool_FromLong
-
-#define _cffi_to_c_double PyFloat_AsDouble
-#define _cffi_to_c_float PyFloat_AsDouble
-
-#define _cffi_from_c_int_const(x) \
- (((x) > 0) ? \
- ((unsigned long long)(x) <= (unsigned long long)LONG_MAX) ? \
- PyInt_FromLong((long)(x)) : \
- PyLong_FromUnsignedLongLong((unsigned long long)(x)) : \
- ((long long)(x) >= (long long)LONG_MIN) ? \
- PyInt_FromLong((long)(x)) : \
- PyLong_FromLongLong((long long)(x)))
-
-#define _cffi_from_c_int(x, type) \
- (((type)-1) > 0 ? /* unsigned */ \
- (sizeof(type) < sizeof(long) ? \
- PyInt_FromLong((long)x) : \
- sizeof(type) == sizeof(long) ? \
- PyLong_FromUnsignedLong((unsigned long)x) : \
- PyLong_FromUnsignedLongLong((unsigned long long)x)) : \
- (sizeof(type) <= sizeof(long) ? \
- PyInt_FromLong((long)x) : \
- PyLong_FromLongLong((long long)x)))
-
-#define _cffi_to_c_int(o, type) \
- ((type)( \
- sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \
- : (type)_cffi_to_c_i8(o)) : \
- sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \
- : (type)_cffi_to_c_i16(o)) : \
- sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \
- : (type)_cffi_to_c_i32(o)) : \
- sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \
- : (type)_cffi_to_c_i64(o)) : \
- (Py_FatalError("unsupported size for type " #type), (type)0)))
-
-#define _cffi_to_c_i8 \
- ((int(*)(PyObject *))_cffi_exports[1])
-#define _cffi_to_c_u8 \
- ((int(*)(PyObject *))_cffi_exports[2])
-#define _cffi_to_c_i16 \
- ((int(*)(PyObject *))_cffi_exports[3])
-#define _cffi_to_c_u16 \
- ((int(*)(PyObject *))_cffi_exports[4])
-#define _cffi_to_c_i32 \
- ((int(*)(PyObject *))_cffi_exports[5])
-#define _cffi_to_c_u32 \
- ((unsigned int(*)(PyObject *))_cffi_exports[6])
-#define _cffi_to_c_i64 \
- ((long long(*)(PyObject *))_cffi_exports[7])
-#define _cffi_to_c_u64 \
- ((unsigned long long(*)(PyObject *))_cffi_exports[8])
-#define _cffi_to_c_char \
- ((int(*)(PyObject *))_cffi_exports[9])
-#define _cffi_from_c_pointer \
- ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10])
-#define _cffi_to_c_pointer \
- ((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11])
-#define _cffi_get_struct_layout \
- ((PyObject *(*)(Py_ssize_t[]))_cffi_exports[12])
-#define _cffi_restore_errno \
- ((void(*)(void))_cffi_exports[13])
-#define _cffi_save_errno \
- ((void(*)(void))_cffi_exports[14])
-#define _cffi_from_c_char \
- ((PyObject *(*)(char))_cffi_exports[15])
-#define _cffi_from_c_deref \
- ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16])
-#define _cffi_to_c \
- ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17])
-#define _cffi_from_c_struct \
- ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18])
-#define _cffi_to_c_wchar_t \
- ((wchar_t(*)(PyObject *))_cffi_exports[19])
-#define _cffi_from_c_wchar_t \
- ((PyObject *(*)(wchar_t))_cffi_exports[20])
-#define _cffi_to_c_long_double \
- ((long double(*)(PyObject *))_cffi_exports[21])
-#define _cffi_to_c__Bool \
- ((_Bool(*)(PyObject *))_cffi_exports[22])
-#define _cffi_prepare_pointer_call_argument \
- ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23])
-#define _cffi_convert_array_from_object \
- ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24])
-#define _CFFI_NUM_EXPORTS 25
-
-typedef struct _ctypedescr CTypeDescrObject;
-
-static void *_cffi_exports[_CFFI_NUM_EXPORTS];
-static PyObject *_cffi_types, *_cffi_VerificationError;
-
-static int _cffi_setup_custom(PyObject *lib); /* forward */
-
-static PyObject *_cffi_setup(PyObject *self, PyObject *args)
-{
- PyObject *library;
- int was_alive = (_cffi_types != NULL);
- (void)self; /* unused */
- if (!PyArg_ParseTuple(args, "OOO", &_cffi_types, &_cffi_VerificationError,
- &library))
- return NULL;
- Py_INCREF(_cffi_types);
- Py_INCREF(_cffi_VerificationError);
- if (_cffi_setup_custom(library) < 0)
- return NULL;
- return PyBool_FromLong(was_alive);
-}
-
-static int _cffi_init(void)
-{
- PyObject *module, *c_api_object = NULL;
-
- module = PyImport_ImportModule("_cffi_backend");
- if (module == NULL)
- goto failure;
-
- c_api_object = PyObject_GetAttrString(module, "_C_API");
- if (c_api_object == NULL)
- goto failure;
- if (!PyCapsule_CheckExact(c_api_object)) {
- PyErr_SetNone(PyExc_ImportError);
- goto failure;
- }
- memcpy(_cffi_exports, PyCapsule_GetPointer(c_api_object, "cffi"),
- _CFFI_NUM_EXPORTS * sizeof(void *));
-
- Py_DECREF(module);
- Py_DECREF(c_api_object);
- return 0;
-
- failure:
- Py_XDECREF(module);
- Py_XDECREF(c_api_object);
- return -1;
-}
-
-#define _cffi_type(num) ((CTypeDescrObject *)PyList_GET_ITEM(_cffi_types, num))
-
-/**********/
-'''
diff --git a/functions/source/GitPullS3/cffi/vengine_gen.py b/functions/source/GitPullS3/cffi/vengine_gen.py
deleted file mode 100644
index a64ff64..0000000
--- a/functions/source/GitPullS3/cffi/vengine_gen.py
+++ /dev/null
@@ -1,675 +0,0 @@
-#
-# DEPRECATED: implementation for ffi.verify()
-#
-import sys, os
-import types
-
-from . import model
-from .error import VerificationError
-
-
-class VGenericEngine(object):
- _class_key = 'g'
- _gen_python_module = False
-
- def __init__(self, verifier):
- self.verifier = verifier
- self.ffi = verifier.ffi
- self.export_symbols = []
- self._struct_pending_verification = {}
-
- def patch_extension_kwds(self, kwds):
- # add 'export_symbols' to the dictionary. Note that we add the
- # list before filling it. When we fill it, it will thus also show
- # up in kwds['export_symbols'].
- kwds.setdefault('export_symbols', self.export_symbols)
-
- def find_module(self, module_name, path, so_suffixes):
- for so_suffix in so_suffixes:
- basename = module_name + so_suffix
- if path is None:
- path = sys.path
- for dirname in path:
- filename = os.path.join(dirname, basename)
- if os.path.isfile(filename):
- return filename
-
- def collect_types(self):
- pass # not needed in the generic engine
-
- def _prnt(self, what=''):
- self._f.write(what + '\n')
-
- def write_source_to_f(self):
- prnt = self._prnt
- # first paste some standard set of lines that are mostly '#include'
- prnt(cffimod_header)
- # then paste the C source given by the user, verbatim.
- prnt(self.verifier.preamble)
- #
- # call generate_gen_xxx_decl(), for every xxx found from
- # ffi._parser._declarations. This generates all the functions.
- self._generate('decl')
- #
- # on Windows, distutils insists on putting init_cffi_xyz in
- # 'export_symbols', so instead of fighting it, just give up and
- # give it one
- if sys.platform == 'win32':
- if sys.version_info >= (3,):
- prefix = 'PyInit_'
- else:
- prefix = 'init'
- modname = self.verifier.get_module_name()
- prnt("void %s%s(void) { }\n" % (prefix, modname))
-
- def load_library(self, flags=0):
- # import it with the CFFI backend
- backend = self.ffi._backend
- # needs to make a path that contains '/', on Posix
- filename = os.path.join(os.curdir, self.verifier.modulefilename)
- module = backend.load_library(filename, flags)
- #
- # call loading_gen_struct() to get the struct layout inferred by
- # the C compiler
- self._load(module, 'loading')
-
- # build the FFILibrary class and instance, this is a module subclass
- # because modules are expected to have usually-constant-attributes and
- # in PyPy this means the JIT is able to treat attributes as constant,
- # which we want.
- class FFILibrary(types.ModuleType):
- _cffi_generic_module = module
- _cffi_ffi = self.ffi
- _cffi_dir = []
- def __dir__(self):
- return FFILibrary._cffi_dir
- library = FFILibrary("")
- #
- # finally, call the loaded_gen_xxx() functions. This will set
- # up the 'library' object.
- self._load(module, 'loaded', library=library)
- return library
-
- def _get_declarations(self):
- lst = [(key, tp) for (key, (tp, qual)) in
- self.ffi._parser._declarations.items()]
- lst.sort()
- return lst
-
- def _generate(self, step_name):
- for name, tp in self._get_declarations():
- kind, realname = name.split(' ', 1)
- try:
- method = getattr(self, '_generate_gen_%s_%s' % (kind,
- step_name))
- except AttributeError:
- raise VerificationError(
- "not implemented in verify(): %r" % name)
- try:
- method(tp, realname)
- except Exception as e:
- model.attach_exception_info(e, name)
- raise
-
- def _load(self, module, step_name, **kwds):
- for name, tp in self._get_declarations():
- kind, realname = name.split(' ', 1)
- method = getattr(self, '_%s_gen_%s' % (step_name, kind))
- try:
- method(tp, realname, module, **kwds)
- except Exception as e:
- model.attach_exception_info(e, name)
- raise
-
- def _generate_nothing(self, tp, name):
- pass
-
- def _loaded_noop(self, tp, name, module, **kwds):
- pass
-
- # ----------
- # typedefs: generates no code so far
-
- _generate_gen_typedef_decl = _generate_nothing
- _loading_gen_typedef = _loaded_noop
- _loaded_gen_typedef = _loaded_noop
-
- # ----------
- # function declarations
-
- def _generate_gen_function_decl(self, tp, name):
- assert isinstance(tp, model.FunctionPtrType)
- if tp.ellipsis:
- # cannot support vararg functions better than this: check for its
- # exact type (including the fixed arguments), and build it as a
- # constant function pointer (no _cffi_f_%s wrapper)
- self._generate_gen_const(False, name, tp)
- return
- prnt = self._prnt
- numargs = len(tp.args)
- argnames = []
- for i, type in enumerate(tp.args):
- indirection = ''
- if isinstance(type, model.StructOrUnion):
- indirection = '*'
- argnames.append('%sx%d' % (indirection, i))
- context = 'argument of %s' % name
- arglist = [type.get_c_name(' %s' % arg, context)
- for type, arg in zip(tp.args, argnames)]
- tpresult = tp.result
- if isinstance(tpresult, model.StructOrUnion):
- arglist.insert(0, tpresult.get_c_name(' *r', context))
- tpresult = model.void_type
- arglist = ', '.join(arglist) or 'void'
- wrappername = '_cffi_f_%s' % name
- self.export_symbols.append(wrappername)
- if tp.abi:
- abi = tp.abi + ' '
- else:
- abi = ''
- funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist)
- context = 'result of %s' % name
- prnt(tpresult.get_c_name(funcdecl, context))
- prnt('{')
- #
- if isinstance(tp.result, model.StructOrUnion):
- result_code = '*r = '
- elif not isinstance(tp.result, model.VoidType):
- result_code = 'return '
- else:
- result_code = ''
- prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames)))
- prnt('}')
- prnt()
-
- _loading_gen_function = _loaded_noop
-
- def _loaded_gen_function(self, tp, name, module, library):
- assert isinstance(tp, model.FunctionPtrType)
- if tp.ellipsis:
- newfunction = self._load_constant(False, tp, name, module)
- else:
- indirections = []
- base_tp = tp
- if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args)
- or isinstance(tp.result, model.StructOrUnion)):
- indirect_args = []
- for i, typ in enumerate(tp.args):
- if isinstance(typ, model.StructOrUnion):
- typ = model.PointerType(typ)
- indirections.append((i, typ))
- indirect_args.append(typ)
- indirect_result = tp.result
- if isinstance(indirect_result, model.StructOrUnion):
- if indirect_result.fldtypes is None:
- raise TypeError("'%s' is used as result type, "
- "but is opaque" % (
- indirect_result._get_c_name(),))
- indirect_result = model.PointerType(indirect_result)
- indirect_args.insert(0, indirect_result)
- indirections.insert(0, ("result", indirect_result))
- indirect_result = model.void_type
- tp = model.FunctionPtrType(tuple(indirect_args),
- indirect_result, tp.ellipsis)
- BFunc = self.ffi._get_cached_btype(tp)
- wrappername = '_cffi_f_%s' % name
- newfunction = module.load_function(BFunc, wrappername)
- for i, typ in indirections:
- newfunction = self._make_struct_wrapper(newfunction, i, typ,
- base_tp)
- setattr(library, name, newfunction)
- type(library)._cffi_dir.append(name)
-
- def _make_struct_wrapper(self, oldfunc, i, tp, base_tp):
- backend = self.ffi._backend
- BType = self.ffi._get_cached_btype(tp)
- if i == "result":
- ffi = self.ffi
- def newfunc(*args):
- res = ffi.new(BType)
- oldfunc(res, *args)
- return res[0]
- else:
- def newfunc(*args):
- args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:]
- return oldfunc(*args)
- newfunc._cffi_base_type = base_tp
- return newfunc
-
- # ----------
- # named structs
-
- def _generate_gen_struct_decl(self, tp, name):
- assert name == tp.name
- self._generate_struct_or_union_decl(tp, 'struct', name)
-
- def _loading_gen_struct(self, tp, name, module):
- self._loading_struct_or_union(tp, 'struct', name, module)
-
- def _loaded_gen_struct(self, tp, name, module, **kwds):
- self._loaded_struct_or_union(tp)
-
- def _generate_gen_union_decl(self, tp, name):
- assert name == tp.name
- self._generate_struct_or_union_decl(tp, 'union', name)
-
- def _loading_gen_union(self, tp, name, module):
- self._loading_struct_or_union(tp, 'union', name, module)
-
- def _loaded_gen_union(self, tp, name, module, **kwds):
- self._loaded_struct_or_union(tp)
-
- def _generate_struct_or_union_decl(self, tp, prefix, name):
- if tp.fldnames is None:
- return # nothing to do with opaque structs
- checkfuncname = '_cffi_check_%s_%s' % (prefix, name)
- layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
- cname = ('%s %s' % (prefix, name)).strip()
- #
- prnt = self._prnt
- prnt('static void %s(%s *p)' % (checkfuncname, cname))
- prnt('{')
- prnt(' /* only to generate compile-time warnings or errors */')
- prnt(' (void)p;')
- for fname, ftype, fbitsize, fqual in tp.enumfields():
- if (isinstance(ftype, model.PrimitiveType)
- and ftype.is_integer_type()) or fbitsize >= 0:
- # accept all integers, but complain on float or double
- prnt(' (void)((p->%s) << 1);' % fname)
- else:
- # only accept exactly the type declared.
- try:
- prnt(' { %s = &p->%s; (void)tmp; }' % (
- ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
- fname))
- except VerificationError as e:
- prnt(' /* %s */' % str(e)) # cannot verify it, ignore
- prnt('}')
- self.export_symbols.append(layoutfuncname)
- prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,))
- prnt('{')
- prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname)
- prnt(' static intptr_t nums[] = {')
- prnt(' sizeof(%s),' % cname)
- prnt(' offsetof(struct _cffi_aligncheck, y),')
- for fname, ftype, fbitsize, fqual in tp.enumfields():
- if fbitsize >= 0:
- continue # xxx ignore fbitsize for now
- prnt(' offsetof(%s, %s),' % (cname, fname))
- if isinstance(ftype, model.ArrayType) and ftype.length is None:
- prnt(' 0, /* %s */' % ftype._get_c_name())
- else:
- prnt(' sizeof(((%s *)0)->%s),' % (cname, fname))
- prnt(' -1')
- prnt(' };')
- prnt(' return nums[i];')
- prnt(' /* the next line is not executed, but compiled */')
- prnt(' %s(0);' % (checkfuncname,))
- prnt('}')
- prnt()
-
- def _loading_struct_or_union(self, tp, prefix, name, module):
- if tp.fldnames is None:
- return # nothing to do with opaque structs
- layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
- #
- BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0]
- function = module.load_function(BFunc, layoutfuncname)
- layout = []
- num = 0
- while True:
- x = function(num)
- if x < 0: break
- layout.append(x)
- num += 1
- if isinstance(tp, model.StructOrUnion) and tp.partial:
- # use the function()'s sizes and offsets to guide the
- # layout of the struct
- totalsize = layout[0]
- totalalignment = layout[1]
- fieldofs = layout[2::2]
- fieldsize = layout[3::2]
- tp.force_flatten()
- assert len(fieldofs) == len(fieldsize) == len(tp.fldnames)
- tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment
- else:
- cname = ('%s %s' % (prefix, name)).strip()
- self._struct_pending_verification[tp] = layout, cname
-
- def _loaded_struct_or_union(self, tp):
- if tp.fldnames is None:
- return # nothing to do with opaque structs
- self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered
-
- if tp in self._struct_pending_verification:
- # check that the layout sizes and offsets match the real ones
- def check(realvalue, expectedvalue, msg):
- if realvalue != expectedvalue:
- raise VerificationError(
- "%s (we have %d, but C compiler says %d)"
- % (msg, expectedvalue, realvalue))
- ffi = self.ffi
- BStruct = ffi._get_cached_btype(tp)
- layout, cname = self._struct_pending_verification.pop(tp)
- check(layout[0], ffi.sizeof(BStruct), "wrong total size")
- check(layout[1], ffi.alignof(BStruct), "wrong total alignment")
- i = 2
- for fname, ftype, fbitsize, fqual in tp.enumfields():
- if fbitsize >= 0:
- continue # xxx ignore fbitsize for now
- check(layout[i], ffi.offsetof(BStruct, fname),
- "wrong offset for field %r" % (fname,))
- if layout[i+1] != 0:
- BField = ffi._get_cached_btype(ftype)
- check(layout[i+1], ffi.sizeof(BField),
- "wrong size for field %r" % (fname,))
- i += 2
- assert i == len(layout)
-
- # ----------
- # 'anonymous' declarations. These are produced for anonymous structs
- # or unions; the 'name' is obtained by a typedef.
-
- def _generate_gen_anonymous_decl(self, tp, name):
- if isinstance(tp, model.EnumType):
- self._generate_gen_enum_decl(tp, name, '')
- else:
- self._generate_struct_or_union_decl(tp, '', name)
-
- def _loading_gen_anonymous(self, tp, name, module):
- if isinstance(tp, model.EnumType):
- self._loading_gen_enum(tp, name, module, '')
- else:
- self._loading_struct_or_union(tp, '', name, module)
-
- def _loaded_gen_anonymous(self, tp, name, module, **kwds):
- if isinstance(tp, model.EnumType):
- self._loaded_gen_enum(tp, name, module, **kwds)
- else:
- self._loaded_struct_or_union(tp)
-
- # ----------
- # constants, likely declared with '#define'
-
- def _generate_gen_const(self, is_int, name, tp=None, category='const',
- check_value=None):
- prnt = self._prnt
- funcname = '_cffi_%s_%s' % (category, name)
- self.export_symbols.append(funcname)
- if check_value is not None:
- assert is_int
- assert category == 'const'
- prnt('int %s(char *out_error)' % funcname)
- prnt('{')
- self._check_int_constant_value(name, check_value)
- prnt(' return 0;')
- prnt('}')
- elif is_int:
- assert category == 'const'
- prnt('int %s(long long *out_value)' % funcname)
- prnt('{')
- prnt(' *out_value = (long long)(%s);' % (name,))
- prnt(' return (%s) <= 0;' % (name,))
- prnt('}')
- else:
- assert tp is not None
- assert check_value is None
- if category == 'var':
- ampersand = '&'
- else:
- ampersand = ''
- extra = ''
- if category == 'const' and isinstance(tp, model.StructOrUnion):
- extra = 'const *'
- ampersand = '&'
- prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name))
- prnt('{')
- prnt(' return (%s%s);' % (ampersand, name))
- prnt('}')
- prnt()
-
- def _generate_gen_constant_decl(self, tp, name):
- is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
- self._generate_gen_const(is_int, name, tp)
-
- _loading_gen_constant = _loaded_noop
-
- def _load_constant(self, is_int, tp, name, module, check_value=None):
- funcname = '_cffi_const_%s' % name
- if check_value is not None:
- assert is_int
- self._load_known_int_constant(module, funcname)
- value = check_value
- elif is_int:
- BType = self.ffi._typeof_locked("long long*")[0]
- BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0]
- function = module.load_function(BFunc, funcname)
- p = self.ffi.new(BType)
- negative = function(p)
- value = int(p[0])
- if value < 0 and not negative:
- BLongLong = self.ffi._typeof_locked("long long")[0]
- value += (1 << (8*self.ffi.sizeof(BLongLong)))
- else:
- assert check_value is None
- fntypeextra = '(*)(void)'
- if isinstance(tp, model.StructOrUnion):
- fntypeextra = '*' + fntypeextra
- BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0]
- function = module.load_function(BFunc, funcname)
- value = function()
- if isinstance(tp, model.StructOrUnion):
- value = value[0]
- return value
-
- def _loaded_gen_constant(self, tp, name, module, library):
- is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
- value = self._load_constant(is_int, tp, name, module)
- setattr(library, name, value)
- type(library)._cffi_dir.append(name)
-
- # ----------
- # enums
-
- def _check_int_constant_value(self, name, value):
- prnt = self._prnt
- if value <= 0:
- prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % (
- name, name, value))
- else:
- prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % (
- name, name, value))
- prnt(' char buf[64];')
- prnt(' if ((%s) <= 0)' % name)
- prnt(' sprintf(buf, "%%ld", (long)(%s));' % name)
- prnt(' else')
- prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' %
- name)
- prnt(' sprintf(out_error, "%s has the real value %s, not %s",')
- prnt(' "%s", buf, "%d");' % (name[:100], value))
- prnt(' return -1;')
- prnt(' }')
-
- def _load_known_int_constant(self, module, funcname):
- BType = self.ffi._typeof_locked("char[]")[0]
- BFunc = self.ffi._typeof_locked("int(*)(char*)")[0]
- function = module.load_function(BFunc, funcname)
- p = self.ffi.new(BType, 256)
- if function(p) < 0:
- error = self.ffi.string(p)
- if sys.version_info >= (3,):
- error = str(error, 'utf-8')
- raise VerificationError(error)
-
- def _enum_funcname(self, prefix, name):
- # "$enum_$1" => "___D_enum____D_1"
- name = name.replace('$', '___D_')
- return '_cffi_e_%s_%s' % (prefix, name)
-
- def _generate_gen_enum_decl(self, tp, name, prefix='enum'):
- if tp.partial:
- for enumerator in tp.enumerators:
- self._generate_gen_const(True, enumerator)
- return
- #
- funcname = self._enum_funcname(prefix, name)
- self.export_symbols.append(funcname)
- prnt = self._prnt
- prnt('int %s(char *out_error)' % funcname)
- prnt('{')
- for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
- self._check_int_constant_value(enumerator, enumvalue)
- prnt(' return 0;')
- prnt('}')
- prnt()
-
- def _loading_gen_enum(self, tp, name, module, prefix='enum'):
- if tp.partial:
- enumvalues = [self._load_constant(True, tp, enumerator, module)
- for enumerator in tp.enumerators]
- tp.enumvalues = tuple(enumvalues)
- tp.partial_resolved = True
- else:
- funcname = self._enum_funcname(prefix, name)
- self._load_known_int_constant(module, funcname)
-
- def _loaded_gen_enum(self, tp, name, module, library):
- for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
- setattr(library, enumerator, enumvalue)
- type(library)._cffi_dir.append(enumerator)
-
- # ----------
- # macros: for now only for integers
-
- def _generate_gen_macro_decl(self, tp, name):
- if tp == '...':
- check_value = None
- else:
- check_value = tp # an integer
- self._generate_gen_const(True, name, check_value=check_value)
-
- _loading_gen_macro = _loaded_noop
-
- def _loaded_gen_macro(self, tp, name, module, library):
- if tp == '...':
- check_value = None
- else:
- check_value = tp # an integer
- value = self._load_constant(True, tp, name, module,
- check_value=check_value)
- setattr(library, name, value)
- type(library)._cffi_dir.append(name)
-
- # ----------
- # global variables
-
- def _generate_gen_variable_decl(self, tp, name):
- if isinstance(tp, model.ArrayType):
- if tp.length == '...':
- prnt = self._prnt
- funcname = '_cffi_sizeof_%s' % (name,)
- self.export_symbols.append(funcname)
- prnt("size_t %s(void)" % funcname)
- prnt("{")
- prnt(" return sizeof(%s);" % (name,))
- prnt("}")
- tp_ptr = model.PointerType(tp.item)
- self._generate_gen_const(False, name, tp_ptr)
- else:
- tp_ptr = model.PointerType(tp)
- self._generate_gen_const(False, name, tp_ptr, category='var')
-
- _loading_gen_variable = _loaded_noop
-
- def _loaded_gen_variable(self, tp, name, module, library):
- if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the
- # sense that "a=..." is forbidden
- if tp.length == '...':
- funcname = '_cffi_sizeof_%s' % (name,)
- BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0]
- function = module.load_function(BFunc, funcname)
- size = function()
- BItemType = self.ffi._get_cached_btype(tp.item)
- length, rest = divmod(size, self.ffi.sizeof(BItemType))
- if rest != 0:
- raise VerificationError(
- "bad size: %r does not seem to be an array of %s" %
- (name, tp.item))
- tp = tp.resolve_length(length)
- tp_ptr = model.PointerType(tp.item)
- value = self._load_constant(False, tp_ptr, name, module)
- # 'value' is a which we have to replace with
- # a if the N is actually known
- if tp.length is not None:
- BArray = self.ffi._get_cached_btype(tp)
- value = self.ffi.cast(BArray, value)
- setattr(library, name, value)
- type(library)._cffi_dir.append(name)
- return
- # remove ptr= from the library instance, and replace
- # it by a property on the class, which reads/writes into ptr[0].
- funcname = '_cffi_var_%s' % name
- BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0]
- function = module.load_function(BFunc, funcname)
- ptr = function()
- def getter(library):
- return ptr[0]
- def setter(library, value):
- ptr[0] = value
- setattr(type(library), name, property(getter, setter))
- type(library)._cffi_dir.append(name)
-
-cffimod_header = r'''
-#include
-#include
-#include
-#include
-#include /* XXX for ssize_t on some platforms */
-
-/* this block of #ifs should be kept exactly identical between
- c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py
- and cffi/_cffi_include.h */
-#if defined(_MSC_VER)
-# include /* for alloca() */
-# if _MSC_VER < 1600 /* MSVC < 2010 */
- typedef __int8 int8_t;
- typedef __int16 int16_t;
- typedef __int32 int32_t;
- typedef __int64 int64_t;
- typedef unsigned __int8 uint8_t;
- typedef unsigned __int16 uint16_t;
- typedef unsigned __int32 uint32_t;
- typedef unsigned __int64 uint64_t;
- typedef __int8 int_least8_t;
- typedef __int16 int_least16_t;
- typedef __int32 int_least32_t;
- typedef __int64 int_least64_t;
- typedef unsigned __int8 uint_least8_t;
- typedef unsigned __int16 uint_least16_t;
- typedef unsigned __int32 uint_least32_t;
- typedef unsigned __int64 uint_least64_t;
- typedef __int8 int_fast8_t;
- typedef __int16 int_fast16_t;
- typedef __int32 int_fast32_t;
- typedef __int64 int_fast64_t;
- typedef unsigned __int8 uint_fast8_t;
- typedef unsigned __int16 uint_fast16_t;
- typedef unsigned __int32 uint_fast32_t;
- typedef unsigned __int64 uint_fast64_t;
- typedef __int64 intmax_t;
- typedef unsigned __int64 uintmax_t;
-# else
-# include
-# endif
-# if _MSC_VER < 1800 /* MSVC < 2013 */
-# ifndef __cplusplus
- typedef unsigned char _Bool;
-# endif
-# endif
-#else
-# include
-# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
-# include
-# endif
-#endif
-'''
diff --git a/functions/source/GitPullS3/cffi/verifier.py b/functions/source/GitPullS3/cffi/verifier.py
deleted file mode 100644
index 59b78c2..0000000
--- a/functions/source/GitPullS3/cffi/verifier.py
+++ /dev/null
@@ -1,306 +0,0 @@
-#
-# DEPRECATED: implementation for ffi.verify()
-#
-import sys, os, binascii, shutil, io
-from . import __version_verifier_modules__
-from . import ffiplatform
-from .error import VerificationError
-
-if sys.version_info >= (3, 3):
- import importlib.machinery
- def _extension_suffixes():
- return importlib.machinery.EXTENSION_SUFFIXES[:]
-else:
- import imp
- def _extension_suffixes():
- return [suffix for suffix, _, type in imp.get_suffixes()
- if type == imp.C_EXTENSION]
-
-
-if sys.version_info >= (3,):
- NativeIO = io.StringIO
-else:
- class NativeIO(io.BytesIO):
- def write(self, s):
- if isinstance(s, unicode):
- s = s.encode('ascii')
- super(NativeIO, self).write(s)
-
-
-class Verifier(object):
-
- def __init__(self, ffi, preamble, tmpdir=None, modulename=None,
- ext_package=None, tag='', force_generic_engine=False,
- source_extension='.c', flags=None, relative_to=None, **kwds):
- if ffi._parser._uses_new_feature:
- raise VerificationError(
- "feature not supported with ffi.verify(), but only "
- "with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,))
- self.ffi = ffi
- self.preamble = preamble
- if not modulename:
- flattened_kwds = ffiplatform.flatten(kwds)
- vengine_class = _locate_engine_class(ffi, force_generic_engine)
- self._vengine = vengine_class(self)
- self._vengine.patch_extension_kwds(kwds)
- self.flags = flags
- self.kwds = self.make_relative_to(kwds, relative_to)
- #
- if modulename:
- if tag:
- raise TypeError("can't specify both 'modulename' and 'tag'")
- else:
- key = '\x00'.join([sys.version[:3], __version_verifier_modules__,
- preamble, flattened_kwds] +
- ffi._cdefsources)
- if sys.version_info >= (3,):
- key = key.encode('utf-8')
- k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff)
- k1 = k1.lstrip('0x').rstrip('L')
- k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff)
- k2 = k2.lstrip('0').rstrip('L')
- modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key,
- k1, k2)
- suffix = _get_so_suffixes()[0]
- self.tmpdir = tmpdir or _caller_dir_pycache()
- self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension)
- self.modulefilename = os.path.join(self.tmpdir, modulename + suffix)
- self.ext_package = ext_package
- self._has_source = False
- self._has_module = False
-
- def write_source(self, file=None):
- """Write the C source code. It is produced in 'self.sourcefilename',
- which can be tweaked beforehand."""
- with self.ffi._lock:
- if self._has_source and file is None:
- raise VerificationError(
- "source code already written")
- self._write_source(file)
-
- def compile_module(self):
- """Write the C source code (if not done already) and compile it.
- This produces a dynamic link library in 'self.modulefilename'."""
- with self.ffi._lock:
- if self._has_module:
- raise VerificationError("module already compiled")
- if not self._has_source:
- self._write_source()
- self._compile_module()
-
- def load_library(self):
- """Get a C module from this Verifier instance.
- Returns an instance of a FFILibrary class that behaves like the
- objects returned by ffi.dlopen(), but that delegates all
- operations to the C module. If necessary, the C code is written
- and compiled first.
- """
- with self.ffi._lock:
- if not self._has_module:
- self._locate_module()
- if not self._has_module:
- if not self._has_source:
- self._write_source()
- self._compile_module()
- return self._load_library()
-
- def get_module_name(self):
- basename = os.path.basename(self.modulefilename)
- # kill both the .so extension and the other .'s, as introduced
- # by Python 3: 'basename.cpython-33m.so'
- basename = basename.split('.', 1)[0]
- # and the _d added in Python 2 debug builds --- but try to be
- # conservative and not kill a legitimate _d
- if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'):
- basename = basename[:-2]
- return basename
-
- def get_extension(self):
- ffiplatform._hack_at_distutils() # backward compatibility hack
- if not self._has_source:
- with self.ffi._lock:
- if not self._has_source:
- self._write_source()
- sourcename = ffiplatform.maybe_relative_path(self.sourcefilename)
- modname = self.get_module_name()
- return ffiplatform.get_extension(sourcename, modname, **self.kwds)
-
- def generates_python_module(self):
- return self._vengine._gen_python_module
-
- def make_relative_to(self, kwds, relative_to):
- if relative_to and os.path.dirname(relative_to):
- dirname = os.path.dirname(relative_to)
- kwds = kwds.copy()
- for key in ffiplatform.LIST_OF_FILE_NAMES:
- if key in kwds:
- lst = kwds[key]
- if not isinstance(lst, (list, tuple)):
- raise TypeError("keyword '%s' should be a list or tuple"
- % (key,))
- lst = [os.path.join(dirname, fn) for fn in lst]
- kwds[key] = lst
- return kwds
-
- # ----------
-
- def _locate_module(self):
- if not os.path.isfile(self.modulefilename):
- if self.ext_package:
- try:
- pkg = __import__(self.ext_package, None, None, ['__doc__'])
- except ImportError:
- return # cannot import the package itself, give up
- # (e.g. it might be called differently before installation)
- path = pkg.__path__
- else:
- path = None
- filename = self._vengine.find_module(self.get_module_name(), path,
- _get_so_suffixes())
- if filename is None:
- return
- self.modulefilename = filename
- self._vengine.collect_types()
- self._has_module = True
-
- def _write_source_to(self, file):
- self._vengine._f = file
- try:
- self._vengine.write_source_to_f()
- finally:
- del self._vengine._f
-
- def _write_source(self, file=None):
- if file is not None:
- self._write_source_to(file)
- else:
- # Write our source file to an in memory file.
- f = NativeIO()
- self._write_source_to(f)
- source_data = f.getvalue()
-
- # Determine if this matches the current file
- if os.path.exists(self.sourcefilename):
- with open(self.sourcefilename, "r") as fp:
- needs_written = not (fp.read() == source_data)
- else:
- needs_written = True
-
- # Actually write the file out if it doesn't match
- if needs_written:
- _ensure_dir(self.sourcefilename)
- with open(self.sourcefilename, "w") as fp:
- fp.write(source_data)
-
- # Set this flag
- self._has_source = True
-
- def _compile_module(self):
- # compile this C source
- tmpdir = os.path.dirname(self.sourcefilename)
- outputfilename = ffiplatform.compile(tmpdir, self.get_extension())
- try:
- same = ffiplatform.samefile(outputfilename, self.modulefilename)
- except OSError:
- same = False
- if not same:
- _ensure_dir(self.modulefilename)
- shutil.move(outputfilename, self.modulefilename)
- self._has_module = True
-
- def _load_library(self):
- assert self._has_module
- if self.flags is not None:
- return self._vengine.load_library(self.flags)
- else:
- return self._vengine.load_library()
-
-# ____________________________________________________________
-
-_FORCE_GENERIC_ENGINE = False # for tests
-
-def _locate_engine_class(ffi, force_generic_engine):
- if _FORCE_GENERIC_ENGINE:
- force_generic_engine = True
- if not force_generic_engine:
- if '__pypy__' in sys.builtin_module_names:
- force_generic_engine = True
- else:
- try:
- import _cffi_backend
- except ImportError:
- _cffi_backend = '?'
- if ffi._backend is not _cffi_backend:
- force_generic_engine = True
- if force_generic_engine:
- from . import vengine_gen
- return vengine_gen.VGenericEngine
- else:
- from . import vengine_cpy
- return vengine_cpy.VCPythonEngine
-
-# ____________________________________________________________
-
-_TMPDIR = None
-
-def _caller_dir_pycache():
- if _TMPDIR:
- return _TMPDIR
- result = os.environ.get('CFFI_TMPDIR')
- if result:
- return result
- filename = sys._getframe(2).f_code.co_filename
- return os.path.abspath(os.path.join(os.path.dirname(filename),
- '__pycache__'))
-
-def set_tmpdir(dirname):
- """Set the temporary directory to use instead of __pycache__."""
- global _TMPDIR
- _TMPDIR = dirname
-
-def cleanup_tmpdir(tmpdir=None, keep_so=False):
- """Clean up the temporary directory by removing all files in it
- called `_cffi_*.{c,so}` as well as the `build` subdirectory."""
- tmpdir = tmpdir or _caller_dir_pycache()
- try:
- filelist = os.listdir(tmpdir)
- except OSError:
- return
- if keep_so:
- suffix = '.c' # only remove .c files
- else:
- suffix = _get_so_suffixes()[0].lower()
- for fn in filelist:
- if fn.lower().startswith('_cffi_') and (
- fn.lower().endswith(suffix) or fn.lower().endswith('.c')):
- try:
- os.unlink(os.path.join(tmpdir, fn))
- except OSError:
- pass
- clean_dir = [os.path.join(tmpdir, 'build')]
- for dir in clean_dir:
- try:
- for fn in os.listdir(dir):
- fn = os.path.join(dir, fn)
- if os.path.isdir(fn):
- clean_dir.append(fn)
- else:
- os.unlink(fn)
- except OSError:
- pass
-
-def _get_so_suffixes():
- suffixes = _extension_suffixes()
- if not suffixes:
- # bah, no C_EXTENSION available. Occurs on pypy without cpyext
- if sys.platform == 'win32':
- suffixes = [".pyd"]
- else:
- suffixes = [".so"]
-
- return suffixes
-
-def _ensure_dir(filename):
- dirname = os.path.dirname(filename)
- if dirname and not os.path.isdir(dirname):
- os.makedirs(dirname)
diff --git a/functions/source/GitPullS3/lambda_function.py b/functions/source/GitPullS3/lambda_function.py
index 2d0e68b..8ebae82 100644
--- a/functions/source/GitPullS3/lambda_function.py
+++ b/functions/source/GitPullS3/lambda_function.py
@@ -1,16 +1,15 @@
-# Copyright 2016 Amazon Web Services, Inc. or its affiliates. All Rights Reserved.
+# Copyright 2020 Amazon Web Services, Inc. or its affiliates. All Rights Reserved.
# This file is licensed to you under the AWS Customer Agreement (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at http://aws.amazon.com/agreement/ .
# This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or implied.
# See the License for the specific language governing permissions and limitations under the License.
-from pygit2 import Keypair, discover_repository, Repository, clone_repository, RemoteCallbacks
from boto3 import client
import os
+import time
import stat
import shutil
-from zipfile import ZipFile
from ipaddress import ip_network, ip_address
import logging
import hmac
@@ -36,105 +35,26 @@
kms = client('kms')
-def write_key(filename, contents):
- logger.info('Writing keys to /tmp/...')
- mode = stat.S_IRUSR | stat.S_IWUSR
- umask_original = os.umask(0)
- try:
- handle = os.fdopen(os.open(filename, os.O_WRONLY | os.O_CREAT, mode), 'w')
- finally:
- os.umask(umask_original)
- handle.write(contents + '\n')
- handle.close()
-
-
-def get_keys(keybucket, pubkey, update=False):
- if not os.path.isfile('/tmp/id_rsa') or not os.path.isfile('/tmp/id_rsa.pub') or update:
- logger.info('Keys not found on Lambda container, fetching from S3...')
- enckey = s3.get_object(Bucket=keybucket, Key=key)['Body'].read()
- privkey = kms.decrypt(CiphertextBlob=enckey)['Plaintext']
- write_key('/tmp/id_rsa', privkey)
- write_key('/tmp/id_rsa.pub', pubkey)
- return Keypair('git', '/tmp/id_rsa.pub', '/tmp/id_rsa', '')
-
-
-def init_remote(repo, name, url):
- remote = repo.remotes.create(name, url, '+refs/*:refs/*')
- return remote
-
-
-def create_repo(repo_path, remote_url, creds):
- if os.path.exists(repo_path):
- logger.info('Cleaning up repo path...')
- shutil.rmtree(repo_path)
- repo = clone_repository(remote_url, repo_path, callbacks=creds)
-
- return repo
-
-
-def pull_repo(repo, branch_name, remote_url, creds):
- remote_exists = False
- for r in repo.remotes:
- if r.url == remote_url:
- remote_exists = True
- remote = r
- if not remote_exists:
- remote = repo.create_remote('origin', remote_url)
- logger.info('Fetching and merging changes from %s branch %s', remote_url, branch_name)
- remote.fetch(callbacks=creds)
- if(branch_name.startswith('tags/')):
- ref = 'refs/' + branch_name
- else:
- ref = 'refs/remotes/origin/' + branch_name
- remote_branch_id = repo.lookup_reference(ref).target
- repo.checkout_tree(repo.get(remote_branch_id))
- # branch_ref = repo.lookup_reference('refs/heads/' + branch_name)
- # branch_ref.set_target(remote_branch_id)
- repo.head.set_target(remote_branch_id)
- return repo
-
-
-def zip_repo(repo_path, repo_name):
- logger.info('Creating zipfile...')
- zf = ZipFile('/tmp/'+repo_name.replace('/', '_')+'.zip', 'w')
- for dirname, subdirs, files in os.walk(repo_path):
- if exclude_git:
- try:
- subdirs.remove('.git')
- except ValueError:
- pass
- zdirname = dirname[len(repo_path)+1:]
- zf.write(dirname, zdirname)
- for filename in files:
- zf.write(os.path.join(dirname, filename), os.path.join(zdirname, filename))
- zf.close()
- return '/tmp/'+repo_name.replace('/', '_')+'.zip'
-
-
-def push_s3(filename, repo_name, branch_name, outputbucket):
- s3key = '%s/%s/%s' % (repo_name, branch_name, filename.replace('/tmp/', ''))
- logger.info('pushing zip to s3://%s/%s' % (outputbucket, s3key))
- data = open(filename, 'rb')
- s3.put_object(Bucket=outputbucket, Body=data, Key=s3key)
- logger.info('Completed S3 upload...')
-
-
def lambda_handler(event, context):
+ print(event)
keybucket = event['context']['key-bucket']
outputbucket = event['context']['output-bucket']
pubkey = event['context']['public-key']
# Source IP ranges to allow requests from, if the IP is in one of these the request will not be chacked for an api key
ipranges = []
- for i in event['context']['allowed-ips'].split(','):
- ipranges.append(ip_network(u'%s' % i))
+ if event['context']['allowed-ips']:
+ for i in event['context']['allowed-ips'].split(','):
+ ipranges.append(ip_network(u'%s' % i))
# APIKeys, it is recommended to use a different API key for each repo that uses this function
apikeys = event['context']['api-secrets'].split(',')
ip = ip_address(event['context']['source-ip'])
secure = False
- for net in ipranges:
- if ip in net:
- secure = True
+ if ipranges:
+ for net in ipranges:
+ if ip in net:
+ secure = True
if 'X-Git-Token' in event['params']['header'].keys():
+ print (event['params']['header']['X-Git-Token'])
if event['params']['header']['X-Git-Token'] in apikeys:
secure = True
if 'X-Gitlab-Token' in event['params']['header'].keys():
@@ -143,10 +63,10 @@ def lambda_handler(event, context):
if 'X-Hub-Signature' in event['params']['header'].keys():
for k in apikeys:
if 'use-sha256' in event['context']:
- k1 = hmac.new(str(k), str(event['context']['raw-body']), hashlib.sha256).hexdigest()
+ k1 = hmac.new(str(k).encode('utf-8'), str(event['context']['raw-body']).encode('utf-8'), hashlib.sha256).hexdigest()
k2 = str(event['params']['header']['X-Hub-Signature'].replace('sha256=', ''))
else:
- k1 = hmac.new(str(k), str(event['context']['raw-body']), hashlib.sha1).hexdigest()
+ k1 = hmac.new(str(k).encode('utf-8'), str(event['context']['raw-body']).encode('utf-8'), hashlib.sha1).hexdigest()
k2 = str(event['params']['header']['X-Hub-Signature'].replace('sha1=', ''))
if k1 == k2:
secure = True
@@ -190,14 +110,8 @@ def lambda_handler(event, context):
try:
# Bibucket server
branch_name = event['body-json']['push']['changes'][0]['new']['name']
- if(event['body-json']['push']['changes'][0]['new']['type'] == 'tag'):
- branch_name = 'tags/'+event['body-json']['push']['changes'][0]['new']['name']
except:
- # Bitbucket Server v6.6.1
- try:
- branch_name = event['body-json']['changes'][0]['ref']['displayId']
- except:
- branch_name = 'master'
+ branch_name = 'master'
try:
# GitLab
remote_url = event['body-json']['project']['git_ssh_url']
@@ -222,22 +136,81 @@ def lambda_handler(event, context):
ssh_index = i
remote_url = event['body-json']['pullRequest']['fromRef']['repository']['links']['clone'][ssh_index]['href']
- repo_path = '/tmp/%s' % repo_name
- creds = RemoteCallbacks(credentials=get_keys(keybucket, pubkey), )
try:
- repository_path = discover_repository(repo_path)
- repo = Repository(repository_path)
- logger.info('found existing repo, using that...')
- except Exception:
- logger.info('creating new repo for %s in %s' % (remote_url, repo_path))
- repo = create_repo(repo_path, remote_url, creds)
- pull_repo(repo, branch_name, remote_url, creds)
- zipfile = zip_repo(repo_path, repo_name)
- push_s3(zipfile, repo_name, branch_name, outputbucket)
- if cleanup:
- logger.info('Cleanup Lambda container...')
- shutil.rmtree(repo_path)
- os.remove(zipfile)
- os.remove('/tmp/id_rsa')
- os.remove('/tmp/id_rsa.pub')
- return 'Successfully updated %s' % repo_name
+ codebuild_client = client(service_name='codebuild')
+ new_build = codebuild_client.start_build(projectName=os.getenv('GitPullCodeBuild'),
+ environmentVariablesOverride=[
+ {
+ 'name': 'GitUrl',
+ 'value': remote_url,
+ 'type': 'PLAINTEXT'
+ },
+ {
+ 'name': 'Branch',
+ 'value': branch_name,
+ 'type': 'PLAINTEXT'
+ },
+ {
+ 'name': 'KeyBucket',
+ 'value': keybucket,
+ 'type': 'PLAINTEXT'
+ },
+ {
+ 'name': 'KeyObject',
+ 'value': key,
+ 'type': 'PLAINTEXT'
+ },
+
+ {
+ 'name': 'outputbucket',
+ 'value': outputbucket,
+ 'type': 'PLAINTEXT'
+ },
+ {
+ 'name': 'outputbucketkey',
+ 'value': '%s' % (repo_name.replace('/', '_')) + '.zip',
+ 'type': 'PLAINTEXT'
+ },
+ {
+ 'name': 'outputbucketpath',
+ 'value': '%s/%s/' % (repo_name, branch_name),
+ 'type': 'PLAINTEXT'
+ },
+ {
+ 'name': 'exclude_git',
+ 'value': '%s' % (exclude_git),
+ 'type': 'PLAINTEXT'
+ }
+
+ ])
+ buildId = new_build['build']['id']
+ logger.info('CodeBuild Build Id is %s' % (buildId))
+ buildStatus = 'NOT_KNOWN'
+ counter = 0
+ while (counter < 60 and buildStatus != 'SUCCEEDED'): # capped this, so it just fails if it takes too long
+ logger.info("Waiting for Codebuild to complete")
+ time.sleep(5)
+ logger.info(counter)
+ counter = counter + 1
+ theBuild = codebuild_client.batch_get_builds(ids=[buildId])
+ print(theBuild)
+ buildStatus = theBuild['builds'][0]['buildStatus']
+ logger.info('CodeBuild Build Status is %s' % (buildStatus))
+ if buildStatus == 'SUCCEEDED':
+ EnvVariables = theBuild['builds'][0]['exportedEnvironmentVariables']
+ commit_id = [env for env in EnvVariables if env['name'] == 'GIT_COMMIT_ID'][0]['value']
+ commit_message = [env for env in EnvVariables if env['name'] == 'GIT_COMMIT_MSG'][0]['value']
+ current_revision = {
+ 'revision': "Git Commit Id:" + commit_id,
+ 'changeIdentifier': 'GitLab',
+ 'revisionSummary': "Git Commit Message:" + commit_message
+ }
+ outputVariables = {
+ 'commit_id': "Git Commit Id:" + commit_id,
+ 'commit_message': "Git Commit Message:" + commit_message
+ }
+ break
+ elif buildStatus == 'FAILED' or buildStatus == 'FAULT' or buildStatus == 'STOPPED' or buildStatus == 'TIMED_OUT':
+ break
+ except Exception as e:
+ logger.info("Error in Function: %s" % (e))
diff --git a/functions/source/GitPullS3/libgit2.so.26 b/functions/source/GitPullS3/libgit2.so.26
deleted file mode 100755
index d238185..0000000
Binary files a/functions/source/GitPullS3/libgit2.so.26 and /dev/null differ
diff --git a/functions/source/GitPullS3/libgit2.tar.gz b/functions/source/GitPullS3/libgit2.tar.gz
deleted file mode 100644
index a558f3a..0000000
Binary files a/functions/source/GitPullS3/libgit2.tar.gz and /dev/null differ
diff --git a/functions/source/GitPullS3/pycparser/__init__.py b/functions/source/GitPullS3/pycparser/__init__.py
deleted file mode 100644
index e089166..0000000
--- a/functions/source/GitPullS3/pycparser/__init__.py
+++ /dev/null
@@ -1,93 +0,0 @@
-#-----------------------------------------------------------------
-# pycparser: __init__.py
-#
-# This package file exports some convenience functions for
-# interacting with pycparser
-#
-# Eli Bendersky [http://eli.thegreenplace.net]
-# License: BSD
-#-----------------------------------------------------------------
-__all__ = ['c_lexer', 'c_parser', 'c_ast']
-__version__ = '2.18'
-
-from subprocess import Popen, PIPE
-from .c_parser import CParser
-
-
-def preprocess_file(filename, cpp_path='cpp', cpp_args=''):
- """ Preprocess a file using cpp.
-
- filename:
- Name of the file you want to preprocess.
-
- cpp_path:
- cpp_args:
- Refer to the documentation of parse_file for the meaning of these
- arguments.
-
- When successful, returns the preprocessed file's contents.
- Errors from cpp will be printed out.
- """
- path_list = [cpp_path]
- if isinstance(cpp_args, list):
- path_list += cpp_args
- elif cpp_args != '':
- path_list += [cpp_args]
- path_list += [filename]
-
- try:
- # Note the use of universal_newlines to treat all newlines
- # as \n for Python's purpose
- #
- pipe = Popen( path_list,
- stdout=PIPE,
- universal_newlines=True)
- text = pipe.communicate()[0]
- except OSError as e:
- raise RuntimeError("Unable to invoke 'cpp'. " +
- 'Make sure its path was passed correctly\n' +
- ('Original error: %s' % e))
-
- return text
-
-
-def parse_file(filename, use_cpp=False, cpp_path='cpp', cpp_args='',
- parser=None):
- """ Parse a C file using pycparser.
-
- filename:
- Name of the file you want to parse.
-
- use_cpp:
- Set to True if you want to execute the C pre-processor
- on the file prior to parsing it.
-
- cpp_path:
- If use_cpp is True, this is the path to 'cpp' on your
- system. If no path is provided, it attempts to just
- execute 'cpp', so it must be in your PATH.
-
- cpp_args:
- If use_cpp is True, set this to the command line arguments strings
- to cpp. Be careful with quotes - it's best to pass a raw string
- (r'') here. For example:
- r'-I../utils/fake_libc_include'
- If several arguments are required, pass a list of strings.
-
- parser:
- Optional parser object to be used instead of the default CParser
-
- When successful, an AST is returned. ParseError can be
- thrown if the file doesn't parse successfully.
-
- Errors from cpp will be printed out.
- """
- if use_cpp:
- text = preprocess_file(filename, cpp_path, cpp_args)
- else:
- with open(filename, 'rU') as f:
- text = f.read()
-
- if parser is None:
- parser = CParser()
- return parser.parse(text, filename)
diff --git a/functions/source/GitPullS3/pycparser/_ast_gen.py b/functions/source/GitPullS3/pycparser/_ast_gen.py
deleted file mode 100644
index 669c303..0000000
--- a/functions/source/GitPullS3/pycparser/_ast_gen.py
+++ /dev/null
@@ -1,278 +0,0 @@
-#-----------------------------------------------------------------
-# _ast_gen.py
-#
-# Generates the AST Node classes from a specification given in
-# a configuration file
-#
-# The design of this module was inspired by astgen.py from the
-# Python 2.5 code-base.
-#
-# Eli Bendersky [http://eli.thegreenplace.net]
-# License: BSD
-#-----------------------------------------------------------------
-import pprint
-from string import Template
-
-
-class ASTCodeGenerator(object):
- def __init__(self, cfg_filename='_c_ast.cfg'):
- """ Initialize the code generator from a configuration
- file.
- """
- self.cfg_filename = cfg_filename
- self.node_cfg = [NodeCfg(name, contents)
- for (name, contents) in self.parse_cfgfile(cfg_filename)]
-
- def generate(self, file=None):
- """ Generates the code into file, an open file buffer.
- """
- src = Template(_PROLOGUE_COMMENT).substitute(
- cfg_filename=self.cfg_filename)
-
- src += _PROLOGUE_CODE
- for node_cfg in self.node_cfg:
- src += node_cfg.generate_source() + '\n\n'
-
- file.write(src)
-
- def parse_cfgfile(self, filename):
- """ Parse the configuration file and yield pairs of
- (name, contents) for each node.
- """
- with open(filename, "r") as f:
- for line in f:
- line = line.strip()
- if not line or line.startswith('#'):
- continue
- colon_i = line.find(':')
- lbracket_i = line.find('[')
- rbracket_i = line.find(']')
- if colon_i < 1 or lbracket_i <= colon_i or rbracket_i <= lbracket_i:
- raise RuntimeError("Invalid line in %s:\n%s\n" % (filename, line))
-
- name = line[:colon_i]
- val = line[lbracket_i + 1:rbracket_i]
- vallist = [v.strip() for v in val.split(',')] if val else []
- yield name, vallist
-
-
-class NodeCfg(object):
- """ Node configuration.
-
- name: node name
- contents: a list of contents - attributes and child nodes
- See comment at the top of the configuration file for details.
- """
- def __init__(self, name, contents):
- self.name = name
- self.all_entries = []
- self.attr = []
- self.child = []
- self.seq_child = []
-
- for entry in contents:
- clean_entry = entry.rstrip('*')
- self.all_entries.append(clean_entry)
-
- if entry.endswith('**'):
- self.seq_child.append(clean_entry)
- elif entry.endswith('*'):
- self.child.append(clean_entry)
- else:
- self.attr.append(entry)
-
- def generate_source(self):
- src = self._gen_init()
- src += '\n' + self._gen_children()
- src += '\n' + self._gen_attr_names()
- return src
-
- def _gen_init(self):
- src = "class %s(Node):\n" % self.name
-
- if self.all_entries:
- args = ', '.join(self.all_entries)
- slots = ', '.join("'{0}'".format(e) for e in self.all_entries)
- slots += ", 'coord', '__weakref__'"
- arglist = '(self, %s, coord=None)' % args
- else:
- slots = "'coord', '__weakref__'"
- arglist = '(self, coord=None)'
-
- src += " __slots__ = (%s)\n" % slots
- src += " def __init__%s:\n" % arglist
-
- for name in self.all_entries + ['coord']:
- src += " self.%s = %s\n" % (name, name)
-
- return src
-
- def _gen_children(self):
- src = ' def children(self):\n'
-
- if self.all_entries:
- src += ' nodelist = []\n'
-
- for child in self.child:
- src += (
- ' if self.%(child)s is not None:' +
- ' nodelist.append(("%(child)s", self.%(child)s))\n') % (
- dict(child=child))
-
- for seq_child in self.seq_child:
- src += (
- ' for i, child in enumerate(self.%(child)s or []):\n'
- ' nodelist.append(("%(child)s[%%d]" %% i, child))\n') % (
- dict(child=seq_child))
-
- src += ' return tuple(nodelist)\n'
- else:
- src += ' return ()\n'
-
- return src
-
- def _gen_attr_names(self):
- src = " attr_names = (" + ''.join("%r, " % nm for nm in self.attr) + ')'
- return src
-
-
-_PROLOGUE_COMMENT = \
-r'''#-----------------------------------------------------------------
-# ** ATTENTION **
-# This code was automatically generated from the file:
-# $cfg_filename
-#
-# Do not modify it directly. Modify the configuration file and
-# run the generator again.
-# ** ** *** ** **
-#
-# pycparser: c_ast.py
-#
-# AST Node classes.
-#
-# Eli Bendersky [http://eli.thegreenplace.net]
-# License: BSD
-#-----------------------------------------------------------------
-
-'''
-
-_PROLOGUE_CODE = r'''
-import sys
-
-
-class Node(object):
- __slots__ = ()
- """ Abstract base class for AST nodes.
- """
- def children(self):
- """ A sequence of all children that are Nodes
- """
- pass
-
- def show(self, buf=sys.stdout, offset=0, attrnames=False, nodenames=False, showcoord=False, _my_node_name=None):
- """ Pretty print the Node and all its attributes and
- children (recursively) to a buffer.
-
- buf:
- Open IO buffer into which the Node is printed.
-
- offset:
- Initial offset (amount of leading spaces)
-
- attrnames:
- True if you want to see the attribute names in
- name=value pairs. False to only see the values.
-
- nodenames:
- True if you want to see the actual node names
- within their parents.
-
- showcoord:
- Do you want the coordinates of each Node to be
- displayed.
- """
- lead = ' ' * offset
- if nodenames and _my_node_name is not None:
- buf.write(lead + self.__class__.__name__+ ' <' + _my_node_name + '>: ')
- else:
- buf.write(lead + self.__class__.__name__+ ': ')
-
- if self.attr_names:
- if attrnames:
- nvlist = [(n, getattr(self,n)) for n in self.attr_names]
- attrstr = ', '.join('%s=%s' % nv for nv in nvlist)
- else:
- vlist = [getattr(self, n) for n in self.attr_names]
- attrstr = ', '.join('%s' % v for v in vlist)
- buf.write(attrstr)
-
- if showcoord:
- buf.write(' (at %s)' % self.coord)
- buf.write('\n')
-
- for (child_name, child) in self.children():
- child.show(
- buf,
- offset=offset + 2,
- attrnames=attrnames,
- nodenames=nodenames,
- showcoord=showcoord,
- _my_node_name=child_name)
-
-
-class NodeVisitor(object):
- """ A base NodeVisitor class for visiting c_ast nodes.
- Subclass it and define your own visit_XXX methods, where
- XXX is the class name you want to visit with these
- methods.
-
- For example:
-
- class ConstantVisitor(NodeVisitor):
- def __init__(self):
- self.values = []
-
- def visit_Constant(self, node):
- self.values.append(node.value)
-
- Creates a list of values of all the constant nodes
- encountered below the given node. To use it:
-
- cv = ConstantVisitor()
- cv.visit(node)
-
- Notes:
-
- * generic_visit() will be called for AST nodes for which
- no visit_XXX method was defined.
- * The children of nodes for which a visit_XXX was
- defined will not be visited - if you need this, call
- generic_visit() on the node.
- You can use:
- NodeVisitor.generic_visit(self, node)
- * Modeled after Python's own AST visiting facilities
- (the ast module of Python 3.0)
- """
- def visit(self, node):
- """ Visit a node.
- """
- method = 'visit_' + node.__class__.__name__
- visitor = getattr(self, method, self.generic_visit)
- return visitor(node)
-
- def generic_visit(self, node):
- """ Called if no explicit visitor function exists for a
- node. Implements preorder visiting of the node.
- """
- for c_name, c in node.children():
- self.visit(c)
-
-
-'''
-
-
-if __name__ == "__main__":
- import sys
- ast_gen = ASTCodeGenerator('_c_ast.cfg')
- ast_gen.generate(open('c_ast.py', 'w'))
-
diff --git a/functions/source/GitPullS3/pycparser/_build_tables.py b/functions/source/GitPullS3/pycparser/_build_tables.py
deleted file mode 100644
index a8a9dcf..0000000
--- a/functions/source/GitPullS3/pycparser/_build_tables.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#-----------------------------------------------------------------
-# pycparser: _build_tables.py
-#
-# A dummy for generating the lexing/parsing tables and and
-# compiling them into .pyc for faster execution in optimized mode.
-# Also generates AST code from the configuration file.
-# Should be called from the pycparser directory.
-#
-# Eli Bendersky [http://eli.thegreenplace.net]
-# License: BSD
-#-----------------------------------------------------------------
-
-# Generate c_ast.py
-from _ast_gen import ASTCodeGenerator
-ast_gen = ASTCodeGenerator('_c_ast.cfg')
-ast_gen.generate(open('c_ast.py', 'w'))
-
-import sys
-sys.path[0:0] = ['.', '..']
-from pycparser import c_parser
-
-# Generates the tables
-#
-c_parser.CParser(
- lex_optimize=True,
- yacc_debug=False,
- yacc_optimize=True)
-
-# Load to compile into .pyc
-#
-import lextab
-import yacctab
-import c_ast
diff --git a/functions/source/GitPullS3/pycparser/_c_ast.cfg b/functions/source/GitPullS3/pycparser/_c_ast.cfg
deleted file mode 100644
index 7dfcd0c..0000000
--- a/functions/source/GitPullS3/pycparser/_c_ast.cfg
+++ /dev/null
@@ -1,191 +0,0 @@
-#-----------------------------------------------------------------
-# pycparser: _c_ast.cfg
-#
-# Defines the AST Node classes used in pycparser.
-#
-# Each entry is a Node sub-class name, listing the attributes
-# and child nodes of the class:
-# * - a child node
-# ** - a sequence of child nodes
-# - an attribute
-#
-# Eli Bendersky [http://eli.thegreenplace.net]
-# License: BSD
-#-----------------------------------------------------------------
-
-# ArrayDecl is a nested declaration of an array with the given type.
-# dim: the dimension (for example, constant 42)
-# dim_quals: list of dimension qualifiers, to support C99's allowing 'const'
-# and 'static' within the array dimension in function declarations.
-ArrayDecl: [type*, dim*, dim_quals]
-
-ArrayRef: [name*, subscript*]
-
-# op: =, +=, /= etc.
-#
-Assignment: [op, lvalue*, rvalue*]
-
-BinaryOp: [op, left*, right*]
-
-Break: []
-
-Case: [expr*, stmts**]
-
-Cast: [to_type*, expr*]
-
-# Compound statement in C99 is a list of block items (declarations or
-# statements).
-#
-Compound: [block_items**]
-
-# Compound literal (anonymous aggregate) for C99.
-# (type-name) {initializer_list}
-# type: the typename
-# init: InitList for the initializer list
-#
-CompoundLiteral: [type*, init*]
-
-# type: int, char, float, etc. see CLexer for constant token types
-#
-Constant: [type, value]
-
-Continue: []
-
-# name: the variable being declared
-# quals: list of qualifiers (const, volatile)
-# funcspec: list function specifiers (i.e. inline in C99)
-# storage: list of storage specifiers (extern, register, etc.)
-# type: declaration type (probably nested with all the modifiers)
-# init: initialization value, or None
-# bitsize: bit field size, or None
-#
-Decl: [name, quals, storage, funcspec, type*, init*, bitsize*]
-
-DeclList: [decls**]
-
-Default: [stmts**]
-
-DoWhile: [cond*, stmt*]
-
-# Represents the ellipsis (...) parameter in a function
-# declaration
-#
-EllipsisParam: []
-
-# An empty statement (a semicolon ';' on its own)
-#
-EmptyStatement: []
-
-# Enumeration type specifier
-# name: an optional ID
-# values: an EnumeratorList
-#
-Enum: [name, values*]
-
-# A name/value pair for enumeration values
-#
-Enumerator: [name, value*]
-
-# A list of enumerators
-#
-EnumeratorList: [enumerators**]
-
-# A list of expressions separated by the comma operator.
-#
-ExprList: [exprs**]
-
-# This is the top of the AST, representing a single C file (a
-# translation unit in K&R jargon). It contains a list of
-# "external-declaration"s, which is either declarations (Decl),
-# Typedef or function definitions (FuncDef).
-#
-FileAST: [ext**]
-
-# for (init; cond; next) stmt
-#
-For: [init*, cond*, next*, stmt*]
-
-# name: Id
-# args: ExprList
-#
-FuncCall: [name*, args*]
-
-# type (args)
-#
-FuncDecl: [args*, type*]
-
-# Function definition: a declarator for the function name and
-# a body, which is a compound statement.
-# There's an optional list of parameter declarations for old
-# K&R-style definitions
-#
-FuncDef: [decl*, param_decls**, body*]
-
-Goto: [name]
-
-ID: [name]
-
-# Holder for types that are a simple identifier (e.g. the built
-# ins void, char etc. and typedef-defined types)
-#
-IdentifierType: [names]
-
-If: [cond*, iftrue*, iffalse*]
-
-# An initialization list used for compound literals.
-#
-InitList: [exprs**]
-
-Label: [name, stmt*]
-
-# A named initializer for C99.
-# The name of a NamedInitializer is a sequence of Nodes, because
-# names can be hierarchical and contain constant expressions.
-#
-NamedInitializer: [name**, expr*]
-
-# a list of comma separated function parameter declarations
-#
-ParamList: [params**]
-
-PtrDecl: [quals, type*]
-
-Return: [expr*]
-
-# name: struct tag name
-# decls: declaration of members
-#
-Struct: [name, decls**]
-
-# type: . or ->
-# name.field or name->field
-#
-StructRef: [name*, type, field*]
-
-Switch: [cond*, stmt*]
-
-# cond ? iftrue : iffalse
-#
-TernaryOp: [cond*, iftrue*, iffalse*]
-
-# A base type declaration
-#
-TypeDecl: [declname, quals, type*]
-
-# A typedef declaration.
-# Very similar to Decl, but without some attributes
-#
-Typedef: [name, quals, storage, type*]
-
-Typename: [name, quals, type*]
-
-UnaryOp: [op, expr*]
-
-# name: union tag name
-# decls: declaration of members
-#
-Union: [name, decls**]
-
-While: [cond*, stmt*]
-
-Pragma: [string]
diff --git a/functions/source/GitPullS3/pycparser/ast_transforms.py b/functions/source/GitPullS3/pycparser/ast_transforms.py
deleted file mode 100644
index 623821d..0000000
--- a/functions/source/GitPullS3/pycparser/ast_transforms.py
+++ /dev/null
@@ -1,105 +0,0 @@
-#------------------------------------------------------------------------------
-# pycparser: ast_transforms.py
-#
-# Some utilities used by the parser to create a friendlier AST.
-#
-# Eli Bendersky [http://eli.thegreenplace.net]
-# License: BSD
-#------------------------------------------------------------------------------
-
-from . import c_ast
-
-
-def fix_switch_cases(switch_node):
- """ The 'case' statements in a 'switch' come out of parsing with one
- child node, so subsequent statements are just tucked to the parent
- Compound. Additionally, consecutive (fall-through) case statements
- come out messy. This is a peculiarity of the C grammar. The following:
-
- switch (myvar) {
- case 10:
- k = 10;
- p = k + 1;
- return 10;
- case 20:
- case 30:
- return 20;
- default:
- break;
- }
-
- Creates this tree (pseudo-dump):
-
- Switch
- ID: myvar
- Compound:
- Case 10:
- k = 10
- p = k + 1
- return 10
- Case 20:
- Case 30:
- return 20
- Default:
- break
-
- The goal of this transform is to fix this mess, turning it into the
- following:
-
- Switch
- ID: myvar
- Compound:
- Case 10:
- k = 10
- p = k + 1
- return 10
- Case 20:
- Case 30:
- return 20
- Default:
- break
-
- A fixed AST node is returned. The argument may be modified.
- """
- assert isinstance(switch_node, c_ast.Switch)
- if not isinstance(switch_node.stmt, c_ast.Compound):
- return switch_node
-
- # The new Compound child for the Switch, which will collect children in the
- # correct order
- new_compound = c_ast.Compound([], switch_node.stmt.coord)
-
- # The last Case/Default node
- last_case = None
-
- # Goes over the children of the Compound below the Switch, adding them
- # either directly below new_compound or below the last Case as appropriate
- for child in switch_node.stmt.block_items:
- if isinstance(child, (c_ast.Case, c_ast.Default)):
- # If it's a Case/Default:
- # 1. Add it to the Compound and mark as "last case"
- # 2. If its immediate child is also a Case or Default, promote it
- # to a sibling.
- new_compound.block_items.append(child)
- _extract_nested_case(child, new_compound.block_items)
- last_case = new_compound.block_items[-1]
- else:
- # Other statements are added as children to the last case, if it
- # exists.
- if last_case is None:
- new_compound.block_items.append(child)
- else:
- last_case.stmts.append(child)
-
- switch_node.stmt = new_compound
- return switch_node
-
-
-def _extract_nested_case(case_node, stmts_list):
- """ Recursively extract consecutive Case statements that are made nested
- by the parser and add them to the stmts_list.
- """
- if isinstance(case_node.stmts[0], (c_ast.Case, c_ast.Default)):
- stmts_list.append(case_node.stmts.pop())
- _extract_nested_case(stmts_list[-1], stmts_list)
-
diff --git a/functions/source/GitPullS3/pycparser/c_ast.py b/functions/source/GitPullS3/pycparser/c_ast.py
deleted file mode 100644
index 5e81648..0000000
--- a/functions/source/GitPullS3/pycparser/c_ast.py
+++ /dev/null
@@ -1,809 +0,0 @@
-#-----------------------------------------------------------------
-# ** ATTENTION **
-# This code was automatically generated from the file:
-# _c_ast.cfg
-#
-# Do not modify it directly. Modify the configuration file and
-# run the generator again.
-# ** ** *** ** **
-#
-# pycparser: c_ast.py
-#
-# AST Node classes.
-#
-# Eli Bendersky [http://eli.thegreenplace.net]
-# License: BSD
-#-----------------------------------------------------------------
-
-
-import sys
-
-
-class Node(object):
- __slots__ = ()
- """ Abstract base class for AST nodes.
- """
- def children(self):
- """ A sequence of all children that are Nodes
- """
- pass
-
- def show(self, buf=sys.stdout, offset=0, attrnames=False, nodenames=False, showcoord=False, _my_node_name=None):
- """ Pretty print the Node and all its attributes and
- children (recursively) to a buffer.
-
- buf:
- Open IO buffer into which the Node is printed.
-
- offset:
- Initial offset (amount of leading spaces)
-
- attrnames:
- True if you want to see the attribute names in
- name=value pairs. False to only see the values.
-
- nodenames:
- True if you want to see the actual node names
- within their parents.
-
- showcoord:
- Do you want the coordinates of each Node to be
- displayed.
- """
- lead = ' ' * offset
- if nodenames and _my_node_name is not None:
- buf.write(lead + self.__class__.__name__+ ' <' + _my_node_name + '>: ')
- else:
- buf.write(lead + self.__class__.__name__+ ': ')
-
- if self.attr_names:
- if attrnames:
- nvlist = [(n, getattr(self,n)) for n in self.attr_names]
- attrstr = ', '.join('%s=%s' % nv for nv in nvlist)
- else:
- vlist = [getattr(self, n) for n in self.attr_names]
- attrstr = ', '.join('%s' % v for v in vlist)
- buf.write(attrstr)
-
- if showcoord:
- buf.write(' (at %s)' % self.coord)
- buf.write('\n')
-
- for (child_name, child) in self.children():
- child.show(
- buf,
- offset=offset + 2,
- attrnames=attrnames,
- nodenames=nodenames,
- showcoord=showcoord,
- _my_node_name=child_name)
-
-
-class NodeVisitor(object):
- """ A base NodeVisitor class for visiting c_ast nodes.
- Subclass it and define your own visit_XXX methods, where
- XXX is the class name you want to visit with these
- methods.
-
- For example:
-
- class ConstantVisitor(NodeVisitor):
- def __init__(self):
- self.values = []
-
- def visit_Constant(self, node):
- self.values.append(node.value)
-
- Creates a list of values of all the constant nodes
- encountered below the given node. To use it:
-
- cv = ConstantVisitor()
- cv.visit(node)
-
- Notes:
-
- * generic_visit() will be called for AST nodes for which
- no visit_XXX method was defined.
- * The children of nodes for which a visit_XXX was
- defined will not be visited - if you need this, call
- generic_visit() on the node.
- You can use:
- NodeVisitor.generic_visit(self, node)
- * Modeled after Python's own AST visiting facilities
- (the ast module of Python 3.0)
- """
- def visit(self, node):
- """ Visit a node.
- """
- method = 'visit_' + node.__class__.__name__
- visitor = getattr(self, method, self.generic_visit)
- return visitor(node)
-
- def generic_visit(self, node):
- """ Called if no explicit visitor function exists for a
- node. Implements preorder visiting of the node.
- """
- for c_name, c in node.children():
- self.visit(c)
-
-
-class ArrayDecl(Node):
- __slots__ = ('type', 'dim', 'dim_quals', 'coord', '__weakref__')
- def __init__(self, type, dim, dim_quals, coord=None):
- self.type = type
- self.dim = dim
- self.dim_quals = dim_quals
- self.coord = coord
-
- def children(self):
- nodelist = []
- if self.type is not None: nodelist.append(("type", self.type))
- if self.dim is not None: nodelist.append(("dim", self.dim))
- return tuple(nodelist)
-
- attr_names = ('dim_quals', )
-
-class ArrayRef(Node):
- __slots__ = ('name', 'subscript', 'coord', '__weakref__')
- def __init__(self, name, subscript, coord=None):
- self.name = name
- self.subscript = subscript
- self.coord = coord
-
- def children(self):
- nodelist = []
- if self.name is not None: nodelist.append(("name", self.name))
- if self.subscript is not None: nodelist.append(("subscript", self.subscript))
- return tuple(nodelist)
-
- attr_names = ()
-
-class Assignment(Node):
- __slots__ = ('op', 'lvalue', 'rvalue', 'coord', '__weakref__')
- def __init__(self, op, lvalue, rvalue, coord=None):
- self.op = op
- self.lvalue = lvalue
- self.rvalue = rvalue
- self.coord = coord
-
- def children(self):
- nodelist = []
- if self.lvalue is not None: nodelist.append(("lvalue", self.lvalue))
- if self.rvalue is not None: nodelist.append(("rvalue", self.rvalue))
- return tuple(nodelist)
-
- attr_names = ('op', )
-
-class BinaryOp(Node):
- __slots__ = ('op', 'left', 'right', 'coord', '__weakref__')
- def __init__(self, op, left, right, coord=None):
- self.op = op
- self.left = left
- self.right = right
- self.coord = coord
-
- def children(self):
- nodelist = []
- if self.left is not None: nodelist.append(("left", self.left))
- if self.right is not None: nodelist.append(("right", self.right))
- return tuple(nodelist)
-
- attr_names = ('op', )
-
-class Break(Node):
- __slots__ = ('coord', '__weakref__')
- def __init__(self, coord=None):
- self.coord = coord
-
- def children(self):
- return ()
-
- attr_names = ()
-
-class Case(Node):
- __slots__ = ('expr', 'stmts', 'coord', '__weakref__')
- def __init__(self, expr, stmts, coord=None):
- self.expr = expr
- self.stmts = stmts
- self.coord = coord
-
- def children(self):
- nodelist = []
- if self.expr is not None: nodelist.append(("expr", self.expr))
- for i, child in enumerate(self.stmts or []):
- nodelist.append(("stmts[%d]" % i, child))
- return tuple(nodelist)
-
- attr_names = ()
-
-class Cast(Node):
- __slots__ = ('to_type', 'expr', 'coord', '__weakref__')
- def __init__(self, to_type, expr, coord=None):
- self.to_type = to_type
- self.expr = expr
- self.coord = coord
-
- def children(self):
- nodelist = []
- if self.to_type is not None: nodelist.append(("to_type", self.to_type))
- if self.expr is not None: nodelist.append(("expr", self.expr))
- return tuple(nodelist)
-
- attr_names = ()
-
-class Compound(Node):
- __slots__ = ('block_items', 'coord', '__weakref__')
- def __init__(self, block_items, coord=None):
- self.block_items = block_items
- self.coord = coord
-
- def children(self):
- nodelist = []
- for i, child in enumerate(self.block_items or []):
- nodelist.append(("block_items[%d]" % i, child))
- return tuple(nodelist)
-
- attr_names = ()
-
-class CompoundLiteral(Node):
- __slots__ = ('type', 'init', 'coord', '__weakref__')
- def __init__(self, type, init, coord=None):
- self.type = type
- self.init = init
- self.coord = coord
-
- def children(self):
- nodelist = []
- if self.type is not None: nodelist.append(("type", self.type))
- if self.init is not None: nodelist.append(("init", self.init))
- return tuple(nodelist)
-
- attr_names = ()
-
-class Constant(Node):
- __slots__ = ('type', 'value', 'coord', '__weakref__')
- def __init__(self, type, value, coord=None):
- self.type = type
- self.value = value
- self.coord = coord
-
- def children(self):
- nodelist = []
- return tuple(nodelist)
-
- attr_names = ('type', 'value', )
-
-class Continue(Node):
- __slots__ = ('coord', '__weakref__')
- def __init__(self, coord=None):
- self.coord = coord
-
- def children(self):
- return ()
-
- attr_names = ()
-
-class Decl(Node):
- __slots__ = ('name', 'quals', 'storage', 'funcspec', 'type', 'init', 'bitsize', 'coord', '__weakref__')
- def __init__(self, name, quals, storage, funcspec, type, init, bitsize, coord=None):
- self.name = name
- self.quals = quals
- self.storage = storage
- self.funcspec = funcspec
- self.type = type
- self.init = init
- self.bitsize = bitsize
- self.coord = coord
-
- def children(self):
- nodelist = []
- if self.type is not None: nodelist.append(("type", self.type))
- if self.init is not None: nodelist.append(("init", self.init))
- if self.bitsize is not None: nodelist.append(("bitsize", self.bitsize))
- return tuple(nodelist)
-
- attr_names = ('name', 'quals', 'storage', 'funcspec', )
-
-class DeclList(Node):
- __slots__ = ('decls', 'coord', '__weakref__')
- def __init__(self, decls, coord=None):
- self.decls = decls
- self.coord = coord
-
- def children(self):
- nodelist = []
- for i, child in enumerate(self.decls or []):
- nodelist.append(("decls[%d]" % i, child))
- return tuple(nodelist)
-
- attr_names = ()
-
-class Default(Node):
- __slots__ = ('stmts', 'coord', '__weakref__')
- def __init__(self, stmts, coord=None):
- self.stmts = stmts
- self.coord = coord
-
- def children(self):
- nodelist = []
- for i, child in enumerate(self.stmts or []):
- nodelist.append(("stmts[%d]" % i, child))
- return tuple(nodelist)
-
- attr_names = ()
-
-class DoWhile(Node):
- __slots__ = ('cond', 'stmt', 'coord', '__weakref__')
- def __init__(self, cond, stmt, coord=None):
- self.cond = cond
- self.stmt = stmt
- self.coord = coord
-
- def children(self):
- nodelist = []
- if self.cond is not None: nodelist.append(("cond", self.cond))
- if self.stmt is not None: nodelist.append(("stmt", self.stmt))
- return tuple(nodelist)
-
- attr_names = ()
-
-class EllipsisParam(Node):
- __slots__ = ('coord', '__weakref__')
- def __init__(self, coord=None):
- self.coord = coord
-
- def children(self):
- return ()
-
- attr_names = ()
-
-class EmptyStatement(Node):
- __slots__ = ('coord', '__weakref__')
- def __init__(self, coord=None):
- self.coord = coord
-
- def children(self):
- return ()
-
- attr_names = ()
-
-class Enum(Node):
- __slots__ = ('name', 'values', 'coord', '__weakref__')
- def __init__(self, name, values, coord=None):
- self.name = name
- self.values = values
- self.coord = coord
-
- def children(self):
- nodelist = []
- if self.values is not None: nodelist.append(("values", self.values))
- return tuple(nodelist)
-
- attr_names = ('name', )
-
-class Enumerator(Node):
- __slots__ = ('name', 'value', 'coord', '__weakref__')
- def __init__(self, name, value, coord=None):
- self.name = name
- self.value = value
- self.coord = coord
-
- def children(self):
- nodelist = []
- if self.value is not None: nodelist.append(("value", self.value))
- return tuple(nodelist)
-
- attr_names = ('name', )
-
-class EnumeratorList(Node):
- __slots__ = ('enumerators', 'coord', '__weakref__')
- def __init__(self, enumerators, coord=None):
- self.enumerators = enumerators
- self.coord = coord
-
- def children(self):
- nodelist = []
- for i, child in enumerate(self.enumerators or []):
- nodelist.append(("enumerators[%d]" % i, child))
- return tuple(nodelist)
-
- attr_names = ()
-
-class ExprList(Node):
- __slots__ = ('exprs', 'coord', '__weakref__')
- def __init__(self, exprs, coord=None):
- self.exprs = exprs
- self.coord = coord
-
- def children(self):
- nodelist = []
- for i, child in enumerate(self.exprs or []):
- nodelist.append(("exprs[%d]" % i, child))
- return tuple(nodelist)
-
- attr_names = ()
-
-class FileAST(Node):
- __slots__ = ('ext', 'coord', '__weakref__')
- def __init__(self, ext, coord=None):
- self.ext = ext
- self.coord = coord
-
- def children(self):
- nodelist = []
- for i, child in enumerate(self.ext or []):
- nodelist.append(("ext[%d]" % i, child))
- return tuple(nodelist)
-
- attr_names = ()
-
-class For(Node):
- __slots__ = ('init', 'cond', 'next', 'stmt', 'coord', '__weakref__')
- def __init__(self, init, cond, next, stmt, coord=None):
- self.init = init
- self.cond = cond
- self.next = next
- self.stmt = stmt
- self.coord = coord
-
- def children(self):
- nodelist = []
- if self.init is not None: nodelist.append(("init", self.init))
- if self.cond is not None: nodelist.append(("cond", self.cond))
- if self.next is not None: nodelist.append(("next", self.next))
- if self.stmt is not None: nodelist.append(("stmt", self.stmt))
- return tuple(nodelist)
-
- attr_names = ()
-
-class FuncCall(Node):
- __slots__ = ('name', 'args', 'coord', '__weakref__')
- def __init__(self, name, args, coord=None):
- self.name = name
- self.args = args
- self.coord = coord
-
- def children(self):
- nodelist = []
- if self.name is not None: nodelist.append(("name", self.name))
- if self.args is not None: nodelist.append(("args", self.args))
- return tuple(nodelist)
-
- attr_names = ()
-
-class FuncDecl(Node):
- __slots__ = ('args', 'type', 'coord', '__weakref__')
- def __init__(self, args, type, coord=None):
- self.args = args
- self.type = type
- self.coord = coord
-
- def children(self):
- nodelist = []
- if self.args is not None: nodelist.append(("args", self.args))
- if self.type is not None: nodelist.append(("type", self.type))
- return tuple(nodelist)
-
- attr_names = ()
-
-class FuncDef(Node):
- __slots__ = ('decl', 'param_decls', 'body', 'coord', '__weakref__')
- def __init__(self, decl, param_decls, body, coord=None):
- self.decl = decl
- self.param_decls = param_decls
- self.body = body
- self.coord = coord
-
- def children(self):
- nodelist = []
- if self.decl is not None: nodelist.append(("decl", self.decl))
- if self.body is not None: nodelist.append(("body", self.body))
- for i, child in enumerate(self.param_decls or []):
- nodelist.append(("param_decls[%d]" % i, child))
- return tuple(nodelist)
-
- attr_names = ()
-
-class Goto(Node):
- __slots__ = ('name', 'coord', '__weakref__')
- def __init__(self, name, coord=None):
- self.name = name
- self.coord = coord
-
- def children(self):
- nodelist = []
- return tuple(nodelist)
-
- attr_names = ('name', )
-
-class ID(Node):
- __slots__ = ('name', 'coord', '__weakref__')
- def __init__(self, name, coord=None):
- self.name = name
- self.coord = coord
-
- def children(self):
- nodelist = []
- return tuple(nodelist)
-
- attr_names = ('name', )
-
-class IdentifierType(Node):
- __slots__ = ('names', 'coord', '__weakref__')
- def __init__(self, names, coord=None):
- self.names = names
- self.coord = coord
-
- def children(self):
- nodelist = []
- return tuple(nodelist)
-
- attr_names = ('names', )
-
-class If(Node):
- __slots__ = ('cond', 'iftrue', 'iffalse', 'coord', '__weakref__')
- def __init__(self, cond, iftrue, iffalse, coord=None):
- self.cond = cond
- self.iftrue = iftrue
- self.iffalse = iffalse
- self.coord = coord
-
- def children(self):
- nodelist = []
- if self.cond is not None: nodelist.append(("cond", self.cond))
- if self.iftrue is not None: nodelist.append(("iftrue", self.iftrue))
- if self.iffalse is not None: nodelist.append(("iffalse", self.iffalse))
- return tuple(nodelist)
-
- attr_names = ()
-
-class InitList(Node):
- __slots__ = ('exprs', 'coord', '__weakref__')
- def __init__(self, exprs, coord=None):
- self.exprs = exprs
- self.coord = coord
-
- def children(self):
- nodelist = []
- for i, child in enumerate(self.exprs or []):
- nodelist.append(("exprs[%d]" % i, child))
- return tuple(nodelist)
-
- attr_names = ()
-
-class Label(Node):
- __slots__ = ('name', 'stmt', 'coord', '__weakref__')
- def __init__(self, name, stmt, coord=None):
- self.name = name
- self.stmt = stmt
- self.coord = coord
-
- def children(self):
- nodelist = []
- if self.stmt is not None: nodelist.append(("stmt", self.stmt))
- return tuple(nodelist)
-
- attr_names = ('name', )
-
-class NamedInitializer(Node):
- __slots__ = ('name', 'expr', 'coord', '__weakref__')
- def __init__(self, name, expr, coord=None):
- self.name = name
- self.expr = expr
- self.coord = coord
-
- def children(self):
- nodelist = []
- if self.expr is not None: nodelist.append(("expr", self.expr))
- for i, child in enumerate(self.name or []):
- nodelist.append(("name[%d]" % i, child))
- return tuple(nodelist)
-
- attr_names = ()
-
-class ParamList(Node):
- __slots__ = ('params', 'coord', '__weakref__')
- def __init__(self, params, coord=None):
- self.params = params
- self.coord = coord
-
- def children(self):
- nodelist = []
- for i, child in enumerate(self.params or []):
- nodelist.append(("params[%d]" % i, child))
- return tuple(nodelist)
-
- attr_names = ()
-
-class PtrDecl(Node):
- __slots__ = ('quals', 'type', 'coord', '__weakref__')
- def __init__(self, quals, type, coord=None):
- self.quals = quals
- self.type = type
- self.coord = coord
-
- def children(self):
- nodelist = []
- if self.type is not None: nodelist.append(("type", self.type))
- return tuple(nodelist)
-
- attr_names = ('quals', )
-
-class Return(Node):
- __slots__ = ('expr', 'coord', '__weakref__')
- def __init__(self, expr, coord=None):
- self.expr = expr
- self.coord = coord
-
- def children(self):
- nodelist = []
- if self.expr is not None: nodelist.append(("expr", self.expr))
- return tuple(nodelist)
-
- attr_names = ()
-
-class Struct(Node):
- __slots__ = ('name', 'decls', 'coord', '__weakref__')
- def __init__(self, name, decls, coord=None):
- self.name = name
- self.decls = decls
- self.coord = coord
-
- def children(self):
- nodelist = []
- for i, child in enumerate(self.decls or []):
- nodelist.append(("decls[%d]" % i, child))
- return tuple(nodelist)
-
- attr_names = ('name', )
-
-class StructRef(Node):
- __slots__ = ('name', 'type', 'field', 'coord', '__weakref__')
- def __init__(self, name, type, field, coord=None):
- self.name = name
- self.type = type
- self.field = field
- self.coord = coord
-
- def children(self):
- nodelist = []
- if self.name is not None: nodelist.append(("name", self.name))
- if self.field is not None: nodelist.append(("field", self.field))
- return tuple(nodelist)
-
- attr_names = ('type', )
-
-class Switch(Node):
- __slots__ = ('cond', 'stmt', 'coord', '__weakref__')
- def __init__(self, cond, stmt, coord=None):
- self.cond = cond
- self.stmt = stmt
- self.coord = coord
-
- def children(self):
- nodelist = []
- if self.cond is not None: nodelist.append(("cond", self.cond))
- if self.stmt is not None: nodelist.append(("stmt", self.stmt))
- return tuple(nodelist)
-
- attr_names = ()
-
-class TernaryOp(Node):
- __slots__ = ('cond', 'iftrue', 'iffalse', 'coord', '__weakref__')
- def __init__(self, cond, iftrue, iffalse, coord=None):
- self.cond = cond
- self.iftrue = iftrue
- self.iffalse = iffalse
- self.coord = coord
-
- def children(self):
- nodelist = []
- if self.cond is not None: nodelist.append(("cond", self.cond))
- if self.iftrue is not None: nodelist.append(("iftrue", self.iftrue))
- if self.iffalse is not None: nodelist.append(("iffalse", self.iffalse))
- return tuple(nodelist)
-
- attr_names = ()
-
-class TypeDecl(Node):
- __slots__ = ('declname', 'quals', 'type', 'coord', '__weakref__')
- def __init__(self, declname, quals, type, coord=None):
- self.declname = declname
- self.quals = quals
- self.type = type
- self.coord = coord
-
- def children(self):
- nodelist = []
- if self.type is not None: nodelist.append(("type", self.type))
- return tuple(nodelist)
-
- attr_names = ('declname', 'quals', )
-
-class Typedef(Node):
- __slots__ = ('name', 'quals', 'storage', 'type', 'coord', '__weakref__')
- def __init__(self, name, quals, storage, type, coord=None):
- self.name = name
- self.quals = quals
- self.storage = storage
- self.type = type
- self.coord = coord
-
- def children(self):
- nodelist = []
- if self.type is not None: nodelist.append(("type", self.type))
- return tuple(nodelist)
-
- attr_names = ('name', 'quals', 'storage', )
-
-class Typename(Node):
- __slots__ = ('name', 'quals', 'type', 'coord', '__weakref__')
- def __init__(self, name, quals, type, coord=None):
- self.name = name
- self.quals = quals
- self.type = type
- self.coord = coord
-
- def children(self):
- nodelist = []
- if self.type is not None: nodelist.append(("type", self.type))
- return tuple(nodelist)
-
- attr_names = ('name', 'quals', )
-
-class UnaryOp(Node):
- __slots__ = ('op', 'expr', 'coord', '__weakref__')
- def __init__(self, op, expr, coord=None):
- self.op = op
- self.expr = expr
- self.coord = coord
-
- def children(self):
- nodelist = []
- if self.expr is not None: nodelist.append(("expr", self.expr))
- return tuple(nodelist)
-
- attr_names = ('op', )
-
-class Union(Node):
- __slots__ = ('name', 'decls', 'coord', '__weakref__')
- def __init__(self, name, decls, coord=None):
- self.name = name
- self.decls = decls
- self.coord = coord
-
- def children(self):
- nodelist = []
- for i, child in enumerate(self.decls or []):
- nodelist.append(("decls[%d]" % i, child))
- return tuple(nodelist)
-
- attr_names = ('name', )
-
-class While(Node):
- __slots__ = ('cond', 'stmt', 'coord', '__weakref__')
- def __init__(self, cond, stmt, coord=None):
- self.cond = cond
- self.stmt = stmt
- self.coord = coord
-
- def children(self):
- nodelist = []
- if self.cond is not None: nodelist.append(("cond", self.cond))
- if self.stmt is not None: nodelist.append(("stmt", self.stmt))
- return tuple(nodelist)
-
- attr_names = ()
-
-class Pragma(Node):
- __slots__ = ('string', 'coord', '__weakref__')
- def __init__(self, string, coord=None):
- self.string = string
- self.coord = coord
-
- def children(self):
- nodelist = []
- return tuple(nodelist)
-
- attr_names = ('string', )
-
diff --git a/functions/source/GitPullS3/pycparser/c_generator.py b/functions/source/GitPullS3/pycparser/c_generator.py
deleted file mode 100644
index 73e7f1b..0000000
--- a/functions/source/GitPullS3/pycparser/c_generator.py
+++ /dev/null
@@ -1,411 +0,0 @@
-#------------------------------------------------------------------------------
-# pycparser: c_generator.py
-#
-# C code generator from pycparser AST nodes.
-#
-# Eli Bendersky [http://eli.thegreenplace.net]
-# License: BSD
-#------------------------------------------------------------------------------
-from . import c_ast
-
-
-class CGenerator(object):
- """ Uses the same visitor pattern as c_ast.NodeVisitor, but modified to
- return a value from each visit method, using string accumulation in
- generic_visit.
- """
- def __init__(self):
- # Statements start with indentation of self.indent_level spaces, using
- # the _make_indent method
- #
- self.indent_level = 0
-
- def _make_indent(self):
- return ' ' * self.indent_level
-
- def visit(self, node):
- method = 'visit_' + node.__class__.__name__
- return getattr(self, method, self.generic_visit)(node)
-
- def generic_visit(self, node):
- #~ print('generic:', type(node))
- if node is None:
- return ''
- else:
- return ''.join(self.visit(c) for c_name, c in node.children())
-
- def visit_Constant(self, n):
- return n.value
-
- def visit_ID(self, n):
- return n.name
-
- def visit_Pragma(self, n):
- ret = '#pragma'
- if n.string:
- ret += ' ' + n.string
- return ret
-
- def visit_ArrayRef(self, n):
- arrref = self._parenthesize_unless_simple(n.name)
- return arrref + '[' + self.visit(n.subscript) + ']'
-
- def visit_StructRef(self, n):
- sref = self._parenthesize_unless_simple(n.name)
- return sref + n.type + self.visit(n.field)
-
- def visit_FuncCall(self, n):
- fref = self._parenthesize_unless_simple(n.name)
- return fref + '(' + self.visit(n.args) + ')'
-
- def visit_UnaryOp(self, n):
- operand = self._parenthesize_unless_simple(n.expr)
- if n.op == 'p++':
- return '%s++' % operand
- elif n.op == 'p--':
- return '%s--' % operand
- elif n.op == 'sizeof':
- # Always parenthesize the argument of sizeof since it can be
- # a name.
- return 'sizeof(%s)' % self.visit(n.expr)
- else:
- return '%s%s' % (n.op, operand)
-
- def visit_BinaryOp(self, n):
- lval_str = self._parenthesize_if(n.left,
- lambda d: not self._is_simple_node(d))
- rval_str = self._parenthesize_if(n.right,
- lambda d: not self._is_simple_node(d))
- return '%s %s %s' % (lval_str, n.op, rval_str)
-
- def visit_Assignment(self, n):
- rval_str = self._parenthesize_if(
- n.rvalue,
- lambda n: isinstance(n, c_ast.Assignment))
- return '%s %s %s' % (self.visit(n.lvalue), n.op, rval_str)
-
- def visit_IdentifierType(self, n):
- return ' '.join(n.names)
-
- def _visit_expr(self, n):
- if isinstance(n, c_ast.InitList):
- return '{' + self.visit(n) + '}'
- elif isinstance(n, c_ast.ExprList):
- return '(' + self.visit(n) + ')'
- else:
- return self.visit(n)
-
- def visit_Decl(self, n, no_type=False):
- # no_type is used when a Decl is part of a DeclList, where the type is
- # explicitly only for the first declaration in a list.
- #
- s = n.name if no_type else self._generate_decl(n)
- if n.bitsize: s += ' : ' + self.visit(n.bitsize)
- if n.init:
- s += ' = ' + self._visit_expr(n.init)
- return s
-
- def visit_DeclList(self, n):
- s = self.visit(n.decls[0])
- if len(n.decls) > 1:
- s += ', ' + ', '.join(self.visit_Decl(decl, no_type=True)
- for decl in n.decls[1:])
- return s
-
- def visit_Typedef(self, n):
- s = ''
- if n.storage: s += ' '.join(n.storage) + ' '
- s += self._generate_type(n.type)
- return s
-
- def visit_Cast(self, n):
- s = '(' + self._generate_type(n.to_type) + ')'
- return s + ' ' + self._parenthesize_unless_simple(n.expr)
-
- def visit_ExprList(self, n):
- visited_subexprs = []
- for expr in n.exprs:
- visited_subexprs.append(self._visit_expr(expr))
- return ', '.join(visited_subexprs)
-
- def visit_InitList(self, n):
- visited_subexprs = []
- for expr in n.exprs:
- visited_subexprs.append(self._visit_expr(expr))
- return ', '.join(visited_subexprs)
-
- def visit_Enum(self, n):
- s = 'enum'
- if n.name: s += ' ' + n.name
- if n.values:
- s += ' {'
- for i, enumerator in enumerate(n.values.enumerators):
- s += enumerator.name
- if enumerator.value:
- s += ' = ' + self.visit(enumerator.value)
- if i != len(n.values.enumerators) - 1:
- s += ', '
- s += '}'
- return s
-
- def visit_FuncDef(self, n):
- decl = self.visit(n.decl)
- self.indent_level = 0
- body = self.visit(n.body)
- if n.param_decls:
- knrdecls = ';\n'.join(self.visit(p) for p in n.param_decls)
- return decl + '\n' + knrdecls + ';\n' + body + '\n'
- else:
- return decl + '\n' + body + '\n'
-
- def visit_FileAST(self, n):
- s = ''
- for ext in n.ext:
- if isinstance(ext, c_ast.FuncDef):
- s += self.visit(ext)
- elif isinstance(ext, c_ast.Pragma):
- s += self.visit(ext) + '\n'
- else:
- s += self.visit(ext) + ';\n'
- return s
-
- def visit_Compound(self, n):
- s = self._make_indent() + '{\n'
- self.indent_level += 2
- if n.block_items:
- s += ''.join(self._generate_stmt(stmt) for stmt in n.block_items)
- self.indent_level -= 2
- s += self._make_indent() + '}\n'
- return s
-
- def visit_CompoundLiteral(self, n):
- return '(' + self.visit(n.type) + '){' + self.visit(n.init) + '}'
-
-
- def visit_EmptyStatement(self, n):
- return ';'
-
- def visit_ParamList(self, n):
- return ', '.join(self.visit(param) for param in n.params)
-
- def visit_Return(self, n):
- s = 'return'
- if n.expr: s += ' ' + self.visit(n.expr)
- return s + ';'
-
- def visit_Break(self, n):
- return 'break;'
-
- def visit_Continue(self, n):
- return 'continue;'
-
- def visit_TernaryOp(self, n):
- s = '(' + self._visit_expr(n.cond) + ') ? '
- s += '(' + self._visit_expr(n.iftrue) + ') : '
- s += '(' + self._visit_expr(n.iffalse) + ')'
- return s
-
- def visit_If(self, n):
- s = 'if ('
- if n.cond: s += self.visit(n.cond)
- s += ')\n'
- s += self._generate_stmt(n.iftrue, add_indent=True)
- if n.iffalse:
- s += self._make_indent() + 'else\n'
- s += self._generate_stmt(n.iffalse, add_indent=True)
- return s
-
- def visit_For(self, n):
- s = 'for ('
- if n.init: s += self.visit(n.init)
- s += ';'
- if n.cond: s += ' ' + self.visit(n.cond)
- s += ';'
- if n.next: s += ' ' + self.visit(n.next)
- s += ')\n'
- s += self._generate_stmt(n.stmt, add_indent=True)
- return s
-
- def visit_While(self, n):
- s = 'while ('
- if n.cond: s += self.visit(n.cond)
- s += ')\n'
- s += self._generate_stmt(n.stmt, add_indent=True)
- return s
-
- def visit_DoWhile(self, n):
- s = 'do\n'
- s += self._generate_stmt(n.stmt, add_indent=True)
- s += self._make_indent() + 'while ('
- if n.cond: s += self.visit(n.cond)
- s += ');'
- return s
-
- def visit_Switch(self, n):
- s = 'switch (' + self.visit(n.cond) + ')\n'
- s += self._generate_stmt(n.stmt, add_indent=True)
- return s
-
- def visit_Case(self, n):
- s = 'case ' + self.visit(n.expr) + ':\n'
- for stmt in n.stmts:
- s += self._generate_stmt(stmt, add_indent=True)
- return s
-
- def visit_Default(self, n):
- s = 'default:\n'
- for stmt in n.stmts:
- s += self._generate_stmt(stmt, add_indent=True)
- return s
-
- def visit_Label(self, n):
- return n.name + ':\n' + self._generate_stmt(n.stmt)
-
- def visit_Goto(self, n):
- return 'goto ' + n.name + ';'
-
- def visit_EllipsisParam(self, n):
- return '...'
-
- def visit_Struct(self, n):
- return self._generate_struct_union(n, 'struct')
-
- def visit_Typename(self, n):
- return self._generate_type(n.type)
-
- def visit_Union(self, n):
- return self._generate_struct_union(n, 'union')
-
- def visit_NamedInitializer(self, n):
- s = ''
- for name in n.name:
- if isinstance(name, c_ast.ID):
- s += '.' + name.name
- elif isinstance(name, c_ast.Constant):
- s += '[' + name.value + ']'
- s += ' = ' + self._visit_expr(n.expr)
- return s
-
- def visit_FuncDecl(self, n):
- return self._generate_type(n)
-
- def _generate_struct_union(self, n, name):
- """ Generates code for structs and unions. name should be either
- 'struct' or union.
- """
- s = name + ' ' + (n.name or '')
- if n.decls:
- s += '\n'
- s += self._make_indent()
- self.indent_level += 2
- s += '{\n'
- for decl in n.decls:
- s += self._generate_stmt(decl)
- self.indent_level -= 2
- s += self._make_indent() + '}'
- return s
-
- def _generate_stmt(self, n, add_indent=False):
- """ Generation from a statement node. This method exists as a wrapper
- for individual visit_* methods to handle different treatment of
- some statements in this context.
- """
- typ = type(n)
- if add_indent: self.indent_level += 2
- indent = self._make_indent()
- if add_indent: self.indent_level -= 2
-
- if typ in (
- c_ast.Decl, c_ast.Assignment, c_ast.Cast, c_ast.UnaryOp,
- c_ast.BinaryOp, c_ast.TernaryOp, c_ast.FuncCall, c_ast.ArrayRef,
- c_ast.StructRef, c_ast.Constant, c_ast.ID, c_ast.Typedef,
- c_ast.ExprList):
- # These can also appear in an expression context so no semicolon
- # is added to them automatically
- #
- return indent + self.visit(n) + ';\n'
- elif typ in (c_ast.Compound,):
- # No extra indentation required before the opening brace of a
- # compound - because it consists of multiple lines it has to
- # compute its own indentation.
- #
- return self.visit(n)
- else:
- return indent + self.visit(n) + '\n'
-
- def _generate_decl(self, n):
- """ Generation from a Decl node.
- """
- s = ''
- if n.funcspec: s = ' '.join(n.funcspec) + ' '
- if n.storage: s += ' '.join(n.storage) + ' '
- s += self._generate_type(n.type)
- return s
-
- def _generate_type(self, n, modifiers=[]):
- """ Recursive generation from a type node. n is the type node.
- modifiers collects the PtrDecl, ArrayDecl and FuncDecl modifiers
- encountered on the way down to a TypeDecl, to allow proper
- generation from it.
- """
- typ = type(n)
- #~ print(n, modifiers)
-
- if typ == c_ast.TypeDecl:
- s = ''
- if n.quals: s += ' '.join(n.quals) + ' '
- s += self.visit(n.type)
-
- nstr = n.declname if n.declname else ''
- # Resolve modifiers.
- # Wrap in parens to distinguish pointer to array and pointer to
- # function syntax.
- #
- for i, modifier in enumerate(modifiers):
- if isinstance(modifier, c_ast.ArrayDecl):
- if (i != 0 and isinstance(modifiers[i - 1], c_ast.PtrDecl)):
- nstr = '(' + nstr + ')'
- nstr += '[' + self.visit(modifier.dim) + ']'
- elif isinstance(modifier, c_ast.FuncDecl):
- if (i != 0 and isinstance(modifiers[i - 1], c_ast.PtrDecl)):
- nstr = '(' + nstr + ')'
- nstr += '(' + self.visit(modifier.args) + ')'
- elif isinstance(modifier, c_ast.PtrDecl):
- if modifier.quals:
- nstr = '* %s %s' % (' '.join(modifier.quals), nstr)
- else:
- nstr = '*' + nstr
- if nstr: s += ' ' + nstr
- return s
- elif typ == c_ast.Decl:
- return self._generate_decl(n.type)
- elif typ == c_ast.Typename:
- return self._generate_type(n.type)
- elif typ == c_ast.IdentifierType:
- return ' '.join(n.names) + ' '
- elif typ in (c_ast.ArrayDecl, c_ast.PtrDecl, c_ast.FuncDecl):
- return self._generate_type(n.type, modifiers + [n])
- else:
- return self.visit(n)
-
- def _parenthesize_if(self, n, condition):
- """ Visits 'n' and returns its string representation, parenthesized
- if the condition function applied to the node returns True.
- """
- s = self._visit_expr(n)
- if condition(n):
- return '(' + s + ')'
- else:
- return s
-
- def _parenthesize_unless_simple(self, n):
- """ Common use case for _parenthesize_if
- """
- return self._parenthesize_if(n, lambda d: not self._is_simple_node(d))
-
- def _is_simple_node(self, n):
- """ Returns True for nodes that are "simple" - i.e. nodes that always
- have higher precedence than operators.
- """
- return isinstance(n,( c_ast.Constant, c_ast.ID, c_ast.ArrayRef,
- c_ast.StructRef, c_ast.FuncCall))
diff --git a/functions/source/GitPullS3/pycparser/c_lexer.py b/functions/source/GitPullS3/pycparser/c_lexer.py
deleted file mode 100644
index d9941c1..0000000
--- a/functions/source/GitPullS3/pycparser/c_lexer.py
+++ /dev/null
@@ -1,485 +0,0 @@
-#------------------------------------------------------------------------------
-# pycparser: c_lexer.py
-#
-# CLexer class: lexer for the C language
-#
-# Eli Bendersky [http://eli.thegreenplace.net]
-# License: BSD
-#------------------------------------------------------------------------------
-import re
-import sys
-
-from .ply import lex
-from .ply.lex import TOKEN
-
-
-class CLexer(object):
- """ A lexer for the C language. After building it, set the
- input text with input(), and call token() to get new
- tokens.
-
- The public attribute filename can be set to an initial
- filaneme, but the lexer will update it upon #line
- directives.
- """
- def __init__(self, error_func, on_lbrace_func, on_rbrace_func,
- type_lookup_func):
- """ Create a new Lexer.
-
- error_func:
- An error function. Will be called with an error
- message, line and column as arguments, in case of
- an error during lexing.
-
- on_lbrace_func, on_rbrace_func:
- Called when an LBRACE or RBRACE is encountered
- (likely to push/pop type_lookup_func's scope)
-
- type_lookup_func:
- A type lookup function. Given a string, it must
- return True IFF this string is a name of a type
- that was defined with a typedef earlier.
- """
- self.error_func = error_func
- self.on_lbrace_func = on_lbrace_func
- self.on_rbrace_func = on_rbrace_func
- self.type_lookup_func = type_lookup_func
- self.filename = ''
-
- # Keeps track of the last token returned from self.token()
- self.last_token = None
-
- # Allow either "# line" or "# " to support GCC's
- # cpp output
- #
- self.line_pattern = re.compile(r'([ \t]*line\W)|([ \t]*\d+)')
- self.pragma_pattern = re.compile(r'[ \t]*pragma\W')
-
- def build(self, **kwargs):
- """ Builds the lexer from the specification. Must be
- called after the lexer object is created.
-
- This method exists separately, because the PLY
- manual warns against calling lex.lex inside
- __init__
- """
- self.lexer = lex.lex(object=self, **kwargs)
-
- def reset_lineno(self):
- """ Resets the internal line number counter of the lexer.
- """
- self.lexer.lineno = 1
-
- def input(self, text):
- self.lexer.input(text)
-
- def token(self):
- self.last_token = self.lexer.token()
- return self.last_token
-
- def find_tok_column(self, token):
- """ Find the column of the token in its line.
- """
- last_cr = self.lexer.lexdata.rfind('\n', 0, token.lexpos)
- return token.lexpos - last_cr
-
- ######################-- PRIVATE --######################
-
- ##
- ## Internal auxiliary methods
- ##
- def _error(self, msg, token):
- location = self._make_tok_location(token)
- self.error_func(msg, location[0], location[1])
- self.lexer.skip(1)
-
- def _make_tok_location(self, token):
- return (token.lineno, self.find_tok_column(token))
-
- ##
- ## Reserved keywords
- ##
- keywords = (
- '_BOOL', '_COMPLEX', 'AUTO', 'BREAK', 'CASE', 'CHAR', 'CONST',
- 'CONTINUE', 'DEFAULT', 'DO', 'DOUBLE', 'ELSE', 'ENUM', 'EXTERN',
- 'FLOAT', 'FOR', 'GOTO', 'IF', 'INLINE', 'INT', 'LONG',
- 'REGISTER', 'OFFSETOF',
- 'RESTRICT', 'RETURN', 'SHORT', 'SIGNED', 'SIZEOF', 'STATIC', 'STRUCT',
- 'SWITCH', 'TYPEDEF', 'UNION', 'UNSIGNED', 'VOID',
- 'VOLATILE', 'WHILE', '__INT128',
- )
-
- keyword_map = {}
- for keyword in keywords:
- if keyword == '_BOOL':
- keyword_map['_Bool'] = keyword
- elif keyword == '_COMPLEX':
- keyword_map['_Complex'] = keyword
- else:
- keyword_map[keyword.lower()] = keyword
-
- ##
- ## All the tokens recognized by the lexer
- ##
- tokens = keywords + (
- # Identifiers
- 'ID',
-
- # Type identifiers (identifiers previously defined as
- # types with typedef)
- 'TYPEID',
-
- # constants
- 'INT_CONST_DEC', 'INT_CONST_OCT', 'INT_CONST_HEX', 'INT_CONST_BIN',
- 'FLOAT_CONST', 'HEX_FLOAT_CONST',
- 'CHAR_CONST',
- 'WCHAR_CONST',
-
- # String literals
- 'STRING_LITERAL',
- 'WSTRING_LITERAL',
-
- # Operators
- 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MOD',
- 'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT',
- 'LOR', 'LAND', 'LNOT',
- 'LT', 'LE', 'GT', 'GE', 'EQ', 'NE',
-
- # Assignment
- 'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL',
- 'PLUSEQUAL', 'MINUSEQUAL',
- 'LSHIFTEQUAL','RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL',
- 'OREQUAL',
-
- # Increment/decrement
- 'PLUSPLUS', 'MINUSMINUS',
-
- # Structure dereference (->)
- 'ARROW',
-
- # Conditional operator (?)
- 'CONDOP',
-
- # Delimeters
- 'LPAREN', 'RPAREN', # ( )
- 'LBRACKET', 'RBRACKET', # [ ]
- 'LBRACE', 'RBRACE', # { }
- 'COMMA', 'PERIOD', # . ,
- 'SEMI', 'COLON', # ; :
-
- # Ellipsis (...)
- 'ELLIPSIS',
-
- # pre-processor
- 'PPHASH', # '#'
- 'PPPRAGMA', # 'pragma'
- 'PPPRAGMASTR',
- )
-
- ##
- ## Regexes for use in tokens
- ##
- ##
-
- # valid C identifiers (K&R2: A.2.3), plus '$' (supported by some compilers)
- identifier = r'[a-zA-Z_$][0-9a-zA-Z_$]*'
-
- hex_prefix = '0[xX]'
- hex_digits = '[0-9a-fA-F]+'
- bin_prefix = '0[bB]'
- bin_digits = '[01]+'
-
- # integer constants (K&R2: A.2.5.1)
- integer_suffix_opt = r'(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?'
- decimal_constant = '(0'+integer_suffix_opt+')|([1-9][0-9]*'+integer_suffix_opt+')'
- octal_constant = '0[0-7]*'+integer_suffix_opt
- hex_constant = hex_prefix+hex_digits+integer_suffix_opt
- bin_constant = bin_prefix+bin_digits+integer_suffix_opt
-
- bad_octal_constant = '0[0-7]*[89]'
-
- # character constants (K&R2: A.2.5.2)
- # Note: a-zA-Z and '.-~^_!=&;,' are allowed as escape chars to support #line
- # directives with Windows paths as filenames (..\..\dir\file)
- # For the same reason, decimal_escape allows all digit sequences. We want to
- # parse all correct code, even if it means to sometimes parse incorrect
- # code.
- #
- simple_escape = r"""([a-zA-Z._~!=&\^\-\\?'"])"""
- decimal_escape = r"""(\d+)"""
- hex_escape = r"""(x[0-9a-fA-F]+)"""
- bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-7])"""
-
- escape_sequence = r"""(\\("""+simple_escape+'|'+decimal_escape+'|'+hex_escape+'))'
- cconst_char = r"""([^'\\\n]|"""+escape_sequence+')'
- char_const = "'"+cconst_char+"'"
- wchar_const = 'L'+char_const
- unmatched_quote = "('"+cconst_char+"*\\n)|('"+cconst_char+"*$)"
- bad_char_const = r"""('"""+cconst_char+"""[^'\n]+')|('')|('"""+bad_escape+r"""[^'\n]*')"""
-
- # string literals (K&R2: A.2.6)
- string_char = r"""([^"\\\n]|"""+escape_sequence+')'
- string_literal = '"'+string_char+'*"'
- wstring_literal = 'L'+string_literal
- bad_string_literal = '"'+string_char+'*?'+bad_escape+string_char+'*"'
-
- # floating constants (K&R2: A.2.5.3)
- exponent_part = r"""([eE][-+]?[0-9]+)"""
- fractional_constant = r"""([0-9]*\.[0-9]+)|([0-9]+\.)"""
- floating_constant = '(((('+fractional_constant+')'+exponent_part+'?)|([0-9]+'+exponent_part+'))[FfLl]?)'
- binary_exponent_part = r'''([pP][+-]?[0-9]+)'''
- hex_fractional_constant = '((('+hex_digits+r""")?\."""+hex_digits+')|('+hex_digits+r"""\.))"""
- hex_floating_constant = '('+hex_prefix+'('+hex_digits+'|'+hex_fractional_constant+')'+binary_exponent_part+'[FfLl]?)'
-
- ##
- ## Lexer states: used for preprocessor \n-terminated directives
- ##
- states = (
- # ppline: preprocessor line directives
- #
- ('ppline', 'exclusive'),
-
- # pppragma: pragma
- #
- ('pppragma', 'exclusive'),
- )
-
- def t_PPHASH(self, t):
- r'[ \t]*\#'
- if self.line_pattern.match(t.lexer.lexdata, pos=t.lexer.lexpos):
- t.lexer.begin('ppline')
- self.pp_line = self.pp_filename = None
- elif self.pragma_pattern.match(t.lexer.lexdata, pos=t.lexer.lexpos):
- t.lexer.begin('pppragma')
- else:
- t.type = 'PPHASH'
- return t
-
- ##
- ## Rules for the ppline state
- ##
- @TOKEN(string_literal)
- def t_ppline_FILENAME(self, t):
- if self.pp_line is None:
- self._error('filename before line number in #line', t)
- else:
- self.pp_filename = t.value.lstrip('"').rstrip('"')
-
- @TOKEN(decimal_constant)
- def t_ppline_LINE_NUMBER(self, t):
- if self.pp_line is None:
- self.pp_line = t.value
- else:
- # Ignore: GCC's cpp sometimes inserts a numeric flag
- # after the file name
- pass
-
- def t_ppline_NEWLINE(self, t):
- r'\n'
- if self.pp_line is None:
- self._error('line number missing in #line', t)
- else:
- self.lexer.lineno = int(self.pp_line)
-
- if self.pp_filename is not None:
- self.filename = self.pp_filename
-
- t.lexer.begin('INITIAL')
-
- def t_ppline_PPLINE(self, t):
- r'line'
- pass
-
- t_ppline_ignore = ' \t'
-
- def t_ppline_error(self, t):
- self._error('invalid #line directive', t)
-
- ##
- ## Rules for the pppragma state
- ##
- def t_pppragma_NEWLINE(self, t):
- r'\n'
- t.lexer.lineno += 1
- t.lexer.begin('INITIAL')
-
- def t_pppragma_PPPRAGMA(self, t):
- r'pragma'
- return t
-
- t_pppragma_ignore = ' \t'
-
- def t_pppragma_STR(self, t):
- '.+'
- t.type = 'PPPRAGMASTR'
- return t
-
- def t_pppragma_error(self, t):
- self._error('invalid #pragma directive', t)
-
- ##
- ## Rules for the normal state
- ##
- t_ignore = ' \t'
-
- # Newlines
- def t_NEWLINE(self, t):
- r'\n+'
- t.lexer.lineno += t.value.count("\n")
-
- # Operators
- t_PLUS = r'\+'
- t_MINUS = r'-'
- t_TIMES = r'\*'
- t_DIVIDE = r'/'
- t_MOD = r'%'
- t_OR = r'\|'
- t_AND = r'&'
- t_NOT = r'~'
- t_XOR = r'\^'
- t_LSHIFT = r'<<'
- t_RSHIFT = r'>>'
- t_LOR = r'\|\|'
- t_LAND = r'&&'
- t_LNOT = r'!'
- t_LT = r'<'
- t_GT = r'>'
- t_LE = r'<='
- t_GE = r'>='
- t_EQ = r'=='
- t_NE = r'!='
-
- # Assignment operators
- t_EQUALS = r'='
- t_TIMESEQUAL = r'\*='
- t_DIVEQUAL = r'/='
- t_MODEQUAL = r'%='
- t_PLUSEQUAL = r'\+='
- t_MINUSEQUAL = r'-='
- t_LSHIFTEQUAL = r'<<='
- t_RSHIFTEQUAL = r'>>='
- t_ANDEQUAL = r'&='
- t_OREQUAL = r'\|='
- t_XOREQUAL = r'\^='
-
- # Increment/decrement
- t_PLUSPLUS = r'\+\+'
- t_MINUSMINUS = r'--'
-
- # ->
- t_ARROW = r'->'
-
- # ?
- t_CONDOP = r'\?'
-
- # Delimeters
- t_LPAREN = r'\('
- t_RPAREN = r'\)'
- t_LBRACKET = r'\['
- t_RBRACKET = r'\]'
- t_COMMA = r','
- t_PERIOD = r'\.'
- t_SEMI = r';'
- t_COLON = r':'
- t_ELLIPSIS = r'\.\.\.'
-
- # Scope delimiters
- # To see why on_lbrace_func is needed, consider:
- # typedef char TT;
- # void foo(int TT) { TT = 10; }
- # TT x = 5;
- # Outside the function, TT is a typedef, but inside (starting and ending
- # with the braces) it's a parameter. The trouble begins with yacc's
- # lookahead token. If we open a new scope in brace_open, then TT has
- # already been read and incorrectly interpreted as TYPEID. So, we need
- # to open and close scopes from within the lexer.
- # Similar for the TT immediately outside the end of the function.
- #
- @TOKEN(r'\{')
- def t_LBRACE(self, t):
- self.on_lbrace_func()
- return t
- @TOKEN(r'\}')
- def t_RBRACE(self, t):
- self.on_rbrace_func()
- return t
-
- t_STRING_LITERAL = string_literal
-
- # The following floating and integer constants are defined as
- # functions to impose a strict order (otherwise, decimal
- # is placed before the others because its regex is longer,
- # and this is bad)
- #
- @TOKEN(floating_constant)
- def t_FLOAT_CONST(self, t):
- return t
-
- @TOKEN(hex_floating_constant)
- def t_HEX_FLOAT_CONST(self, t):
- return t
-
- @TOKEN(hex_constant)
- def t_INT_CONST_HEX(self, t):
- return t
-
- @TOKEN(bin_constant)
- def t_INT_CONST_BIN(self, t):
- return t
-
- @TOKEN(bad_octal_constant)
- def t_BAD_CONST_OCT(self, t):
- msg = "Invalid octal constant"
- self._error(msg, t)
-
- @TOKEN(octal_constant)
- def t_INT_CONST_OCT(self, t):
- return t
-
- @TOKEN(decimal_constant)
- def t_INT_CONST_DEC(self, t):
- return t
-
- # Must come before bad_char_const, to prevent it from
- # catching valid char constants as invalid
- #
- @TOKEN(char_const)
- def t_CHAR_CONST(self, t):
- return t
-
- @TOKEN(wchar_const)
- def t_WCHAR_CONST(self, t):
- return t
-
- @TOKEN(unmatched_quote)
- def t_UNMATCHED_QUOTE(self, t):
- msg = "Unmatched '"
- self._error(msg, t)
-
- @TOKEN(bad_char_const)
- def t_BAD_CHAR_CONST(self, t):
- msg = "Invalid char constant %s" % t.value
- self._error(msg, t)
-
- @TOKEN(wstring_literal)
- def t_WSTRING_LITERAL(self, t):
- return t
-
- # unmatched string literals are caught by the preprocessor
-
- @TOKEN(bad_string_literal)
- def t_BAD_STRING_LITERAL(self, t):
- msg = "String contains invalid escape code"
- self._error(msg, t)
-
- @TOKEN(identifier)
- def t_ID(self, t):
- t.type = self.keyword_map.get(t.value, "ID")
- if t.type == 'ID' and self.type_lookup_func(t.value):
- t.type = "TYPEID"
- return t
-
- def t_error(self, t):
- msg = 'Illegal character %s' % repr(t.value[0])
- self._error(msg, t)
-
diff --git a/functions/source/GitPullS3/pycparser/c_parser.py b/functions/source/GitPullS3/pycparser/c_parser.py
deleted file mode 100644
index f84d6bc..0000000
--- a/functions/source/GitPullS3/pycparser/c_parser.py
+++ /dev/null
@@ -1,1782 +0,0 @@
-#------------------------------------------------------------------------------
-# pycparser: c_parser.py
-#
-# CParser class: Parser and AST builder for the C language
-#
-# Eli Bendersky [http://eli.thegreenplace.net]
-# License: BSD
-#------------------------------------------------------------------------------
-import re
-
-from .ply import yacc
-
-from . import c_ast
-from .c_lexer import CLexer
-from .plyparser import PLYParser, Coord, ParseError, parameterized, template
-from .ast_transforms import fix_switch_cases
-
-
-@template
-class CParser(PLYParser):
- def __init__(
- self,
- lex_optimize=True,
- lexer=CLexer,
- lextab='pycparser.lextab',
- yacc_optimize=True,
- yacctab='pycparser.yacctab',
- yacc_debug=False,
- taboutputdir=''):
- """ Create a new CParser.
-
- Some arguments for controlling the debug/optimization
- level of the parser are provided. The defaults are
- tuned for release/performance mode.
- The simple rules for using them are:
- *) When tweaking CParser/CLexer, set these to False
- *) When releasing a stable parser, set to True
-
- lex_optimize:
- Set to False when you're modifying the lexer.
- Otherwise, changes in the lexer won't be used, if
- some lextab.py file exists.
- When releasing with a stable lexer, set to True
- to save the re-generation of the lexer table on
- each run.
-
- lexer:
- Set this parameter to define the lexer to use if
- you're not using the default CLexer.
-
- lextab:
- Points to the lex table that's used for optimized
- mode. Only if you're modifying the lexer and want
- some tests to avoid re-generating the table, make
- this point to a local lex table file (that's been
- earlier generated with lex_optimize=True)
-
- yacc_optimize:
- Set to False when you're modifying the parser.
- Otherwise, changes in the parser won't be used, if
- some parsetab.py file exists.
- When releasing with a stable parser, set to True
- to save the re-generation of the parser table on
- each run.
-
- yacctab:
- Points to the yacc table that's used for optimized
- mode. Only if you're modifying the parser, make
- this point to a local yacc table file
-
- yacc_debug:
- Generate a parser.out file that explains how yacc
- built the parsing table from the grammar.
-
- taboutputdir:
- Set this parameter to control the location of generated
- lextab and yacctab files.
- """
- self.clex = lexer(
- error_func=self._lex_error_func,
- on_lbrace_func=self._lex_on_lbrace_func,
- on_rbrace_func=self._lex_on_rbrace_func,
- type_lookup_func=self._lex_type_lookup_func)
-
- self.clex.build(
- optimize=lex_optimize,
- lextab=lextab,
- outputdir=taboutputdir)
- self.tokens = self.clex.tokens
-
- rules_with_opt = [
- 'abstract_declarator',
- 'assignment_expression',
- 'declaration_list',
- 'declaration_specifiers_no_type',
- 'designation',
- 'expression',
- 'identifier_list',
- 'init_declarator_list',
- 'id_init_declarator_list',
- 'initializer_list',
- 'parameter_type_list',
- 'block_item_list',
- 'type_qualifier_list',
- 'struct_declarator_list'
- ]
-
- for rule in rules_with_opt:
- self._create_opt_rule(rule)
-
- self.cparser = yacc.yacc(
- module=self,
- start='translation_unit_or_empty',
- debug=yacc_debug,
- optimize=yacc_optimize,
- tabmodule=yacctab,
- outputdir=taboutputdir)
-
- # Stack of scopes for keeping track of symbols. _scope_stack[-1] is
- # the current (topmost) scope. Each scope is a dictionary that
- # specifies whether a name is a type. If _scope_stack[n][name] is
- # True, 'name' is currently a type in the scope. If it's False,
- # 'name' is used in the scope but not as a type (for instance, if we
- # saw: int name;
- # If 'name' is not a key in _scope_stack[n] then 'name' was not defined
- # in this scope at all.
- self._scope_stack = [dict()]
-
- # Keeps track of the last token given to yacc (the lookahead token)
- self._last_yielded_token = None
-
- def parse(self, text, filename='', debuglevel=0):
- """ Parses C code and returns an AST.
-
- text:
- A string containing the C source code
-
- filename:
- Name of the file being parsed (for meaningful
- error messages)
-
- debuglevel:
- Debug level to yacc
- """
- self.clex.filename = filename
- self.clex.reset_lineno()
- self._scope_stack = [dict()]
- self._last_yielded_token = None
- return self.cparser.parse(
- input=text,
- lexer=self.clex,
- debug=debuglevel)
-
- ######################-- PRIVATE --######################
-
- def _push_scope(self):
- self._scope_stack.append(dict())
-
- def _pop_scope(self):
- assert len(self._scope_stack) > 1
- self._scope_stack.pop()
-
- def _add_typedef_name(self, name, coord):
- """ Add a new typedef name (ie a TYPEID) to the current scope
- """
- if not self._scope_stack[-1].get(name, True):
- self._parse_error(
- "Typedef %r previously declared as non-typedef "
- "in this scope" % name, coord)
- self._scope_stack[-1][name] = True
-
- def _add_identifier(self, name, coord):
- """ Add a new object, function, or enum member name (ie an ID) to the
- current scope
- """
- if self._scope_stack[-1].get(name, False):
- self._parse_error(
- "Non-typedef %r previously declared as typedef "
- "in this scope" % name, coord)
- self._scope_stack[-1][name] = False
-
- def _is_type_in_scope(self, name):
- """ Is *name* a typedef-name in the current scope?
- """
- for scope in reversed(self._scope_stack):
- # If name is an identifier in this scope it shadows typedefs in
- # higher scopes.
- in_scope = scope.get(name)
- if in_scope is not None: return in_scope
- return False
-
- def _lex_error_func(self, msg, line, column):
- self._parse_error(msg, self._coord(line, column))
-
- def _lex_on_lbrace_func(self):
- self._push_scope()
-
- def _lex_on_rbrace_func(self):
- self._pop_scope()
-
- def _lex_type_lookup_func(self, name):
- """ Looks up types that were previously defined with
- typedef.
- Passed to the lexer for recognizing identifiers that
- are types.
- """
- is_type = self._is_type_in_scope(name)
- return is_type
-
- def _get_yacc_lookahead_token(self):
- """ We need access to yacc's lookahead token in certain cases.
- This is the last token yacc requested from the lexer, so we
- ask the lexer.
- """
- return self.clex.last_token
-
- # To understand what's going on here, read sections A.8.5 and
- # A.8.6 of K&R2 very carefully.
- #
- # A C type consists of a basic type declaration, with a list
- # of modifiers. For example:
- #
- # int *c[5];
- #
- # The basic declaration here is 'int c', and the pointer and
- # the array are the modifiers.
- #
- # Basic declarations are represented by TypeDecl (from module c_ast) and the
- # modifiers are FuncDecl, PtrDecl and ArrayDecl.
- #
- # The standard states that whenever a new modifier is parsed, it should be
- # added to the end of the list of modifiers. For example:
- #
- # K&R2 A.8.6.2: Array Declarators
- #
- # In a declaration T D where D has the form
- # D1 [constant-expression-opt]
- # and the type of the identifier in the declaration T D1 is
- # "type-modifier T", the type of the
- # identifier of D is "type-modifier array of T"
- #
- # This is what this method does. The declarator it receives
- # can be a list of declarators ending with TypeDecl. It
- # tacks the modifier to the end of this list, just before
- # the TypeDecl.
- #
- # Additionally, the modifier may be a list itself. This is
- # useful for pointers, that can come as a chain from the rule
- # p_pointer. In this case, the whole modifier list is spliced
- # into the new location.
- def _type_modify_decl(self, decl, modifier):
- """ Tacks a type modifier on a declarator, and returns
- the modified declarator.
-
- Note: the declarator and modifier may be modified
- """
- #~ print '****'
- #~ decl.show(offset=3)
- #~ modifier.show(offset=3)
- #~ print '****'
-
- modifier_head = modifier
- modifier_tail = modifier
-
- # The modifier may be a nested list. Reach its tail.
- #
- while modifier_tail.type:
- modifier_tail = modifier_tail.type
-
- # If the decl is a basic type, just tack the modifier onto
- # it
- #
- if isinstance(decl, c_ast.TypeDecl):
- modifier_tail.type = decl
- return modifier
- else:
- # Otherwise, the decl is a list of modifiers. Reach
- # its tail and splice the modifier onto the tail,
- # pointing to the underlying basic type.
- #
- decl_tail = decl
-
- while not isinstance(decl_tail.type, c_ast.TypeDecl):
- decl_tail = decl_tail.type
-
- modifier_tail.type = decl_tail.type
- decl_tail.type = modifier_head
- return decl
-
- # Due to the order in which declarators are constructed,
- # they have to be fixed in order to look like a normal AST.
- #
- # When a declaration arrives from syntax construction, it has
- # these problems:
- # * The innermost TypeDecl has no type (because the basic
- # type is only known at the uppermost declaration level)
- # * The declaration has no variable name, since that is saved
- # in the innermost TypeDecl
- # * The typename of the declaration is a list of type
- # specifiers, and not a node. Here, basic identifier types
- # should be separated from more complex types like enums
- # and structs.
- #
- # This method fixes these problems.
- #
- def _fix_decl_name_type(self, decl, typename):
- """ Fixes a declaration. Modifies decl.
- """
- # Reach the underlying basic type
- #
- type = decl
- while not isinstance(type, c_ast.TypeDecl):
- type = type.type
-
- decl.name = type.declname
- type.quals = decl.quals
-
- # The typename is a list of types. If any type in this
- # list isn't an IdentifierType, it must be the only
- # type in the list (it's illegal to declare "int enum ..")
- # If all the types are basic, they're collected in the
- # IdentifierType holder.
- #
- for tn in typename:
- if not isinstance(tn, c_ast.IdentifierType):
- if len(typename) > 1:
- self._parse_error(
- "Invalid multiple types specified", tn.coord)
- else:
- type.type = tn
- return decl
-
- if not typename:
- # Functions default to returning int
- #
- if not isinstance(decl.type, c_ast.FuncDecl):
- self._parse_error(
- "Missing type in declaration", decl.coord)
- type.type = c_ast.IdentifierType(
- ['int'],
- coord=decl.coord)
- else:
- # At this point, we know that typename is a list of IdentifierType
- # nodes. Concatenate all the names into a single list.
- #
- type.type = c_ast.IdentifierType(
- [name for id in typename for name in id.names],
- coord=typename[0].coord)
- return decl
-
- def _add_declaration_specifier(self, declspec, newspec, kind, append=False):
- """ Declaration specifiers are represented by a dictionary
- with the entries:
- * qual: a list of type qualifiers
- * storage: a list of storage type qualifiers
- * type: a list of type specifiers
- * function: a list of function specifiers
-
- This method is given a declaration specifier, and a
- new specifier of a given kind.
- If `append` is True, the new specifier is added to the end of
- the specifiers list, otherwise it's added at the beginning.
- Returns the declaration specifier, with the new
- specifier incorporated.
- """
- spec = declspec or dict(qual=[], storage=[], type=[], function=[])
-
- if append:
- spec[kind].append(newspec)
- else:
- spec[kind].insert(0, newspec)
-
- return spec
-
- def _build_declarations(self, spec, decls, typedef_namespace=False):
- """ Builds a list of declarations all sharing the given specifiers.
- If typedef_namespace is true, each declared name is added
- to the "typedef namespace", which also includes objects,
- functions, and enum constants.
- """
- is_typedef = 'typedef' in spec['storage']
- declarations = []
-
- # Bit-fields are allowed to be unnamed.
- #
- if decls[0].get('bitsize') is not None:
- pass
-
- # When redeclaring typedef names as identifiers in inner scopes, a
- # problem can occur where the identifier gets grouped into
- # spec['type'], leaving decl as None. This can only occur for the
- # first declarator.
- #
- elif decls[0]['decl'] is None:
- if len(spec['type']) < 2 or len(spec['type'][-1].names) != 1 or \
- not self._is_type_in_scope(spec['type'][-1].names[0]):
- coord = '?'
- for t in spec['type']:
- if hasattr(t, 'coord'):
- coord = t.coord
- break
- self._parse_error('Invalid declaration', coord)
-
- # Make this look as if it came from "direct_declarator:ID"
- decls[0]['decl'] = c_ast.TypeDecl(
- declname=spec['type'][-1].names[0],
- type=None,
- quals=None,
- coord=spec['type'][-1].coord)
- # Remove the "new" type's name from the end of spec['type']
- del spec['type'][-1]
-
- # A similar problem can occur where the declaration ends up looking
- # like an abstract declarator. Give it a name if this is the case.
- #
- elif not isinstance(decls[0]['decl'],
- (c_ast.Struct, c_ast.Union, c_ast.IdentifierType)):
- decls_0_tail = decls[0]['decl']
- while not isinstance(decls_0_tail, c_ast.TypeDecl):
- decls_0_tail = decls_0_tail.type
- if decls_0_tail.declname is None:
- decls_0_tail.declname = spec['type'][-1].names[0]
- del spec['type'][-1]
-
- for decl in decls:
- assert decl['decl'] is not None
- if is_typedef:
- declaration = c_ast.Typedef(
- name=None,
- quals=spec['qual'],
- storage=spec['storage'],
- type=decl['decl'],
- coord=decl['decl'].coord)
- else:
- declaration = c_ast.Decl(
- name=None,
- quals=spec['qual'],
- storage=spec['storage'],
- funcspec=spec['function'],
- type=decl['decl'],
- init=decl.get('init'),
- bitsize=decl.get('bitsize'),
- coord=decl['decl'].coord)
-
- if isinstance(declaration.type,
- (c_ast.Struct, c_ast.Union, c_ast.IdentifierType)):
- fixed_decl = declaration
- else:
- fixed_decl = self._fix_decl_name_type(declaration, spec['type'])
-
- # Add the type name defined by typedef to a
- # symbol table (for usage in the lexer)
- #
- if typedef_namespace:
- if is_typedef:
- self._add_typedef_name(fixed_decl.name, fixed_decl.coord)
- else:
- self._add_identifier(fixed_decl.name, fixed_decl.coord)
-
- declarations.append(fixed_decl)
-
- return declarations
-
- def _build_function_definition(self, spec, decl, param_decls, body):
- """ Builds a function definition.
- """
- assert 'typedef' not in spec['storage']
-
- declaration = self._build_declarations(
- spec=spec,
- decls=[dict(decl=decl, init=None)],
- typedef_namespace=True)[0]
-
- return c_ast.FuncDef(
- decl=declaration,
- param_decls=param_decls,
- body=body,
- coord=decl.coord)
-
- def _select_struct_union_class(self, token):
- """ Given a token (either STRUCT or UNION), selects the
- appropriate AST class.
- """
- if token == 'struct':
- return c_ast.Struct
- else:
- return c_ast.Union
-
- ##
- ## Precedence and associativity of operators
- ##
- precedence = (
- ('left', 'LOR'),
- ('left', 'LAND'),
- ('left', 'OR'),
- ('left', 'XOR'),
- ('left', 'AND'),
- ('left', 'EQ', 'NE'),
- ('left', 'GT', 'GE', 'LT', 'LE'),
- ('left', 'RSHIFT', 'LSHIFT'),
- ('left', 'PLUS', 'MINUS'),
- ('left', 'TIMES', 'DIVIDE', 'MOD')
- )
-
- ##
- ## Grammar productions
- ## Implementation of the BNF defined in K&R2 A.13
- ##
-
- # Wrapper around a translation unit, to allow for empty input.
- # Not strictly part of the C99 Grammar, but useful in practice.
- #
- def p_translation_unit_or_empty(self, p):
- """ translation_unit_or_empty : translation_unit
- | empty
- """
- if p[1] is None:
- p[0] = c_ast.FileAST([])
- else:
- p[0] = c_ast.FileAST(p[1])
-
- def p_translation_unit_1(self, p):
- """ translation_unit : external_declaration
- """
- # Note: external_declaration is already a list
- #
- p[0] = p[1]
-
- def p_translation_unit_2(self, p):
- """ translation_unit : translation_unit external_declaration
- """
- if p[2] is not None:
- p[1].extend(p[2])
- p[0] = p[1]
-
- # Declarations always come as lists (because they can be
- # several in one line), so we wrap the function definition
- # into a list as well, to make the return value of
- # external_declaration homogenous.
- #
- def p_external_declaration_1(self, p):
- """ external_declaration : function_definition
- """
- p[0] = [p[1]]
-
- def p_external_declaration_2(self, p):
- """ external_declaration : declaration
- """
- p[0] = p[1]
-
- def p_external_declaration_3(self, p):
- """ external_declaration : pp_directive
- | pppragma_directive
- """
- p[0] = [p[1]]
-
- def p_external_declaration_4(self, p):
- """ external_declaration : SEMI
- """
- p[0] = None
-
- def p_pp_directive(self, p):
- """ pp_directive : PPHASH
- """
- self._parse_error('Directives not supported yet',
- self._token_coord(p, 1))
-
- def p_pppragma_directive(self, p):
- """ pppragma_directive : PPPRAGMA
- | PPPRAGMA PPPRAGMASTR
- """
- if len(p) == 3:
- p[0] = c_ast.Pragma(p[2], self._token_coord(p, 2))
- else:
- p[0] = c_ast.Pragma("", self._token_coord(p, 1))
-
- # In function definitions, the declarator can be followed by
- # a declaration list, for old "K&R style" function definitios.
- #
- def p_function_definition_1(self, p):
- """ function_definition : id_declarator declaration_list_opt compound_statement
- """
- # no declaration specifiers - 'int' becomes the default type
- spec = dict(
- qual=[],
- storage=[],
- type=[c_ast.IdentifierType(['int'],
- coord=self._token_coord(p, 1))],
- function=[])
-
- p[0] = self._build_function_definition(
- spec=spec,
- decl=p[1],
- param_decls=p[2],
- body=p[3])
-
- def p_function_definition_2(self, p):
- """ function_definition : declaration_specifiers id_declarator declaration_list_opt compound_statement
- """
- spec = p[1]
-
- p[0] = self._build_function_definition(
- spec=spec,
- decl=p[2],
- param_decls=p[3],
- body=p[4])
-
- def p_statement(self, p):
- """ statement : labeled_statement
- | expression_statement
- | compound_statement
- | selection_statement
- | iteration_statement
- | jump_statement
- | pppragma_directive
- """
- p[0] = p[1]
-
- # In C, declarations can come several in a line:
- # int x, *px, romulo = 5;
- #
- # However, for the AST, we will split them to separate Decl
- # nodes.
- #
- # This rule splits its declarations and always returns a list
- # of Decl nodes, even if it's one element long.
- #
- def p_decl_body(self, p):
- """ decl_body : declaration_specifiers init_declarator_list_opt
- | declaration_specifiers_no_type id_init_declarator_list_opt
- """
- spec = p[1]
-
- # p[2] (init_declarator_list_opt) is either a list or None
- #
- if p[2] is None:
- # By the standard, you must have at least one declarator unless
- # declaring a structure tag, a union tag, or the members of an
- # enumeration.
- #
- ty = spec['type']
- s_u_or_e = (c_ast.Struct, c_ast.Union, c_ast.Enum)
- if len(ty) == 1 and isinstance(ty[0], s_u_or_e):
- decls = [c_ast.Decl(
- name=None,
- quals=spec['qual'],
- storage=spec['storage'],
- funcspec=spec['function'],
- type=ty[0],
- init=None,
- bitsize=None,
- coord=ty[0].coord)]
-
- # However, this case can also occur on redeclared identifiers in
- # an inner scope. The trouble is that the redeclared type's name
- # gets grouped into declaration_specifiers; _build_declarations
- # compensates for this.
- #
- else:
- decls = self._build_declarations(
- spec=spec,
- decls=[dict(decl=None, init=None)],
- typedef_namespace=True)
-
- else:
- decls = self._build_declarations(
- spec=spec,
- decls=p[2],
- typedef_namespace=True)
-
- p[0] = decls
-
- # The declaration has been split to a decl_body sub-rule and
- # SEMI, because having them in a single rule created a problem
- # for defining typedefs.
- #
- # If a typedef line was directly followed by a line using the
- # type defined with the typedef, the type would not be
- # recognized. This is because to reduce the declaration rule,
- # the parser's lookahead asked for the token after SEMI, which
- # was the type from the next line, and the lexer had no chance
- # to see the updated type symbol table.
- #
- # Splitting solves this problem, because after seeing SEMI,
- # the parser reduces decl_body, which actually adds the new
- # type into the table to be seen by the lexer before the next
- # line is reached.
- def p_declaration(self, p):
- """ declaration : decl_body SEMI
- """
- p[0] = p[1]
-
- # Since each declaration is a list of declarations, this
- # rule will combine all the declarations and return a single
- # list
- #
- def p_declaration_list(self, p):
- """ declaration_list : declaration
- | declaration_list declaration
- """
- p[0] = p[1] if len(p) == 2 else p[1] + p[2]
-
- # To know when declaration-specifiers end and declarators begin,
- # we require declaration-specifiers to have at least one
- # type-specifier, and disallow typedef-names after we've seen any
- # type-specifier. These are both required by the spec.
- #
- def p_declaration_specifiers_no_type_1(self, p):
- """ declaration_specifiers_no_type : type_qualifier declaration_specifiers_no_type_opt
- """
- p[0] = self._add_declaration_specifier(p[2], p[1], 'qual')
-
- def p_declaration_specifiers_no_type_2(self, p):
- """ declaration_specifiers_no_type : storage_class_specifier declaration_specifiers_no_type_opt
- """
- p[0] = self._add_declaration_specifier(p[2], p[1], 'storage')
-
- def p_declaration_specifiers_no_type_3(self, p):
- """ declaration_specifiers_no_type : function_specifier declaration_specifiers_no_type_opt
- """
- p[0] = self._add_declaration_specifier(p[2], p[1], 'function')
-
-
- def p_declaration_specifiers_1(self, p):
- """ declaration_specifiers : declaration_specifiers type_qualifier
- """
- p[0] = self._add_declaration_specifier(p[1], p[2], 'qual', append=True)
-
- def p_declaration_specifiers_2(self, p):
- """ declaration_specifiers : declaration_specifiers storage_class_specifier
- """
- p[0] = self._add_declaration_specifier(p[1], p[2], 'storage', append=True)
-
- def p_declaration_specifiers_3(self, p):
- """ declaration_specifiers : declaration_specifiers function_specifier
- """
- p[0] = self._add_declaration_specifier(p[1], p[2], 'function', append=True)
-
- def p_declaration_specifiers_4(self, p):
- """ declaration_specifiers : declaration_specifiers type_specifier_no_typeid
- """
- p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True)
-
- def p_declaration_specifiers_5(self, p):
- """ declaration_specifiers : type_specifier
- """
- p[0] = self._add_declaration_specifier(None, p[1], 'type')
-
- def p_declaration_specifiers_6(self, p):
- """ declaration_specifiers : declaration_specifiers_no_type type_specifier
- """
- p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True)
-
-
- def p_storage_class_specifier(self, p):
- """ storage_class_specifier : AUTO
- | REGISTER
- | STATIC
- | EXTERN
- | TYPEDEF
- """
- p[0] = p[1]
-
- def p_function_specifier(self, p):
- """ function_specifier : INLINE
- """
- p[0] = p[1]
-
- def p_type_specifier_no_typeid(self, p):
- """ type_specifier_no_typeid : VOID
- | _BOOL
- | CHAR
- | SHORT
- | INT
- | LONG
- | FLOAT
- | DOUBLE
- | _COMPLEX
- | SIGNED
- | UNSIGNED
- | __INT128
- """
- p[0] = c_ast.IdentifierType([p[1]], coord=self._token_coord(p, 1))
-
- def p_type_specifier(self, p):
- """ type_specifier : typedef_name
- | enum_specifier
- | struct_or_union_specifier
- | type_specifier_no_typeid
- """
- p[0] = p[1]
-
- def p_type_qualifier(self, p):
- """ type_qualifier : CONST
- | RESTRICT
- | VOLATILE
- """
- p[0] = p[1]
-
- def p_init_declarator_list(self, p):
- """ init_declarator_list : init_declarator
- | init_declarator_list COMMA init_declarator
- """
- p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]]
-
- # Returns a {decl= : init=} dictionary
- # If there's no initializer, uses None
- #
- def p_init_declarator(self, p):
- """ init_declarator : declarator
- | declarator EQUALS initializer
- """
- p[0] = dict(decl=p[1], init=(p[3] if len(p) > 2 else None))
-
- def p_id_init_declarator_list(self, p):
- """ id_init_declarator_list : id_init_declarator
- | id_init_declarator_list COMMA init_declarator
- """
- p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]]
-
- def p_id_init_declarator(self, p):
- """ id_init_declarator : id_declarator
- | id_declarator EQUALS initializer
- """
- p[0] = dict(decl=p[1], init=(p[3] if len(p) > 2 else None))
-
- # Require at least one type specifier in a specifier-qualifier-list
- #
- def p_specifier_qualifier_list_1(self, p):
- """ specifier_qualifier_list : specifier_qualifier_list type_specifier_no_typeid
- """
- p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True)
-
- def p_specifier_qualifier_list_2(self, p):
- """ specifier_qualifier_list : specifier_qualifier_list type_qualifier
- """
- p[0] = self._add_declaration_specifier(p[1], p[2], 'qual', append=True)
-
- def p_specifier_qualifier_list_3(self, p):
- """ specifier_qualifier_list : type_specifier
- """
- p[0] = self._add_declaration_specifier(None, p[1], 'type')
-
- def p_specifier_qualifier_list_4(self, p):
- """ specifier_qualifier_list : type_qualifier_list type_specifier
- """
- spec = dict(qual=p[1], storage=[], type=[], function=[])
- p[0] = self._add_declaration_specifier(spec, p[2], 'type', append=True)
-
- # TYPEID is allowed here (and in other struct/enum related tag names), because
- # struct/enum tags reside in their own namespace and can be named the same as types
- #
- def p_struct_or_union_specifier_1(self, p):
- """ struct_or_union_specifier : struct_or_union ID
- | struct_or_union TYPEID
- """
- klass = self._select_struct_union_class(p[1])
- p[0] = klass(
- name=p[2],
- decls=None,
- coord=self._token_coord(p, 2))
-
- def p_struct_or_union_specifier_2(self, p):
- """ struct_or_union_specifier : struct_or_union brace_open struct_declaration_list brace_close
- """
- klass = self._select_struct_union_class(p[1])
- p[0] = klass(
- name=None,
- decls=p[3],
- coord=self._token_coord(p, 2))
-
- def p_struct_or_union_specifier_3(self, p):
- """ struct_or_union_specifier : struct_or_union ID brace_open struct_declaration_list brace_close
- | struct_or_union TYPEID brace_open struct_declaration_list brace_close
- """
- klass = self._select_struct_union_class(p[1])
- p[0] = klass(
- name=p[2],
- decls=p[4],
- coord=self._token_coord(p, 2))
-
- def p_struct_or_union(self, p):
- """ struct_or_union : STRUCT
- | UNION
- """
- p[0] = p[1]
-
- # Combine all declarations into a single list
- #
- def p_struct_declaration_list(self, p):
- """ struct_declaration_list : struct_declaration
- | struct_declaration_list struct_declaration
- """
- if len(p) == 2:
- p[0] = p[1] or []
- else:
- p[0] = p[1] + (p[2] or [])
-
- def p_struct_declaration_1(self, p):
- """ struct_declaration : specifier_qualifier_list struct_declarator_list_opt SEMI
- """
- spec = p[1]
- assert 'typedef' not in spec['storage']
-
- if p[2] is not None:
- decls = self._build_declarations(
- spec=spec,
- decls=p[2])
-
- elif len(spec['type']) == 1:
- # Anonymous struct/union, gcc extension, C1x feature.
- # Although the standard only allows structs/unions here, I see no
- # reason to disallow other types since some compilers have typedefs
- # here, and pycparser isn't about rejecting all invalid code.
- #
- node = spec['type'][0]
- if isinstance(node, c_ast.Node):
- decl_type = node
- else:
- decl_type = c_ast.IdentifierType(node)
-
- decls = self._build_declarations(
- spec=spec,
- decls=[dict(decl=decl_type)])
-
- else:
- # Structure/union members can have the same names as typedefs.
- # The trouble is that the member's name gets grouped into
- # specifier_qualifier_list; _build_declarations compensates.
- #
- decls = self._build_declarations(
- spec=spec,
- decls=[dict(decl=None, init=None)])
-
- p[0] = decls
-
- def p_struct_declaration_2(self, p):
- """ struct_declaration : SEMI
- """
- p[0] = None
-
- def p_struct_declarator_list(self, p):
- """ struct_declarator_list : struct_declarator
- | struct_declarator_list COMMA struct_declarator
- """
- p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]]
-
- # struct_declarator passes up a dict with the keys: decl (for
- # the underlying declarator) and bitsize (for the bitsize)
- #
- def p_struct_declarator_1(self, p):
- """ struct_declarator : declarator
- """
- p[0] = {'decl': p[1], 'bitsize': None}
-
- def p_struct_declarator_2(self, p):
- """ struct_declarator : declarator COLON constant_expression
- | COLON constant_expression
- """
- if len(p) > 3:
- p[0] = {'decl': p[1], 'bitsize': p[3]}
- else:
- p[0] = {'decl': c_ast.TypeDecl(None, None, None), 'bitsize': p[2]}
-
- def p_enum_specifier_1(self, p):
- """ enum_specifier : ENUM ID
- | ENUM TYPEID
- """
- p[0] = c_ast.Enum(p[2], None, self._token_coord(p, 1))
-
- def p_enum_specifier_2(self, p):
- """ enum_specifier : ENUM brace_open enumerator_list brace_close
- """
- p[0] = c_ast.Enum(None, p[3], self._token_coord(p, 1))
-
- def p_enum_specifier_3(self, p):
- """ enum_specifier : ENUM ID brace_open enumerator_list brace_close
- | ENUM TYPEID brace_open enumerator_list brace_close
- """
- p[0] = c_ast.Enum(p[2], p[4], self._token_coord(p, 1))
-
- def p_enumerator_list(self, p):
- """ enumerator_list : enumerator
- | enumerator_list COMMA
- | enumerator_list COMMA enumerator
- """
- if len(p) == 2:
- p[0] = c_ast.EnumeratorList([p[1]], p[1].coord)
- elif len(p) == 3:
- p[0] = p[1]
- else:
- p[1].enumerators.append(p[3])
- p[0] = p[1]
-
- def p_enumerator(self, p):
- """ enumerator : ID
- | ID EQUALS constant_expression
- """
- if len(p) == 2:
- enumerator = c_ast.Enumerator(
- p[1], None,
- self._token_coord(p, 1))
- else:
- enumerator = c_ast.Enumerator(
- p[1], p[3],
- self._token_coord(p, 1))
- self._add_identifier(enumerator.name, enumerator.coord)
-
- p[0] = enumerator
-
- def p_declarator(self, p):
- """ declarator : id_declarator
- | typeid_declarator
- """
- p[0] = p[1]
-
- @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
- def p_xxx_declarator_1(self, p):
- """ xxx_declarator : direct_xxx_declarator
- """
- p[0] = p[1]
-
- @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
- def p_xxx_declarator_2(self, p):
- """ xxx_declarator : pointer direct_xxx_declarator
- """
- p[0] = self._type_modify_decl(p[2], p[1])
-
- @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
- def p_direct_xxx_declarator_1(self, p):
- """ direct_xxx_declarator : yyy
- """
- p[0] = c_ast.TypeDecl(
- declname=p[1],
- type=None,
- quals=None,
- coord=self._token_coord(p, 1))
-
- @parameterized(('id', 'ID'), ('typeid', 'TYPEID'))
- def p_direct_xxx_declarator_2(self, p):
- """ direct_xxx_declarator : LPAREN xxx_declarator RPAREN
- """
- p[0] = p[2]
-
- @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
- def p_direct_xxx_declarator_3(self, p):
- """ direct_xxx_declarator : direct_xxx_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET
- """
- quals = (p[3] if len(p) > 5 else []) or []
- # Accept dimension qualifiers
- # Per C99 6.7.5.3 p7
- arr = c_ast.ArrayDecl(
- type=None,
- dim=p[4] if len(p) > 5 else p[3],
- dim_quals=quals,
- coord=p[1].coord)
-
- p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
-
- @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
- def p_direct_xxx_declarator_4(self, p):
- """ direct_xxx_declarator : direct_xxx_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET
- | direct_xxx_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET
- """
- # Using slice notation for PLY objects doesn't work in Python 3 for the
- # version of PLY embedded with pycparser; see PLY Google Code issue 30.
- # Work around that here by listing the two elements separately.
- listed_quals = [item if isinstance(item, list) else [item]
- for item in [p[3],p[4]]]
- dim_quals = [qual for sublist in listed_quals for qual in sublist
- if qual is not None]
- arr = c_ast.ArrayDecl(
- type=None,
- dim=p[5],
- dim_quals=dim_quals,
- coord=p[1].coord)
-
- p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
-
- # Special for VLAs
- #
- @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
- def p_direct_xxx_declarator_5(self, p):
- """ direct_xxx_declarator : direct_xxx_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET
- """
- arr = c_ast.ArrayDecl(
- type=None,
- dim=c_ast.ID(p[4], self._token_coord(p, 4)),
- dim_quals=p[3] if p[3] != None else [],
- coord=p[1].coord)
-
- p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
-
- @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
- def p_direct_xxx_declarator_6(self, p):
- """ direct_xxx_declarator : direct_xxx_declarator LPAREN parameter_type_list RPAREN
- | direct_xxx_declarator LPAREN identifier_list_opt RPAREN
- """
- func = c_ast.FuncDecl(
- args=p[3],
- type=None,
- coord=p[1].coord)
-
- # To see why _get_yacc_lookahead_token is needed, consider:
- # typedef char TT;
- # void foo(int TT) { TT = 10; }
- # Outside the function, TT is a typedef, but inside (starting and
- # ending with the braces) it's a parameter. The trouble begins with
- # yacc's lookahead token. We don't know if we're declaring or
- # defining a function until we see LBRACE, but if we wait for yacc to
- # trigger a rule on that token, then TT will have already been read
- # and incorrectly interpreted as TYPEID. We need to add the
- # parameters to the scope the moment the lexer sees LBRACE.
- #
- if self._get_yacc_lookahead_token().type == "LBRACE":
- if func.args is not None:
- for param in func.args.params:
- if isinstance(param, c_ast.EllipsisParam): break
- self._add_identifier(param.name, param.coord)
-
- p[0] = self._type_modify_decl(decl=p[1], modifier=func)
-
- def p_pointer(self, p):
- """ pointer : TIMES type_qualifier_list_opt
- | TIMES type_qualifier_list_opt pointer
- """
- coord = self._token_coord(p, 1)
- # Pointer decls nest from inside out. This is important when different
- # levels have different qualifiers. For example:
- #
- # char * const * p;
- #
- # Means "pointer to const pointer to char"
- #
- # While:
- #
- # char ** const p;
- #
- # Means "const pointer to pointer to char"
- #
- # So when we construct PtrDecl nestings, the leftmost pointer goes in
- # as the most nested type.
- nested_type = c_ast.PtrDecl(quals=p[2] or [], type=None, coord=coord)
- if len(p) > 3:
- tail_type = p[3]
- while tail_type.type is not None:
- tail_type = tail_type.type
- tail_type.type = nested_type
- p[0] = p[3]
- else:
- p[0] = nested_type
-
- def p_type_qualifier_list(self, p):
- """ type_qualifier_list : type_qualifier
- | type_qualifier_list type_qualifier
- """
- p[0] = [p[1]] if len(p) == 2 else p[1] + [p[2]]
-
- def p_parameter_type_list(self, p):
- """ parameter_type_list : parameter_list
- | parameter_list COMMA ELLIPSIS
- """
- if len(p) > 2:
- p[1].params.append(c_ast.EllipsisParam(self._token_coord(p, 3)))
-
- p[0] = p[1]
-
- def p_parameter_list(self, p):
- """ parameter_list : parameter_declaration
- | parameter_list COMMA parameter_declaration
- """
- if len(p) == 2: # single parameter
- p[0] = c_ast.ParamList([p[1]], p[1].coord)
- else:
- p[1].params.append(p[3])
- p[0] = p[1]
-
- # From ISO/IEC 9899:TC2, 6.7.5.3.11:
- # "If, in a parameter declaration, an identifier can be treated either
- # as a typedef name or as a parameter name, it shall be taken as a
- # typedef name."
- #
- # Inside a parameter declaration, once we've reduced declaration specifiers,
- # if we shift in an LPAREN and see a TYPEID, it could be either an abstract
- # declarator or a declarator nested inside parens. This rule tells us to
- # always treat it as an abstract declarator. Therefore, we only accept
- # `id_declarator`s and `typeid_noparen_declarator`s.
- def p_parameter_declaration_1(self, p):
- """ parameter_declaration : declaration_specifiers id_declarator
- | declaration_specifiers typeid_noparen_declarator
- """
- spec = p[1]
- if not spec['type']:
- spec['type'] = [c_ast.IdentifierType(['int'],
- coord=self._token_coord(p, 1))]
- p[0] = self._build_declarations(
- spec=spec,
- decls=[dict(decl=p[2])])[0]
-
- def p_parameter_declaration_2(self, p):
- """ parameter_declaration : declaration_specifiers abstract_declarator_opt
- """
- spec = p[1]
- if not spec['type']:
- spec['type'] = [c_ast.IdentifierType(['int'],
- coord=self._token_coord(p, 1))]
-
- # Parameters can have the same names as typedefs. The trouble is that
- # the parameter's name gets grouped into declaration_specifiers, making
- # it look like an old-style declaration; compensate.
- #
- if len(spec['type']) > 1 and len(spec['type'][-1].names) == 1 and \
- self._is_type_in_scope(spec['type'][-1].names[0]):
- decl = self._build_declarations(
- spec=spec,
- decls=[dict(decl=p[2], init=None)])[0]
-
- # This truly is an old-style parameter declaration
- #
- else:
- decl = c_ast.Typename(
- name='',
- quals=spec['qual'],
- type=p[2] or c_ast.TypeDecl(None, None, None),
- coord=self._token_coord(p, 2))
- typename = spec['type']
- decl = self._fix_decl_name_type(decl, typename)
-
- p[0] = decl
-
- def p_identifier_list(self, p):
- """ identifier_list : identifier
- | identifier_list COMMA identifier
- """
- if len(p) == 2: # single parameter
- p[0] = c_ast.ParamList([p[1]], p[1].coord)
- else:
- p[1].params.append(p[3])
- p[0] = p[1]
-
- def p_initializer_1(self, p):
- """ initializer : assignment_expression
- """
- p[0] = p[1]
-
- def p_initializer_2(self, p):
- """ initializer : brace_open initializer_list_opt brace_close
- | brace_open initializer_list COMMA brace_close
- """
- if p[2] is None:
- p[0] = c_ast.InitList([], self._token_coord(p, 1))
- else:
- p[0] = p[2]
-
- def p_initializer_list(self, p):
- """ initializer_list : designation_opt initializer
- | initializer_list COMMA designation_opt initializer
- """
- if len(p) == 3: # single initializer
- init = p[2] if p[1] is None else c_ast.NamedInitializer(p[1], p[2])
- p[0] = c_ast.InitList([init], p[2].coord)
- else:
- init = p[4] if p[3] is None else c_ast.NamedInitializer(p[3], p[4])
- p[1].exprs.append(init)
- p[0] = p[1]
-
- def p_designation(self, p):
- """ designation : designator_list EQUALS
- """
- p[0] = p[1]
-
- # Designators are represented as a list of nodes, in the order in which
- # they're written in the code.
- #
- def p_designator_list(self, p):
- """ designator_list : designator
- | designator_list designator
- """
- p[0] = [p[1]] if len(p) == 2 else p[1] + [p[2]]
-
- def p_designator(self, p):
- """ designator : LBRACKET constant_expression RBRACKET
- | PERIOD identifier
- """
- p[0] = p[2]
-
- def p_type_name(self, p):
- """ type_name : specifier_qualifier_list abstract_declarator_opt
- """
- typename = c_ast.Typename(
- name='',
- quals=p[1]['qual'],
- type=p[2] or c_ast.TypeDecl(None, None, None),
- coord=self._token_coord(p, 2))
-
- p[0] = self._fix_decl_name_type(typename, p[1]['type'])
-
- def p_abstract_declarator_1(self, p):
- """ abstract_declarator : pointer
- """
- dummytype = c_ast.TypeDecl(None, None, None)
- p[0] = self._type_modify_decl(
- decl=dummytype,
- modifier=p[1])
-
- def p_abstract_declarator_2(self, p):
- """ abstract_declarator : pointer direct_abstract_declarator
- """
- p[0] = self._type_modify_decl(p[2], p[1])
-
- def p_abstract_declarator_3(self, p):
- """ abstract_declarator : direct_abstract_declarator
- """
- p[0] = p[1]
-
- # Creating and using direct_abstract_declarator_opt here
- # instead of listing both direct_abstract_declarator and the
- # lack of it in the beginning of _1 and _2 caused two
- # shift/reduce errors.
- #
- def p_direct_abstract_declarator_1(self, p):
- """ direct_abstract_declarator : LPAREN abstract_declarator RPAREN """
- p[0] = p[2]
-
- def p_direct_abstract_declarator_2(self, p):
- """ direct_abstract_declarator : direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET
- """
- arr = c_ast.ArrayDecl(
- type=None,
- dim=p[3],
- dim_quals=[],
- coord=p[1].coord)
-
- p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
-
- def p_direct_abstract_declarator_3(self, p):
- """ direct_abstract_declarator : LBRACKET assignment_expression_opt RBRACKET
- """
- p[0] = c_ast.ArrayDecl(
- type=c_ast.TypeDecl(None, None, None),
- dim=p[2],
- dim_quals=[],
- coord=self._token_coord(p, 1))
-
- def p_direct_abstract_declarator_4(self, p):
- """ direct_abstract_declarator : direct_abstract_declarator LBRACKET TIMES RBRACKET
- """
- arr = c_ast.ArrayDecl(
- type=None,
- dim=c_ast.ID(p[3], self._token_coord(p, 3)),
- dim_quals=[],
- coord=p[1].coord)
-
- p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
-
- def p_direct_abstract_declarator_5(self, p):
- """ direct_abstract_declarator : LBRACKET TIMES RBRACKET
- """
- p[0] = c_ast.ArrayDecl(
- type=c_ast.TypeDecl(None, None, None),
- dim=c_ast.ID(p[3], self._token_coord(p, 3)),
- dim_quals=[],
- coord=self._token_coord(p, 1))
-
- def p_direct_abstract_declarator_6(self, p):
- """ direct_abstract_declarator : direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN
- """
- func = c_ast.FuncDecl(
- args=p[3],
- type=None,
- coord=p[1].coord)
-
- p[0] = self._type_modify_decl(decl=p[1], modifier=func)
-
- def p_direct_abstract_declarator_7(self, p):
- """ direct_abstract_declarator : LPAREN parameter_type_list_opt RPAREN
- """
- p[0] = c_ast.FuncDecl(
- args=p[2],
- type=c_ast.TypeDecl(None, None, None),
- coord=self._token_coord(p, 1))
-
- # declaration is a list, statement isn't. To make it consistent, block_item
- # will always be a list
- #
- def p_block_item(self, p):
- """ block_item : declaration
- | statement
- """
- p[0] = p[1] if isinstance(p[1], list) else [p[1]]
-
- # Since we made block_item a list, this just combines lists
- #
- def p_block_item_list(self, p):
- """ block_item_list : block_item
- | block_item_list block_item
- """
- # Empty block items (plain ';') produce [None], so ignore them
- p[0] = p[1] if (len(p) == 2 or p[2] == [None]) else p[1] + p[2]
-
- def p_compound_statement_1(self, p):
- """ compound_statement : brace_open block_item_list_opt brace_close """
- p[0] = c_ast.Compound(
- block_items=p[2],
- coord=self._token_coord(p, 1))
-
- def p_labeled_statement_1(self, p):
- """ labeled_statement : ID COLON statement """
- p[0] = c_ast.Label(p[1], p[3], self._token_coord(p, 1))
-
- def p_labeled_statement_2(self, p):
- """ labeled_statement : CASE constant_expression COLON statement """
- p[0] = c_ast.Case(p[2], [p[4]], self._token_coord(p, 1))
-
- def p_labeled_statement_3(self, p):
- """ labeled_statement : DEFAULT COLON statement """
- p[0] = c_ast.Default([p[3]], self._token_coord(p, 1))
-
- def p_selection_statement_1(self, p):
- """ selection_statement : IF LPAREN expression RPAREN statement """
- p[0] = c_ast.If(p[3], p[5], None, self._token_coord(p, 1))
-
- def p_selection_statement_2(self, p):
- """ selection_statement : IF LPAREN expression RPAREN statement ELSE statement """
- p[0] = c_ast.If(p[3], p[5], p[7], self._token_coord(p, 1))
-
- def p_selection_statement_3(self, p):
- """ selection_statement : SWITCH LPAREN expression RPAREN statement """
- p[0] = fix_switch_cases(
- c_ast.Switch(p[3], p[5], self._token_coord(p, 1)))
-
- def p_iteration_statement_1(self, p):
- """ iteration_statement : WHILE LPAREN expression RPAREN statement """
- p[0] = c_ast.While(p[3], p[5], self._token_coord(p, 1))
-
- def p_iteration_statement_2(self, p):
- """ iteration_statement : DO statement WHILE LPAREN expression RPAREN SEMI """
- p[0] = c_ast.DoWhile(p[5], p[2], self._token_coord(p, 1))
-
- def p_iteration_statement_3(self, p):
- """ iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN statement """
- p[0] = c_ast.For(p[3], p[5], p[7], p[9], self._token_coord(p, 1))
-
- def p_iteration_statement_4(self, p):
- """ iteration_statement : FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN statement """
- p[0] = c_ast.For(c_ast.DeclList(p[3], self._token_coord(p, 1)),
- p[4], p[6], p[8], self._token_coord(p, 1))
-
- def p_jump_statement_1(self, p):
- """ jump_statement : GOTO ID SEMI """
- p[0] = c_ast.Goto(p[2], self._token_coord(p, 1))
-
- def p_jump_statement_2(self, p):
- """ jump_statement : BREAK SEMI """
- p[0] = c_ast.Break(self._token_coord(p, 1))
-
- def p_jump_statement_3(self, p):
- """ jump_statement : CONTINUE SEMI """
- p[0] = c_ast.Continue(self._token_coord(p, 1))
-
- def p_jump_statement_4(self, p):
- """ jump_statement : RETURN expression SEMI
- | RETURN SEMI
- """
- p[0] = c_ast.Return(p[2] if len(p) == 4 else None, self._token_coord(p, 1))
-
- def p_expression_statement(self, p):
- """ expression_statement : expression_opt SEMI """
- if p[1] is None:
- p[0] = c_ast.EmptyStatement(self._token_coord(p, 2))
- else:
- p[0] = p[1]
-
- def p_expression(self, p):
- """ expression : assignment_expression
- | expression COMMA assignment_expression
- """
- if len(p) == 2:
- p[0] = p[1]
- else:
- if not isinstance(p[1], c_ast.ExprList):
- p[1] = c_ast.ExprList([p[1]], p[1].coord)
-
- p[1].exprs.append(p[3])
- p[0] = p[1]
-
- def p_typedef_name(self, p):
- """ typedef_name : TYPEID """
- p[0] = c_ast.IdentifierType([p[1]], coord=self._token_coord(p, 1))
-
- def p_assignment_expression(self, p):
- """ assignment_expression : conditional_expression
- | unary_expression assignment_operator assignment_expression
- """
- if len(p) == 2:
- p[0] = p[1]
- else:
- p[0] = c_ast.Assignment(p[2], p[1], p[3], p[1].coord)
-
- # K&R2 defines these as many separate rules, to encode
- # precedence and associativity. Why work hard ? I'll just use
- # the built in precedence/associativity specification feature
- # of PLY. (see precedence declaration above)
- #
- def p_assignment_operator(self, p):
- """ assignment_operator : EQUALS
- | XOREQUAL
- | TIMESEQUAL
- | DIVEQUAL
- | MODEQUAL
- | PLUSEQUAL
- | MINUSEQUAL
- | LSHIFTEQUAL
- | RSHIFTEQUAL
- | ANDEQUAL
- | OREQUAL
- """
- p[0] = p[1]
-
- def p_constant_expression(self, p):
- """ constant_expression : conditional_expression """
- p[0] = p[1]
-
- def p_conditional_expression(self, p):
- """ conditional_expression : binary_expression
- | binary_expression CONDOP expression COLON conditional_expression
- """
- if len(p) == 2:
- p[0] = p[1]
- else:
- p[0] = c_ast.TernaryOp(p[1], p[3], p[5], p[1].coord)
-
- def p_binary_expression(self, p):
- """ binary_expression : cast_expression
- | binary_expression TIMES binary_expression
- | binary_expression DIVIDE binary_expression
- | binary_expression MOD binary_expression
- | binary_expression PLUS binary_expression
- | binary_expression MINUS binary_expression
- | binary_expression RSHIFT binary_expression
- | binary_expression LSHIFT binary_expression
- | binary_expression LT binary_expression
- | binary_expression LE binary_expression
- | binary_expression GE binary_expression
- | binary_expression GT binary_expression
- | binary_expression EQ binary_expression
- | binary_expression NE binary_expression
- | binary_expression AND binary_expression
- | binary_expression OR binary_expression
- | binary_expression XOR binary_expression
- | binary_expression LAND binary_expression
- | binary_expression LOR binary_expression
- """
- if len(p) == 2:
- p[0] = p[1]
- else:
- p[0] = c_ast.BinaryOp(p[2], p[1], p[3], p[1].coord)
-
- def p_cast_expression_1(self, p):
- """ cast_expression : unary_expression """
- p[0] = p[1]
-
- def p_cast_expression_2(self, p):
- """ cast_expression : LPAREN type_name RPAREN cast_expression """
- p[0] = c_ast.Cast(p[2], p[4], self._token_coord(p, 1))
-
- def p_unary_expression_1(self, p):
- """ unary_expression : postfix_expression """
- p[0] = p[1]
-
- def p_unary_expression_2(self, p):
- """ unary_expression : PLUSPLUS unary_expression
- | MINUSMINUS unary_expression
- | unary_operator cast_expression
- """
- p[0] = c_ast.UnaryOp(p[1], p[2], p[2].coord)
-
- def p_unary_expression_3(self, p):
- """ unary_expression : SIZEOF unary_expression
- | SIZEOF LPAREN type_name RPAREN
- """
- p[0] = c_ast.UnaryOp(
- p[1],
- p[2] if len(p) == 3 else p[3],
- self._token_coord(p, 1))
-
- def p_unary_operator(self, p):
- """ unary_operator : AND
- | TIMES
- | PLUS
- | MINUS
- | NOT
- | LNOT
- """
- p[0] = p[1]
-
- def p_postfix_expression_1(self, p):
- """ postfix_expression : primary_expression """
- p[0] = p[1]
-
- def p_postfix_expression_2(self, p):
- """ postfix_expression : postfix_expression LBRACKET expression RBRACKET """
- p[0] = c_ast.ArrayRef(p[1], p[3], p[1].coord)
-
- def p_postfix_expression_3(self, p):
- """ postfix_expression : postfix_expression LPAREN argument_expression_list RPAREN
- | postfix_expression LPAREN RPAREN
- """
- p[0] = c_ast.FuncCall(p[1], p[3] if len(p) == 5 else None, p[1].coord)
-
- def p_postfix_expression_4(self, p):
- """ postfix_expression : postfix_expression PERIOD ID
- | postfix_expression PERIOD TYPEID
- | postfix_expression ARROW ID
- | postfix_expression ARROW TYPEID
- """
- field = c_ast.ID(p[3], self._token_coord(p, 3))
- p[0] = c_ast.StructRef(p[1], p[2], field, p[1].coord)
-
- def p_postfix_expression_5(self, p):
- """ postfix_expression : postfix_expression PLUSPLUS
- | postfix_expression MINUSMINUS
- """
- p[0] = c_ast.UnaryOp('p' + p[2], p[1], p[1].coord)
-
- def p_postfix_expression_6(self, p):
- """ postfix_expression : LPAREN type_name RPAREN brace_open initializer_list brace_close
- | LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close
- """
- p[0] = c_ast.CompoundLiteral(p[2], p[5])
-
- def p_primary_expression_1(self, p):
- """ primary_expression : identifier """
- p[0] = p[1]
-
- def p_primary_expression_2(self, p):
- """ primary_expression : constant """
- p[0] = p[1]
-
- def p_primary_expression_3(self, p):
- """ primary_expression : unified_string_literal
- | unified_wstring_literal
- """
- p[0] = p[1]
-
- def p_primary_expression_4(self, p):
- """ primary_expression : LPAREN expression RPAREN """
- p[0] = p[2]
-
- def p_primary_expression_5(self, p):
- """ primary_expression : OFFSETOF LPAREN type_name COMMA offsetof_member_designator RPAREN
- """
- coord = self._token_coord(p, 1)
- p[0] = c_ast.FuncCall(c_ast.ID(p[1], coord),
- c_ast.ExprList([p[3], p[5]], coord),
- coord)
-
- def p_offsetof_member_designator(self, p):
- """ offsetof_member_designator : identifier
- | offsetof_member_designator PERIOD identifier
- | offsetof_member_designator LBRACKET expression RBRACKET
- """
- if len(p) == 2:
- p[0] = p[1]
- elif len(p) == 4:
- field = c_ast.ID(p[3], self._token_coord(p, 3))
- p[0] = c_ast.StructRef(p[1], p[2], field, p[1].coord)
- elif len(p) == 5:
- p[0] = c_ast.ArrayRef(p[1], p[3], p[1].coord)
- else:
- raise NotImplementedError("Unexpected parsing state. len(p): %u" % len(p))
-
- def p_argument_expression_list(self, p):
- """ argument_expression_list : assignment_expression
- | argument_expression_list COMMA assignment_expression
- """
- if len(p) == 2: # single expr
- p[0] = c_ast.ExprList([p[1]], p[1].coord)
- else:
- p[1].exprs.append(p[3])
- p[0] = p[1]
-
- def p_identifier(self, p):
- """ identifier : ID """
- p[0] = c_ast.ID(p[1], self._token_coord(p, 1))
-
- def p_constant_1(self, p):
- """ constant : INT_CONST_DEC
- | INT_CONST_OCT
- | INT_CONST_HEX
- | INT_CONST_BIN
- """
- p[0] = c_ast.Constant(
- 'int', p[1], self._token_coord(p, 1))
-
- def p_constant_2(self, p):
- """ constant : FLOAT_CONST
- | HEX_FLOAT_CONST
- """
- p[0] = c_ast.Constant(
- 'float', p[1], self._token_coord(p, 1))
-
- def p_constant_3(self, p):
- """ constant : CHAR_CONST
- | WCHAR_CONST
- """
- p[0] = c_ast.Constant(
- 'char', p[1], self._token_coord(p, 1))
-
- # The "unified" string and wstring literal rules are for supporting
- # concatenation of adjacent string literals.
- # I.e. "hello " "world" is seen by the C compiler as a single string literal
- # with the value "hello world"
- #
- def p_unified_string_literal(self, p):
- """ unified_string_literal : STRING_LITERAL
- | unified_string_literal STRING_LITERAL
- """
- if len(p) == 2: # single literal
- p[0] = c_ast.Constant(
- 'string', p[1], self._token_coord(p, 1))
- else:
- p[1].value = p[1].value[:-1] + p[2][1:]
- p[0] = p[1]
-
- def p_unified_wstring_literal(self, p):
- """ unified_wstring_literal : WSTRING_LITERAL
- | unified_wstring_literal WSTRING_LITERAL
- """
- if len(p) == 2: # single literal
- p[0] = c_ast.Constant(
- 'string', p[1], self._token_coord(p, 1))
- else:
- p[1].value = p[1].value.rstrip()[:-1] + p[2][2:]
- p[0] = p[1]
-
- def p_brace_open(self, p):
- """ brace_open : LBRACE
- """
- p[0] = p[1]
- p.set_lineno(0, p.lineno(1))
-
- def p_brace_close(self, p):
- """ brace_close : RBRACE
- """
- p[0] = p[1]
- p.set_lineno(0, p.lineno(1))
-
- def p_empty(self, p):
- 'empty : '
- p[0] = None
-
- def p_error(self, p):
- # If error recovery is added here in the future, make sure
- # _get_yacc_lookahead_token still works!
- #
- if p:
- self._parse_error(
- 'before: %s' % p.value,
- self._coord(lineno=p.lineno,
- column=self.clex.find_tok_column(p)))
- else:
- self._parse_error('At end of input', self.clex.filename)
-
-
-#------------------------------------------------------------------------------
-if __name__ == "__main__":
- import pprint
- import time, sys
-
- #t1 = time.time()
- #parser = CParser(lex_optimize=True, yacc_debug=True, yacc_optimize=False)
- #sys.write(time.time() - t1)
-
- #buf = '''
- #int (*k)(int);
- #'''
-
- ## set debuglevel to 2 for debugging
- #t = parser.parse(buf, 'x.c', debuglevel=0)
- #t.show(showcoord=True)
-
diff --git a/functions/source/GitPullS3/pycparser/lextab.py b/functions/source/GitPullS3/pycparser/lextab.py
deleted file mode 100644
index eb3ae07..0000000
--- a/functions/source/GitPullS3/pycparser/lextab.py
+++ /dev/null
@@ -1,10 +0,0 @@
-# lextab.py. This file automatically created by PLY (version 3.10). Don't edit!
-_tabversion = '3.10'
-_lextokens = set(('VOID', 'LBRACKET', 'WCHAR_CONST', 'FLOAT_CONST', 'MINUS', 'RPAREN', 'LONG', 'PLUS', 'ELLIPSIS', 'GT', 'GOTO', 'ENUM', 'PERIOD', 'GE', 'INT_CONST_DEC', 'ARROW', '__INT128', 'HEX_FLOAT_CONST', 'DOUBLE', 'MINUSEQUAL', 'INT_CONST_OCT', 'TIMESEQUAL', 'OR', 'SHORT', 'RETURN', 'RSHIFTEQUAL', 'RESTRICT', 'STATIC', 'SIZEOF', 'UNSIGNED', 'UNION', 'COLON', 'WSTRING_LITERAL', 'DIVIDE', 'FOR', 'PLUSPLUS', 'EQUALS', 'ELSE', 'INLINE', 'EQ', 'AND', 'TYPEID', 'LBRACE', 'PPHASH', 'INT', 'SIGNED', 'CONTINUE', 'NOT', 'OREQUAL', 'MOD', 'RSHIFT', 'DEFAULT', 'CHAR', 'WHILE', 'DIVEQUAL', 'EXTERN', 'CASE', 'LAND', 'REGISTER', 'MODEQUAL', 'NE', 'SWITCH', 'INT_CONST_HEX', '_COMPLEX', 'PPPRAGMASTR', 'PLUSEQUAL', 'STRUCT', 'CONDOP', 'BREAK', 'VOLATILE', 'PPPRAGMA', 'ANDEQUAL', 'INT_CONST_BIN', 'DO', 'LNOT', 'CONST', 'LOR', 'CHAR_CONST', 'LSHIFT', 'RBRACE', '_BOOL', 'LE', 'SEMI', 'LT', 'COMMA', 'OFFSETOF', 'TYPEDEF', 'XOR', 'AUTO', 'TIMES', 'LPAREN', 'MINUSMINUS', 'ID', 'IF', 'STRING_LITERAL', 'FLOAT', 'XOREQUAL', 'LSHIFTEQUAL', 'RBRACKET'))
-_lexreflags = 64
-_lexliterals = ''
-_lexstateinfo = {'ppline': 'exclusive', 'pppragma': 'exclusive', 'INITIAL': 'inclusive'}
-_lexstatere = {'ppline': [('(?P"([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*")|(?P(0(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|([1-9][0-9]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?))|(?P\\n)|(?Pline)', [None, ('t_ppline_FILENAME', 'FILENAME'), None, None, None, None, None, None, ('t_ppline_LINE_NUMBER', 'LINE_NUMBER'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_ppline_NEWLINE', 'NEWLINE'), ('t_ppline_PPLINE', 'PPLINE')])], 'pppragma': [('(?P\\n)|(?Ppragma)|(?P.+)', [None, ('t_pppragma_NEWLINE', 'NEWLINE'), ('t_pppragma_PPPRAGMA', 'PPPRAGMA'), ('t_pppragma_STR', 'STR')])], 'INITIAL': [('(?P[ \\t]*\\#)|(?P\\n+)|(?P\\{)|(?P\\})|(?P((((([0-9]*\\.[0-9]+)|([0-9]+\\.))([eE][-+]?[0-9]+)?)|([0-9]+([eE][-+]?[0-9]+)))[FfLl]?))|(?P(0[xX]([0-9a-fA-F]+|((([0-9a-fA-F]+)?\\.[0-9a-fA-F]+)|([0-9a-fA-F]+\\.)))([pP][+-]?[0-9]+)[FfLl]?))|(?P0[xX][0-9a-fA-F]+(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)', [None, ('t_PPHASH', 'PPHASH'), ('t_NEWLINE', 'NEWLINE'), ('t_LBRACE', 'LBRACE'), ('t_RBRACE', 'RBRACE'), ('t_FLOAT_CONST', 'FLOAT_CONST'), None, None, None, None, None, None, None, None, None, ('t_HEX_FLOAT_CONST', 'HEX_FLOAT_CONST'), None, None, None, None, None, None, None, ('t_INT_CONST_HEX', 'INT_CONST_HEX')]), ('(?P0[bB][01]+(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|(?P0[0-7]*[89])|(?P0[0-7]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|(?P(0(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|([1-9][0-9]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?))|(?P\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))\')|(?PL\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))\')|(?P(\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*\\n)|(\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*$))|(?P(\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))[^\'\n]+\')|(\'\')|(\'([\\\\][^a-zA-Z._~^!=&\\^\\-\\\\?\'"x0-7])[^\'\\n]*\'))', [None, ('t_INT_CONST_BIN', 'INT_CONST_BIN'), None, None, None, None, None, None, None, ('t_BAD_CONST_OCT', 'BAD_CONST_OCT'), ('t_INT_CONST_OCT', 'INT_CONST_OCT'), None, None, None, None, None, None, None, ('t_INT_CONST_DEC', 'INT_CONST_DEC'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_CHAR_CONST', 'CHAR_CONST'), None, None, None, None, None, None, ('t_WCHAR_CONST', 'WCHAR_CONST'), None, None, None, None, None, None, ('t_UNMATCHED_QUOTE', 'UNMATCHED_QUOTE'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_BAD_CHAR_CONST', 'BAD_CHAR_CONST')]), ('(?PL"([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*")|(?P"([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*?([\\\\][^a-zA-Z._~^!=&\\^\\-\\\\?\'"x0-7])([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*")|(?P[a-zA-Z_$][0-9a-zA-Z_$]*)|(?P"([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*")|(?P\\.\\.\\.)|(?P\\+\\+)|(?P\\|\\|)|(?P\\^=)|(?P\\|=)|(?P<<=)|(?P>>=)|(?P\\+=)|(?P\\*=)|(?P\\+)|(?P%=)|(?P/=)', [None, ('t_WSTRING_LITERAL', 'WSTRING_LITERAL'), None, None, None, None, None, None, ('t_BAD_STRING_LITERAL', 'BAD_STRING_LITERAL'), None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_ID', 'ID'), (None, 'STRING_LITERAL'), None, None, None, None, None, None, (None, 'ELLIPSIS'), (None, 'PLUSPLUS'), (None, 'LOR'), (None, 'XOREQUAL'), (None, 'OREQUAL'), (None, 'LSHIFTEQUAL'), (None, 'RSHIFTEQUAL'), (None, 'PLUSEQUAL'), (None, 'TIMESEQUAL'), (None, 'PLUS'), (None, 'MODEQUAL'), (None, 'DIVEQUAL')]), ('(?P\\])|(?P\\?)|(?P\\^)|(?P<<)|(?P<=)|(?P\\()|(?P->)|(?P==)|(?P!=)|(?P--)|(?P\\|)|(?P\\*)|(?P\\[)|(?P>=)|(?P\\))|(?P&&)|(?P>>)|(?P-=)|(?P\\.)|(?P&=)|(?P=)|(?P<)|(?P,)|(?P/)|(?P&)|(?P%)|(?P;)|(?P-)|(?P>)|(?P:)|(?P~)|(?P!)', [None, (None, 'RBRACKET'), (None, 'CONDOP'), (None, 'XOR'), (None, 'LSHIFT'), (None, 'LE'), (None, 'LPAREN'), (None, 'ARROW'), (None, 'EQ'), (None, 'NE'), (None, 'MINUSMINUS'), (None, 'OR'), (None, 'TIMES'), (None, 'LBRACKET'), (None, 'GE'), (None, 'RPAREN'), (None, 'LAND'), (None, 'RSHIFT'), (None, 'MINUSEQUAL'), (None, 'PERIOD'), (None, 'ANDEQUAL'), (None, 'EQUALS'), (None, 'LT'), (None, 'COMMA'), (None, 'DIVIDE'), (None, 'AND'), (None, 'MOD'), (None, 'SEMI'), (None, 'MINUS'), (None, 'GT'), (None, 'COLON'), (None, 'NOT'), (None, 'LNOT')])]}
-_lexstateignore = {'ppline': ' \t', 'pppragma': ' \t', 'INITIAL': ' \t'}
-_lexstateerrorf = {'ppline': 't_ppline_error', 'pppragma': 't_pppragma_error', 'INITIAL': 't_error'}
-_lexstateeoff = {}
diff --git a/functions/source/GitPullS3/pycparser/ply/__init__.py b/functions/source/GitPullS3/pycparser/ply/__init__.py
deleted file mode 100644
index 6e53cdd..0000000
--- a/functions/source/GitPullS3/pycparser/ply/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-# PLY package
-# Author: David Beazley (dave@dabeaz.com)
-
-__version__ = '3.9'
-__all__ = ['lex','yacc']
diff --git a/functions/source/GitPullS3/pycparser/ply/cpp.py b/functions/source/GitPullS3/pycparser/ply/cpp.py
deleted file mode 100644
index 8ff0be1..0000000
--- a/functions/source/GitPullS3/pycparser/ply/cpp.py
+++ /dev/null
@@ -1,907 +0,0 @@
-# -----------------------------------------------------------------------------
-# cpp.py
-#
-# Author: David Beazley (http://www.dabeaz.com)
-# Copyright (C) 2017
-# All rights reserved
-#
-# This module implements an ANSI-C style lexical preprocessor for PLY.
-# -----------------------------------------------------------------------------
-from __future__ import generators
-
-import sys
-
-# Some Python 3 compatibility shims
-if sys.version_info.major < 3:
- STRING_TYPES = (str, unicode)
-else:
- STRING_TYPES = str
- xrange = range
-
-# -----------------------------------------------------------------------------
-# Default preprocessor lexer definitions. These tokens are enough to get
-# a basic preprocessor working. Other modules may import these if they want
-# -----------------------------------------------------------------------------
-
-tokens = (
- 'CPP_ID','CPP_INTEGER', 'CPP_FLOAT', 'CPP_STRING', 'CPP_CHAR', 'CPP_WS', 'CPP_COMMENT1', 'CPP_COMMENT2', 'CPP_POUND','CPP_DPOUND'
-)
-
-literals = "+-*/%|&~^<>=!?()[]{}.,;:\\\'\""
-
-# Whitespace
-def t_CPP_WS(t):
- r'\s+'
- t.lexer.lineno += t.value.count("\n")
- return t
-
-t_CPP_POUND = r'\#'
-t_CPP_DPOUND = r'\#\#'
-
-# Identifier
-t_CPP_ID = r'[A-Za-z_][\w_]*'
-
-# Integer literal
-def CPP_INTEGER(t):
- r'(((((0x)|(0X))[0-9a-fA-F]+)|(\d+))([uU][lL]|[lL][uU]|[uU]|[lL])?)'
- return t
-
-t_CPP_INTEGER = CPP_INTEGER
-
-# Floating literal
-t_CPP_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'
-
-# String literal
-def t_CPP_STRING(t):
- r'\"([^\\\n]|(\\(.|\n)))*?\"'
- t.lexer.lineno += t.value.count("\n")
- return t
-
-# Character constant 'c' or L'c'
-def t_CPP_CHAR(t):
- r'(L)?\'([^\\\n]|(\\(.|\n)))*?\''
- t.lexer.lineno += t.value.count("\n")
- return t
-
-# Comment
-def t_CPP_COMMENT1(t):
- r'(/\*(.|\n)*?\*/)'
- ncr = t.value.count("\n")
- t.lexer.lineno += ncr
- # replace with one space or a number of '\n'
- t.type = 'CPP_WS'; t.value = '\n' * ncr if ncr else ' '
- return t
-
-# Line comment
-def t_CPP_COMMENT2(t):
- r'(//.*?(\n|$))'
- # replace with '/n'
- t.type = 'CPP_WS'; t.value = '\n'
- return t
-
-def t_error(t):
- t.type = t.value[0]
- t.value = t.value[0]
- t.lexer.skip(1)
- return t
-
-import re
-import copy
-import time
-import os.path
-
-# -----------------------------------------------------------------------------
-# trigraph()
-#
-# Given an input string, this function replaces all trigraph sequences.
-# The following mapping is used:
-#
-# ??= #
-# ??/ \
-# ??' ^
-# ??( [
-# ??) ]
-# ??! |
-# ??< {
-# ??> }
-# ??- ~
-# -----------------------------------------------------------------------------
-
-_trigraph_pat = re.compile(r'''\?\?[=/\'\(\)\!<>\-]''')
-_trigraph_rep = {
- '=':'#',
- '/':'\\',
- "'":'^',
- '(':'[',
- ')':']',
- '!':'|',
- '<':'{',
- '>':'}',
- '-':'~'
-}
-
-def trigraph(input):
- return _trigraph_pat.sub(lambda g: _trigraph_rep[g.group()[-1]],input)
-
-# ------------------------------------------------------------------
-# Macro object
-#
-# This object holds information about preprocessor macros
-#
-# .name - Macro name (string)
-# .value - Macro value (a list of tokens)
-# .arglist - List of argument names
-# .variadic - Boolean indicating whether or not variadic macro
-# .vararg - Name of the variadic parameter
-#
-# When a macro is created, the macro replacement token sequence is
-# pre-scanned and used to create patch lists that are later used
-# during macro expansion
-# ------------------------------------------------------------------
-
-class Macro(object):
- def __init__(self,name,value,arglist=None,variadic=False):
- self.name = name
- self.value = value
- self.arglist = arglist
- self.variadic = variadic
- if variadic:
- self.vararg = arglist[-1]
- self.source = None
-
-# ------------------------------------------------------------------
-# Preprocessor object
-#
-# Object representing a preprocessor. Contains macro definitions,
-# include directories, and other information
-# ------------------------------------------------------------------
-
-class Preprocessor(object):
- def __init__(self,lexer=None):
- if lexer is None:
- lexer = lex.lexer
- self.lexer = lexer
- self.macros = { }
- self.path = []
- self.temp_path = []
-
- # Probe the lexer for selected tokens
- self.lexprobe()
-
- tm = time.localtime()
- self.define("__DATE__ \"%s\"" % time.strftime("%b %d %Y",tm))
- self.define("__TIME__ \"%s\"" % time.strftime("%H:%M:%S",tm))
- self.parser = None
-
- # -----------------------------------------------------------------------------
- # tokenize()
- #
- # Utility function. Given a string of text, tokenize into a list of tokens
- # -----------------------------------------------------------------------------
-
- def tokenize(self,text):
- tokens = []
- self.lexer.input(text)
- while True:
- tok = self.lexer.token()
- if not tok: break
- tokens.append(tok)
- return tokens
-
- # ---------------------------------------------------------------------
- # error()
- #
- # Report a preprocessor error/warning of some kind
- # ----------------------------------------------------------------------
-
- def error(self,file,line,msg):
- print("%s:%d %s" % (file,line,msg))
-
- # ----------------------------------------------------------------------
- # lexprobe()
- #
- # This method probes the preprocessor lexer object to discover
- # the token types of symbols that are important to the preprocessor.
- # If this works right, the preprocessor will simply "work"
- # with any suitable lexer regardless of how tokens have been named.
- # ----------------------------------------------------------------------
-
- def lexprobe(self):
-
- # Determine the token type for identifiers
- self.lexer.input("identifier")
- tok = self.lexer.token()
- if not tok or tok.value != "identifier":
- print("Couldn't determine identifier type")
- else:
- self.t_ID = tok.type
-
- # Determine the token type for integers
- self.lexer.input("12345")
- tok = self.lexer.token()
- if not tok or int(tok.value) != 12345:
- print("Couldn't determine integer type")
- else:
- self.t_INTEGER = tok.type
- self.t_INTEGER_TYPE = type(tok.value)
-
- # Determine the token type for strings enclosed in double quotes
- self.lexer.input("\"filename\"")
- tok = self.lexer.token()
- if not tok or tok.value != "\"filename\"":
- print("Couldn't determine string type")
- else:
- self.t_STRING = tok.type
-
- # Determine the token type for whitespace--if any
- self.lexer.input(" ")
- tok = self.lexer.token()
- if not tok or tok.value != " ":
- self.t_SPACE = None
- else:
- self.t_SPACE = tok.type
-
- # Determine the token type for newlines
- self.lexer.input("\n")
- tok = self.lexer.token()
- if not tok or tok.value != "\n":
- self.t_NEWLINE = None
- print("Couldn't determine token for newlines")
- else:
- self.t_NEWLINE = tok.type
-
- self.t_WS = (self.t_SPACE, self.t_NEWLINE)
-
- # Check for other characters used by the preprocessor
- chars = [ '<','>','#','##','\\','(',')',',','.']
- for c in chars:
- self.lexer.input(c)
- tok = self.lexer.token()
- if not tok or tok.value != c:
- print("Unable to lex '%s' required for preprocessor" % c)
-
- # ----------------------------------------------------------------------
- # add_path()
- #
- # Adds a search path to the preprocessor.
- # ----------------------------------------------------------------------
-
- def add_path(self,path):
- self.path.append(path)
-
- # ----------------------------------------------------------------------
- # group_lines()
- #
- # Given an input string, this function splits it into lines. Trailing whitespace
- # is removed. Any line ending with \ is grouped with the next line. This
- # function forms the lowest level of the preprocessor---grouping into text into
- # a line-by-line format.
- # ----------------------------------------------------------------------
-
- def group_lines(self,input):
- lex = self.lexer.clone()
- lines = [x.rstrip() for x in input.splitlines()]
- for i in xrange(len(lines)):
- j = i+1
- while lines[i].endswith('\\') and (j < len(lines)):
- lines[i] = lines[i][:-1]+lines[j]
- lines[j] = ""
- j += 1
-
- input = "\n".join(lines)
- lex.input(input)
- lex.lineno = 1
-
- current_line = []
- while True:
- tok = lex.token()
- if not tok:
- break
- current_line.append(tok)
- if tok.type in self.t_WS and '\n' in tok.value:
- yield current_line
- current_line = []
-
- if current_line:
- yield current_line
-
- # ----------------------------------------------------------------------
- # tokenstrip()
- #
- # Remove leading/trailing whitespace tokens from a token list
- # ----------------------------------------------------------------------
-
- def tokenstrip(self,tokens):
- i = 0
- while i < len(tokens) and tokens[i].type in self.t_WS:
- i += 1
- del tokens[:i]
- i = len(tokens)-1
- while i >= 0 and tokens[i].type in self.t_WS:
- i -= 1
- del tokens[i+1:]
- return tokens
-
-
- # ----------------------------------------------------------------------
- # collect_args()
- #
- # Collects comma separated arguments from a list of tokens. The arguments
- # must be enclosed in parenthesis. Returns a tuple (tokencount,args,positions)
- # where tokencount is the number of tokens consumed, args is a list of arguments,
- # and positions is a list of integers containing the starting index of each
- # argument. Each argument is represented by a list of tokens.
- #
- # When collecting arguments, leading and trailing whitespace is removed
- # from each argument.
- #
- # This function properly handles nested parenthesis and commas---these do not
- # define new arguments.
- # ----------------------------------------------------------------------
-
- def collect_args(self,tokenlist):
- args = []
- positions = []
- current_arg = []
- nesting = 1
- tokenlen = len(tokenlist)
-
- # Search for the opening '('.
- i = 0
- while (i < tokenlen) and (tokenlist[i].type in self.t_WS):
- i += 1
-
- if (i < tokenlen) and (tokenlist[i].value == '('):
- positions.append(i+1)
- else:
- self.error(self.source,tokenlist[0].lineno,"Missing '(' in macro arguments")
- return 0, [], []
-
- i += 1
-
- while i < tokenlen:
- t = tokenlist[i]
- if t.value == '(':
- current_arg.append(t)
- nesting += 1
- elif t.value == ')':
- nesting -= 1
- if nesting == 0:
- if current_arg:
- args.append(self.tokenstrip(current_arg))
- positions.append(i)
- return i+1,args,positions
- current_arg.append(t)
- elif t.value == ',' and nesting == 1:
- args.append(self.tokenstrip(current_arg))
- positions.append(i+1)
- current_arg = []
- else:
- current_arg.append(t)
- i += 1
-
- # Missing end argument
- self.error(self.source,tokenlist[-1].lineno,"Missing ')' in macro arguments")
- return 0, [],[]
-
- # ----------------------------------------------------------------------
- # macro_prescan()
- #
- # Examine the macro value (token sequence) and identify patch points
- # This is used to speed up macro expansion later on---we'll know
- # right away where to apply patches to the value to form the expansion
- # ----------------------------------------------------------------------
-
- def macro_prescan(self,macro):
- macro.patch = [] # Standard macro arguments
- macro.str_patch = [] # String conversion expansion
- macro.var_comma_patch = [] # Variadic macro comma patch
- i = 0
- while i < len(macro.value):
- if macro.value[i].type == self.t_ID and macro.value[i].value in macro.arglist:
- argnum = macro.arglist.index(macro.value[i].value)
- # Conversion of argument to a string
- if i > 0 and macro.value[i-1].value == '#':
- macro.value[i] = copy.copy(macro.value[i])
- macro.value[i].type = self.t_STRING
- del macro.value[i-1]
- macro.str_patch.append((argnum,i-1))
- continue
- # Concatenation
- elif (i > 0 and macro.value[i-1].value == '##'):
- macro.patch.append(('c',argnum,i-1))
- del macro.value[i-1]
- continue
- elif ((i+1) < len(macro.value) and macro.value[i+1].value == '##'):
- macro.patch.append(('c',argnum,i))
- i += 1
- continue
- # Standard expansion
- else:
- macro.patch.append(('e',argnum,i))
- elif macro.value[i].value == '##':
- if macro.variadic and (i > 0) and (macro.value[i-1].value == ',') and \
- ((i+1) < len(macro.value)) and (macro.value[i+1].type == self.t_ID) and \
- (macro.value[i+1].value == macro.vararg):
- macro.var_comma_patch.append(i-1)
- i += 1
- macro.patch.sort(key=lambda x: x[2],reverse=True)
-
- # ----------------------------------------------------------------------
- # macro_expand_args()
- #
- # Given a Macro and list of arguments (each a token list), this method
- # returns an expanded version of a macro. The return value is a token sequence
- # representing the replacement macro tokens
- # ----------------------------------------------------------------------
-
- def macro_expand_args(self,macro,args):
- # Make a copy of the macro token sequence
- rep = [copy.copy(_x) for _x in macro.value]
-
- # Make string expansion patches. These do not alter the length of the replacement sequence
-
- str_expansion = {}
- for argnum, i in macro.str_patch:
- if argnum not in str_expansion:
- str_expansion[argnum] = ('"%s"' % "".join([x.value for x in args[argnum]])).replace("\\","\\\\")
- rep[i] = copy.copy(rep[i])
- rep[i].value = str_expansion[argnum]
-
- # Make the variadic macro comma patch. If the variadic macro argument is empty, we get rid
- comma_patch = False
- if macro.variadic and not args[-1]:
- for i in macro.var_comma_patch:
- rep[i] = None
- comma_patch = True
-
- # Make all other patches. The order of these matters. It is assumed that the patch list
- # has been sorted in reverse order of patch location since replacements will cause the
- # size of the replacement sequence to expand from the patch point.
-
- expanded = { }
- for ptype, argnum, i in macro.patch:
- # Concatenation. Argument is left unexpanded
- if ptype == 'c':
- rep[i:i+1] = args[argnum]
- # Normal expansion. Argument is macro expanded first
- elif ptype == 'e':
- if argnum not in expanded:
- expanded[argnum] = self.expand_macros(args[argnum])
- rep[i:i+1] = expanded[argnum]
-
- # Get rid of removed comma if necessary
- if comma_patch:
- rep = [_i for _i in rep if _i]
-
- return rep
-
-
- # ----------------------------------------------------------------------
- # expand_macros()
- #
- # Given a list of tokens, this function performs macro expansion.
- # The expanded argument is a dictionary that contains macros already
- # expanded. This is used to prevent infinite recursion.
- # ----------------------------------------------------------------------
-
- def expand_macros(self,tokens,expanded=None):
- if expanded is None:
- expanded = {}
- i = 0
- while i < len(tokens):
- t = tokens[i]
- if t.type == self.t_ID:
- if t.value in self.macros and t.value not in expanded:
- # Yes, we found a macro match
- expanded[t.value] = True
-
- m = self.macros[t.value]
- if not m.arglist:
- # A simple macro
- ex = self.expand_macros([copy.copy(_x) for _x in m.value],expanded)
- for e in ex:
- e.lineno = t.lineno
- tokens[i:i+1] = ex
- i += len(ex)
- else:
- # A macro with arguments
- j = i + 1
- while j < len(tokens) and tokens[j].type in self.t_WS:
- j += 1
- if tokens[j].value == '(':
- tokcount,args,positions = self.collect_args(tokens[j:])
- if not m.variadic and len(args) != len(m.arglist):
- self.error(self.source,t.lineno,"Macro %s requires %d arguments" % (t.value,len(m.arglist)))
- i = j + tokcount
- elif m.variadic and len(args) < len(m.arglist)-1:
- if len(m.arglist) > 2:
- self.error(self.source,t.lineno,"Macro %s must have at least %d arguments" % (t.value, len(m.arglist)-1))
- else:
- self.error(self.source,t.lineno,"Macro %s must have at least %d argument" % (t.value, len(m.arglist)-1))
- i = j + tokcount
- else:
- if m.variadic:
- if len(args) == len(m.arglist)-1:
- args.append([])
- else:
- args[len(m.arglist)-1] = tokens[j+positions[len(m.arglist)-1]:j+tokcount-1]
- del args[len(m.arglist):]
-
- # Get macro replacement text
- rep = self.macro_expand_args(m,args)
- rep = self.expand_macros(rep,expanded)
- for r in rep:
- r.lineno = t.lineno
- tokens[i:j+tokcount] = rep
- i += len(rep)
- del expanded[t.value]
- continue
- elif t.value == '__LINE__':
- t.type = self.t_INTEGER
- t.value = self.t_INTEGER_TYPE(t.lineno)
-
- i += 1
- return tokens
-
- # ----------------------------------------------------------------------
- # evalexpr()
- #
- # Evaluate an expression token sequence for the purposes of evaluating
- # integral expressions.
- # ----------------------------------------------------------------------
-
- def evalexpr(self,tokens):
- # tokens = tokenize(line)
- # Search for defined macros
- i = 0
- while i < len(tokens):
- if tokens[i].type == self.t_ID and tokens[i].value == 'defined':
- j = i + 1
- needparen = False
- result = "0L"
- while j < len(tokens):
- if tokens[j].type in self.t_WS:
- j += 1
- continue
- elif tokens[j].type == self.t_ID:
- if tokens[j].value in self.macros:
- result = "1L"
- else:
- result = "0L"
- if not needparen: break
- elif tokens[j].value == '(':
- needparen = True
- elif tokens[j].value == ')':
- break
- else:
- self.error(self.source,tokens[i].lineno,"Malformed defined()")
- j += 1
- tokens[i].type = self.t_INTEGER
- tokens[i].value = self.t_INTEGER_TYPE(result)
- del tokens[i+1:j+1]
- i += 1
- tokens = self.expand_macros(tokens)
- for i,t in enumerate(tokens):
- if t.type == self.t_ID:
- tokens[i] = copy.copy(t)
- tokens[i].type = self.t_INTEGER
- tokens[i].value = self.t_INTEGER_TYPE("0L")
- elif t.type == self.t_INTEGER:
- tokens[i] = copy.copy(t)
- # Strip off any trailing suffixes
- tokens[i].value = str(tokens[i].value)
- while tokens[i].value[-1] not in "0123456789abcdefABCDEF":
- tokens[i].value = tokens[i].value[:-1]
-
- expr = "".join([str(x.value) for x in tokens])
- expr = expr.replace("&&"," and ")
- expr = expr.replace("||"," or ")
- expr = expr.replace("!"," not ")
- try:
- result = eval(expr)
- except Exception:
- self.error(self.source,tokens[0].lineno,"Couldn't evaluate expression")
- result = 0
- return result
-
- # ----------------------------------------------------------------------
- # parsegen()
- #
- # Parse an input string/
- # ----------------------------------------------------------------------
- def parsegen(self,input,source=None):
-
- # Replace trigraph sequences
- t = trigraph(input)
- lines = self.group_lines(t)
-
- if not source:
- source = ""
-
- self.define("__FILE__ \"%s\"" % source)
-
- self.source = source
- chunk = []
- enable = True
- iftrigger = False
- ifstack = []
-
- for x in lines:
- for i,tok in enumerate(x):
- if tok.type not in self.t_WS: break
- if tok.value == '#':
- # Preprocessor directive
-
- # insert necessary whitespace instead of eaten tokens
- for tok in x:
- if tok.type in self.t_WS and '\n' in tok.value:
- chunk.append(tok)
-
- dirtokens = self.tokenstrip(x[i+1:])
- if dirtokens:
- name = dirtokens[0].value
- args = self.tokenstrip(dirtokens[1:])
- else:
- name = ""
- args = []
-
- if name == 'define':
- if enable:
- for tok in self.expand_macros(chunk):
- yield tok
- chunk = []
- self.define(args)
- elif name == 'include':
- if enable:
- for tok in self.expand_macros(chunk):
- yield tok
- chunk = []
- oldfile = self.macros['__FILE__']
- for tok in self.include(args):
- yield tok
- self.macros['__FILE__'] = oldfile
- self.source = source
- elif name == 'undef':
- if enable:
- for tok in self.expand_macros(chunk):
- yield tok
- chunk = []
- self.undef(args)
- elif name == 'ifdef':
- ifstack.append((enable,iftrigger))
- if enable:
- if not args[0].value in self.macros:
- enable = False
- iftrigger = False
- else:
- iftrigger = True
- elif name == 'ifndef':
- ifstack.append((enable,iftrigger))
- if enable:
- if args[0].value in self.macros:
- enable = False
- iftrigger = False
- else:
- iftrigger = True
- elif name == 'if':
- ifstack.append((enable,iftrigger))
- if enable:
- result = self.evalexpr(args)
- if not result:
- enable = False
- iftrigger = False
- else:
- iftrigger = True
- elif name == 'elif':
- if ifstack:
- if ifstack[-1][0]: # We only pay attention if outer "if" allows this
- if enable: # If already true, we flip enable False
- enable = False
- elif not iftrigger: # If False, but not triggered yet, we'll check expression
- result = self.evalexpr(args)
- if result:
- enable = True
- iftrigger = True
- else:
- self.error(self.source,dirtokens[0].lineno,"Misplaced #elif")
-
- elif name == 'else':
- if ifstack:
- if ifstack[-1][0]:
- if enable:
- enable = False
- elif not iftrigger:
- enable = True
- iftrigger = True
- else:
- self.error(self.source,dirtokens[0].lineno,"Misplaced #else")
-
- elif name == 'endif':
- if ifstack:
- enable,iftrigger = ifstack.pop()
- else:
- self.error(self.source,dirtokens[0].lineno,"Misplaced #endif")
- else:
- # Unknown preprocessor directive
- pass
-
- else:
- # Normal text
- if enable:
- chunk.extend(x)
-
- for tok in self.expand_macros(chunk):
- yield tok
- chunk = []
-
- # ----------------------------------------------------------------------
- # include()
- #
- # Implementation of file-inclusion
- # ----------------------------------------------------------------------
-
- def include(self,tokens):
- # Try to extract the filename and then process an include file
- if not tokens:
- return
- if tokens:
- if tokens[0].value != '<' and tokens[0].type != self.t_STRING:
- tokens = self.expand_macros(tokens)
-
- if tokens[0].value == '<':
- # Include <...>
- i = 1
- while i < len(tokens):
- if tokens[i].value == '>':
- break
- i += 1
- else:
- print("Malformed #include <...>")
- return
- filename = "".join([x.value for x in tokens[1:i]])
- path = self.path + [""] + self.temp_path
- elif tokens[0].type == self.t_STRING:
- filename = tokens[0].value[1:-1]
- path = self.temp_path + [""] + self.path
- else:
- print("Malformed #include statement")
- return
- for p in path:
- iname = os.path.join(p,filename)
- try:
- data = open(iname,"r").read()
- dname = os.path.dirname(iname)
- if dname:
- self.temp_path.insert(0,dname)
- for tok in self.parsegen(data,filename):
- yield tok
- if dname:
- del self.temp_path[0]
- break
- except IOError:
- pass
- else:
- print("Couldn't find '%s'" % filename)
-
- # ----------------------------------------------------------------------
- # define()
- #
- # Define a new macro
- # ----------------------------------------------------------------------
-
- def define(self,tokens):
- if isinstance(tokens,STRING_TYPES):
- tokens = self.tokenize(tokens)
-
- linetok = tokens
- try:
- name = linetok[0]
- if len(linetok) > 1:
- mtype = linetok[1]
- else:
- mtype = None
- if not mtype:
- m = Macro(name.value,[])
- self.macros[name.value] = m
- elif mtype.type in self.t_WS:
- # A normal macro
- m = Macro(name.value,self.tokenstrip(linetok[2:]))
- self.macros[name.value] = m
- elif mtype.value == '(':
- # A macro with arguments
- tokcount, args, positions = self.collect_args(linetok[1:])
- variadic = False
- for a in args:
- if variadic:
- print("No more arguments may follow a variadic argument")
- break
- astr = "".join([str(_i.value) for _i in a])
- if astr == "...":
- variadic = True
- a[0].type = self.t_ID
- a[0].value = '__VA_ARGS__'
- variadic = True
- del a[1:]
- continue
- elif astr[-3:] == "..." and a[0].type == self.t_ID:
- variadic = True
- del a[1:]
- # If, for some reason, "." is part of the identifier, strip off the name for the purposes
- # of macro expansion
- if a[0].value[-3:] == '...':
- a[0].value = a[0].value[:-3]
- continue
- if len(a) > 1 or a[0].type != self.t_ID:
- print("Invalid macro argument")
- break
- else:
- mvalue = self.tokenstrip(linetok[1+tokcount:])
- i = 0
- while i < len(mvalue):
- if i+1 < len(mvalue):
- if mvalue[i].type in self.t_WS and mvalue[i+1].value == '##':
- del mvalue[i]
- continue
- elif mvalue[i].value == '##' and mvalue[i+1].type in self.t_WS:
- del mvalue[i+1]
- i += 1
- m = Macro(name.value,mvalue,[x[0].value for x in args],variadic)
- self.macro_prescan(m)
- self.macros[name.value] = m
- else:
- print("Bad macro definition")
- except LookupError:
- print("Bad macro definition")
-
- # ----------------------------------------------------------------------
- # undef()
- #
- # Undefine a macro
- # ----------------------------------------------------------------------
-
- def undef(self,tokens):
- id = tokens[0].value
- try:
- del self.macros[id]
- except LookupError:
- pass
-
- # ----------------------------------------------------------------------
- # parse()
- #
- # Parse input text.
- # ----------------------------------------------------------------------
- def parse(self,input,source=None,ignore={}):
- self.ignore = ignore
- self.parser = self.parsegen(input,source)
-
- # ----------------------------------------------------------------------
- # token()
- #
- # Method to return individual tokens
- # ----------------------------------------------------------------------
- def token(self):
- try:
- while True:
- tok = next(self.parser)
- if tok.type not in self.ignore: return tok
- except StopIteration:
- self.parser = None
- return None
-
-if __name__ == '__main__':
- import ply.lex as lex
- lexer = lex.lex()
-
- # Run a preprocessor
- import sys
- f = open(sys.argv[1])
- input = f.read()
-
- p = Preprocessor(lexer)
- p.parse(input,sys.argv[1])
- while True:
- tok = p.token()
- if not tok: break
- print(p.source, tok)
diff --git a/functions/source/GitPullS3/pycparser/ply/ctokens.py b/functions/source/GitPullS3/pycparser/ply/ctokens.py
deleted file mode 100644
index f6f6952..0000000
--- a/functions/source/GitPullS3/pycparser/ply/ctokens.py
+++ /dev/null
@@ -1,133 +0,0 @@
-# ----------------------------------------------------------------------
-# ctokens.py
-#
-# Token specifications for symbols in ANSI C and C++. This file is
-# meant to be used as a library in other tokenizers.
-# ----------------------------------------------------------------------
-
-# Reserved words
-
-tokens = [
- # Literals (identifier, integer constant, float constant, string constant, char const)
- 'ID', 'TYPEID', 'INTEGER', 'FLOAT', 'STRING', 'CHARACTER',
-
- # Operators (+,-,*,/,%,|,&,~,^,<<,>>, ||, &&, !, <, <=, >, >=, ==, !=)
- 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MODULO',
- 'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT',
- 'LOR', 'LAND', 'LNOT',
- 'LT', 'LE', 'GT', 'GE', 'EQ', 'NE',
-
- # Assignment (=, *=, /=, %=, +=, -=, <<=, >>=, &=, ^=, |=)
- 'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL', 'PLUSEQUAL', 'MINUSEQUAL',
- 'LSHIFTEQUAL','RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL', 'OREQUAL',
-
- # Increment/decrement (++,--)
- 'INCREMENT', 'DECREMENT',
-
- # Structure dereference (->)
- 'ARROW',
-
- # Ternary operator (?)
- 'TERNARY',
-
- # Delimeters ( ) [ ] { } , . ; :
- 'LPAREN', 'RPAREN',
- 'LBRACKET', 'RBRACKET',
- 'LBRACE', 'RBRACE',
- 'COMMA', 'PERIOD', 'SEMI', 'COLON',
-
- # Ellipsis (...)
- 'ELLIPSIS',
-]
-
-# Operators
-t_PLUS = r'\+'
-t_MINUS = r'-'
-t_TIMES = r'\*'
-t_DIVIDE = r'/'
-t_MODULO = r'%'
-t_OR = r'\|'
-t_AND = r'&'
-t_NOT = r'~'
-t_XOR = r'\^'
-t_LSHIFT = r'<<'
-t_RSHIFT = r'>>'
-t_LOR = r'\|\|'
-t_LAND = r'&&'
-t_LNOT = r'!'
-t_LT = r'<'
-t_GT = r'>'
-t_LE = r'<='
-t_GE = r'>='
-t_EQ = r'=='
-t_NE = r'!='
-
-# Assignment operators
-
-t_EQUALS = r'='
-t_TIMESEQUAL = r'\*='
-t_DIVEQUAL = r'/='
-t_MODEQUAL = r'%='
-t_PLUSEQUAL = r'\+='
-t_MINUSEQUAL = r'-='
-t_LSHIFTEQUAL = r'<<='
-t_RSHIFTEQUAL = r'>>='
-t_ANDEQUAL = r'&='
-t_OREQUAL = r'\|='
-t_XOREQUAL = r'\^='
-
-# Increment/decrement
-t_INCREMENT = r'\+\+'
-t_DECREMENT = r'--'
-
-# ->
-t_ARROW = r'->'
-
-# ?
-t_TERNARY = r'\?'
-
-# Delimeters
-t_LPAREN = r'\('
-t_RPAREN = r'\)'
-t_LBRACKET = r'\['
-t_RBRACKET = r'\]'
-t_LBRACE = r'\{'
-t_RBRACE = r'\}'
-t_COMMA = r','
-t_PERIOD = r'\.'
-t_SEMI = r';'
-t_COLON = r':'
-t_ELLIPSIS = r'\.\.\.'
-
-# Identifiers
-t_ID = r'[A-Za-z_][A-Za-z0-9_]*'
-
-# Integer literal
-t_INTEGER = r'\d+([uU]|[lL]|[uU][lL]|[lL][uU])?'
-
-# Floating literal
-t_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'
-
-# String literal
-t_STRING = r'\"([^\\\n]|(\\.))*?\"'
-
-# Character constant 'c' or L'c'
-t_CHARACTER = r'(L)?\'([^\\\n]|(\\.))*?\''
-
-# Comment (C-Style)
-def t_COMMENT(t):
- r'/\*(.|\n)*?\*/'
- t.lexer.lineno += t.value.count('\n')
- return t
-
-# Comment (C++-Style)
-def t_CPPCOMMENT(t):
- r'//.*\n'
- t.lexer.lineno += 1
- return t
-
-
-
-
-
-
diff --git a/functions/source/GitPullS3/pycparser/ply/lex.py b/functions/source/GitPullS3/pycparser/ply/lex.py
deleted file mode 100644
index 4bdd76c..0000000
--- a/functions/source/GitPullS3/pycparser/ply/lex.py
+++ /dev/null
@@ -1,1099 +0,0 @@
-# -----------------------------------------------------------------------------
-# ply: lex.py
-#
-# Copyright (C) 2001-2017
-# David M. Beazley (Dabeaz LLC)
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright notice,
-# this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above copyright notice,
-# this list of conditions and the following disclaimer in the documentation
-# and/or other materials provided with the distribution.
-# * Neither the name of the David Beazley or Dabeaz LLC may be used to
-# endorse or promote products derived from this software without
-# specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-# -----------------------------------------------------------------------------
-
-__version__ = '3.10'
-__tabversion__ = '3.10'
-
-import re
-import sys
-import types
-import copy
-import os
-import inspect
-
-# This tuple contains known string types
-try:
- # Python 2.6
- StringTypes = (types.StringType, types.UnicodeType)
-except AttributeError:
- # Python 3.0
- StringTypes = (str, bytes)
-
-# This regular expression is used to match valid token names
-_is_identifier = re.compile(r'^[a-zA-Z0-9_]+$')
-
-# Exception thrown when invalid token encountered and no default error
-# handler is defined.
-class LexError(Exception):
- def __init__(self, message, s):
- self.args = (message,)
- self.text = s
-
-
-# Token class. This class is used to represent the tokens produced.
-class LexToken(object):
- def __str__(self):
- return 'LexToken(%s,%r,%d,%d)' % (self.type, self.value, self.lineno, self.lexpos)
-
- def __repr__(self):
- return str(self)
-
-
-# This object is a stand-in for a logging object created by the
-# logging module.
-
-class PlyLogger(object):
- def __init__(self, f):
- self.f = f
-
- def critical(self, msg, *args, **kwargs):
- self.f.write((msg % args) + '\n')
-
- def warning(self, msg, *args, **kwargs):
- self.f.write('WARNING: ' + (msg % args) + '\n')
-
- def error(self, msg, *args, **kwargs):
- self.f.write('ERROR: ' + (msg % args) + '\n')
-
- info = critical
- debug = critical
-
-
-# Null logger is used when no output is generated. Does nothing.
-class NullLogger(object):
- def __getattribute__(self, name):
- return self
-
- def __call__(self, *args, **kwargs):
- return self
-
-
-# -----------------------------------------------------------------------------
-# === Lexing Engine ===
-#
-# The following Lexer class implements the lexer runtime. There are only
-# a few public methods and attributes:
-#
-# input() - Store a new string in the lexer
-# token() - Get the next token
-# clone() - Clone the lexer
-#
-# lineno - Current line number
-# lexpos - Current position in the input string
-# -----------------------------------------------------------------------------
-
-class Lexer:
- def __init__(self):
- self.lexre = None # Master regular expression. This is a list of
- # tuples (re, findex) where re is a compiled
- # regular expression and findex is a list
- # mapping regex group numbers to rules
- self.lexretext = None # Current regular expression strings
- self.lexstatere = {} # Dictionary mapping lexer states to master regexs
- self.lexstateretext = {} # Dictionary mapping lexer states to regex strings
- self.lexstaterenames = {} # Dictionary mapping lexer states to symbol names
- self.lexstate = 'INITIAL' # Current lexer state
- self.lexstatestack = [] # Stack of lexer states
- self.lexstateinfo = None # State information
- self.lexstateignore = {} # Dictionary of ignored characters for each state
- self.lexstateerrorf = {} # Dictionary of error functions for each state
- self.lexstateeoff = {} # Dictionary of eof functions for each state
- self.lexreflags = 0 # Optional re compile flags
- self.lexdata = None # Actual input data (as a string)
- self.lexpos = 0 # Current position in input text
- self.lexlen = 0 # Length of the input text
- self.lexerrorf = None # Error rule (if any)
- self.lexeoff = None # EOF rule (if any)
- self.lextokens = None # List of valid tokens
- self.lexignore = '' # Ignored characters
- self.lexliterals = '' # Literal characters that can be passed through
- self.lexmodule = None # Module
- self.lineno = 1 # Current line number
- self.lexoptimize = False # Optimized mode
-
- def clone(self, object=None):
- c = copy.copy(self)
-
- # If the object parameter has been supplied, it means we are attaching the
- # lexer to a new object. In this case, we have to rebind all methods in
- # the lexstatere and lexstateerrorf tables.
-
- if object:
- newtab = {}
- for key, ritem in self.lexstatere.items():
- newre = []
- for cre, findex in ritem:
- newfindex = []
- for f in findex:
- if not f or not f[0]:
- newfindex.append(f)
- continue
- newfindex.append((getattr(object, f[0].__name__), f[1]))
- newre.append((cre, newfindex))
- newtab[key] = newre
- c.lexstatere = newtab
- c.lexstateerrorf = {}
- for key, ef in self.lexstateerrorf.items():
- c.lexstateerrorf[key] = getattr(object, ef.__name__)
- c.lexmodule = object
- return c
-
- # ------------------------------------------------------------
- # writetab() - Write lexer information to a table file
- # ------------------------------------------------------------
- def writetab(self, lextab, outputdir=''):
- if isinstance(lextab, types.ModuleType):
- raise IOError("Won't overwrite existing lextab module")
- basetabmodule = lextab.split('.')[-1]
- filename = os.path.join(outputdir, basetabmodule) + '.py'
- with open(filename, 'w') as tf:
- tf.write('# %s.py. This file automatically created by PLY (version %s). Don\'t edit!\n' % (basetabmodule, __version__))
- tf.write('_tabversion = %s\n' % repr(__tabversion__))
- tf.write('_lextokens = set(%s)\n' % repr(tuple(self.lextokens)))
- tf.write('_lexreflags = %s\n' % repr(self.lexreflags))
- tf.write('_lexliterals = %s\n' % repr(self.lexliterals))
- tf.write('_lexstateinfo = %s\n' % repr(self.lexstateinfo))
-
- # Rewrite the lexstatere table, replacing function objects with function names
- tabre = {}
- for statename, lre in self.lexstatere.items():
- titem = []
- for (pat, func), retext, renames in zip(lre, self.lexstateretext[statename], self.lexstaterenames[statename]):
- titem.append((retext, _funcs_to_names(func, renames)))
- tabre[statename] = titem
-
- tf.write('_lexstatere = %s\n' % repr(tabre))
- tf.write('_lexstateignore = %s\n' % repr(self.lexstateignore))
-
- taberr = {}
- for statename, ef in self.lexstateerrorf.items():
- taberr[statename] = ef.__name__ if ef else None
- tf.write('_lexstateerrorf = %s\n' % repr(taberr))
-
- tabeof = {}
- for statename, ef in self.lexstateeoff.items():
- tabeof[statename] = ef.__name__ if ef else None
- tf.write('_lexstateeoff = %s\n' % repr(tabeof))
-
- # ------------------------------------------------------------
- # readtab() - Read lexer information from a tab file
- # ------------------------------------------------------------
- def readtab(self, tabfile, fdict):
- if isinstance(tabfile, types.ModuleType):
- lextab = tabfile
- else:
- exec('import %s' % tabfile)
- lextab = sys.modules[tabfile]
-
- if getattr(lextab, '_tabversion', '0.0') != __tabversion__:
- raise ImportError('Inconsistent PLY version')
-
- self.lextokens = lextab._lextokens
- self.lexreflags = lextab._lexreflags
- self.lexliterals = lextab._lexliterals
- self.lextokens_all = self.lextokens | set(self.lexliterals)
- self.lexstateinfo = lextab._lexstateinfo
- self.lexstateignore = lextab._lexstateignore
- self.lexstatere = {}
- self.lexstateretext = {}
- for statename, lre in lextab._lexstatere.items():
- titem = []
- txtitem = []
- for pat, func_name in lre:
- titem.append((re.compile(pat, lextab._lexreflags), _names_to_funcs(func_name, fdict)))
-
- self.lexstatere[statename] = titem
- self.lexstateretext[statename] = txtitem
-
- self.lexstateerrorf = {}
- for statename, ef in lextab._lexstateerrorf.items():
- self.lexstateerrorf[statename] = fdict[ef]
-
- self.lexstateeoff = {}
- for statename, ef in lextab._lexstateeoff.items():
- self.lexstateeoff[statename] = fdict[ef]
-
- self.begin('INITIAL')
-
- # ------------------------------------------------------------
- # input() - Push a new string into the lexer
- # ------------------------------------------------------------
- def input(self, s):
- # Pull off the first character to see if s looks like a string
- c = s[:1]
- if not isinstance(c, StringTypes):
- raise ValueError('Expected a string')
- self.lexdata = s
- self.lexpos = 0
- self.lexlen = len(s)
-
- # ------------------------------------------------------------
- # begin() - Changes the lexing state
- # ------------------------------------------------------------
- def begin(self, state):
- if state not in self.lexstatere:
- raise ValueError('Undefined state')
- self.lexre = self.lexstatere[state]
- self.lexretext = self.lexstateretext[state]
- self.lexignore = self.lexstateignore.get(state, '')
- self.lexerrorf = self.lexstateerrorf.get(state, None)
- self.lexeoff = self.lexstateeoff.get(state, None)
- self.lexstate = state
-
- # ------------------------------------------------------------
- # push_state() - Changes the lexing state and saves old on stack
- # ------------------------------------------------------------
- def push_state(self, state):
- self.lexstatestack.append(self.lexstate)
- self.begin(state)
-
- # ------------------------------------------------------------
- # pop_state() - Restores the previous state
- # ------------------------------------------------------------
- def pop_state(self):
- self.begin(self.lexstatestack.pop())
-
- # ------------------------------------------------------------
- # current_state() - Returns the current lexing state
- # ------------------------------------------------------------
- def current_state(self):
- return self.lexstate
-
- # ------------------------------------------------------------
- # skip() - Skip ahead n characters
- # ------------------------------------------------------------
- def skip(self, n):
- self.lexpos += n
-
- # ------------------------------------------------------------
- # opttoken() - Return the next token from the Lexer
- #
- # Note: This function has been carefully implemented to be as fast
- # as possible. Don't make changes unless you really know what
- # you are doing
- # ------------------------------------------------------------
- def token(self):
- # Make local copies of frequently referenced attributes
- lexpos = self.lexpos
- lexlen = self.lexlen
- lexignore = self.lexignore
- lexdata = self.lexdata
-
- while lexpos < lexlen:
- # This code provides some short-circuit code for whitespace, tabs, and other ignored characters
- if lexdata[lexpos] in lexignore:
- lexpos += 1
- continue
-
- # Look for a regular expression match
- for lexre, lexindexfunc in self.lexre:
- m = lexre.match(lexdata, lexpos)
- if not m:
- continue
-
- # Create a token for return
- tok = LexToken()
- tok.value = m.group()
- tok.lineno = self.lineno
- tok.lexpos = lexpos
-
- i = m.lastindex
- func, tok.type = lexindexfunc[i]
-
- if not func:
- # If no token type was set, it's an ignored token
- if tok.type:
- self.lexpos = m.end()
- return tok
- else:
- lexpos = m.end()
- break
-
- lexpos = m.end()
-
- # If token is processed by a function, call it
-
- tok.lexer = self # Set additional attributes useful in token rules
- self.lexmatch = m
- self.lexpos = lexpos
-
- newtok = func(tok)
-
- # Every function must return a token, if nothing, we just move to next token
- if not newtok:
- lexpos = self.lexpos # This is here in case user has updated lexpos.
- lexignore = self.lexignore # This is here in case there was a state change
- break
-
- # Verify type of the token. If not in the token map, raise an error
- if not self.lexoptimize:
- if newtok.type not in self.lextokens_all:
- raise LexError("%s:%d: Rule '%s' returned an unknown token type '%s'" % (
- func.__code__.co_filename, func.__code__.co_firstlineno,
- func.__name__, newtok.type), lexdata[lexpos:])
-
- return newtok
- else:
- # No match, see if in literals
- if lexdata[lexpos] in self.lexliterals:
- tok = LexToken()
- tok.value = lexdata[lexpos]
- tok.lineno = self.lineno
- tok.type = tok.value
- tok.lexpos = lexpos
- self.lexpos = lexpos + 1
- return tok
-
- # No match. Call t_error() if defined.
- if self.lexerrorf:
- tok = LexToken()
- tok.value = self.lexdata[lexpos:]
- tok.lineno = self.lineno
- tok.type = 'error'
- tok.lexer = self
- tok.lexpos = lexpos
- self.lexpos = lexpos
- newtok = self.lexerrorf(tok)
- if lexpos == self.lexpos:
- # Error method didn't change text position at all. This is an error.
- raise LexError("Scanning error. Illegal character '%s'" % (lexdata[lexpos]), lexdata[lexpos:])
- lexpos = self.lexpos
- if not newtok:
- continue
- return newtok
-
- self.lexpos = lexpos
- raise LexError("Illegal character '%s' at index %d" % (lexdata[lexpos], lexpos), lexdata[lexpos:])
-
- if self.lexeoff:
- tok = LexToken()
- tok.type = 'eof'
- tok.value = ''
- tok.lineno = self.lineno
- tok.lexpos = lexpos
- tok.lexer = self
- self.lexpos = lexpos
- newtok = self.lexeoff(tok)
- return newtok
-
- self.lexpos = lexpos + 1
- if self.lexdata is None:
- raise RuntimeError('No input string given with input()')
- return None
-
- # Iterator interface
- def __iter__(self):
- return self
-
- def next(self):
- t = self.token()
- if t is None:
- raise StopIteration
- return t
-
- __next__ = next
-
-# -----------------------------------------------------------------------------
-# ==== Lex Builder ===
-#
-# The functions and classes below are used to collect lexing information
-# and build a Lexer object from it.
-# -----------------------------------------------------------------------------
-
-# -----------------------------------------------------------------------------
-# _get_regex(func)
-#
-# Returns the regular expression assigned to a function either as a doc string
-# or as a .regex attribute attached by the @TOKEN decorator.
-# -----------------------------------------------------------------------------
-def _get_regex(func):
- return getattr(func, 'regex', func.__doc__)
-
-# -----------------------------------------------------------------------------
-# get_caller_module_dict()
-#
-# This function returns a dictionary containing all of the symbols defined within
-# a caller further down the call stack. This is used to get the environment
-# associated with the yacc() call if none was provided.
-# -----------------------------------------------------------------------------
-def get_caller_module_dict(levels):
- f = sys._getframe(levels)
- ldict = f.f_globals.copy()
- if f.f_globals != f.f_locals:
- ldict.update(f.f_locals)
- return ldict
-
-# -----------------------------------------------------------------------------
-# _funcs_to_names()
-#
-# Given a list of regular expression functions, this converts it to a list
-# suitable for output to a table file
-# -----------------------------------------------------------------------------
-def _funcs_to_names(funclist, namelist):
- result = []
- for f, name in zip(funclist, namelist):
- if f and f[0]:
- result.append((name, f[1]))
- else:
- result.append(f)
- return result
-
-# -----------------------------------------------------------------------------
-# _names_to_funcs()
-#
-# Given a list of regular expression function names, this converts it back to
-# functions.
-# -----------------------------------------------------------------------------
-def _names_to_funcs(namelist, fdict):
- result = []
- for n in namelist:
- if n and n[0]:
- result.append((fdict[n[0]], n[1]))
- else:
- result.append(n)
- return result
-
-# -----------------------------------------------------------------------------
-# _form_master_re()
-#
-# This function takes a list of all of the regex components and attempts to
-# form the master regular expression. Given limitations in the Python re
-# module, it may be necessary to break the master regex into separate expressions.
-# -----------------------------------------------------------------------------
-def _form_master_re(relist, reflags, ldict, toknames):
- if not relist:
- return []
- regex = '|'.join(relist)
- try:
- lexre = re.compile(regex, reflags)
-
- # Build the index to function map for the matching engine
- lexindexfunc = [None] * (max(lexre.groupindex.values()) + 1)
- lexindexnames = lexindexfunc[:]
-
- for f, i in lexre.groupindex.items():
- handle = ldict.get(f, None)
- if type(handle) in (types.FunctionType, types.MethodType):
- lexindexfunc[i] = (handle, toknames[f])
- lexindexnames[i] = f
- elif handle is not None:
- lexindexnames[i] = f
- if f.find('ignore_') > 0:
- lexindexfunc[i] = (None, None)
- else:
- lexindexfunc[i] = (None, toknames[f])
-
- return [(lexre, lexindexfunc)], [regex], [lexindexnames]
- except Exception:
- m = int(len(relist)/2)
- if m == 0:
- m = 1
- llist, lre, lnames = _form_master_re(relist[:m], reflags, ldict, toknames)
- rlist, rre, rnames = _form_master_re(relist[m:], reflags, ldict, toknames)
- return (llist+rlist), (lre+rre), (lnames+rnames)
-
-# -----------------------------------------------------------------------------
-# def _statetoken(s,names)
-#
-# Given a declaration name s of the form "t_" and a dictionary whose keys are
-# state names, this function returns a tuple (states,tokenname) where states
-# is a tuple of state names and tokenname is the name of the token. For example,
-# calling this with s = "t_foo_bar_SPAM" might return (('foo','bar'),'SPAM')
-# -----------------------------------------------------------------------------
-def _statetoken(s, names):
- nonstate = 1
- parts = s.split('_')
- for i, part in enumerate(parts[1:], 1):
- if part not in names and part != 'ANY':
- break
-
- if i > 1:
- states = tuple(parts[1:i])
- else:
- states = ('INITIAL',)
-
- if 'ANY' in states:
- states = tuple(names)
-
- tokenname = '_'.join(parts[i:])
- return (states, tokenname)
-
-
-# -----------------------------------------------------------------------------
-# LexerReflect()
-#
-# This class represents information needed to build a lexer as extracted from a
-# user's input file.
-# -----------------------------------------------------------------------------
-class LexerReflect(object):
- def __init__(self, ldict, log=None, reflags=0):
- self.ldict = ldict
- self.error_func = None
- self.tokens = []
- self.reflags = reflags
- self.stateinfo = {'INITIAL': 'inclusive'}
- self.modules = set()
- self.error = False
- self.log = PlyLogger(sys.stderr) if log is None else log
-
- # Get all of the basic information
- def get_all(self):
- self.get_tokens()
- self.get_literals()
- self.get_states()
- self.get_rules()
-
- # Validate all of the information
- def validate_all(self):
- self.validate_tokens()
- self.validate_literals()
- self.validate_rules()
- return self.error
-
- # Get the tokens map
- def get_tokens(self):
- tokens = self.ldict.get('tokens', None)
- if not tokens:
- self.log.error('No token list is defined')
- self.error = True
- return
-
- if not isinstance(tokens, (list, tuple)):
- self.log.error('tokens must be a list or tuple')
- self.error = True
- return
-
- if not tokens:
- self.log.error('tokens is empty')
- self.error = True
- return
-
- self.tokens = tokens
-
- # Validate the tokens
- def validate_tokens(self):
- terminals = {}
- for n in self.tokens:
- if not _is_identifier.match(n):
- self.log.error("Bad token name '%s'", n)
- self.error = True
- if n in terminals:
- self.log.warning("Token '%s' multiply defined", n)
- terminals[n] = 1
-
- # Get the literals specifier
- def get_literals(self):
- self.literals = self.ldict.get('literals', '')
- if not self.literals:
- self.literals = ''
-
- # Validate literals
- def validate_literals(self):
- try:
- for c in self.literals:
- if not isinstance(c, StringTypes) or len(c) > 1:
- self.log.error('Invalid literal %s. Must be a single character', repr(c))
- self.error = True
-
- except TypeError:
- self.log.error('Invalid literals specification. literals must be a sequence of characters')
- self.error = True
-
- def get_states(self):
- self.states = self.ldict.get('states', None)
- # Build statemap
- if self.states:
- if not isinstance(self.states, (tuple, list)):
- self.log.error('states must be defined as a tuple or list')
- self.error = True
- else:
- for s in self.states:
- if not isinstance(s, tuple) or len(s) != 2:
- self.log.error("Invalid state specifier %s. Must be a tuple (statename,'exclusive|inclusive')", repr(s))
- self.error = True
- continue
- name, statetype = s
- if not isinstance(name, StringTypes):
- self.log.error('State name %s must be a string', repr(name))
- self.error = True
- continue
- if not (statetype == 'inclusive' or statetype == 'exclusive'):
- self.log.error("State type for state %s must be 'inclusive' or 'exclusive'", name)
- self.error = True
- continue
- if name in self.stateinfo:
- self.log.error("State '%s' already defined", name)
- self.error = True
- continue
- self.stateinfo[name] = statetype
-
- # Get all of the symbols with a t_ prefix and sort them into various
- # categories (functions, strings, error functions, and ignore characters)
-
- def get_rules(self):
- tsymbols = [f for f in self.ldict if f[:2] == 't_']
-
- # Now build up a list of functions and a list of strings
- self.toknames = {} # Mapping of symbols to token names
- self.funcsym = {} # Symbols defined as functions
- self.strsym = {} # Symbols defined as strings
- self.ignore = {} # Ignore strings by state
- self.errorf = {} # Error functions by state
- self.eoff = {} # EOF functions by state
-
- for s in self.stateinfo:
- self.funcsym[s] = []
- self.strsym[s] = []
-
- if len(tsymbols) == 0:
- self.log.error('No rules of the form t_rulename are defined')
- self.error = True
- return
-
- for f in tsymbols:
- t = self.ldict[f]
- states, tokname = _statetoken(f, self.stateinfo)
- self.toknames[f] = tokname
-
- if hasattr(t, '__call__'):
- if tokname == 'error':
- for s in states:
- self.errorf[s] = t
- elif tokname == 'eof':
- for s in states:
- self.eoff[s] = t
- elif tokname == 'ignore':
- line = t.__code__.co_firstlineno
- file = t.__code__.co_filename
- self.log.error("%s:%d: Rule '%s' must be defined as a string", file, line, t.__name__)
- self.error = True
- else:
- for s in states:
- self.funcsym[s].append((f, t))
- elif isinstance(t, StringTypes):
- if tokname == 'ignore':
- for s in states:
- self.ignore[s] = t
- if '\\' in t:
- self.log.warning("%s contains a literal backslash '\\'", f)
-
- elif tokname == 'error':
- self.log.error("Rule '%s' must be defined as a function", f)
- self.error = True
- else:
- for s in states:
- self.strsym[s].append((f, t))
- else:
- self.log.error('%s not defined as a function or string', f)
- self.error = True
-
- # Sort the functions by line number
- for f in self.funcsym.values():
- f.sort(key=lambda x: x[1].__code__.co_firstlineno)
-
- # Sort the strings by regular expression length
- for s in self.strsym.values():
- s.sort(key=lambda x: len(x[1]), reverse=True)
-
- # Validate all of the t_rules collected
- def validate_rules(self):
- for state in self.stateinfo:
- # Validate all rules defined by functions
-
- for fname, f in self.funcsym[state]:
- line = f.__code__.co_firstlineno
- file = f.__code__.co_filename
- module = inspect.getmodule(f)
- self.modules.add(module)
-
- tokname = self.toknames[fname]
- if isinstance(f, types.MethodType):
- reqargs = 2
- else:
- reqargs = 1
- nargs = f.__code__.co_argcount
- if nargs > reqargs:
- self.log.error("%s:%d: Rule '%s' has too many arguments", file, line, f.__name__)
- self.error = True
- continue
-
- if nargs < reqargs:
- self.log.error("%s:%d: Rule '%s' requires an argument", file, line, f.__name__)
- self.error = True
- continue
-
- if not _get_regex(f):
- self.log.error("%s:%d: No regular expression defined for rule '%s'", file, line, f.__name__)
- self.error = True
- continue
-
- try:
- c = re.compile('(?P<%s>%s)' % (fname, _get_regex(f)), self.reflags)
- if c.match(''):
- self.log.error("%s:%d: Regular expression for rule '%s' matches empty string", file, line, f.__name__)
- self.error = True
- except re.error as e:
- self.log.error("%s:%d: Invalid regular expression for rule '%s'. %s", file, line, f.__name__, e)
- if '#' in _get_regex(f):
- self.log.error("%s:%d. Make sure '#' in rule '%s' is escaped with '\\#'", file, line, f.__name__)
- self.error = True
-
- # Validate all rules defined by strings
- for name, r in self.strsym[state]:
- tokname = self.toknames[name]
- if tokname == 'error':
- self.log.error("Rule '%s' must be defined as a function", name)
- self.error = True
- continue
-
- if tokname not in self.tokens and tokname.find('ignore_') < 0:
- self.log.error("Rule '%s' defined for an unspecified token %s", name, tokname)
- self.error = True
- continue
-
- try:
- c = re.compile('(?P<%s>%s)' % (name, r), self.reflags)
- if (c.match('')):
- self.log.error("Regular expression for rule '%s' matches empty string", name)
- self.error = True
- except re.error as e:
- self.log.error("Invalid regular expression for rule '%s'. %s", name, e)
- if '#' in r:
- self.log.error("Make sure '#' in rule '%s' is escaped with '\\#'", name)
- self.error = True
-
- if not self.funcsym[state] and not self.strsym[state]:
- self.log.error("No rules defined for state '%s'", state)
- self.error = True
-
- # Validate the error function
- efunc = self.errorf.get(state, None)
- if efunc:
- f = efunc
- line = f.__code__.co_firstlineno
- file = f.__code__.co_filename
- module = inspect.getmodule(f)
- self.modules.add(module)
-
- if isinstance(f, types.MethodType):
- reqargs = 2
- else:
- reqargs = 1
- nargs = f.__code__.co_argcount
- if nargs > reqargs:
- self.log.error("%s:%d: Rule '%s' has too many arguments", file, line, f.__name__)
- self.error = True
-
- if nargs < reqargs:
- self.log.error("%s:%d: Rule '%s' requires an argument", file, line, f.__name__)
- self.error = True
-
- for module in self.modules:
- self.validate_module(module)
-
- # -----------------------------------------------------------------------------
- # validate_module()
- #
- # This checks to see if there are duplicated t_rulename() functions or strings
- # in the parser input file. This is done using a simple regular expression
- # match on each line in the source code of the given module.
- # -----------------------------------------------------------------------------
-
- def validate_module(self, module):
- try:
- lines, linen = inspect.getsourcelines(module)
- except IOError:
- return
-
- fre = re.compile(r'\s*def\s+(t_[a-zA-Z_0-9]*)\(')
- sre = re.compile(r'\s*(t_[a-zA-Z_0-9]*)\s*=')
-
- counthash = {}
- linen += 1
- for line in lines:
- m = fre.match(line)
- if not m:
- m = sre.match(line)
- if m:
- name = m.group(1)
- prev = counthash.get(name)
- if not prev:
- counthash[name] = linen
- else:
- filename = inspect.getsourcefile(module)
- self.log.error('%s:%d: Rule %s redefined. Previously defined on line %d', filename, linen, name, prev)
- self.error = True
- linen += 1
-
-# -----------------------------------------------------------------------------
-# lex(module)
-#
-# Build all of the regular expression rules from definitions in the supplied module
-# -----------------------------------------------------------------------------
-def lex(module=None, object=None, debug=False, optimize=False, lextab='lextab',
- reflags=int(re.VERBOSE), nowarn=False, outputdir=None, debuglog=None, errorlog=None):
-
- if lextab is None:
- lextab = 'lextab'
-
- global lexer
-
- ldict = None
- stateinfo = {'INITIAL': 'inclusive'}
- lexobj = Lexer()
- lexobj.lexoptimize = optimize
- global token, input
-
- if errorlog is None:
- errorlog = PlyLogger(sys.stderr)
-
- if debug:
- if debuglog is None:
- debuglog = PlyLogger(sys.stderr)
-
- # Get the module dictionary used for the lexer
- if object:
- module = object
-
- # Get the module dictionary used for the parser
- if module:
- _items = [(k, getattr(module, k)) for k in dir(module)]
- ldict = dict(_items)
- # If no __file__ attribute is available, try to obtain it from the __module__ instead
- if '__file__' not in ldict:
- ldict['__file__'] = sys.modules[ldict['__module__']].__file__
- else:
- ldict = get_caller_module_dict(2)
-
- # Determine if the module is package of a package or not.
- # If so, fix the tabmodule setting so that tables load correctly
- pkg = ldict.get('__package__')
- if pkg and isinstance(lextab, str):
- if '.' not in lextab:
- lextab = pkg + '.' + lextab
-
- # Collect parser information from the dictionary
- linfo = LexerReflect(ldict, log=errorlog, reflags=reflags)
- linfo.get_all()
- if not optimize:
- if linfo.validate_all():
- raise SyntaxError("Can't build lexer")
-
- if optimize and lextab:
- try:
- lexobj.readtab(lextab, ldict)
- token = lexobj.token
- input = lexobj.input
- lexer = lexobj
- return lexobj
-
- except ImportError:
- pass
-
- # Dump some basic debugging information
- if debug:
- debuglog.info('lex: tokens = %r', linfo.tokens)
- debuglog.info('lex: literals = %r', linfo.literals)
- debuglog.info('lex: states = %r', linfo.stateinfo)
-
- # Build a dictionary of valid token names
- lexobj.lextokens = set()
- for n in linfo.tokens:
- lexobj.lextokens.add(n)
-
- # Get literals specification
- if isinstance(linfo.literals, (list, tuple)):
- lexobj.lexliterals = type(linfo.literals[0])().join(linfo.literals)
- else:
- lexobj.lexliterals = linfo.literals
-
- lexobj.lextokens_all = lexobj.lextokens | set(lexobj.lexliterals)
-
- # Get the stateinfo dictionary
- stateinfo = linfo.stateinfo
-
- regexs = {}
- # Build the master regular expressions
- for state in stateinfo:
- regex_list = []
-
- # Add rules defined by functions first
- for fname, f in linfo.funcsym[state]:
- line = f.__code__.co_firstlineno
- file = f.__code__.co_filename
- regex_list.append('(?P<%s>%s)' % (fname, _get_regex(f)))
- if debug:
- debuglog.info("lex: Adding rule %s -> '%s' (state '%s')", fname, _get_regex(f), state)
-
- # Now add all of the simple rules
- for name, r in linfo.strsym[state]:
- regex_list.append('(?P<%s>%s)' % (name, r))
- if debug:
- debuglog.info("lex: Adding rule %s -> '%s' (state '%s')", name, r, state)
-
- regexs[state] = regex_list
-
- # Build the master regular expressions
-
- if debug:
- debuglog.info('lex: ==== MASTER REGEXS FOLLOW ====')
-
- for state in regexs:
- lexre, re_text, re_names = _form_master_re(regexs[state], reflags, ldict, linfo.toknames)
- lexobj.lexstatere[state] = lexre
- lexobj.lexstateretext[state] = re_text
- lexobj.lexstaterenames[state] = re_names
- if debug:
- for i, text in enumerate(re_text):
- debuglog.info("lex: state '%s' : regex[%d] = '%s'", state, i, text)
-
- # For inclusive states, we need to add the regular expressions from the INITIAL state
- for state, stype in stateinfo.items():
- if state != 'INITIAL' and stype == 'inclusive':
- lexobj.lexstatere[state].extend(lexobj.lexstatere['INITIAL'])
- lexobj.lexstateretext[state].extend(lexobj.lexstateretext['INITIAL'])
- lexobj.lexstaterenames[state].extend(lexobj.lexstaterenames['INITIAL'])
-
- lexobj.lexstateinfo = stateinfo
- lexobj.lexre = lexobj.lexstatere['INITIAL']
- lexobj.lexretext = lexobj.lexstateretext['INITIAL']
- lexobj.lexreflags = reflags
-
- # Set up ignore variables
- lexobj.lexstateignore = linfo.ignore
- lexobj.lexignore = lexobj.lexstateignore.get('INITIAL', '')
-
- # Set up error functions
- lexobj.lexstateerrorf = linfo.errorf
- lexobj.lexerrorf = linfo.errorf.get('INITIAL', None)
- if not lexobj.lexerrorf:
- errorlog.warning('No t_error rule is defined')
-
- # Set up eof functions
- lexobj.lexstateeoff = linfo.eoff
- lexobj.lexeoff = linfo.eoff.get('INITIAL', None)
-
- # Check state information for ignore and error rules
- for s, stype in stateinfo.items():
- if stype == 'exclusive':
- if s not in linfo.errorf:
- errorlog.warning("No error rule is defined for exclusive state '%s'", s)
- if s not in linfo.ignore and lexobj.lexignore:
- errorlog.warning("No ignore rule is defined for exclusive state '%s'", s)
- elif stype == 'inclusive':
- if s not in linfo.errorf:
- linfo.errorf[s] = linfo.errorf.get('INITIAL', None)
- if s not in linfo.ignore:
- linfo.ignore[s] = linfo.ignore.get('INITIAL', '')
-
- # Create global versions of the token() and input() functions
- token = lexobj.token
- input = lexobj.input
- lexer = lexobj
-
- # If in optimize mode, we write the lextab
- if lextab and optimize:
- if outputdir is None:
- # If no output directory is set, the location of the output files
- # is determined according to the following rules:
- # - If lextab specifies a package, files go into that package directory
- # - Otherwise, files go in the same directory as the specifying module
- if isinstance(lextab, types.ModuleType):
- srcfile = lextab.__file__
- else:
- if '.' not in lextab:
- srcfile = ldict['__file__']
- else:
- parts = lextab.split('.')
- pkgname = '.'.join(parts[:-1])
- exec('import %s' % pkgname)
- srcfile = getattr(sys.modules[pkgname], '__file__', '')
- outputdir = os.path.dirname(srcfile)
- try:
- lexobj.writetab(lextab, outputdir)
- except IOError as e:
- errorlog.warning("Couldn't write lextab module %r. %s" % (lextab, e))
-
- return lexobj
-
-# -----------------------------------------------------------------------------
-# runmain()
-#
-# This runs the lexer as a main program
-# -----------------------------------------------------------------------------
-
-def runmain(lexer=None, data=None):
- if not data:
- try:
- filename = sys.argv[1]
- f = open(filename)
- data = f.read()
- f.close()
- except IndexError:
- sys.stdout.write('Reading from standard input (type EOF to end):\n')
- data = sys.stdin.read()
-
- if lexer:
- _input = lexer.input
- else:
- _input = input
- _input(data)
- if lexer:
- _token = lexer.token
- else:
- _token = token
-
- while True:
- tok = _token()
- if not tok:
- break
- sys.stdout.write('(%s,%r,%d,%d)\n' % (tok.type, tok.value, tok.lineno, tok.lexpos))
-
-# -----------------------------------------------------------------------------
-# @TOKEN(regex)
-#
-# This decorator function can be used to set the regex expression on a function
-# when its docstring might need to be set in an alternative way
-# -----------------------------------------------------------------------------
-
-def TOKEN(r):
- def set_regex(f):
- if hasattr(r, '__call__'):
- f.regex = _get_regex(r)
- else:
- f.regex = r
- return f
- return set_regex
-
-# Alternative spelling of the TOKEN decorator
-Token = TOKEN
diff --git a/functions/source/GitPullS3/pycparser/ply/yacc.py b/functions/source/GitPullS3/pycparser/ply/yacc.py
deleted file mode 100644
index 03bd86e..0000000
--- a/functions/source/GitPullS3/pycparser/ply/yacc.py
+++ /dev/null
@@ -1,3494 +0,0 @@
-# -----------------------------------------------------------------------------
-# ply: yacc.py
-#
-# Copyright (C) 2001-2017
-# David M. Beazley (Dabeaz LLC)
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright notice,
-# this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above copyright notice,
-# this list of conditions and the following disclaimer in the documentation
-# and/or other materials provided with the distribution.
-# * Neither the name of the David Beazley or Dabeaz LLC may be used to
-# endorse or promote products derived from this software without
-# specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-# -----------------------------------------------------------------------------
-#
-# This implements an LR parser that is constructed from grammar rules defined
-# as Python functions. The grammer is specified by supplying the BNF inside
-# Python documentation strings. The inspiration for this technique was borrowed
-# from John Aycock's Spark parsing system. PLY might be viewed as cross between
-# Spark and the GNU bison utility.
-#
-# The current implementation is only somewhat object-oriented. The
-# LR parser itself is defined in terms of an object (which allows multiple
-# parsers to co-exist). However, most of the variables used during table
-# construction are defined in terms of global variables. Users shouldn't
-# notice unless they are trying to define multiple parsers at the same
-# time using threads (in which case they should have their head examined).
-#
-# This implementation supports both SLR and LALR(1) parsing. LALR(1)
-# support was originally implemented by Elias Ioup (ezioup@alumni.uchicago.edu),
-# using the algorithm found in Aho, Sethi, and Ullman "Compilers: Principles,
-# Techniques, and Tools" (The Dragon Book). LALR(1) has since been replaced
-# by the more efficient DeRemer and Pennello algorithm.
-#
-# :::::::: WARNING :::::::
-#
-# Construction of LR parsing tables is fairly complicated and expensive.
-# To make this module run fast, a *LOT* of work has been put into
-# optimization---often at the expensive of readability and what might
-# consider to be good Python "coding style." Modify the code at your
-# own risk!
-# ----------------------------------------------------------------------------
-
-import re
-import types
-import sys
-import os.path
-import inspect
-import base64
-import warnings
-
-__version__ = '3.10'
-__tabversion__ = '3.10'
-
-#-----------------------------------------------------------------------------
-# === User configurable parameters ===
-#
-# Change these to modify the default behavior of yacc (if you wish)
-#-----------------------------------------------------------------------------
-
-yaccdebug = True # Debugging mode. If set, yacc generates a
- # a 'parser.out' file in the current directory
-
-debug_file = 'parser.out' # Default name of the debugging file
-tab_module = 'parsetab' # Default name of the table module
-default_lr = 'LALR' # Default LR table generation method
-
-error_count = 3 # Number of symbols that must be shifted to leave recovery mode
-
-yaccdevel = False # Set to True if developing yacc. This turns off optimized
- # implementations of certain functions.
-
-resultlimit = 40 # Size limit of results when running in debug mode.
-
-pickle_protocol = 0 # Protocol to use when writing pickle files
-
-# String type-checking compatibility
-if sys.version_info[0] < 3:
- string_types = basestring
-else:
- string_types = str
-
-MAXINT = sys.maxsize
-
-# This object is a stand-in for a logging object created by the
-# logging module. PLY will use this by default to create things
-# such as the parser.out file. If a user wants more detailed
-# information, they can create their own logging object and pass
-# it into PLY.
-
-class PlyLogger(object):
- def __init__(self, f):
- self.f = f
-
- def debug(self, msg, *args, **kwargs):
- self.f.write((msg % args) + '\n')
-
- info = debug
-
- def warning(self, msg, *args, **kwargs):
- self.f.write('WARNING: ' + (msg % args) + '\n')
-
- def error(self, msg, *args, **kwargs):
- self.f.write('ERROR: ' + (msg % args) + '\n')
-
- critical = debug
-
-# Null logger is used when no output is generated. Does nothing.
-class NullLogger(object):
- def __getattribute__(self, name):
- return self
-
- def __call__(self, *args, **kwargs):
- return self
-
-# Exception raised for yacc-related errors
-class YaccError(Exception):
- pass
-
-# Format the result message that the parser produces when running in debug mode.
-def format_result(r):
- repr_str = repr(r)
- if '\n' in repr_str:
- repr_str = repr(repr_str)
- if len(repr_str) > resultlimit:
- repr_str = repr_str[:resultlimit] + ' ...'
- result = '<%s @ 0x%x> (%s)' % (type(r).__name__, id(r), repr_str)
- return result
-
-# Format stack entries when the parser is running in debug mode
-def format_stack_entry(r):
- repr_str = repr(r)
- if '\n' in repr_str:
- repr_str = repr(repr_str)
- if len(repr_str) < 16:
- return repr_str
- else:
- return '<%s @ 0x%x>' % (type(r).__name__, id(r))
-
-# Panic mode error recovery support. This feature is being reworked--much of the
-# code here is to offer a deprecation/backwards compatible transition
-
-_errok = None
-_token = None
-_restart = None
-_warnmsg = '''PLY: Don't use global functions errok(), token(), and restart() in p_error().
-Instead, invoke the methods on the associated parser instance:
-
- def p_error(p):
- ...
- # Use parser.errok(), parser.token(), parser.restart()
- ...
-
- parser = yacc.yacc()
-'''
-
-def errok():
- warnings.warn(_warnmsg)
- return _errok()
-
-def restart():
- warnings.warn(_warnmsg)
- return _restart()
-
-def token():
- warnings.warn(_warnmsg)
- return _token()
-
-# Utility function to call the p_error() function with some deprecation hacks
-def call_errorfunc(errorfunc, token, parser):
- global _errok, _token, _restart
- _errok = parser.errok
- _token = parser.token
- _restart = parser.restart
- r = errorfunc(token)
- try:
- del _errok, _token, _restart
- except NameError:
- pass
- return r
-
-#-----------------------------------------------------------------------------
-# === LR Parsing Engine ===
-#
-# The following classes are used for the LR parser itself. These are not
-# used during table construction and are independent of the actual LR
-# table generation algorithm
-#-----------------------------------------------------------------------------
-
-# This class is used to hold non-terminal grammar symbols during parsing.
-# It normally has the following attributes set:
-# .type = Grammar symbol type
-# .value = Symbol value
-# .lineno = Starting line number
-# .endlineno = Ending line number (optional, set automatically)
-# .lexpos = Starting lex position
-# .endlexpos = Ending lex position (optional, set automatically)
-
-class YaccSymbol:
- def __str__(self):
- return self.type
-
- def __repr__(self):
- return str(self)
-
-# This class is a wrapper around the objects actually passed to each
-# grammar rule. Index lookup and assignment actually assign the
-# .value attribute of the underlying YaccSymbol object.
-# The lineno() method returns the line number of a given
-# item (or 0 if not defined). The linespan() method returns
-# a tuple of (startline,endline) representing the range of lines
-# for a symbol. The lexspan() method returns a tuple (lexpos,endlexpos)
-# representing the range of positional information for a symbol.
-
-class YaccProduction:
- def __init__(self, s, stack=None):
- self.slice = s
- self.stack = stack
- self.lexer = None
- self.parser = None
-
- def __getitem__(self, n):
- if isinstance(n, slice):
- return [s.value for s in self.slice[n]]
- elif n >= 0:
- return self.slice[n].value
- else:
- return self.stack[n].value
-
- def __setitem__(self, n, v):
- self.slice[n].value = v
-
- def __getslice__(self, i, j):
- return [s.value for s in self.slice[i:j]]
-
- def __len__(self):
- return len(self.slice)
-
- def lineno(self, n):
- return getattr(self.slice[n], 'lineno', 0)
-
- def set_lineno(self, n, lineno):
- self.slice[n].lineno = lineno
-
- def linespan(self, n):
- startline = getattr(self.slice[n], 'lineno', 0)
- endline = getattr(self.slice[n], 'endlineno', startline)
- return startline, endline
-
- def lexpos(self, n):
- return getattr(self.slice[n], 'lexpos', 0)
-
- def lexspan(self, n):
- startpos = getattr(self.slice[n], 'lexpos', 0)
- endpos = getattr(self.slice[n], 'endlexpos', startpos)
- return startpos, endpos
-
- def error(self):
- raise SyntaxError
-
-# -----------------------------------------------------------------------------
-# == LRParser ==
-#
-# The LR Parsing engine.
-# -----------------------------------------------------------------------------
-
-class LRParser:
- def __init__(self, lrtab, errorf):
- self.productions = lrtab.lr_productions
- self.action = lrtab.lr_action
- self.goto = lrtab.lr_goto
- self.errorfunc = errorf
- self.set_defaulted_states()
- self.errorok = True
-
- def errok(self):
- self.errorok = True
-
- def restart(self):
- del self.statestack[:]
- del self.symstack[:]
- sym = YaccSymbol()
- sym.type = '$end'
- self.symstack.append(sym)
- self.statestack.append(0)
-
- # Defaulted state support.
- # This method identifies parser states where there is only one possible reduction action.
- # For such states, the parser can make a choose to make a rule reduction without consuming
- # the next look-ahead token. This delayed invocation of the tokenizer can be useful in
- # certain kinds of advanced parsing situations where the lexer and parser interact with
- # each other or change states (i.e., manipulation of scope, lexer states, etc.).
- #
- # See: http://www.gnu.org/software/bison/manual/html_node/Default-Reductions.html#Default-Reductions
- def set_defaulted_states(self):
- self.defaulted_states = {}
- for state, actions in self.action.items():
- rules = list(actions.values())
- if len(rules) == 1 and rules[0] < 0:
- self.defaulted_states[state] = rules[0]
-
- def disable_defaulted_states(self):
- self.defaulted_states = {}
-
- def parse(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None):
- if debug or yaccdevel:
- if isinstance(debug, int):
- debug = PlyLogger(sys.stderr)
- return self.parsedebug(input, lexer, debug, tracking, tokenfunc)
- elif tracking:
- return self.parseopt(input, lexer, debug, tracking, tokenfunc)
- else:
- return self.parseopt_notrack(input, lexer, debug, tracking, tokenfunc)
-
-
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- # parsedebug().
- #
- # This is the debugging enabled version of parse(). All changes made to the
- # parsing engine should be made here. Optimized versions of this function
- # are automatically created by the ply/ygen.py script. This script cuts out
- # sections enclosed in markers such as this:
- #
- # #--! DEBUG
- # statements
- # #--! DEBUG
- #
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
- def parsedebug(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None):
- #--! parsedebug-start
- lookahead = None # Current lookahead symbol
- lookaheadstack = [] # Stack of lookahead symbols
- actions = self.action # Local reference to action table (to avoid lookup on self.)
- goto = self.goto # Local reference to goto table (to avoid lookup on self.)
- prod = self.productions # Local reference to production list (to avoid lookup on self.)
- defaulted_states = self.defaulted_states # Local reference to defaulted states
- pslice = YaccProduction(None) # Production object passed to grammar rules
- errorcount = 0 # Used during error recovery
-
- #--! DEBUG
- debug.info('PLY: PARSE DEBUG START')
- #--! DEBUG
-
- # If no lexer was given, we will try to use the lex module
- if not lexer:
- from . import lex
- lexer = lex.lexer
-
- # Set up the lexer and parser objects on pslice
- pslice.lexer = lexer
- pslice.parser = self
-
- # If input was supplied, pass to lexer
- if input is not None:
- lexer.input(input)
-
- if tokenfunc is None:
- # Tokenize function
- get_token = lexer.token
- else:
- get_token = tokenfunc
-
- # Set the parser() token method (sometimes used in error recovery)
- self.token = get_token
-
- # Set up the state and symbol stacks
-
- statestack = [] # Stack of parsing states
- self.statestack = statestack
- symstack = [] # Stack of grammar symbols
- self.symstack = symstack
-
- pslice.stack = symstack # Put in the production
- errtoken = None # Err token
-
- # The start state is assumed to be (0,$end)
-
- statestack.append(0)
- sym = YaccSymbol()
- sym.type = '$end'
- symstack.append(sym)
- state = 0
- while True:
- # Get the next symbol on the input. If a lookahead symbol
- # is already set, we just use that. Otherwise, we'll pull
- # the next token off of the lookaheadstack or from the lexer
-
- #--! DEBUG
- debug.debug('')
- debug.debug('State : %s', state)
- #--! DEBUG
-
- if state not in defaulted_states:
- if not lookahead:
- if not lookaheadstack:
- lookahead = get_token() # Get the next token
- else:
- lookahead = lookaheadstack.pop()
- if not lookahead:
- lookahead = YaccSymbol()
- lookahead.type = '$end'
-
- # Check the action table
- ltype = lookahead.type
- t = actions[state].get(ltype)
- else:
- t = defaulted_states[state]
- #--! DEBUG
- debug.debug('Defaulted state %s: Reduce using %d', state, -t)
- #--! DEBUG
-
- #--! DEBUG
- debug.debug('Stack : %s',
- ('%s . %s' % (' '.join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip())
- #--! DEBUG
-
- if t is not None:
- if t > 0:
- # shift a symbol on the stack
- statestack.append(t)
- state = t
-
- #--! DEBUG
- debug.debug('Action : Shift and goto state %s', t)
- #--! DEBUG
-
- symstack.append(lookahead)
- lookahead = None
-
- # Decrease error count on successful shift
- if errorcount:
- errorcount -= 1
- continue
-
- if t < 0:
- # reduce a symbol on the stack, emit a production
- p = prod[-t]
- pname = p.name
- plen = p.len
-
- # Get production function
- sym = YaccSymbol()
- sym.type = pname # Production name
- sym.value = None
-
- #--! DEBUG
- if plen:
- debug.info('Action : Reduce rule [%s] with %s and goto state %d', p.str,
- '['+','.join([format_stack_entry(_v.value) for _v in symstack[-plen:]])+']',
- goto[statestack[-1-plen]][pname])
- else:
- debug.info('Action : Reduce rule [%s] with %s and goto state %d', p.str, [],
- goto[statestack[-1]][pname])
-
- #--! DEBUG
-
- if plen:
- targ = symstack[-plen-1:]
- targ[0] = sym
-
- #--! TRACKING
- if tracking:
- t1 = targ[1]
- sym.lineno = t1.lineno
- sym.lexpos = t1.lexpos
- t1 = targ[-1]
- sym.endlineno = getattr(t1, 'endlineno', t1.lineno)
- sym.endlexpos = getattr(t1, 'endlexpos', t1.lexpos)
- #--! TRACKING
-
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- # The code enclosed in this section is duplicated
- # below as a performance optimization. Make sure
- # changes get made in both locations.
-
- pslice.slice = targ
-
- try:
- # Call the grammar rule with our special slice object
- del symstack[-plen:]
- self.state = state
- p.callable(pslice)
- del statestack[-plen:]
- #--! DEBUG
- debug.info('Result : %s', format_result(pslice[0]))
- #--! DEBUG
- symstack.append(sym)
- state = goto[statestack[-1]][pname]
- statestack.append(state)
- except SyntaxError:
- # If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead) # Save the current lookahead token
- symstack.extend(targ[1:-1]) # Put the production slice back on the stack
- statestack.pop() # Pop back one state (before the reduce)
- state = statestack[-1]
- sym.type = 'error'
- sym.value = 'error'
- lookahead = sym
- errorcount = error_count
- self.errorok = False
-
- continue
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
- else:
-
- #--! TRACKING
- if tracking:
- sym.lineno = lexer.lineno
- sym.lexpos = lexer.lexpos
- #--! TRACKING
-
- targ = [sym]
-
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- # The code enclosed in this section is duplicated
- # above as a performance optimization. Make sure
- # changes get made in both locations.
-
- pslice.slice = targ
-
- try:
- # Call the grammar rule with our special slice object
- self.state = state
- p.callable(pslice)
- #--! DEBUG
- debug.info('Result : %s', format_result(pslice[0]))
- #--! DEBUG
- symstack.append(sym)
- state = goto[statestack[-1]][pname]
- statestack.append(state)
- except SyntaxError:
- # If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead) # Save the current lookahead token
- statestack.pop() # Pop back one state (before the reduce)
- state = statestack[-1]
- sym.type = 'error'
- sym.value = 'error'
- lookahead = sym
- errorcount = error_count
- self.errorok = False
-
- continue
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
- if t == 0:
- n = symstack[-1]
- result = getattr(n, 'value', None)
- #--! DEBUG
- debug.info('Done : Returning %s', format_result(result))
- debug.info('PLY: PARSE DEBUG END')
- #--! DEBUG
- return result
-
- if t is None:
-
- #--! DEBUG
- debug.error('Error : %s',
- ('%s . %s' % (' '.join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip())
- #--! DEBUG
-
- # We have some kind of parsing error here. To handle
- # this, we are going to push the current token onto
- # the tokenstack and replace it with an 'error' token.
- # If there are any synchronization rules, they may
- # catch it.
- #
- # In addition to pushing the error token, we call call
- # the user defined p_error() function if this is the
- # first syntax error. This function is only called if
- # errorcount == 0.
- if errorcount == 0 or self.errorok:
- errorcount = error_count
- self.errorok = False
- errtoken = lookahead
- if errtoken.type == '$end':
- errtoken = None # End of file!
- if self.errorfunc:
- if errtoken and not hasattr(errtoken, 'lexer'):
- errtoken.lexer = lexer
- self.state = state
- tok = call_errorfunc(self.errorfunc, errtoken, self)
- if self.errorok:
- # User must have done some kind of panic
- # mode recovery on their own. The
- # returned token is the next lookahead
- lookahead = tok
- errtoken = None
- continue
- else:
- if errtoken:
- if hasattr(errtoken, 'lineno'):
- lineno = lookahead.lineno
- else:
- lineno = 0
- if lineno:
- sys.stderr.write('yacc: Syntax error at line %d, token=%s\n' % (lineno, errtoken.type))
- else:
- sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type)
- else:
- sys.stderr.write('yacc: Parse error in input. EOF\n')
- return
-
- else:
- errorcount = error_count
-
- # case 1: the statestack only has 1 entry on it. If we're in this state, the
- # entire parse has been rolled back and we're completely hosed. The token is
- # discarded and we just keep going.
-
- if len(statestack) <= 1 and lookahead.type != '$end':
- lookahead = None
- errtoken = None
- state = 0
- # Nuke the pushback stack
- del lookaheadstack[:]
- continue
-
- # case 2: the statestack has a couple of entries on it, but we're
- # at the end of the file. nuke the top entry and generate an error token
-
- # Start nuking entries on the stack
- if lookahead.type == '$end':
- # Whoa. We're really hosed here. Bail out
- return
-
- if lookahead.type != 'error':
- sym = symstack[-1]
- if sym.type == 'error':
- # Hmmm. Error is on top of stack, we'll just nuke input
- # symbol and continue
- #--! TRACKING
- if tracking:
- sym.endlineno = getattr(lookahead, 'lineno', sym.lineno)
- sym.endlexpos = getattr(lookahead, 'lexpos', sym.lexpos)
- #--! TRACKING
- lookahead = None
- continue
-
- # Create the error symbol for the first time and make it the new lookahead symbol
- t = YaccSymbol()
- t.type = 'error'
-
- if hasattr(lookahead, 'lineno'):
- t.lineno = t.endlineno = lookahead.lineno
- if hasattr(lookahead, 'lexpos'):
- t.lexpos = t.endlexpos = lookahead.lexpos
- t.value = lookahead
- lookaheadstack.append(lookahead)
- lookahead = t
- else:
- sym = symstack.pop()
- #--! TRACKING
- if tracking:
- lookahead.lineno = sym.lineno
- lookahead.lexpos = sym.lexpos
- #--! TRACKING
- statestack.pop()
- state = statestack[-1]
-
- continue
-
- # Call an error function here
- raise RuntimeError('yacc: internal parser error!!!\n')
-
- #--! parsedebug-end
-
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- # parseopt().
- #
- # Optimized version of parse() method. DO NOT EDIT THIS CODE DIRECTLY!
- # This code is automatically generated by the ply/ygen.py script. Make
- # changes to the parsedebug() method instead.
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
- def parseopt(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None):
- #--! parseopt-start
- lookahead = None # Current lookahead symbol
- lookaheadstack = [] # Stack of lookahead symbols
- actions = self.action # Local reference to action table (to avoid lookup on self.)
- goto = self.goto # Local reference to goto table (to avoid lookup on self.)
- prod = self.productions # Local reference to production list (to avoid lookup on self.)
- defaulted_states = self.defaulted_states # Local reference to defaulted states
- pslice = YaccProduction(None) # Production object passed to grammar rules
- errorcount = 0 # Used during error recovery
-
-
- # If no lexer was given, we will try to use the lex module
- if not lexer:
- from . import lex
- lexer = lex.lexer
-
- # Set up the lexer and parser objects on pslice
- pslice.lexer = lexer
- pslice.parser = self
-
- # If input was supplied, pass to lexer
- if input is not None:
- lexer.input(input)
-
- if tokenfunc is None:
- # Tokenize function
- get_token = lexer.token
- else:
- get_token = tokenfunc
-
- # Set the parser() token method (sometimes used in error recovery)
- self.token = get_token
-
- # Set up the state and symbol stacks
-
- statestack = [] # Stack of parsing states
- self.statestack = statestack
- symstack = [] # Stack of grammar symbols
- self.symstack = symstack
-
- pslice.stack = symstack # Put in the production
- errtoken = None # Err token
-
- # The start state is assumed to be (0,$end)
-
- statestack.append(0)
- sym = YaccSymbol()
- sym.type = '$end'
- symstack.append(sym)
- state = 0
- while True:
- # Get the next symbol on the input. If a lookahead symbol
- # is already set, we just use that. Otherwise, we'll pull
- # the next token off of the lookaheadstack or from the lexer
-
-
- if state not in defaulted_states:
- if not lookahead:
- if not lookaheadstack:
- lookahead = get_token() # Get the next token
- else:
- lookahead = lookaheadstack.pop()
- if not lookahead:
- lookahead = YaccSymbol()
- lookahead.type = '$end'
-
- # Check the action table
- ltype = lookahead.type
- t = actions[state].get(ltype)
- else:
- t = defaulted_states[state]
-
-
- if t is not None:
- if t > 0:
- # shift a symbol on the stack
- statestack.append(t)
- state = t
-
-
- symstack.append(lookahead)
- lookahead = None
-
- # Decrease error count on successful shift
- if errorcount:
- errorcount -= 1
- continue
-
- if t < 0:
- # reduce a symbol on the stack, emit a production
- p = prod[-t]
- pname = p.name
- plen = p.len
-
- # Get production function
- sym = YaccSymbol()
- sym.type = pname # Production name
- sym.value = None
-
-
- if plen:
- targ = symstack[-plen-1:]
- targ[0] = sym
-
- #--! TRACKING
- if tracking:
- t1 = targ[1]
- sym.lineno = t1.lineno
- sym.lexpos = t1.lexpos
- t1 = targ[-1]
- sym.endlineno = getattr(t1, 'endlineno', t1.lineno)
- sym.endlexpos = getattr(t1, 'endlexpos', t1.lexpos)
- #--! TRACKING
-
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- # The code enclosed in this section is duplicated
- # below as a performance optimization. Make sure
- # changes get made in both locations.
-
- pslice.slice = targ
-
- try:
- # Call the grammar rule with our special slice object
- del symstack[-plen:]
- self.state = state
- p.callable(pslice)
- del statestack[-plen:]
- symstack.append(sym)
- state = goto[statestack[-1]][pname]
- statestack.append(state)
- except SyntaxError:
- # If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead) # Save the current lookahead token
- symstack.extend(targ[1:-1]) # Put the production slice back on the stack
- statestack.pop() # Pop back one state (before the reduce)
- state = statestack[-1]
- sym.type = 'error'
- sym.value = 'error'
- lookahead = sym
- errorcount = error_count
- self.errorok = False
-
- continue
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
- else:
-
- #--! TRACKING
- if tracking:
- sym.lineno = lexer.lineno
- sym.lexpos = lexer.lexpos
- #--! TRACKING
-
- targ = [sym]
-
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- # The code enclosed in this section is duplicated
- # above as a performance optimization. Make sure
- # changes get made in both locations.
-
- pslice.slice = targ
-
- try:
- # Call the grammar rule with our special slice object
- self.state = state
- p.callable(pslice)
- symstack.append(sym)
- state = goto[statestack[-1]][pname]
- statestack.append(state)
- except SyntaxError:
- # If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead) # Save the current lookahead token
- statestack.pop() # Pop back one state (before the reduce)
- state = statestack[-1]
- sym.type = 'error'
- sym.value = 'error'
- lookahead = sym
- errorcount = error_count
- self.errorok = False
-
- continue
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
- if t == 0:
- n = symstack[-1]
- result = getattr(n, 'value', None)
- return result
-
- if t is None:
-
-
- # We have some kind of parsing error here. To handle
- # this, we are going to push the current token onto
- # the tokenstack and replace it with an 'error' token.
- # If there are any synchronization rules, they may
- # catch it.
- #
- # In addition to pushing the error token, we call call
- # the user defined p_error() function if this is the
- # first syntax error. This function is only called if
- # errorcount == 0.
- if errorcount == 0 or self.errorok:
- errorcount = error_count
- self.errorok = False
- errtoken = lookahead
- if errtoken.type == '$end':
- errtoken = None # End of file!
- if self.errorfunc:
- if errtoken and not hasattr(errtoken, 'lexer'):
- errtoken.lexer = lexer
- self.state = state
- tok = call_errorfunc(self.errorfunc, errtoken, self)
- if self.errorok:
- # User must have done some kind of panic
- # mode recovery on their own. The
- # returned token is the next lookahead
- lookahead = tok
- errtoken = None
- continue
- else:
- if errtoken:
- if hasattr(errtoken, 'lineno'):
- lineno = lookahead.lineno
- else:
- lineno = 0
- if lineno:
- sys.stderr.write('yacc: Syntax error at line %d, token=%s\n' % (lineno, errtoken.type))
- else:
- sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type)
- else:
- sys.stderr.write('yacc: Parse error in input. EOF\n')
- return
-
- else:
- errorcount = error_count
-
- # case 1: the statestack only has 1 entry on it. If we're in this state, the
- # entire parse has been rolled back and we're completely hosed. The token is
- # discarded and we just keep going.
-
- if len(statestack) <= 1 and lookahead.type != '$end':
- lookahead = None
- errtoken = None
- state = 0
- # Nuke the pushback stack
- del lookaheadstack[:]
- continue
-
- # case 2: the statestack has a couple of entries on it, but we're
- # at the end of the file. nuke the top entry and generate an error token
-
- # Start nuking entries on the stack
- if lookahead.type == '$end':
- # Whoa. We're really hosed here. Bail out
- return
-
- if lookahead.type != 'error':
- sym = symstack[-1]
- if sym.type == 'error':
- # Hmmm. Error is on top of stack, we'll just nuke input
- # symbol and continue
- #--! TRACKING
- if tracking:
- sym.endlineno = getattr(lookahead, 'lineno', sym.lineno)
- sym.endlexpos = getattr(lookahead, 'lexpos', sym.lexpos)
- #--! TRACKING
- lookahead = None
- continue
-
- # Create the error symbol for the first time and make it the new lookahead symbol
- t = YaccSymbol()
- t.type = 'error'
-
- if hasattr(lookahead, 'lineno'):
- t.lineno = t.endlineno = lookahead.lineno
- if hasattr(lookahead, 'lexpos'):
- t.lexpos = t.endlexpos = lookahead.lexpos
- t.value = lookahead
- lookaheadstack.append(lookahead)
- lookahead = t
- else:
- sym = symstack.pop()
- #--! TRACKING
- if tracking:
- lookahead.lineno = sym.lineno
- lookahead.lexpos = sym.lexpos
- #--! TRACKING
- statestack.pop()
- state = statestack[-1]
-
- continue
-
- # Call an error function here
- raise RuntimeError('yacc: internal parser error!!!\n')
-
- #--! parseopt-end
-
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- # parseopt_notrack().
- #
- # Optimized version of parseopt() with line number tracking removed.
- # DO NOT EDIT THIS CODE DIRECTLY. This code is automatically generated
- # by the ply/ygen.py script. Make changes to the parsedebug() method instead.
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
- def parseopt_notrack(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None):
- #--! parseopt-notrack-start
- lookahead = None # Current lookahead symbol
- lookaheadstack = [] # Stack of lookahead symbols
- actions = self.action # Local reference to action table (to avoid lookup on self.)
- goto = self.goto # Local reference to goto table (to avoid lookup on self.)
- prod = self.productions # Local reference to production list (to avoid lookup on self.)
- defaulted_states = self.defaulted_states # Local reference to defaulted states
- pslice = YaccProduction(None) # Production object passed to grammar rules
- errorcount = 0 # Used during error recovery
-
-
- # If no lexer was given, we will try to use the lex module
- if not lexer:
- from . import lex
- lexer = lex.lexer
-
- # Set up the lexer and parser objects on pslice
- pslice.lexer = lexer
- pslice.parser = self
-
- # If input was supplied, pass to lexer
- if input is not None:
- lexer.input(input)
-
- if tokenfunc is None:
- # Tokenize function
- get_token = lexer.token
- else:
- get_token = tokenfunc
-
- # Set the parser() token method (sometimes used in error recovery)
- self.token = get_token
-
- # Set up the state and symbol stacks
-
- statestack = [] # Stack of parsing states
- self.statestack = statestack
- symstack = [] # Stack of grammar symbols
- self.symstack = symstack
-
- pslice.stack = symstack # Put in the production
- errtoken = None # Err token
-
- # The start state is assumed to be (0,$end)
-
- statestack.append(0)
- sym = YaccSymbol()
- sym.type = '$end'
- symstack.append(sym)
- state = 0
- while True:
- # Get the next symbol on the input. If a lookahead symbol
- # is already set, we just use that. Otherwise, we'll pull
- # the next token off of the lookaheadstack or from the lexer
-
-
- if state not in defaulted_states:
- if not lookahead:
- if not lookaheadstack:
- lookahead = get_token() # Get the next token
- else:
- lookahead = lookaheadstack.pop()
- if not lookahead:
- lookahead = YaccSymbol()
- lookahead.type = '$end'
-
- # Check the action table
- ltype = lookahead.type
- t = actions[state].get(ltype)
- else:
- t = defaulted_states[state]
-
-
- if t is not None:
- if t > 0:
- # shift a symbol on the stack
- statestack.append(t)
- state = t
-
-
- symstack.append(lookahead)
- lookahead = None
-
- # Decrease error count on successful shift
- if errorcount:
- errorcount -= 1
- continue
-
- if t < 0:
- # reduce a symbol on the stack, emit a production
- p = prod[-t]
- pname = p.name
- plen = p.len
-
- # Get production function
- sym = YaccSymbol()
- sym.type = pname # Production name
- sym.value = None
-
-
- if plen:
- targ = symstack[-plen-1:]
- targ[0] = sym
-
-
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- # The code enclosed in this section is duplicated
- # below as a performance optimization. Make sure
- # changes get made in both locations.
-
- pslice.slice = targ
-
- try:
- # Call the grammar rule with our special slice object
- del symstack[-plen:]
- self.state = state
- p.callable(pslice)
- del statestack[-plen:]
- symstack.append(sym)
- state = goto[statestack[-1]][pname]
- statestack.append(state)
- except SyntaxError:
- # If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead) # Save the current lookahead token
- symstack.extend(targ[1:-1]) # Put the production slice back on the stack
- statestack.pop() # Pop back one state (before the reduce)
- state = statestack[-1]
- sym.type = 'error'
- sym.value = 'error'
- lookahead = sym
- errorcount = error_count
- self.errorok = False
-
- continue
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
- else:
-
-
- targ = [sym]
-
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- # The code enclosed in this section is duplicated
- # above as a performance optimization. Make sure
- # changes get made in both locations.
-
- pslice.slice = targ
-
- try:
- # Call the grammar rule with our special slice object
- self.state = state
- p.callable(pslice)
- symstack.append(sym)
- state = goto[statestack[-1]][pname]
- statestack.append(state)
- except SyntaxError:
- # If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead) # Save the current lookahead token
- statestack.pop() # Pop back one state (before the reduce)
- state = statestack[-1]
- sym.type = 'error'
- sym.value = 'error'
- lookahead = sym
- errorcount = error_count
- self.errorok = False
-
- continue
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
- if t == 0:
- n = symstack[-1]
- result = getattr(n, 'value', None)
- return result
-
- if t is None:
-
-
- # We have some kind of parsing error here. To handle
- # this, we are going to push the current token onto
- # the tokenstack and replace it with an 'error' token.
- # If there are any synchronization rules, they may
- # catch it.
- #
- # In addition to pushing the error token, we call call
- # the user defined p_error() function if this is the
- # first syntax error. This function is only called if
- # errorcount == 0.
- if errorcount == 0 or self.errorok:
- errorcount = error_count
- self.errorok = False
- errtoken = lookahead
- if errtoken.type == '$end':
- errtoken = None # End of file!
- if self.errorfunc:
- if errtoken and not hasattr(errtoken, 'lexer'):
- errtoken.lexer = lexer
- self.state = state
- tok = call_errorfunc(self.errorfunc, errtoken, self)
- if self.errorok:
- # User must have done some kind of panic
- # mode recovery on their own. The
- # returned token is the next lookahead
- lookahead = tok
- errtoken = None
- continue
- else:
- if errtoken:
- if hasattr(errtoken, 'lineno'):
- lineno = lookahead.lineno
- else:
- lineno = 0
- if lineno:
- sys.stderr.write('yacc: Syntax error at line %d, token=%s\n' % (lineno, errtoken.type))
- else:
- sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type)
- else:
- sys.stderr.write('yacc: Parse error in input. EOF\n')
- return
-
- else:
- errorcount = error_count
-
- # case 1: the statestack only has 1 entry on it. If we're in this state, the
- # entire parse has been rolled back and we're completely hosed. The token is
- # discarded and we just keep going.
-
- if len(statestack) <= 1 and lookahead.type != '$end':
- lookahead = None
- errtoken = None
- state = 0
- # Nuke the pushback stack
- del lookaheadstack[:]
- continue
-
- # case 2: the statestack has a couple of entries on it, but we're
- # at the end of the file. nuke the top entry and generate an error token
-
- # Start nuking entries on the stack
- if lookahead.type == '$end':
- # Whoa. We're really hosed here. Bail out
- return
-
- if lookahead.type != 'error':
- sym = symstack[-1]
- if sym.type == 'error':
- # Hmmm. Error is on top of stack, we'll just nuke input
- # symbol and continue
- lookahead = None
- continue
-
- # Create the error symbol for the first time and make it the new lookahead symbol
- t = YaccSymbol()
- t.type = 'error'
-
- if hasattr(lookahead, 'lineno'):
- t.lineno = t.endlineno = lookahead.lineno
- if hasattr(lookahead, 'lexpos'):
- t.lexpos = t.endlexpos = lookahead.lexpos
- t.value = lookahead
- lookaheadstack.append(lookahead)
- lookahead = t
- else:
- sym = symstack.pop()
- statestack.pop()
- state = statestack[-1]
-
- continue
-
- # Call an error function here
- raise RuntimeError('yacc: internal parser error!!!\n')
-
- #--! parseopt-notrack-end
-
-# -----------------------------------------------------------------------------
-# === Grammar Representation ===
-#
-# The following functions, classes, and variables are used to represent and
-# manipulate the rules that make up a grammar.
-# -----------------------------------------------------------------------------
-
-# regex matching identifiers
-_is_identifier = re.compile(r'^[a-zA-Z0-9_-]+$')
-
-# -----------------------------------------------------------------------------
-# class Production:
-#
-# This class stores the raw information about a single production or grammar rule.
-# A grammar rule refers to a specification such as this:
-#
-# expr : expr PLUS term
-#
-# Here are the basic attributes defined on all productions
-#
-# name - Name of the production. For example 'expr'
-# prod - A list of symbols on the right side ['expr','PLUS','term']
-# prec - Production precedence level
-# number - Production number.
-# func - Function that executes on reduce
-# file - File where production function is defined
-# lineno - Line number where production function is defined
-#
-# The following attributes are defined or optional.
-#
-# len - Length of the production (number of symbols on right hand side)
-# usyms - Set of unique symbols found in the production
-# -----------------------------------------------------------------------------
-
-class Production(object):
- reduced = 0
- def __init__(self, number, name, prod, precedence=('right', 0), func=None, file='', line=0):
- self.name = name
- self.prod = tuple(prod)
- self.number = number
- self.func = func
- self.callable = None
- self.file = file
- self.line = line
- self.prec = precedence
-
- # Internal settings used during table construction
-
- self.len = len(self.prod) # Length of the production
-
- # Create a list of unique production symbols used in the production
- self.usyms = []
- for s in self.prod:
- if s not in self.usyms:
- self.usyms.append(s)
-
- # List of all LR items for the production
- self.lr_items = []
- self.lr_next = None
-
- # Create a string representation
- if self.prod:
- self.str = '%s -> %s' % (self.name, ' '.join(self.prod))
- else:
- self.str = '%s -> ' % self.name
-
- def __str__(self):
- return self.str
-
- def __repr__(self):
- return 'Production(' + str(self) + ')'
-
- def __len__(self):
- return len(self.prod)
-
- def __nonzero__(self):
- return 1
-
- def __getitem__(self, index):
- return self.prod[index]
-
- # Return the nth lr_item from the production (or None if at the end)
- def lr_item(self, n):
- if n > len(self.prod):
- return None
- p = LRItem(self, n)
- # Precompute the list of productions immediately following.
- try:
- p.lr_after = Prodnames[p.prod[n+1]]
- except (IndexError, KeyError):
- p.lr_after = []
- try:
- p.lr_before = p.prod[n-1]
- except IndexError:
- p.lr_before = None
- return p
-
- # Bind the production function name to a callable
- def bind(self, pdict):
- if self.func:
- self.callable = pdict[self.func]
-
-# This class serves as a minimal standin for Production objects when
-# reading table data from files. It only contains information
-# actually used by the LR parsing engine, plus some additional
-# debugging information.
-class MiniProduction(object):
- def __init__(self, str, name, len, func, file, line):
- self.name = name
- self.len = len
- self.func = func
- self.callable = None
- self.file = file
- self.line = line
- self.str = str
-
- def __str__(self):
- return self.str
-
- def __repr__(self):
- return 'MiniProduction(%s)' % self.str
-
- # Bind the production function name to a callable
- def bind(self, pdict):
- if self.func:
- self.callable = pdict[self.func]
-
-
-# -----------------------------------------------------------------------------
-# class LRItem
-#
-# This class represents a specific stage of parsing a production rule. For
-# example:
-#
-# expr : expr . PLUS term
-#
-# In the above, the "." represents the current location of the parse. Here
-# basic attributes:
-#
-# name - Name of the production. For example 'expr'
-# prod - A list of symbols on the right side ['expr','.', 'PLUS','term']
-# number - Production number.
-#
-# lr_next Next LR item. Example, if we are ' expr -> expr . PLUS term'
-# then lr_next refers to 'expr -> expr PLUS . term'
-# lr_index - LR item index (location of the ".") in the prod list.
-# lookaheads - LALR lookahead symbols for this item
-# len - Length of the production (number of symbols on right hand side)
-# lr_after - List of all productions that immediately follow
-# lr_before - Grammar symbol immediately before
-# -----------------------------------------------------------------------------
-
-class LRItem(object):
- def __init__(self, p, n):
- self.name = p.name
- self.prod = list(p.prod)
- self.number = p.number
- self.lr_index = n
- self.lookaheads = {}
- self.prod.insert(n, '.')
- self.prod = tuple(self.prod)
- self.len = len(self.prod)
- self.usyms = p.usyms
-
- def __str__(self):
- if self.prod:
- s = '%s -> %s' % (self.name, ' '.join(self.prod))
- else:
- s = '%s -> ' % self.name
- return s
-
- def __repr__(self):
- return 'LRItem(' + str(self) + ')'
-
-# -----------------------------------------------------------------------------
-# rightmost_terminal()
-#
-# Return the rightmost terminal from a list of symbols. Used in add_production()
-# -----------------------------------------------------------------------------
-def rightmost_terminal(symbols, terminals):
- i = len(symbols) - 1
- while i >= 0:
- if symbols[i] in terminals:
- return symbols[i]
- i -= 1
- return None
-
-# -----------------------------------------------------------------------------
-# === GRAMMAR CLASS ===
-#
-# The following class represents the contents of the specified grammar along
-# with various computed properties such as first sets, follow sets, LR items, etc.
-# This data is used for critical parts of the table generation process later.
-# -----------------------------------------------------------------------------
-
-class GrammarError(YaccError):
- pass
-
-class Grammar(object):
- def __init__(self, terminals):
- self.Productions = [None] # A list of all of the productions. The first
- # entry is always reserved for the purpose of
- # building an augmented grammar
-
- self.Prodnames = {} # A dictionary mapping the names of nonterminals to a list of all
- # productions of that nonterminal.
-
- self.Prodmap = {} # A dictionary that is only used to detect duplicate
- # productions.
-
- self.Terminals = {} # A dictionary mapping the names of terminal symbols to a
- # list of the rules where they are used.
-
- for term in terminals:
- self.Terminals[term] = []
-
- self.Terminals['error'] = []
-
- self.Nonterminals = {} # A dictionary mapping names of nonterminals to a list
- # of rule numbers where they are used.
-
- self.First = {} # A dictionary of precomputed FIRST(x) symbols
-
- self.Follow = {} # A dictionary of precomputed FOLLOW(x) symbols
-
- self.Precedence = {} # Precedence rules for each terminal. Contains tuples of the
- # form ('right',level) or ('nonassoc', level) or ('left',level)
-
- self.UsedPrecedence = set() # Precedence rules that were actually used by the grammer.
- # This is only used to provide error checking and to generate
- # a warning about unused precedence rules.
-
- self.Start = None # Starting symbol for the grammar
-
-
- def __len__(self):
- return len(self.Productions)
-
- def __getitem__(self, index):
- return self.Productions[index]
-
- # -----------------------------------------------------------------------------
- # set_precedence()
- #
- # Sets the precedence for a given terminal. assoc is the associativity such as
- # 'left','right', or 'nonassoc'. level is a numeric level.
- #
- # -----------------------------------------------------------------------------
-
- def set_precedence(self, term, assoc, level):
- assert self.Productions == [None], 'Must call set_precedence() before add_production()'
- if term in self.Precedence:
- raise GrammarError('Precedence already specified for terminal %r' % term)
- if assoc not in ['left', 'right', 'nonassoc']:
- raise GrammarError("Associativity must be one of 'left','right', or 'nonassoc'")
- self.Precedence[term] = (assoc, level)
-
- # -----------------------------------------------------------------------------
- # add_production()
- #
- # Given an action function, this function assembles a production rule and
- # computes its precedence level.
- #
- # The production rule is supplied as a list of symbols. For example,
- # a rule such as 'expr : expr PLUS term' has a production name of 'expr' and
- # symbols ['expr','PLUS','term'].
- #
- # Precedence is determined by the precedence of the right-most non-terminal
- # or the precedence of a terminal specified by %prec.
- #
- # A variety of error checks are performed to make sure production symbols
- # are valid and that %prec is used correctly.
- # -----------------------------------------------------------------------------
-
- def add_production(self, prodname, syms, func=None, file='', line=0):
-
- if prodname in self.Terminals:
- raise GrammarError('%s:%d: Illegal rule name %r. Already defined as a token' % (file, line, prodname))
- if prodname == 'error':
- raise GrammarError('%s:%d: Illegal rule name %r. error is a reserved word' % (file, line, prodname))
- if not _is_identifier.match(prodname):
- raise GrammarError('%s:%d: Illegal rule name %r' % (file, line, prodname))
-
- # Look for literal tokens
- for n, s in enumerate(syms):
- if s[0] in "'\"":
- try:
- c = eval(s)
- if (len(c) > 1):
- raise GrammarError('%s:%d: Literal token %s in rule %r may only be a single character' %
- (file, line, s, prodname))
- if c not in self.Terminals:
- self.Terminals[c] = []
- syms[n] = c
- continue
- except SyntaxError:
- pass
- if not _is_identifier.match(s) and s != '%prec':
- raise GrammarError('%s:%d: Illegal name %r in rule %r' % (file, line, s, prodname))
-
- # Determine the precedence level
- if '%prec' in syms:
- if syms[-1] == '%prec':
- raise GrammarError('%s:%d: Syntax error. Nothing follows %%prec' % (file, line))
- if syms[-2] != '%prec':
- raise GrammarError('%s:%d: Syntax error. %%prec can only appear at the end of a grammar rule' %
- (file, line))
- precname = syms[-1]
- prodprec = self.Precedence.get(precname)
- if not prodprec:
- raise GrammarError('%s:%d: Nothing known about the precedence of %r' % (file, line, precname))
- else:
- self.UsedPrecedence.add(precname)
- del syms[-2:] # Drop %prec from the rule
- else:
- # If no %prec, precedence is determined by the rightmost terminal symbol
- precname = rightmost_terminal(syms, self.Terminals)
- prodprec = self.Precedence.get(precname, ('right', 0))
-
- # See if the rule is already in the rulemap
- map = '%s -> %s' % (prodname, syms)
- if map in self.Prodmap:
- m = self.Prodmap[map]
- raise GrammarError('%s:%d: Duplicate rule %s. ' % (file, line, m) +
- 'Previous definition at %s:%d' % (m.file, m.line))
-
- # From this point on, everything is valid. Create a new Production instance
- pnumber = len(self.Productions)
- if prodname not in self.Nonterminals:
- self.Nonterminals[prodname] = []
-
- # Add the production number to Terminals and Nonterminals
- for t in syms:
- if t in self.Terminals:
- self.Terminals[t].append(pnumber)
- else:
- if t not in self.Nonterminals:
- self.Nonterminals[t] = []
- self.Nonterminals[t].append(pnumber)
-
- # Create a production and add it to the list of productions
- p = Production(pnumber, prodname, syms, prodprec, func, file, line)
- self.Productions.append(p)
- self.Prodmap[map] = p
-
- # Add to the global productions list
- try:
- self.Prodnames[prodname].append(p)
- except KeyError:
- self.Prodnames[prodname] = [p]
-
- # -----------------------------------------------------------------------------
- # set_start()
- #
- # Sets the starting symbol and creates the augmented grammar. Production
- # rule 0 is S' -> start where start is the start symbol.
- # -----------------------------------------------------------------------------
-
- def set_start(self, start=None):
- if not start:
- start = self.Productions[1].name
- if start not in self.Nonterminals:
- raise GrammarError('start symbol %s undefined' % start)
- self.Productions[0] = Production(0, "S'", [start])
- self.Nonterminals[start].append(0)
- self.Start = start
-
- # -----------------------------------------------------------------------------
- # find_unreachable()
- #
- # Find all of the nonterminal symbols that can't be reached from the starting
- # symbol. Returns a list of nonterminals that can't be reached.
- # -----------------------------------------------------------------------------
-
- def find_unreachable(self):
-
- # Mark all symbols that are reachable from a symbol s
- def mark_reachable_from(s):
- if s in reachable:
- return
- reachable.add(s)
- for p in self.Prodnames.get(s, []):
- for r in p.prod:
- mark_reachable_from(r)
-
- reachable = set()
- mark_reachable_from(self.Productions[0].prod[0])
- return [s for s in self.Nonterminals if s not in reachable]
-
- # -----------------------------------------------------------------------------
- # infinite_cycles()
- #
- # This function looks at the various parsing rules and tries to detect
- # infinite recursion cycles (grammar rules where there is no possible way
- # to derive a string of only terminals).
- # -----------------------------------------------------------------------------
-
- def infinite_cycles(self):
- terminates = {}
-
- # Terminals:
- for t in self.Terminals:
- terminates[t] = True
-
- terminates['$end'] = True
-
- # Nonterminals:
-
- # Initialize to false:
- for n in self.Nonterminals:
- terminates[n] = False
-
- # Then propagate termination until no change:
- while True:
- some_change = False
- for (n, pl) in self.Prodnames.items():
- # Nonterminal n terminates iff any of its productions terminates.
- for p in pl:
- # Production p terminates iff all of its rhs symbols terminate.
- for s in p.prod:
- if not terminates[s]:
- # The symbol s does not terminate,
- # so production p does not terminate.
- p_terminates = False
- break
- else:
- # didn't break from the loop,
- # so every symbol s terminates
- # so production p terminates.
- p_terminates = True
-
- if p_terminates:
- # symbol n terminates!
- if not terminates[n]:
- terminates[n] = True
- some_change = True
- # Don't need to consider any more productions for this n.
- break
-
- if not some_change:
- break
-
- infinite = []
- for (s, term) in terminates.items():
- if not term:
- if s not in self.Prodnames and s not in self.Terminals and s != 'error':
- # s is used-but-not-defined, and we've already warned of that,
- # so it would be overkill to say that it's also non-terminating.
- pass
- else:
- infinite.append(s)
-
- return infinite
-
- # -----------------------------------------------------------------------------
- # undefined_symbols()
- #
- # Find all symbols that were used the grammar, but not defined as tokens or
- # grammar rules. Returns a list of tuples (sym, prod) where sym in the symbol
- # and prod is the production where the symbol was used.
- # -----------------------------------------------------------------------------
- def undefined_symbols(self):
- result = []
- for p in self.Productions:
- if not p:
- continue
-
- for s in p.prod:
- if s not in self.Prodnames and s not in self.Terminals and s != 'error':
- result.append((s, p))
- return result
-
- # -----------------------------------------------------------------------------
- # unused_terminals()
- #
- # Find all terminals that were defined, but not used by the grammar. Returns
- # a list of all symbols.
- # -----------------------------------------------------------------------------
- def unused_terminals(self):
- unused_tok = []
- for s, v in self.Terminals.items():
- if s != 'error' and not v:
- unused_tok.append(s)
-
- return unused_tok
-
- # ------------------------------------------------------------------------------
- # unused_rules()
- #
- # Find all grammar rules that were defined, but not used (maybe not reachable)
- # Returns a list of productions.
- # ------------------------------------------------------------------------------
-
- def unused_rules(self):
- unused_prod = []
- for s, v in self.Nonterminals.items():
- if not v:
- p = self.Prodnames[s][0]
- unused_prod.append(p)
- return unused_prod
-
- # -----------------------------------------------------------------------------
- # unused_precedence()
- #
- # Returns a list of tuples (term,precedence) corresponding to precedence
- # rules that were never used by the grammar. term is the name of the terminal
- # on which precedence was applied and precedence is a string such as 'left' or
- # 'right' corresponding to the type of precedence.
- # -----------------------------------------------------------------------------
-
- def unused_precedence(self):
- unused = []
- for termname in self.Precedence:
- if not (termname in self.Terminals or termname in self.UsedPrecedence):
- unused.append((termname, self.Precedence[termname][0]))
-
- return unused
-
- # -------------------------------------------------------------------------
- # _first()
- #
- # Compute the value of FIRST1(beta) where beta is a tuple of symbols.
- #
- # During execution of compute_first1, the result may be incomplete.
- # Afterward (e.g., when called from compute_follow()), it will be complete.
- # -------------------------------------------------------------------------
- def _first(self, beta):
-
- # We are computing First(x1,x2,x3,...,xn)
- result = []
- for x in beta:
- x_produces_empty = False
-
- # Add all the non- symbols of First[x] to the result.
- for f in self.First[x]:
- if f == '':
- x_produces_empty = True
- else:
- if f not in result:
- result.append(f)
-
- if x_produces_empty:
- # We have to consider the next x in beta,
- # i.e. stay in the loop.
- pass
- else:
- # We don't have to consider any further symbols in beta.
- break
- else:
- # There was no 'break' from the loop,
- # so x_produces_empty was true for all x in beta,
- # so beta produces empty as well.
- result.append('')
-
- return result
-
- # -------------------------------------------------------------------------
- # compute_first()
- #
- # Compute the value of FIRST1(X) for all symbols
- # -------------------------------------------------------------------------
- def compute_first(self):
- if self.First:
- return self.First
-
- # Terminals:
- for t in self.Terminals:
- self.First[t] = [t]
-
- self.First['$end'] = ['$end']
-
- # Nonterminals:
-
- # Initialize to the empty set:
- for n in self.Nonterminals:
- self.First[n] = []
-
- # Then propagate symbols until no change:
- while True:
- some_change = False
- for n in self.Nonterminals:
- for p in self.Prodnames[n]:
- for f in self._first(p.prod):
- if f not in self.First[n]:
- self.First[n].append(f)
- some_change = True
- if not some_change:
- break
-
- return self.First
-
- # ---------------------------------------------------------------------
- # compute_follow()
- #
- # Computes all of the follow sets for every non-terminal symbol. The
- # follow set is the set of all symbols that might follow a given
- # non-terminal. See the Dragon book, 2nd Ed. p. 189.
- # ---------------------------------------------------------------------
- def compute_follow(self, start=None):
- # If already computed, return the result
- if self.Follow:
- return self.Follow
-
- # If first sets not computed yet, do that first.
- if not self.First:
- self.compute_first()
-
- # Add '$end' to the follow list of the start symbol
- for k in self.Nonterminals:
- self.Follow[k] = []
-
- if not start:
- start = self.Productions[1].name
-
- self.Follow[start] = ['$end']
-
- while True:
- didadd = False
- for p in self.Productions[1:]:
- # Here is the production set
- for i, B in enumerate(p.prod):
- if B in self.Nonterminals:
- # Okay. We got a non-terminal in a production
- fst = self._first(p.prod[i+1:])
- hasempty = False
- for f in fst:
- if f != '' and f not in self.Follow[B]:
- self.Follow[B].append(f)
- didadd = True
- if f == '':
- hasempty = True
- if hasempty or i == (len(p.prod)-1):
- # Add elements of follow(a) to follow(b)
- for f in self.Follow[p.name]:
- if f not in self.Follow[B]:
- self.Follow[B].append(f)
- didadd = True
- if not didadd:
- break
- return self.Follow
-
-
- # -----------------------------------------------------------------------------
- # build_lritems()
- #
- # This function walks the list of productions and builds a complete set of the
- # LR items. The LR items are stored in two ways: First, they are uniquely
- # numbered and placed in the list _lritems. Second, a linked list of LR items
- # is built for each production. For example:
- #
- # E -> E PLUS E
- #
- # Creates the list
- #
- # [E -> . E PLUS E, E -> E . PLUS E, E -> E PLUS . E, E -> E PLUS E . ]
- # -----------------------------------------------------------------------------
-
- def build_lritems(self):
- for p in self.Productions:
- lastlri = p
- i = 0
- lr_items = []
- while True:
- if i > len(p):
- lri = None
- else:
- lri = LRItem(p, i)
- # Precompute the list of productions immediately following
- try:
- lri.lr_after = self.Prodnames[lri.prod[i+1]]
- except (IndexError, KeyError):
- lri.lr_after = []
- try:
- lri.lr_before = lri.prod[i-1]
- except IndexError:
- lri.lr_before = None
-
- lastlri.lr_next = lri
- if not lri:
- break
- lr_items.append(lri)
- lastlri = lri
- i += 1
- p.lr_items = lr_items
-
-# -----------------------------------------------------------------------------
-# == Class LRTable ==
-#
-# This basic class represents a basic table of LR parsing information.
-# Methods for generating the tables are not defined here. They are defined
-# in the derived class LRGeneratedTable.
-# -----------------------------------------------------------------------------
-
-class VersionError(YaccError):
- pass
-
-class LRTable(object):
- def __init__(self):
- self.lr_action = None
- self.lr_goto = None
- self.lr_productions = None
- self.lr_method = None
-
- def read_table(self, module):
- if isinstance(module, types.ModuleType):
- parsetab = module
- else:
- exec('import %s' % module)
- parsetab = sys.modules[module]
-
- if parsetab._tabversion != __tabversion__:
- raise VersionError('yacc table file version is out of date')
-
- self.lr_action = parsetab._lr_action
- self.lr_goto = parsetab._lr_goto
-
- self.lr_productions = []
- for p in parsetab._lr_productions:
- self.lr_productions.append(MiniProduction(*p))
-
- self.lr_method = parsetab._lr_method
- return parsetab._lr_signature
-
- def read_pickle(self, filename):
- try:
- import cPickle as pickle
- except ImportError:
- import pickle
-
- if not os.path.exists(filename):
- raise ImportError
-
- in_f = open(filename, 'rb')
-
- tabversion = pickle.load(in_f)
- if tabversion != __tabversion__:
- raise VersionError('yacc table file version is out of date')
- self.lr_method = pickle.load(in_f)
- signature = pickle.load(in_f)
- self.lr_action = pickle.load(in_f)
- self.lr_goto = pickle.load(in_f)
- productions = pickle.load(in_f)
-
- self.lr_productions = []
- for p in productions:
- self.lr_productions.append(MiniProduction(*p))
-
- in_f.close()
- return signature
-
- # Bind all production function names to callable objects in pdict
- def bind_callables(self, pdict):
- for p in self.lr_productions:
- p.bind(pdict)
-
-
-# -----------------------------------------------------------------------------
-# === LR Generator ===
-#
-# The following classes and functions are used to generate LR parsing tables on
-# a grammar.
-# -----------------------------------------------------------------------------
-
-# -----------------------------------------------------------------------------
-# digraph()
-# traverse()
-#
-# The following two functions are used to compute set valued functions
-# of the form:
-#
-# F(x) = F'(x) U U{F(y) | x R y}
-#
-# This is used to compute the values of Read() sets as well as FOLLOW sets
-# in LALR(1) generation.
-#
-# Inputs: X - An input set
-# R - A relation
-# FP - Set-valued function
-# ------------------------------------------------------------------------------
-
-def digraph(X, R, FP):
- N = {}
- for x in X:
- N[x] = 0
- stack = []
- F = {}
- for x in X:
- if N[x] == 0:
- traverse(x, N, stack, F, X, R, FP)
- return F
-
-def traverse(x, N, stack, F, X, R, FP):
- stack.append(x)
- d = len(stack)
- N[x] = d
- F[x] = FP(x) # F(X) <- F'(x)
-
- rel = R(x) # Get y's related to x
- for y in rel:
- if N[y] == 0:
- traverse(y, N, stack, F, X, R, FP)
- N[x] = min(N[x], N[y])
- for a in F.get(y, []):
- if a not in F[x]:
- F[x].append(a)
- if N[x] == d:
- N[stack[-1]] = MAXINT
- F[stack[-1]] = F[x]
- element = stack.pop()
- while element != x:
- N[stack[-1]] = MAXINT
- F[stack[-1]] = F[x]
- element = stack.pop()
-
-class LALRError(YaccError):
- pass
-
-# -----------------------------------------------------------------------------
-# == LRGeneratedTable ==
-#
-# This class implements the LR table generation algorithm. There are no
-# public methods except for write()
-# -----------------------------------------------------------------------------
-
-class LRGeneratedTable(LRTable):
- def __init__(self, grammar, method='LALR', log=None):
- if method not in ['SLR', 'LALR']:
- raise LALRError('Unsupported method %s' % method)
-
- self.grammar = grammar
- self.lr_method = method
-
- # Set up the logger
- if not log:
- log = NullLogger()
- self.log = log
-
- # Internal attributes
- self.lr_action = {} # Action table
- self.lr_goto = {} # Goto table
- self.lr_productions = grammar.Productions # Copy of grammar Production array
- self.lr_goto_cache = {} # Cache of computed gotos
- self.lr0_cidhash = {} # Cache of closures
-
- self._add_count = 0 # Internal counter used to detect cycles
-
- # Diagonistic information filled in by the table generator
- self.sr_conflict = 0
- self.rr_conflict = 0
- self.conflicts = [] # List of conflicts
-
- self.sr_conflicts = []
- self.rr_conflicts = []
-
- # Build the tables
- self.grammar.build_lritems()
- self.grammar.compute_first()
- self.grammar.compute_follow()
- self.lr_parse_table()
-
- # Compute the LR(0) closure operation on I, where I is a set of LR(0) items.
-
- def lr0_closure(self, I):
- self._add_count += 1
-
- # Add everything in I to J
- J = I[:]
- didadd = True
- while didadd:
- didadd = False
- for j in J:
- for x in j.lr_after:
- if getattr(x, 'lr0_added', 0) == self._add_count:
- continue
- # Add B --> .G to J
- J.append(x.lr_next)
- x.lr0_added = self._add_count
- didadd = True
-
- return J
-
- # Compute the LR(0) goto function goto(I,X) where I is a set
- # of LR(0) items and X is a grammar symbol. This function is written
- # in a way that guarantees uniqueness of the generated goto sets
- # (i.e. the same goto set will never be returned as two different Python
- # objects). With uniqueness, we can later do fast set comparisons using
- # id(obj) instead of element-wise comparison.
-
- def lr0_goto(self, I, x):
- # First we look for a previously cached entry
- g = self.lr_goto_cache.get((id(I), x))
- if g:
- return g
-
- # Now we generate the goto set in a way that guarantees uniqueness
- # of the result
-
- s = self.lr_goto_cache.get(x)
- if not s:
- s = {}
- self.lr_goto_cache[x] = s
-
- gs = []
- for p in I:
- n = p.lr_next
- if n and n.lr_before == x:
- s1 = s.get(id(n))
- if not s1:
- s1 = {}
- s[id(n)] = s1
- gs.append(n)
- s = s1
- g = s.get('$end')
- if not g:
- if gs:
- g = self.lr0_closure(gs)
- s['$end'] = g
- else:
- s['$end'] = gs
- self.lr_goto_cache[(id(I), x)] = g
- return g
-
- # Compute the LR(0) sets of item function
- def lr0_items(self):
- C = [self.lr0_closure([self.grammar.Productions[0].lr_next])]
- i = 0
- for I in C:
- self.lr0_cidhash[id(I)] = i
- i += 1
-
- # Loop over the items in C and each grammar symbols
- i = 0
- while i < len(C):
- I = C[i]
- i += 1
-
- # Collect all of the symbols that could possibly be in the goto(I,X) sets
- asyms = {}
- for ii in I:
- for s in ii.usyms:
- asyms[s] = None
-
- for x in asyms:
- g = self.lr0_goto(I, x)
- if not g or id(g) in self.lr0_cidhash:
- continue
- self.lr0_cidhash[id(g)] = len(C)
- C.append(g)
-
- return C
-
- # -----------------------------------------------------------------------------
- # ==== LALR(1) Parsing ====
- #
- # LALR(1) parsing is almost exactly the same as SLR except that instead of
- # relying upon Follow() sets when performing reductions, a more selective
- # lookahead set that incorporates the state of the LR(0) machine is utilized.
- # Thus, we mainly just have to focus on calculating the lookahead sets.
- #
- # The method used here is due to DeRemer and Pennelo (1982).
- #
- # DeRemer, F. L., and T. J. Pennelo: "Efficient Computation of LALR(1)
- # Lookahead Sets", ACM Transactions on Programming Languages and Systems,
- # Vol. 4, No. 4, Oct. 1982, pp. 615-649
- #
- # Further details can also be found in:
- #
- # J. Tremblay and P. Sorenson, "The Theory and Practice of Compiler Writing",
- # McGraw-Hill Book Company, (1985).
- #
- # -----------------------------------------------------------------------------
-
- # -----------------------------------------------------------------------------
- # compute_nullable_nonterminals()
- #
- # Creates a dictionary containing all of the non-terminals that might produce
- # an empty production.
- # -----------------------------------------------------------------------------
-
- def compute_nullable_nonterminals(self):
- nullable = set()
- num_nullable = 0
- while True:
- for p in self.grammar.Productions[1:]:
- if p.len == 0:
- nullable.add(p.name)
- continue
- for t in p.prod:
- if t not in nullable:
- break
- else:
- nullable.add(p.name)
- if len(nullable) == num_nullable:
- break
- num_nullable = len(nullable)
- return nullable
-
- # -----------------------------------------------------------------------------
- # find_nonterminal_trans(C)
- #
- # Given a set of LR(0) items, this functions finds all of the non-terminal
- # transitions. These are transitions in which a dot appears immediately before
- # a non-terminal. Returns a list of tuples of the form (state,N) where state
- # is the state number and N is the nonterminal symbol.
- #
- # The input C is the set of LR(0) items.
- # -----------------------------------------------------------------------------
-
- def find_nonterminal_transitions(self, C):
- trans = []
- for stateno, state in enumerate(C):
- for p in state:
- if p.lr_index < p.len - 1:
- t = (stateno, p.prod[p.lr_index+1])
- if t[1] in self.grammar.Nonterminals:
- if t not in trans:
- trans.append(t)
- return trans
-
- # -----------------------------------------------------------------------------
- # dr_relation()
- #
- # Computes the DR(p,A) relationships for non-terminal transitions. The input
- # is a tuple (state,N) where state is a number and N is a nonterminal symbol.
- #
- # Returns a list of terminals.
- # -----------------------------------------------------------------------------
-
- def dr_relation(self, C, trans, nullable):
- dr_set = {}
- state, N = trans
- terms = []
-
- g = self.lr0_goto(C[state], N)
- for p in g:
- if p.lr_index < p.len - 1:
- a = p.prod[p.lr_index+1]
- if a in self.grammar.Terminals:
- if a not in terms:
- terms.append(a)
-
- # This extra bit is to handle the start state
- if state == 0 and N == self.grammar.Productions[0].prod[0]:
- terms.append('$end')
-
- return terms
-
- # -----------------------------------------------------------------------------
- # reads_relation()
- #
- # Computes the READS() relation (p,A) READS (t,C).
- # -----------------------------------------------------------------------------
-
- def reads_relation(self, C, trans, empty):
- # Look for empty transitions
- rel = []
- state, N = trans
-
- g = self.lr0_goto(C[state], N)
- j = self.lr0_cidhash.get(id(g), -1)
- for p in g:
- if p.lr_index < p.len - 1:
- a = p.prod[p.lr_index + 1]
- if a in empty:
- rel.append((j, a))
-
- return rel
-
- # -----------------------------------------------------------------------------
- # compute_lookback_includes()
- #
- # Determines the lookback and includes relations
- #
- # LOOKBACK:
- #
- # This relation is determined by running the LR(0) state machine forward.
- # For example, starting with a production "N : . A B C", we run it forward
- # to obtain "N : A B C ." We then build a relationship between this final
- # state and the starting state. These relationships are stored in a dictionary
- # lookdict.
- #
- # INCLUDES:
- #
- # Computes the INCLUDE() relation (p,A) INCLUDES (p',B).
- #
- # This relation is used to determine non-terminal transitions that occur
- # inside of other non-terminal transition states. (p,A) INCLUDES (p', B)
- # if the following holds:
- #
- # B -> LAT, where T -> epsilon and p' -L-> p
- #
- # L is essentially a prefix (which may be empty), T is a suffix that must be
- # able to derive an empty string. State p' must lead to state p with the string L.
- #
- # -----------------------------------------------------------------------------
-
- def compute_lookback_includes(self, C, trans, nullable):
- lookdict = {} # Dictionary of lookback relations
- includedict = {} # Dictionary of include relations
-
- # Make a dictionary of non-terminal transitions
- dtrans = {}
- for t in trans:
- dtrans[t] = 1
-
- # Loop over all transitions and compute lookbacks and includes
- for state, N in trans:
- lookb = []
- includes = []
- for p in C[state]:
- if p.name != N:
- continue
-
- # Okay, we have a name match. We now follow the production all the way
- # through the state machine until we get the . on the right hand side
-
- lr_index = p.lr_index
- j = state
- while lr_index < p.len - 1:
- lr_index = lr_index + 1
- t = p.prod[lr_index]
-
- # Check to see if this symbol and state are a non-terminal transition
- if (j, t) in dtrans:
- # Yes. Okay, there is some chance that this is an includes relation
- # the only way to know for certain is whether the rest of the
- # production derives empty
-
- li = lr_index + 1
- while li < p.len:
- if p.prod[li] in self.grammar.Terminals:
- break # No forget it
- if p.prod[li] not in nullable:
- break
- li = li + 1
- else:
- # Appears to be a relation between (j,t) and (state,N)
- includes.append((j, t))
-
- g = self.lr0_goto(C[j], t) # Go to next set
- j = self.lr0_cidhash.get(id(g), -1) # Go to next state
-
- # When we get here, j is the final state, now we have to locate the production
- for r in C[j]:
- if r.name != p.name:
- continue
- if r.len != p.len:
- continue
- i = 0
- # This look is comparing a production ". A B C" with "A B C ."
- while i < r.lr_index:
- if r.prod[i] != p.prod[i+1]:
- break
- i = i + 1
- else:
- lookb.append((j, r))
- for i in includes:
- if i not in includedict:
- includedict[i] = []
- includedict[i].append((state, N))
- lookdict[(state, N)] = lookb
-
- return lookdict, includedict
-
- # -----------------------------------------------------------------------------
- # compute_read_sets()
- #
- # Given a set of LR(0) items, this function computes the read sets.
- #
- # Inputs: C = Set of LR(0) items
- # ntrans = Set of nonterminal transitions
- # nullable = Set of empty transitions
- #
- # Returns a set containing the read sets
- # -----------------------------------------------------------------------------
-
- def compute_read_sets(self, C, ntrans, nullable):
- FP = lambda x: self.dr_relation(C, x, nullable)
- R = lambda x: self.reads_relation(C, x, nullable)
- F = digraph(ntrans, R, FP)
- return F
-
- # -----------------------------------------------------------------------------
- # compute_follow_sets()
- #
- # Given a set of LR(0) items, a set of non-terminal transitions, a readset,
- # and an include set, this function computes the follow sets
- #
- # Follow(p,A) = Read(p,A) U U {Follow(p',B) | (p,A) INCLUDES (p',B)}
- #
- # Inputs:
- # ntrans = Set of nonterminal transitions
- # readsets = Readset (previously computed)
- # inclsets = Include sets (previously computed)
- #
- # Returns a set containing the follow sets
- # -----------------------------------------------------------------------------
-
- def compute_follow_sets(self, ntrans, readsets, inclsets):
- FP = lambda x: readsets[x]
- R = lambda x: inclsets.get(x, [])
- F = digraph(ntrans, R, FP)
- return F
-
- # -----------------------------------------------------------------------------
- # add_lookaheads()
- #
- # Attaches the lookahead symbols to grammar rules.
- #
- # Inputs: lookbacks - Set of lookback relations
- # followset - Computed follow set
- #
- # This function directly attaches the lookaheads to productions contained
- # in the lookbacks set
- # -----------------------------------------------------------------------------
-
- def add_lookaheads(self, lookbacks, followset):
- for trans, lb in lookbacks.items():
- # Loop over productions in lookback
- for state, p in lb:
- if state not in p.lookaheads:
- p.lookaheads[state] = []
- f = followset.get(trans, [])
- for a in f:
- if a not in p.lookaheads[state]:
- p.lookaheads[state].append(a)
-
- # -----------------------------------------------------------------------------
- # add_lalr_lookaheads()
- #
- # This function does all of the work of adding lookahead information for use
- # with LALR parsing
- # -----------------------------------------------------------------------------
-
- def add_lalr_lookaheads(self, C):
- # Determine all of the nullable nonterminals
- nullable = self.compute_nullable_nonterminals()
-
- # Find all non-terminal transitions
- trans = self.find_nonterminal_transitions(C)
-
- # Compute read sets
- readsets = self.compute_read_sets(C, trans, nullable)
-
- # Compute lookback/includes relations
- lookd, included = self.compute_lookback_includes(C, trans, nullable)
-
- # Compute LALR FOLLOW sets
- followsets = self.compute_follow_sets(trans, readsets, included)
-
- # Add all of the lookaheads
- self.add_lookaheads(lookd, followsets)
-
- # -----------------------------------------------------------------------------
- # lr_parse_table()
- #
- # This function constructs the parse tables for SLR or LALR
- # -----------------------------------------------------------------------------
- def lr_parse_table(self):
- Productions = self.grammar.Productions
- Precedence = self.grammar.Precedence
- goto = self.lr_goto # Goto array
- action = self.lr_action # Action array
- log = self.log # Logger for output
-
- actionp = {} # Action production array (temporary)
-
- log.info('Parsing method: %s', self.lr_method)
-
- # Step 1: Construct C = { I0, I1, ... IN}, collection of LR(0) items
- # This determines the number of states
-
- C = self.lr0_items()
-
- if self.lr_method == 'LALR':
- self.add_lalr_lookaheads(C)
-
- # Build the parser table, state by state
- st = 0
- for I in C:
- # Loop over each production in I
- actlist = [] # List of actions
- st_action = {}
- st_actionp = {}
- st_goto = {}
- log.info('')
- log.info('state %d', st)
- log.info('')
- for p in I:
- log.info(' (%d) %s', p.number, p)
- log.info('')
-
- for p in I:
- if p.len == p.lr_index + 1:
- if p.name == "S'":
- # Start symbol. Accept!
- st_action['$end'] = 0
- st_actionp['$end'] = p
- else:
- # We are at the end of a production. Reduce!
- if self.lr_method == 'LALR':
- laheads = p.lookaheads[st]
- else:
- laheads = self.grammar.Follow[p.name]
- for a in laheads:
- actlist.append((a, p, 'reduce using rule %d (%s)' % (p.number, p)))
- r = st_action.get(a)
- if r is not None:
- # Whoa. Have a shift/reduce or reduce/reduce conflict
- if r > 0:
- # Need to decide on shift or reduce here
- # By default we favor shifting. Need to add
- # some precedence rules here.
-
- # Shift precedence comes from the token
- sprec, slevel = Precedence.get(a, ('right', 0))
-
- # Reduce precedence comes from rule being reduced (p)
- rprec, rlevel = Productions[p.number].prec
-
- if (slevel < rlevel) or ((slevel == rlevel) and (rprec == 'left')):
- # We really need to reduce here.
- st_action[a] = -p.number
- st_actionp[a] = p
- if not slevel and not rlevel:
- log.info(' ! shift/reduce conflict for %s resolved as reduce', a)
- self.sr_conflicts.append((st, a, 'reduce'))
- Productions[p.number].reduced += 1
- elif (slevel == rlevel) and (rprec == 'nonassoc'):
- st_action[a] = None
- else:
- # Hmmm. Guess we'll keep the shift
- if not rlevel:
- log.info(' ! shift/reduce conflict for %s resolved as shift', a)
- self.sr_conflicts.append((st, a, 'shift'))
- elif r < 0:
- # Reduce/reduce conflict. In this case, we favor the rule
- # that was defined first in the grammar file
- oldp = Productions[-r]
- pp = Productions[p.number]
- if oldp.line > pp.line:
- st_action[a] = -p.number
- st_actionp[a] = p
- chosenp, rejectp = pp, oldp
- Productions[p.number].reduced += 1
- Productions[oldp.number].reduced -= 1
- else:
- chosenp, rejectp = oldp, pp
- self.rr_conflicts.append((st, chosenp, rejectp))
- log.info(' ! reduce/reduce conflict for %s resolved using rule %d (%s)',
- a, st_actionp[a].number, st_actionp[a])
- else:
- raise LALRError('Unknown conflict in state %d' % st)
- else:
- st_action[a] = -p.number
- st_actionp[a] = p
- Productions[p.number].reduced += 1
- else:
- i = p.lr_index
- a = p.prod[i+1] # Get symbol right after the "."
- if a in self.grammar.Terminals:
- g = self.lr0_goto(I, a)
- j = self.lr0_cidhash.get(id(g), -1)
- if j >= 0:
- # We are in a shift state
- actlist.append((a, p, 'shift and go to state %d' % j))
- r = st_action.get(a)
- if r is not None:
- # Whoa have a shift/reduce or shift/shift conflict
- if r > 0:
- if r != j:
- raise LALRError('Shift/shift conflict in state %d' % st)
- elif r < 0:
- # Do a precedence check.
- # - if precedence of reduce rule is higher, we reduce.
- # - if precedence of reduce is same and left assoc, we reduce.
- # - otherwise we shift
-
- # Shift precedence comes from the token
- sprec, slevel = Precedence.get(a, ('right', 0))
-
- # Reduce precedence comes from the rule that could have been reduced
- rprec, rlevel = Productions[st_actionp[a].number].prec
-
- if (slevel > rlevel) or ((slevel == rlevel) and (rprec == 'right')):
- # We decide to shift here... highest precedence to shift
- Productions[st_actionp[a].number].reduced -= 1
- st_action[a] = j
- st_actionp[a] = p
- if not rlevel:
- log.info(' ! shift/reduce conflict for %s resolved as shift', a)
- self.sr_conflicts.append((st, a, 'shift'))
- elif (slevel == rlevel) and (rprec == 'nonassoc'):
- st_action[a] = None
- else:
- # Hmmm. Guess we'll keep the reduce
- if not slevel and not rlevel:
- log.info(' ! shift/reduce conflict for %s resolved as reduce', a)
- self.sr_conflicts.append((st, a, 'reduce'))
-
- else:
- raise LALRError('Unknown conflict in state %d' % st)
- else:
- st_action[a] = j
- st_actionp[a] = p
-
- # Print the actions associated with each terminal
- _actprint = {}
- for a, p, m in actlist:
- if a in st_action:
- if p is st_actionp[a]:
- log.info(' %-15s %s', a, m)
- _actprint[(a, m)] = 1
- log.info('')
- # Print the actions that were not used. (debugging)
- not_used = 0
- for a, p, m in actlist:
- if a in st_action:
- if p is not st_actionp[a]:
- if not (a, m) in _actprint:
- log.debug(' ! %-15s [ %s ]', a, m)
- not_used = 1
- _actprint[(a, m)] = 1
- if not_used:
- log.debug('')
-
- # Construct the goto table for this state
-
- nkeys = {}
- for ii in I:
- for s in ii.usyms:
- if s in self.grammar.Nonterminals:
- nkeys[s] = None
- for n in nkeys:
- g = self.lr0_goto(I, n)
- j = self.lr0_cidhash.get(id(g), -1)
- if j >= 0:
- st_goto[n] = j
- log.info(' %-30s shift and go to state %d', n, j)
-
- action[st] = st_action
- actionp[st] = st_actionp
- goto[st] = st_goto
- st += 1
-
- # -----------------------------------------------------------------------------
- # write()
- #
- # This function writes the LR parsing tables to a file
- # -----------------------------------------------------------------------------
-
- def write_table(self, tabmodule, outputdir='', signature=''):
- if isinstance(tabmodule, types.ModuleType):
- raise IOError("Won't overwrite existing tabmodule")
-
- basemodulename = tabmodule.split('.')[-1]
- filename = os.path.join(outputdir, basemodulename) + '.py'
- try:
- f = open(filename, 'w')
-
- f.write('''
-# %s
-# This file is automatically generated. Do not edit.
-_tabversion = %r
-
-_lr_method = %r
-
-_lr_signature = %r
- ''' % (os.path.basename(filename), __tabversion__, self.lr_method, signature))
-
- # Change smaller to 0 to go back to original tables
- smaller = 1
-
- # Factor out names to try and make smaller
- if smaller:
- items = {}
-
- for s, nd in self.lr_action.items():
- for name, v in nd.items():
- i = items.get(name)
- if not i:
- i = ([], [])
- items[name] = i
- i[0].append(s)
- i[1].append(v)
-
- f.write('\n_lr_action_items = {')
- for k, v in items.items():
- f.write('%r:([' % k)
- for i in v[0]:
- f.write('%r,' % i)
- f.write('],[')
- for i in v[1]:
- f.write('%r,' % i)
-
- f.write(']),')
- f.write('}\n')
-
- f.write('''
-_lr_action = {}
-for _k, _v in _lr_action_items.items():
- for _x,_y in zip(_v[0],_v[1]):
- if not _x in _lr_action: _lr_action[_x] = {}
- _lr_action[_x][_k] = _y
-del _lr_action_items
-''')
-
- else:
- f.write('\n_lr_action = { ')
- for k, v in self.lr_action.items():
- f.write('(%r,%r):%r,' % (k[0], k[1], v))
- f.write('}\n')
-
- if smaller:
- # Factor out names to try and make smaller
- items = {}
-
- for s, nd in self.lr_goto.items():
- for name, v in nd.items():
- i = items.get(name)
- if not i:
- i = ([], [])
- items[name] = i
- i[0].append(s)
- i[1].append(v)
-
- f.write('\n_lr_goto_items = {')
- for k, v in items.items():
- f.write('%r:([' % k)
- for i in v[0]:
- f.write('%r,' % i)
- f.write('],[')
- for i in v[1]:
- f.write('%r,' % i)
-
- f.write(']),')
- f.write('}\n')
-
- f.write('''
-_lr_goto = {}
-for _k, _v in _lr_goto_items.items():
- for _x, _y in zip(_v[0], _v[1]):
- if not _x in _lr_goto: _lr_goto[_x] = {}
- _lr_goto[_x][_k] = _y
-del _lr_goto_items
-''')
- else:
- f.write('\n_lr_goto = { ')
- for k, v in self.lr_goto.items():
- f.write('(%r,%r):%r,' % (k[0], k[1], v))
- f.write('}\n')
-
- # Write production table
- f.write('_lr_productions = [\n')
- for p in self.lr_productions:
- if p.func:
- f.write(' (%r,%r,%d,%r,%r,%d),\n' % (p.str, p.name, p.len,
- p.func, os.path.basename(p.file), p.line))
- else:
- f.write(' (%r,%r,%d,None,None,None),\n' % (str(p), p.name, p.len))
- f.write(']\n')
- f.close()
-
- except IOError as e:
- raise
-
-
- # -----------------------------------------------------------------------------
- # pickle_table()
- #
- # This function pickles the LR parsing tables to a supplied file object
- # -----------------------------------------------------------------------------
-
- def pickle_table(self, filename, signature=''):
- try:
- import cPickle as pickle
- except ImportError:
- import pickle
- with open(filename, 'wb') as outf:
- pickle.dump(__tabversion__, outf, pickle_protocol)
- pickle.dump(self.lr_method, outf, pickle_protocol)
- pickle.dump(signature, outf, pickle_protocol)
- pickle.dump(self.lr_action, outf, pickle_protocol)
- pickle.dump(self.lr_goto, outf, pickle_protocol)
-
- outp = []
- for p in self.lr_productions:
- if p.func:
- outp.append((p.str, p.name, p.len, p.func, os.path.basename(p.file), p.line))
- else:
- outp.append((str(p), p.name, p.len, None, None, None))
- pickle.dump(outp, outf, pickle_protocol)
-
-# -----------------------------------------------------------------------------
-# === INTROSPECTION ===
-#
-# The following functions and classes are used to implement the PLY
-# introspection features followed by the yacc() function itself.
-# -----------------------------------------------------------------------------
-
-# -----------------------------------------------------------------------------
-# get_caller_module_dict()
-#
-# This function returns a dictionary containing all of the symbols defined within
-# a caller further down the call stack. This is used to get the environment
-# associated with the yacc() call if none was provided.
-# -----------------------------------------------------------------------------
-
-def get_caller_module_dict(levels):
- f = sys._getframe(levels)
- ldict = f.f_globals.copy()
- if f.f_globals != f.f_locals:
- ldict.update(f.f_locals)
- return ldict
-
-# -----------------------------------------------------------------------------
-# parse_grammar()
-#
-# This takes a raw grammar rule string and parses it into production data
-# -----------------------------------------------------------------------------
-def parse_grammar(doc, file, line):
- grammar = []
- # Split the doc string into lines
- pstrings = doc.splitlines()
- lastp = None
- dline = line
- for ps in pstrings:
- dline += 1
- p = ps.split()
- if not p:
- continue
- try:
- if p[0] == '|':
- # This is a continuation of a previous rule
- if not lastp:
- raise SyntaxError("%s:%d: Misplaced '|'" % (file, dline))
- prodname = lastp
- syms = p[1:]
- else:
- prodname = p[0]
- lastp = prodname
- syms = p[2:]
- assign = p[1]
- if assign != ':' and assign != '::=':
- raise SyntaxError("%s:%d: Syntax error. Expected ':'" % (file, dline))
-
- grammar.append((file, dline, prodname, syms))
- except SyntaxError:
- raise
- except Exception:
- raise SyntaxError('%s:%d: Syntax error in rule %r' % (file, dline, ps.strip()))
-
- return grammar
-
-# -----------------------------------------------------------------------------
-# ParserReflect()
-#
-# This class represents information extracted for building a parser including
-# start symbol, error function, tokens, precedence list, action functions,
-# etc.
-# -----------------------------------------------------------------------------
-class ParserReflect(object):
- def __init__(self, pdict, log=None):
- self.pdict = pdict
- self.start = None
- self.error_func = None
- self.tokens = None
- self.modules = set()
- self.grammar = []
- self.error = False
-
- if log is None:
- self.log = PlyLogger(sys.stderr)
- else:
- self.log = log
-
- # Get all of the basic information
- def get_all(self):
- self.get_start()
- self.get_error_func()
- self.get_tokens()
- self.get_precedence()
- self.get_pfunctions()
-
- # Validate all of the information
- def validate_all(self):
- self.validate_start()
- self.validate_error_func()
- self.validate_tokens()
- self.validate_precedence()
- self.validate_pfunctions()
- self.validate_modules()
- return self.error
-
- # Compute a signature over the grammar
- def signature(self):
- parts = []
- try:
- if self.start:
- parts.append(self.start)
- if self.prec:
- parts.append(''.join([''.join(p) for p in self.prec]))
- if self.tokens:
- parts.append(' '.join(self.tokens))
- for f in self.pfuncs:
- if f[3]:
- parts.append(f[3])
- except (TypeError, ValueError):
- pass
- return ''.join(parts)
-
- # -----------------------------------------------------------------------------
- # validate_modules()
- #
- # This method checks to see if there are duplicated p_rulename() functions
- # in the parser module file. Without this function, it is really easy for
- # users to make mistakes by cutting and pasting code fragments (and it's a real
- # bugger to try and figure out why the resulting parser doesn't work). Therefore,
- # we just do a little regular expression pattern matching of def statements
- # to try and detect duplicates.
- # -----------------------------------------------------------------------------
-
- def validate_modules(self):
- # Match def p_funcname(
- fre = re.compile(r'\s*def\s+(p_[a-zA-Z_0-9]*)\(')
-
- for module in self.modules:
- try:
- lines, linen = inspect.getsourcelines(module)
- except IOError:
- continue
-
- counthash = {}
- for linen, line in enumerate(lines):
- linen += 1
- m = fre.match(line)
- if m:
- name = m.group(1)
- prev = counthash.get(name)
- if not prev:
- counthash[name] = linen
- else:
- filename = inspect.getsourcefile(module)
- self.log.warning('%s:%d: Function %s redefined. Previously defined on line %d',
- filename, linen, name, prev)
-
- # Get the start symbol
- def get_start(self):
- self.start = self.pdict.get('start')
-
- # Validate the start symbol
- def validate_start(self):
- if self.start is not None:
- if not isinstance(self.start, string_types):
- self.log.error("'start' must be a string")
-
- # Look for error handler
- def get_error_func(self):
- self.error_func = self.pdict.get('p_error')
-
- # Validate the error function
- def validate_error_func(self):
- if self.error_func:
- if isinstance(self.error_func, types.FunctionType):
- ismethod = 0
- elif isinstance(self.error_func, types.MethodType):
- ismethod = 1
- else:
- self.log.error("'p_error' defined, but is not a function or method")
- self.error = True
- return
-
- eline = self.error_func.__code__.co_firstlineno
- efile = self.error_func.__code__.co_filename
- module = inspect.getmodule(self.error_func)
- self.modules.add(module)
-
- argcount = self.error_func.__code__.co_argcount - ismethod
- if argcount != 1:
- self.log.error('%s:%d: p_error() requires 1 argument', efile, eline)
- self.error = True
-
- # Get the tokens map
- def get_tokens(self):
- tokens = self.pdict.get('tokens')
- if not tokens:
- self.log.error('No token list is defined')
- self.error = True
- return
-
- if not isinstance(tokens, (list, tuple)):
- self.log.error('tokens must be a list or tuple')
- self.error = True
- return
-
- if not tokens:
- self.log.error('tokens is empty')
- self.error = True
- return
-
- self.tokens = tokens
-
- # Validate the tokens
- def validate_tokens(self):
- # Validate the tokens.
- if 'error' in self.tokens:
- self.log.error("Illegal token name 'error'. Is a reserved word")
- self.error = True
- return
-
- terminals = set()
- for n in self.tokens:
- if n in terminals:
- self.log.warning('Token %r multiply defined', n)
- terminals.add(n)
-
- # Get the precedence map (if any)
- def get_precedence(self):
- self.prec = self.pdict.get('precedence')
-
- # Validate and parse the precedence map
- def validate_precedence(self):
- preclist = []
- if self.prec:
- if not isinstance(self.prec, (list, tuple)):
- self.log.error('precedence must be a list or tuple')
- self.error = True
- return
- for level, p in enumerate(self.prec):
- if not isinstance(p, (list, tuple)):
- self.log.error('Bad precedence table')
- self.error = True
- return
-
- if len(p) < 2:
- self.log.error('Malformed precedence entry %s. Must be (assoc, term, ..., term)', p)
- self.error = True
- return
- assoc = p[0]
- if not isinstance(assoc, string_types):
- self.log.error('precedence associativity must be a string')
- self.error = True
- return
- for term in p[1:]:
- if not isinstance(term, string_types):
- self.log.error('precedence items must be strings')
- self.error = True
- return
- preclist.append((term, assoc, level+1))
- self.preclist = preclist
-
- # Get all p_functions from the grammar
- def get_pfunctions(self):
- p_functions = []
- for name, item in self.pdict.items():
- if not name.startswith('p_') or name == 'p_error':
- continue
- if isinstance(item, (types.FunctionType, types.MethodType)):
- line = getattr(item, 'co_firstlineno', item.__code__.co_firstlineno)
- module = inspect.getmodule(item)
- p_functions.append((line, module, name, item.__doc__))
-
- # Sort all of the actions by line number; make sure to stringify
- # modules to make them sortable, since `line` may not uniquely sort all
- # p functions
- p_functions.sort(key=lambda p_function: (
- p_function[0],
- str(p_function[1]),
- p_function[2],
- p_function[3]))
- self.pfuncs = p_functions
-
- # Validate all of the p_functions
- def validate_pfunctions(self):
- grammar = []
- # Check for non-empty symbols
- if len(self.pfuncs) == 0:
- self.log.error('no rules of the form p_rulename are defined')
- self.error = True
- return
-
- for line, module, name, doc in self.pfuncs:
- file = inspect.getsourcefile(module)
- func = self.pdict[name]
- if isinstance(func, types.MethodType):
- reqargs = 2
- else:
- reqargs = 1
- if func.__code__.co_argcount > reqargs:
- self.log.error('%s:%d: Rule %r has too many arguments', file, line, func.__name__)
- self.error = True
- elif func.__code__.co_argcount < reqargs:
- self.log.error('%s:%d: Rule %r requires an argument', file, line, func.__name__)
- self.error = True
- elif not func.__doc__:
- self.log.warning('%s:%d: No documentation string specified in function %r (ignored)',
- file, line, func.__name__)
- else:
- try:
- parsed_g = parse_grammar(doc, file, line)
- for g in parsed_g:
- grammar.append((name, g))
- except SyntaxError as e:
- self.log.error(str(e))
- self.error = True
-
- # Looks like a valid grammar rule
- # Mark the file in which defined.
- self.modules.add(module)
-
- # Secondary validation step that looks for p_ definitions that are not functions
- # or functions that look like they might be grammar rules.
-
- for n, v in self.pdict.items():
- if n.startswith('p_') and isinstance(v, (types.FunctionType, types.MethodType)):
- continue
- if n.startswith('t_'):
- continue
- if n.startswith('p_') and n != 'p_error':
- self.log.warning('%r not defined as a function', n)
- if ((isinstance(v, types.FunctionType) and v.__code__.co_argcount == 1) or
- (isinstance(v, types.MethodType) and v.__func__.__code__.co_argcount == 2)):
- if v.__doc__:
- try:
- doc = v.__doc__.split(' ')
- if doc[1] == ':':
- self.log.warning('%s:%d: Possible grammar rule %r defined without p_ prefix',
- v.__code__.co_filename, v.__code__.co_firstlineno, n)
- except IndexError:
- pass
-
- self.grammar = grammar
-
-# -----------------------------------------------------------------------------
-# yacc(module)
-#
-# Build a parser
-# -----------------------------------------------------------------------------
-
-def yacc(method='LALR', debug=yaccdebug, module=None, tabmodule=tab_module, start=None,
- check_recursion=True, optimize=False, write_tables=True, debugfile=debug_file,
- outputdir=None, debuglog=None, errorlog=None, picklefile=None):
-
- if tabmodule is None:
- tabmodule = tab_module
-
- # Reference to the parsing method of the last built parser
- global parse
-
- # If pickling is enabled, table files are not created
- if picklefile:
- write_tables = 0
-
- if errorlog is None:
- errorlog = PlyLogger(sys.stderr)
-
- # Get the module dictionary used for the parser
- if module:
- _items = [(k, getattr(module, k)) for k in dir(module)]
- pdict = dict(_items)
- # If no __file__ attribute is available, try to obtain it from the __module__ instead
- if '__file__' not in pdict:
- pdict['__file__'] = sys.modules[pdict['__module__']].__file__
- else:
- pdict = get_caller_module_dict(2)
-
- if outputdir is None:
- # If no output directory is set, the location of the output files
- # is determined according to the following rules:
- # - If tabmodule specifies a package, files go into that package directory
- # - Otherwise, files go in the same directory as the specifying module
- if isinstance(tabmodule, types.ModuleType):
- srcfile = tabmodule.__file__
- else:
- if '.' not in tabmodule:
- srcfile = pdict['__file__']
- else:
- parts = tabmodule.split('.')
- pkgname = '.'.join(parts[:-1])
- exec('import %s' % pkgname)
- srcfile = getattr(sys.modules[pkgname], '__file__', '')
- outputdir = os.path.dirname(srcfile)
-
- # Determine if the module is package of a package or not.
- # If so, fix the tabmodule setting so that tables load correctly
- pkg = pdict.get('__package__')
- if pkg and isinstance(tabmodule, str):
- if '.' not in tabmodule:
- tabmodule = pkg + '.' + tabmodule
-
-
-
- # Set start symbol if it's specified directly using an argument
- if start is not None:
- pdict['start'] = start
-
- # Collect parser information from the dictionary
- pinfo = ParserReflect(pdict, log=errorlog)
- pinfo.get_all()
-
- if pinfo.error:
- raise YaccError('Unable to build parser')
-
- # Check signature against table files (if any)
- signature = pinfo.signature()
-
- # Read the tables
- try:
- lr = LRTable()
- if picklefile:
- read_signature = lr.read_pickle(picklefile)
- else:
- read_signature = lr.read_table(tabmodule)
- if optimize or (read_signature == signature):
- try:
- lr.bind_callables(pinfo.pdict)
- parser = LRParser(lr, pinfo.error_func)
- parse = parser.parse
- return parser
- except Exception as e:
- errorlog.warning('There was a problem loading the table file: %r', e)
- except VersionError as e:
- errorlog.warning(str(e))
- except ImportError:
- pass
-
- if debuglog is None:
- if debug:
- try:
- debuglog = PlyLogger(open(os.path.join(outputdir, debugfile), 'w'))
- except IOError as e:
- errorlog.warning("Couldn't open %r. %s" % (debugfile, e))
- debuglog = NullLogger()
- else:
- debuglog = NullLogger()
-
- debuglog.info('Created by PLY version %s (http://www.dabeaz.com/ply)', __version__)
-
- errors = False
-
- # Validate the parser information
- if pinfo.validate_all():
- raise YaccError('Unable to build parser')
-
- if not pinfo.error_func:
- errorlog.warning('no p_error() function is defined')
-
- # Create a grammar object
- grammar = Grammar(pinfo.tokens)
-
- # Set precedence level for terminals
- for term, assoc, level in pinfo.preclist:
- try:
- grammar.set_precedence(term, assoc, level)
- except GrammarError as e:
- errorlog.warning('%s', e)
-
- # Add productions to the grammar
- for funcname, gram in pinfo.grammar:
- file, line, prodname, syms = gram
- try:
- grammar.add_production(prodname, syms, funcname, file, line)
- except GrammarError as e:
- errorlog.error('%s', e)
- errors = True
-
- # Set the grammar start symbols
- try:
- if start is None:
- grammar.set_start(pinfo.start)
- else:
- grammar.set_start(start)
- except GrammarError as e:
- errorlog.error(str(e))
- errors = True
-
- if errors:
- raise YaccError('Unable to build parser')
-
- # Verify the grammar structure
- undefined_symbols = grammar.undefined_symbols()
- for sym, prod in undefined_symbols:
- errorlog.error('%s:%d: Symbol %r used, but not defined as a token or a rule', prod.file, prod.line, sym)
- errors = True
-
- unused_terminals = grammar.unused_terminals()
- if unused_terminals:
- debuglog.info('')
- debuglog.info('Unused terminals:')
- debuglog.info('')
- for term in unused_terminals:
- errorlog.warning('Token %r defined, but not used', term)
- debuglog.info(' %s', term)
-
- # Print out all productions to the debug log
- if debug:
- debuglog.info('')
- debuglog.info('Grammar')
- debuglog.info('')
- for n, p in enumerate(grammar.Productions):
- debuglog.info('Rule %-5d %s', n, p)
-
- # Find unused non-terminals
- unused_rules = grammar.unused_rules()
- for prod in unused_rules:
- errorlog.warning('%s:%d: Rule %r defined, but not used', prod.file, prod.line, prod.name)
-
- if len(unused_terminals) == 1:
- errorlog.warning('There is 1 unused token')
- if len(unused_terminals) > 1:
- errorlog.warning('There are %d unused tokens', len(unused_terminals))
-
- if len(unused_rules) == 1:
- errorlog.warning('There is 1 unused rule')
- if len(unused_rules) > 1:
- errorlog.warning('There are %d unused rules', len(unused_rules))
-
- if debug:
- debuglog.info('')
- debuglog.info('Terminals, with rules where they appear')
- debuglog.info('')
- terms = list(grammar.Terminals)
- terms.sort()
- for term in terms:
- debuglog.info('%-20s : %s', term, ' '.join([str(s) for s in grammar.Terminals[term]]))
-
- debuglog.info('')
- debuglog.info('Nonterminals, with rules where they appear')
- debuglog.info('')
- nonterms = list(grammar.Nonterminals)
- nonterms.sort()
- for nonterm in nonterms:
- debuglog.info('%-20s : %s', nonterm, ' '.join([str(s) for s in grammar.Nonterminals[nonterm]]))
- debuglog.info('')
-
- if check_recursion:
- unreachable = grammar.find_unreachable()
- for u in unreachable:
- errorlog.warning('Symbol %r is unreachable', u)
-
- infinite = grammar.infinite_cycles()
- for inf in infinite:
- errorlog.error('Infinite recursion detected for symbol %r', inf)
- errors = True
-
- unused_prec = grammar.unused_precedence()
- for term, assoc in unused_prec:
- errorlog.error('Precedence rule %r defined for unknown symbol %r', assoc, term)
- errors = True
-
- if errors:
- raise YaccError('Unable to build parser')
-
- # Run the LRGeneratedTable on the grammar
- if debug:
- errorlog.debug('Generating %s tables', method)
-
- lr = LRGeneratedTable(grammar, method, debuglog)
-
- if debug:
- num_sr = len(lr.sr_conflicts)
-
- # Report shift/reduce and reduce/reduce conflicts
- if num_sr == 1:
- errorlog.warning('1 shift/reduce conflict')
- elif num_sr > 1:
- errorlog.warning('%d shift/reduce conflicts', num_sr)
-
- num_rr = len(lr.rr_conflicts)
- if num_rr == 1:
- errorlog.warning('1 reduce/reduce conflict')
- elif num_rr > 1:
- errorlog.warning('%d reduce/reduce conflicts', num_rr)
-
- # Write out conflicts to the output file
- if debug and (lr.sr_conflicts or lr.rr_conflicts):
- debuglog.warning('')
- debuglog.warning('Conflicts:')
- debuglog.warning('')
-
- for state, tok, resolution in lr.sr_conflicts:
- debuglog.warning('shift/reduce conflict for %s in state %d resolved as %s', tok, state, resolution)
-
- already_reported = set()
- for state, rule, rejected in lr.rr_conflicts:
- if (state, id(rule), id(rejected)) in already_reported:
- continue
- debuglog.warning('reduce/reduce conflict in state %d resolved using rule (%s)', state, rule)
- debuglog.warning('rejected rule (%s) in state %d', rejected, state)
- errorlog.warning('reduce/reduce conflict in state %d resolved using rule (%s)', state, rule)
- errorlog.warning('rejected rule (%s) in state %d', rejected, state)
- already_reported.add((state, id(rule), id(rejected)))
-
- warned_never = []
- for state, rule, rejected in lr.rr_conflicts:
- if not rejected.reduced and (rejected not in warned_never):
- debuglog.warning('Rule (%s) is never reduced', rejected)
- errorlog.warning('Rule (%s) is never reduced', rejected)
- warned_never.append(rejected)
-
- # Write the table file if requested
- if write_tables:
- try:
- lr.write_table(tabmodule, outputdir, signature)
- except IOError as e:
- errorlog.warning("Couldn't create %r. %s" % (tabmodule, e))
-
- # Write a pickled version of the tables
- if picklefile:
- try:
- lr.pickle_table(picklefile, signature)
- except IOError as e:
- errorlog.warning("Couldn't create %r. %s" % (picklefile, e))
-
- # Build the parser
- lr.bind_callables(pinfo.pdict)
- parser = LRParser(lr, pinfo.error_func)
-
- parse = parser.parse
- return parser
diff --git a/functions/source/GitPullS3/pycparser/ply/ygen.py b/functions/source/GitPullS3/pycparser/ply/ygen.py
deleted file mode 100644
index acf5ca1..0000000
--- a/functions/source/GitPullS3/pycparser/ply/ygen.py
+++ /dev/null
@@ -1,74 +0,0 @@
-# ply: ygen.py
-#
-# This is a support program that auto-generates different versions of the YACC parsing
-# function with different features removed for the purposes of performance.
-#
-# Users should edit the method LParser.parsedebug() in yacc.py. The source code
-# for that method is then used to create the other methods. See the comments in
-# yacc.py for further details.
-
-import os.path
-import shutil
-
-def get_source_range(lines, tag):
- srclines = enumerate(lines)
- start_tag = '#--! %s-start' % tag
- end_tag = '#--! %s-end' % tag
-
- for start_index, line in srclines:
- if line.strip().startswith(start_tag):
- break
-
- for end_index, line in srclines:
- if line.strip().endswith(end_tag):
- break
-
- return (start_index + 1, end_index)
-
-def filter_section(lines, tag):
- filtered_lines = []
- include = True
- tag_text = '#--! %s' % tag
- for line in lines:
- if line.strip().startswith(tag_text):
- include = not include
- elif include:
- filtered_lines.append(line)
- return filtered_lines
-
-def main():
- dirname = os.path.dirname(__file__)
- shutil.copy2(os.path.join(dirname, 'yacc.py'), os.path.join(dirname, 'yacc.py.bak'))
- with open(os.path.join(dirname, 'yacc.py'), 'r') as f:
- lines = f.readlines()
-
- parse_start, parse_end = get_source_range(lines, 'parsedebug')
- parseopt_start, parseopt_end = get_source_range(lines, 'parseopt')
- parseopt_notrack_start, parseopt_notrack_end = get_source_range(lines, 'parseopt-notrack')
-
- # Get the original source
- orig_lines = lines[parse_start:parse_end]
-
- # Filter the DEBUG sections out
- parseopt_lines = filter_section(orig_lines, 'DEBUG')
-
- # Filter the TRACKING sections out
- parseopt_notrack_lines = filter_section(parseopt_lines, 'TRACKING')
-
- # Replace the parser source sections with updated versions
- lines[parseopt_notrack_start:parseopt_notrack_end] = parseopt_notrack_lines
- lines[parseopt_start:parseopt_end] = parseopt_lines
-
- lines = [line.rstrip()+'\n' for line in lines]
- with open(os.path.join(dirname, 'yacc.py'), 'w') as f:
- f.writelines(lines)
-
- print('Updated yacc.py')
-
-if __name__ == '__main__':
- main()
-
-
-
-
-
diff --git a/functions/source/GitPullS3/pycparser/plyparser.py b/functions/source/GitPullS3/pycparser/plyparser.py
deleted file mode 100644
index af91922..0000000
--- a/functions/source/GitPullS3/pycparser/plyparser.py
+++ /dev/null
@@ -1,116 +0,0 @@
-#-----------------------------------------------------------------
-# plyparser.py
-#
-# PLYParser class and other utilites for simplifying programming
-# parsers with PLY
-#
-# Eli Bendersky [http://eli.thegreenplace.net]
-# License: BSD
-#-----------------------------------------------------------------
-
-
-class Coord(object):
- """ Coordinates of a syntactic element. Consists of:
- - File name
- - Line number
- - (optional) column number, for the Lexer
- """
- __slots__ = ('file', 'line', 'column', '__weakref__')
- def __init__(self, file, line, column=None):
- self.file = file
- self.line = line
- self.column = column
-
- def __str__(self):
- str = "%s:%s" % (self.file, self.line)
- if self.column: str += ":%s" % self.column
- return str
-
-
-class ParseError(Exception): pass
-
-
-class PLYParser(object):
- def _create_opt_rule(self, rulename):
- """ Given a rule name, creates an optional ply.yacc rule
- for it. The name of the optional rule is
- _opt
- """
- optname = rulename + '_opt'
-
- def optrule(self, p):
- p[0] = p[1]
-
- optrule.__doc__ = '%s : empty\n| %s' % (optname, rulename)
- optrule.__name__ = 'p_%s' % optname
- setattr(self.__class__, optrule.__name__, optrule)
-
- def _coord(self, lineno, column=None):
- return Coord(
- file=self.clex.filename,
- line=lineno,
- column=column)
-
- def _token_coord(self, p, token_idx):
- """ Returns the coordinates for the YaccProduction objet 'p' indexed
- with 'token_idx'. The coordinate includes the 'lineno' and
- 'column'. Both follow the lex semantic, starting from 1.
- """
- last_cr = p.lexer.lexer.lexdata.rfind('\n', 0, p.lexpos(token_idx))
- if last_cr < 0:
- last_cr = -1
- column = (p.lexpos(token_idx) - (last_cr))
- return self._coord(p.lineno(token_idx), column)
-
- def _parse_error(self, msg, coord):
- raise ParseError("%s: %s" % (coord, msg))
-
-
-def parameterized(*params):
- """ Decorator to create parameterized rules.
-
- Parameterized rule methods must be named starting with 'p_' and contain
- 'xxx', and their docstrings may contain 'xxx' and 'yyy'. These will be
- replaced by the given parameter tuples. For example, ``p_xxx_rule()`` with
- docstring 'xxx_rule : yyy' when decorated with
- ``@parameterized(('id', 'ID'))`` produces ``p_id_rule()`` with the docstring
- 'id_rule : ID'. Using multiple tuples produces multiple rules.
- """
- def decorate(rule_func):
- rule_func._params = params
- return rule_func
- return decorate
-
-
-def template(cls):
- """ Class decorator to generate rules from parameterized rule templates.
-
- See `parameterized` for more information on parameterized rules.
- """
- for attr_name in dir(cls):
- if attr_name.startswith('p_'):
- method = getattr(cls, attr_name)
- if hasattr(method, '_params'):
- delattr(cls, attr_name) # Remove template method
- _create_param_rules(cls, method)
- return cls
-
-
-def _create_param_rules(cls, func):
- """ Create ply.yacc rules based on a parameterized rule function
-
- Generates new methods (one per each pair of parameters) based on the
- template rule function `func`, and attaches them to `cls`. The rule
- function's parameters must be accessible via its `_params` attribute.
- """
- for xxx, yyy in func._params:
- # Use the template method's body for each new method
- def param_rule(self, p):
- func(self, p)
-
- # Substitute in the params for the grammar rule and function name
- param_rule.__doc__ = func.__doc__.replace('xxx', xxx).replace('yyy', yyy)
- param_rule.__name__ = func.__name__.replace('xxx', xxx)
-
- # Attach the new method to the class
- setattr(cls, param_rule.__name__, param_rule)
diff --git a/functions/source/GitPullS3/pycparser/yacctab.py b/functions/source/GitPullS3/pycparser/yacctab.py
deleted file mode 100644
index a244322..0000000
--- a/functions/source/GitPullS3/pycparser/yacctab.py
+++ /dev/null
@@ -1,332 +0,0 @@
-
-# yacctab.py
-# This file is automatically generated. Do not edit.
-_tabversion = '3.10'
-
-_lr_method = 'LALR'
-
-_lr_signature = 'translation_unit_or_emptyleftLORleftLANDleftORleftXORleftANDleftEQNEleftGTGELTLEleftRSHIFTLSHIFTleftPLUSMINUSleftTIMESDIVIDEMOD_BOOL _COMPLEX AUTO BREAK CASE CHAR CONST CONTINUE DEFAULT DO DOUBLE ELSE ENUM EXTERN FLOAT FOR GOTO IF INLINE INT LONG REGISTER OFFSETOF RESTRICT RETURN SHORT SIGNED SIZEOF STATIC STRUCT SWITCH TYPEDEF UNION UNSIGNED VOID VOLATILE WHILE __INT128 ID TYPEID INT_CONST_DEC INT_CONST_OCT INT_CONST_HEX INT_CONST_BIN FLOAT_CONST HEX_FLOAT_CONST CHAR_CONST WCHAR_CONST STRING_LITERAL WSTRING_LITERAL PLUS MINUS TIMES DIVIDE MOD OR AND NOT XOR LSHIFT RSHIFT LOR LAND LNOT LT LE GT GE EQ NE EQUALS TIMESEQUAL DIVEQUAL MODEQUAL PLUSEQUAL MINUSEQUAL LSHIFTEQUAL RSHIFTEQUAL ANDEQUAL XOREQUAL OREQUAL PLUSPLUS MINUSMINUS ARROW CONDOP LPAREN RPAREN LBRACKET RBRACKET LBRACE RBRACE COMMA PERIOD SEMI COLON ELLIPSIS PPHASH PPPRAGMA PPPRAGMASTRabstract_declarator_opt : empty\n| abstract_declaratorassignment_expression_opt : empty\n| assignment_expressionblock_item_list_opt : empty\n| block_item_listdeclaration_list_opt : empty\n| declaration_listdeclaration_specifiers_no_type_opt : empty\n| declaration_specifiers_no_typedesignation_opt : empty\n| designationexpression_opt : empty\n| expressionid_init_declarator_list_opt : empty\n| id_init_declarator_listidentifier_list_opt : empty\n| identifier_listinit_declarator_list_opt : empty\n| init_declarator_listinitializer_list_opt : empty\n| initializer_listparameter_type_list_opt : empty\n| parameter_type_liststruct_declarator_list_opt : empty\n| struct_declarator_listtype_qualifier_list_opt : empty\n| type_qualifier_list direct_id_declarator : ID\n direct_id_declarator : LPAREN id_declarator RPAREN\n direct_id_declarator : direct_id_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_id_declarator : direct_id_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET\n | direct_id_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET\n direct_id_declarator : direct_id_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET\n direct_id_declarator : direct_id_declarator LPAREN parameter_type_list RPAREN\n | direct_id_declarator LPAREN identifier_list_opt RPAREN\n direct_typeid_declarator : TYPEID\n direct_typeid_declarator : LPAREN typeid_declarator RPAREN\n direct_typeid_declarator : direct_typeid_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_typeid_declarator : direct_typeid_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET\n | direct_typeid_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET\n direct_typeid_declarator : direct_typeid_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET\n direct_typeid_declarator : direct_typeid_declarator LPAREN parameter_type_list RPAREN\n | direct_typeid_declarator LPAREN identifier_list_opt RPAREN\n direct_typeid_noparen_declarator : TYPEID\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET\n | direct_typeid_noparen_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LPAREN parameter_type_list RPAREN\n | direct_typeid_noparen_declarator LPAREN identifier_list_opt RPAREN\n id_declarator : direct_id_declarator\n id_declarator : pointer direct_id_declarator\n typeid_declarator : direct_typeid_declarator\n typeid_declarator : pointer direct_typeid_declarator\n typeid_noparen_declarator : direct_typeid_noparen_declarator\n typeid_noparen_declarator : pointer direct_typeid_noparen_declarator\n translation_unit_or_empty : translation_unit\n | empty\n translation_unit : external_declaration\n translation_unit : translation_unit external_declaration\n external_declaration : function_definition\n external_declaration : declaration\n external_declaration : pp_directive\n | pppragma_directive\n external_declaration : SEMI\n pp_directive : PPHASH\n pppragma_directive : PPPRAGMA\n | PPPRAGMA PPPRAGMASTR\n function_definition : id_declarator declaration_list_opt compound_statement\n function_definition : declaration_specifiers id_declarator declaration_list_opt compound_statement\n statement : labeled_statement\n | expression_statement\n | compound_statement\n | selection_statement\n | iteration_statement\n | jump_statement\n | pppragma_directive\n decl_body : declaration_specifiers init_declarator_list_opt\n | declaration_specifiers_no_type id_init_declarator_list_opt\n declaration : decl_body SEMI\n declaration_list : declaration\n | declaration_list declaration\n declaration_specifiers_no_type : type_qualifier declaration_specifiers_no_type_opt\n declaration_specifiers_no_type : storage_class_specifier declaration_specifiers_no_type_opt\n declaration_specifiers_no_type : function_specifier declaration_specifiers_no_type_opt\n declaration_specifiers : declaration_specifiers type_qualifier\n declaration_specifiers : declaration_specifiers storage_class_specifier\n declaration_specifiers : declaration_specifiers function_specifier\n declaration_specifiers : declaration_specifiers type_specifier_no_typeid\n declaration_specifiers : type_specifier\n declaration_specifiers : declaration_specifiers_no_type type_specifier\n storage_class_specifier : AUTO\n | REGISTER\n | STATIC\n | EXTERN\n | TYPEDEF\n function_specifier : INLINE\n type_specifier_no_typeid : VOID\n | _BOOL\n | CHAR\n | SHORT\n | INT\n | LONG\n | FLOAT\n | DOUBLE\n | _COMPLEX\n | SIGNED\n | UNSIGNED\n | __INT128\n type_specifier : typedef_name\n | enum_specifier\n | struct_or_union_specifier\n | type_specifier_no_typeid\n type_qualifier : CONST\n | RESTRICT\n | VOLATILE\n init_declarator_list : init_declarator\n | init_declarator_list COMMA init_declarator\n init_declarator : declarator\n | declarator EQUALS initializer\n id_init_declarator_list : id_init_declarator\n | id_init_declarator_list COMMA init_declarator\n id_init_declarator : id_declarator\n | id_declarator EQUALS initializer\n specifier_qualifier_list : specifier_qualifier_list type_specifier_no_typeid\n specifier_qualifier_list : specifier_qualifier_list type_qualifier\n specifier_qualifier_list : type_specifier\n specifier_qualifier_list : type_qualifier_list type_specifier\n struct_or_union_specifier : struct_or_union ID\n | struct_or_union TYPEID\n struct_or_union_specifier : struct_or_union brace_open struct_declaration_list brace_close\n struct_or_union_specifier : struct_or_union ID brace_open struct_declaration_list brace_close\n | struct_or_union TYPEID brace_open struct_declaration_list brace_close\n struct_or_union : STRUCT\n | UNION\n struct_declaration_list : struct_declaration\n | struct_declaration_list struct_declaration\n struct_declaration : specifier_qualifier_list struct_declarator_list_opt SEMI\n struct_declaration : SEMI\n struct_declarator_list : struct_declarator\n | struct_declarator_list COMMA struct_declarator\n struct_declarator : declarator\n struct_declarator : declarator COLON constant_expression\n | COLON constant_expression\n enum_specifier : ENUM ID\n | ENUM TYPEID\n enum_specifier : ENUM brace_open enumerator_list brace_close\n enum_specifier : ENUM ID brace_open enumerator_list brace_close\n | ENUM TYPEID brace_open enumerator_list brace_close\n enumerator_list : enumerator\n | enumerator_list COMMA\n | enumerator_list COMMA enumerator\n enumerator : ID\n | ID EQUALS constant_expression\n declarator : id_declarator\n | typeid_declarator\n pointer : TIMES type_qualifier_list_opt\n | TIMES type_qualifier_list_opt pointer\n type_qualifier_list : type_qualifier\n | type_qualifier_list type_qualifier\n parameter_type_list : parameter_list\n | parameter_list COMMA ELLIPSIS\n parameter_list : parameter_declaration\n | parameter_list COMMA parameter_declaration\n parameter_declaration : declaration_specifiers id_declarator\n | declaration_specifiers typeid_noparen_declarator\n parameter_declaration : declaration_specifiers abstract_declarator_opt\n identifier_list : identifier\n | identifier_list COMMA identifier\n initializer : assignment_expression\n initializer : brace_open initializer_list_opt brace_close\n | brace_open initializer_list COMMA brace_close\n initializer_list : designation_opt initializer\n | initializer_list COMMA designation_opt initializer\n designation : designator_list EQUALS\n designator_list : designator\n | designator_list designator\n designator : LBRACKET constant_expression RBRACKET\n | PERIOD identifier\n type_name : specifier_qualifier_list abstract_declarator_opt\n abstract_declarator : pointer\n abstract_declarator : pointer direct_abstract_declarator\n abstract_declarator : direct_abstract_declarator\n direct_abstract_declarator : LPAREN abstract_declarator RPAREN direct_abstract_declarator : direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET\n direct_abstract_declarator : LBRACKET assignment_expression_opt RBRACKET\n direct_abstract_declarator : direct_abstract_declarator LBRACKET TIMES RBRACKET\n direct_abstract_declarator : LBRACKET TIMES RBRACKET\n direct_abstract_declarator : direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN\n direct_abstract_declarator : LPAREN parameter_type_list_opt RPAREN\n block_item : declaration\n | statement\n block_item_list : block_item\n | block_item_list block_item\n compound_statement : brace_open block_item_list_opt brace_close labeled_statement : ID COLON statement labeled_statement : CASE constant_expression COLON statement labeled_statement : DEFAULT COLON statement selection_statement : IF LPAREN expression RPAREN statement selection_statement : IF LPAREN expression RPAREN statement ELSE statement selection_statement : SWITCH LPAREN expression RPAREN statement iteration_statement : WHILE LPAREN expression RPAREN statement iteration_statement : DO statement WHILE LPAREN expression RPAREN SEMI iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN statement iteration_statement : FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN statement jump_statement : GOTO ID SEMI jump_statement : BREAK SEMI jump_statement : CONTINUE SEMI jump_statement : RETURN expression SEMI\n | RETURN SEMI\n expression_statement : expression_opt SEMI expression : assignment_expression\n | expression COMMA assignment_expression\n typedef_name : TYPEID assignment_expression : conditional_expression\n | unary_expression assignment_operator assignment_expression\n assignment_operator : EQUALS\n | XOREQUAL\n | TIMESEQUAL\n | DIVEQUAL\n | MODEQUAL\n | PLUSEQUAL\n | MINUSEQUAL\n | LSHIFTEQUAL\n | RSHIFTEQUAL\n | ANDEQUAL\n | OREQUAL\n constant_expression : conditional_expression conditional_expression : binary_expression\n | binary_expression CONDOP expression COLON conditional_expression\n binary_expression : cast_expression\n | binary_expression TIMES binary_expression\n | binary_expression DIVIDE binary_expression\n | binary_expression MOD binary_expression\n | binary_expression PLUS binary_expression\n | binary_expression MINUS binary_expression\n | binary_expression RSHIFT binary_expression\n | binary_expression LSHIFT binary_expression\n | binary_expression LT binary_expression\n | binary_expression LE binary_expression\n | binary_expression GE binary_expression\n | binary_expression GT binary_expression\n | binary_expression EQ binary_expression\n | binary_expression NE binary_expression\n | binary_expression AND binary_expression\n | binary_expression OR binary_expression\n | binary_expression XOR binary_expression\n | binary_expression LAND binary_expression\n | binary_expression LOR binary_expression\n cast_expression : unary_expression cast_expression : LPAREN type_name RPAREN cast_expression unary_expression : postfix_expression unary_expression : PLUSPLUS unary_expression\n | MINUSMINUS unary_expression\n | unary_operator cast_expression\n unary_expression : SIZEOF unary_expression\n | SIZEOF LPAREN type_name RPAREN\n unary_operator : AND\n | TIMES\n | PLUS\n | MINUS\n | NOT\n | LNOT\n postfix_expression : primary_expression postfix_expression : postfix_expression LBRACKET expression RBRACKET postfix_expression : postfix_expression LPAREN argument_expression_list RPAREN\n | postfix_expression LPAREN RPAREN\n postfix_expression : postfix_expression PERIOD ID\n | postfix_expression PERIOD TYPEID\n | postfix_expression ARROW ID\n | postfix_expression ARROW TYPEID\n postfix_expression : postfix_expression PLUSPLUS\n | postfix_expression MINUSMINUS\n postfix_expression : LPAREN type_name RPAREN brace_open initializer_list brace_close\n | LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close\n primary_expression : identifier primary_expression : constant primary_expression : unified_string_literal\n | unified_wstring_literal\n primary_expression : LPAREN expression RPAREN primary_expression : OFFSETOF LPAREN type_name COMMA offsetof_member_designator RPAREN\n offsetof_member_designator : identifier\n | offsetof_member_designator PERIOD identifier\n | offsetof_member_designator LBRACKET expression RBRACKET\n argument_expression_list : assignment_expression\n | argument_expression_list COMMA assignment_expression\n identifier : ID constant : INT_CONST_DEC\n | INT_CONST_OCT\n | INT_CONST_HEX\n | INT_CONST_BIN\n constant : FLOAT_CONST\n | HEX_FLOAT_CONST\n constant : CHAR_CONST\n | WCHAR_CONST\n unified_string_literal : STRING_LITERAL\n | unified_string_literal STRING_LITERAL\n unified_wstring_literal : WSTRING_LITERAL\n | unified_wstring_literal WSTRING_LITERAL\n brace_open : LBRACE\n brace_close : RBRACE\n empty : '
-
-_lr_action_items = {'VOID':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,254,273,282,283,284,287,289,292,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[6,-303,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,6,-94,-109,-104,-65,-93,-110,6,-215,-107,-111,6,-63,-116,6,-29,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,6,-53,6,-82,6,6,-61,-131,-301,-130,6,-147,-146,-160,-88,-90,6,-87,-89,-92,-81,-84,-86,-69,-30,6,6,-70,6,-83,6,6,-128,-140,-137,6,6,6,-161,6,6,-36,-35,6,6,-73,-76,-72,-74,6,-78,-193,-192,-77,-194,-75,6,6,-129,-132,-138,-302,-126,-127,-148,-71,6,-31,6,6,6,-34,6,6,6,-212,-211,6,-209,-195,-208,-196,-134,-133,-139,-150,-149,6,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'LBRACKET':([2,3,5,6,7,8,9,10,11,15,16,19,20,21,24,25,29,30,31,32,35,37,39,41,44,45,48,50,51,54,61,69,70,71,73,74,76,77,78,79,80,83,85,88,91,92,96,105,113,115,125,136,137,140,147,150,151,152,153,154,155,158,159,167,169,172,174,175,177,178,184,185,186,189,190,193,196,226,230,231,233,234,240,245,247,256,272,275,276,278,282,289,292,315,320,321,350,351,356,357,364,365,368,373,377,378,379,380,383,388,391,392,412,413,414,415,421,422,440,441,445,447,449,452,453,459,465,466,467,468,469,477,478,479,484,485,488,489,499,502,503,504,505,510,512,517,],[-102,-115,-113,-99,-97,59,-95,-114,-96,-100,-91,-94,-109,-104,-93,-110,-215,-107,-303,-111,-116,-29,-105,-101,-112,-106,-108,-103,-117,-98,59,-131,-301,-130,-147,-146,-28,-158,-160,-27,-88,-90,141,-37,-87,-89,-92,-30,195,-288,-128,-161,-159,141,-292,-280,-295,-299,-296,-293,-278,-279,280,-291,-265,-297,-289,-277,-294,-290,-36,-35,195,195,322,-45,326,-288,-129,-132,-302,-126,-127,-148,-38,370,-300,-298,-274,-273,-31,-34,195,195,322,326,-134,-133,-150,-149,-44,-43,-177,370,-272,-271,-270,-269,-268,-281,195,195,-33,-32,-191,-185,-187,-189,-39,-42,-180,370,-178,-266,-267,370,-51,-50,-186,-188,-190,-41,-40,-179,501,-283,-46,-49,-282,370,-275,-48,-47,-284,-276,-285,]),'WCHAR_CONST':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,136,141,144,145,148,156,157,163,164,166,168,170,171,173,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,501,502,506,509,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,153,-28,-303,153,-161,-303,153,153,-264,153,-262,153,-261,153,-260,153,153,-259,-263,153,153,153,-73,-76,-72,153,-74,153,153,-78,-193,-192,-77,-194,153,-75,-260,-302,153,153,153,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,153,-227,-228,-220,-226,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,-303,-260,153,-212,-211,153,-209,153,153,153,-195,153,-208,-196,153,153,153,-260,153,153,-12,153,153,-11,153,153,-28,-303,-260,-207,-210,153,-199,153,-197,-303,-176,153,153,-303,153,-260,153,153,153,153,-198,153,153,153,153,-11,153,-203,-202,-200,153,-303,153,153,153,-204,-201,153,-206,-205,]),'FLOAT_CONST':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,136,141,144,145,148,156,157,163,164,166,168,170,171,173,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,501,502,506,509,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,154,-28,-303,154,-161,-303,154,154,-264,154,-262,154,-261,154,-260,154,154,-259,-263,154,154,154,-73,-76,-72,154,-74,154,154,-78,-193,-192,-77,-194,154,-75,-260,-302,154,154,154,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,154,-227,-228,-220,-226,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,-303,-260,154,-212,-211,154,-209,154,154,154,-195,154,-208,-196,154,154,154,-260,154,154,-12,154,154,-11,154,154,-28,-303,-260,-207,-210,154,-199,154,-197,-303,-176,154,154,-303,154,-260,154,154,154,154,-198,154,154,154,154,-11,154,-203,-202,-200,154,-303,154,154,154,-204,-201,154,-206,-205,]),'MINUS':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,115,121,136,141,144,145,147,148,149,150,151,152,153,154,155,156,157,158,159,161,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,226,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,278,280,281,284,285,286,287,288,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,377,378,379,380,383,388,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,416,417,418,424,426,427,429,431,433,436,447,450,451,452,453,454,456,458,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,499,501,502,503,506,509,512,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,157,-28,-303,-288,157,-161,-303,157,157,-292,-264,-251,-280,-295,-299,-296,-293,-278,157,-262,-279,-253,-232,157,-261,157,-291,-260,-265,157,157,-297,-259,-289,-277,297,-294,-290,-263,157,157,157,-73,-76,-72,157,-74,157,157,-78,-193,-192,-77,-194,157,-75,-260,-288,-302,157,157,157,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,157,-227,-228,-220,-226,-300,157,-257,-298,-274,-273,157,157,157,-251,-256,157,-254,-255,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,-303,-260,157,-212,-211,157,-209,157,157,157,-195,157,-208,-196,157,157,157,-260,157,157,-12,157,157,-11,-272,-271,-270,-269,-268,-281,157,297,297,297,-237,297,297,297,-236,297,297,-234,-233,297,297,297,297,297,-235,157,-28,-303,-260,-207,-210,157,-199,157,-197,-303,-176,-258,-266,-267,157,157,-252,-303,157,-260,157,157,157,157,-198,157,157,157,157,-11,157,-203,-202,-200,-282,157,-303,-275,157,157,-276,157,-204,-201,157,-206,-205,]),'RPAREN':([2,3,5,6,7,8,9,10,11,15,16,19,20,21,24,25,29,30,31,32,35,37,39,41,44,45,48,50,51,54,58,60,61,69,71,73,74,76,77,78,79,80,83,85,88,91,92,96,105,109,110,111,112,113,114,115,116,118,125,136,137,138,140,142,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,180,184,185,186,187,188,189,190,191,192,193,194,196,208,224,230,231,233,234,240,245,247,252,253,272,274,275,276,278,281,282,285,286,288,289,290,291,292,293,315,316,317,318,319,320,321,323,327,328,329,330,343,350,351,356,357,364,365,375,376,377,378,379,380,382,383,384,386,387,388,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,412,413,414,415,419,420,421,422,425,430,432,434,437,440,441,451,452,453,458,465,466,467,468,469,477,478,483,484,485,487,488,489,493,496,499,503,504,505,506,507,510,512,513,517,],[-102,-115,-113,-99,-97,-52,-95,-114,-96,-100,-91,-94,-109,-104,-93,-110,-215,-107,-303,-111,-116,-29,-105,-101,-112,-106,-108,-103,-117,-98,105,-303,-53,-131,-130,-147,-146,-28,-158,-160,-27,-88,-90,-54,-37,-87,-89,-92,-30,184,-17,185,-164,-303,-18,-288,-162,-169,-128,-161,-159,247,-55,-303,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,-230,-294,-290,-216,-36,-35,-303,-168,-2,-182,-56,-166,-1,-45,-167,-184,-14,-213,-129,-132,-302,-126,-127,-148,-38,364,365,-300,-257,-298,-274,-273,383,-31,-251,-256,-254,-34,388,389,-303,-255,-182,-23,-24,414,415,-57,-183,-303,-303,-170,-163,-165,-13,-134,-133,-150,-149,-44,-43,-217,451,-272,-271,-270,-269,-286,-268,453,456,457,-281,-181,-182,-303,-238,-250,-239,-237,-241,-245,-240,-236,-243,-248,-234,-233,-242,-249,-244,-246,-247,-235,-33,-32,-191,-185,465,466,-187,-189,469,-214,472,474,476,-39,-42,-258,-266,-267,-252,-51,-50,-186,-188,-190,-41,-40,-287,499,-283,-231,-46,-49,-303,508,-282,-275,-48,-47,-303,514,-284,-276,518,-285,]),'LONG':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,254,273,282,283,284,287,289,292,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[21,-303,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,21,-94,-109,-104,-65,-93,-110,21,-215,-107,-111,21,-63,-116,21,-29,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,21,-53,21,-82,21,21,-61,-131,-301,-130,21,-147,-146,-160,-88,-90,21,-87,-89,-92,-81,-84,-86,-69,-30,21,21,-70,21,-83,21,21,-128,-140,-137,21,21,21,-161,21,21,-36,-35,21,21,-73,-76,-72,-74,21,-78,-193,-192,-77,-194,-75,21,21,-129,-132,-138,-302,-126,-127,-148,-71,21,-31,21,21,21,-34,21,21,21,-212,-211,21,-209,-195,-208,-196,-134,-133,-139,-150,-149,21,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'PLUS':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,115,121,136,141,144,145,147,148,149,150,151,152,153,154,155,156,157,158,159,161,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,226,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,278,280,281,284,285,286,287,288,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,377,378,379,380,383,388,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,416,417,418,424,426,427,429,431,433,436,447,450,451,452,453,454,456,458,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,499,501,502,503,506,509,512,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,164,-28,-303,-288,164,-161,-303,164,164,-292,-264,-251,-280,-295,-299,-296,-293,-278,164,-262,-279,-253,-232,164,-261,164,-291,-260,-265,164,164,-297,-259,-289,-277,301,-294,-290,-263,164,164,164,-73,-76,-72,164,-74,164,164,-78,-193,-192,-77,-194,164,-75,-260,-288,-302,164,164,164,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,164,-227,-228,-220,-226,-300,164,-257,-298,-274,-273,164,164,164,-251,-256,164,-254,-255,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,-303,-260,164,-212,-211,164,-209,164,164,164,-195,164,-208,-196,164,164,164,-260,164,164,-12,164,164,-11,-272,-271,-270,-269,-268,-281,164,301,301,301,-237,301,301,301,-236,301,301,-234,-233,301,301,301,301,301,-235,164,-28,-303,-260,-207,-210,164,-199,164,-197,-303,-176,-258,-266,-267,164,164,-252,-303,164,-260,164,164,164,164,-198,164,164,164,164,-11,164,-203,-202,-200,-282,164,-303,-275,164,164,-276,164,-204,-201,164,-206,-205,]),'ELLIPSIS':([198,],[329,]),'GT':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,302,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-238,302,-239,-237,-241,302,-240,-236,-243,302,-234,-233,-242,302,302,302,302,-235,-258,-266,-267,-252,-282,-275,-276,]),'GOTO':([53,70,101,104,121,199,200,203,205,212,214,215,216,217,219,221,222,233,332,333,336,338,342,345,347,348,426,427,431,433,436,472,473,474,476,494,495,497,509,514,515,516,518,519,520,],[-68,-301,-81,-69,201,-73,-76,-72,-74,201,-78,-193,-192,-77,-194,201,-75,-302,-212,-211,-209,201,-195,-208,-196,201,-207,-210,-199,201,-197,201,-198,201,201,-203,-202,-200,201,201,-204,-201,201,-206,-205,]),'ENUM':([0,1,3,7,8,9,11,12,14,17,18,19,23,24,26,34,35,36,37,40,42,47,49,51,53,54,55,56,57,60,61,64,65,67,68,70,72,78,87,101,102,103,104,105,117,120,121,122,123,124,126,127,128,129,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,232,233,254,273,282,283,284,287,289,323,327,332,333,335,336,342,345,347,354,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[28,-303,-115,-97,-52,-95,-96,-64,-60,-66,28,-94,-65,-93,28,-63,-116,28,-29,-62,-67,-303,-303,-117,-68,-98,-85,-10,-9,28,-53,-82,28,28,-61,-301,28,-160,28,-81,-84,-86,-69,-30,28,-70,28,-83,28,28,-140,-137,28,28,-161,28,28,-36,-35,28,28,-73,-76,-72,-74,28,-78,-193,-192,-77,-194,-75,28,28,-138,-302,-71,28,-31,28,28,28,-34,28,28,-212,-211,28,-209,-195,-208,-196,-139,28,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'PERIOD':([70,115,147,150,151,152,153,154,155,158,159,167,169,172,174,175,177,178,226,233,256,272,275,276,278,368,373,377,378,379,380,383,388,445,447,449,452,453,459,479,484,485,499,502,503,510,512,517,],[-301,-288,-292,-280,-295,-299,-296,-293,-278,-279,279,-291,-265,-297,-289,-277,-294,-290,-288,-302,369,-300,-298,-274,-273,-177,369,-272,-271,-270,-269,-268,-281,-180,369,-178,-266,-267,369,-179,500,-283,-282,369,-275,-284,-276,-285,]),'GE':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,306,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-238,306,-239,-237,-241,306,-240,-236,-243,306,-234,-233,-242,306,306,306,306,-235,-258,-266,-267,-252,-282,-275,-276,]),'INT_CONST_DEC':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,136,141,144,145,148,156,157,163,164,166,168,170,171,173,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,501,502,506,509,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,174,-28,-303,174,-161,-303,174,174,-264,174,-262,174,-261,174,-260,174,174,-259,-263,174,174,174,-73,-76,-72,174,-74,174,174,-78,-193,-192,-77,-194,174,-75,-260,-302,174,174,174,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,174,-227,-228,-220,-226,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,-303,-260,174,-212,-211,174,-209,174,174,174,-195,174,-208,-196,174,174,174,-260,174,174,-12,174,174,-11,174,174,-28,-303,-260,-207,-210,174,-199,174,-197,-303,-176,174,174,-303,174,-260,174,174,174,174,-198,174,174,174,174,-11,174,-203,-202,-200,174,-303,174,174,174,-204,-201,174,-206,-205,]),'ARROW':([115,147,150,151,152,153,154,155,158,159,167,169,172,174,175,177,178,226,233,272,275,276,278,377,378,379,380,383,388,452,453,499,503,512,],[-288,-292,-280,-295,-299,-296,-293,-278,-279,277,-291,-265,-297,-289,-277,-294,-290,-288,-302,-300,-298,-274,-273,-272,-271,-270,-269,-268,-281,-266,-267,-282,-275,-276,]),'CHAR':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,254,273,282,283,284,287,289,292,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[41,-303,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,41,-94,-109,-104,-65,-93,-110,41,-215,-107,-111,41,-63,-116,41,-29,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,41,-53,41,-82,41,41,-61,-131,-301,-130,41,-147,-146,-160,-88,-90,41,-87,-89,-92,-81,-84,-86,-69,-30,41,41,-70,41,-83,41,41,-128,-140,-137,41,41,41,-161,41,41,-36,-35,41,41,-73,-76,-72,-74,41,-78,-193,-192,-77,-194,-75,41,41,-129,-132,-138,-302,-126,-127,-148,-71,41,-31,41,41,41,-34,41,41,41,-212,-211,41,-209,-195,-208,-196,-134,-133,-139,-150,-149,41,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'HEX_FLOAT_CONST':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,136,141,144,145,148,156,157,163,164,166,168,170,171,173,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,501,502,506,509,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,177,-28,-303,177,-161,-303,177,177,-264,177,-262,177,-261,177,-260,177,177,-259,-263,177,177,177,-73,-76,-72,177,-74,177,177,-78,-193,-192,-77,-194,177,-75,-260,-302,177,177,177,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,177,-227,-228,-220,-226,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,-303,-260,177,-212,-211,177,-209,177,177,177,-195,177,-208,-196,177,177,177,-260,177,177,-12,177,177,-11,177,177,-28,-303,-260,-207,-210,177,-199,177,-197,-303,-176,177,177,-303,177,-260,177,177,177,177,-198,177,177,177,177,-11,177,-203,-202,-200,177,-303,177,177,177,-204,-201,177,-206,-205,]),'DOUBLE':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,254,273,282,283,284,287,289,292,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[45,-303,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,45,-94,-109,-104,-65,-93,-110,45,-215,-107,-111,45,-63,-116,45,-29,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,45,-53,45,-82,45,45,-61,-131,-301,-130,45,-147,-146,-160,-88,-90,45,-87,-89,-92,-81,-84,-86,-69,-30,45,45,-70,45,-83,45,45,-128,-140,-137,45,45,45,-161,45,45,-36,-35,45,45,-73,-76,-72,-74,45,-78,-193,-192,-77,-194,-75,45,45,-129,-132,-138,-302,-126,-127,-148,-71,45,-31,45,45,45,-34,45,45,45,-212,-211,45,-209,-195,-208,-196,-134,-133,-139,-150,-149,45,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'MINUSEQUAL':([115,147,149,150,151,152,153,154,155,158,159,167,169,172,174,175,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,451,452,453,458,499,503,512,],[-288,-292,261,-280,-295,-299,-296,-293,-278,-279,-253,-291,-265,-297,-289,-277,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-258,-266,-267,-252,-282,-275,-276,]),'INT_CONST_OCT':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,136,141,144,145,148,156,157,163,164,166,168,170,171,173,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,501,502,506,509,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,178,-28,-303,178,-161,-303,178,178,-264,178,-262,178,-261,178,-260,178,178,-259,-263,178,178,178,-73,-76,-72,178,-74,178,178,-78,-193,-192,-77,-194,178,-75,-260,-302,178,178,178,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,178,-227,-228,-220,-226,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,-303,-260,178,-212,-211,178,-209,178,178,178,-195,178,-208,-196,178,178,178,-260,178,178,-12,178,178,-11,178,178,-28,-303,-260,-207,-210,178,-199,178,-197,-303,-176,178,178,-303,178,-260,178,178,178,178,-198,178,178,178,178,-11,178,-203,-202,-200,178,-303,178,178,178,-204,-201,178,-206,-205,]),'TIMESEQUAL':([115,147,149,150,151,152,153,154,155,158,159,167,169,172,174,175,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,451,452,453,458,499,503,512,],[-288,-292,270,-280,-295,-299,-296,-293,-278,-279,-253,-291,-265,-297,-289,-277,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-258,-266,-267,-252,-282,-275,-276,]),'OR':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,311,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-238,311,-239,-237,-241,-245,-240,-236,-243,-248,-234,-233,-242,311,-244,-246,-247,-235,-258,-266,-267,-252,-282,-275,-276,]),'SHORT':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,254,273,282,283,284,287,289,292,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[2,-303,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,2,-94,-109,-104,-65,-93,-110,2,-215,-107,-111,2,-63,-116,2,-29,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,2,-53,2,-82,2,2,-61,-131,-301,-130,2,-147,-146,-160,-88,-90,2,-87,-89,-92,-81,-84,-86,-69,-30,2,2,-70,2,-83,2,2,-128,-140,-137,2,2,2,-161,2,2,-36,-35,2,2,-73,-76,-72,-74,2,-78,-193,-192,-77,-194,-75,2,2,-129,-132,-138,-302,-126,-127,-148,-71,2,-31,2,2,2,-34,2,2,2,-212,-211,2,-209,-195,-208,-196,-134,-133,-139,-150,-149,2,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'RETURN':([53,70,101,104,121,199,200,203,205,212,214,215,216,217,219,221,222,233,332,333,336,338,342,345,347,348,426,427,431,433,436,472,473,474,476,494,495,497,509,514,515,516,518,519,520,],[-68,-301,-81,-69,204,-73,-76,-72,-74,204,-78,-193,-192,-77,-194,204,-75,-302,-212,-211,-209,204,-195,-208,-196,204,-207,-210,-199,204,-197,204,-198,204,204,-203,-202,-200,204,204,-204,-201,204,-206,-205,]),'RSHIFTEQUAL':([115,147,149,150,151,152,153,154,155,158,159,167,169,172,174,175,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,451,452,453,458,499,503,512,],[-288,-292,271,-280,-295,-299,-296,-293,-278,-279,-253,-291,-265,-297,-289,-277,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-258,-266,-267,-252,-282,-275,-276,]),'RESTRICT':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,31,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,59,60,61,63,64,67,68,69,70,71,72,73,74,76,78,80,83,87,91,92,96,101,104,105,107,108,113,120,121,122,123,124,125,126,127,128,129,130,136,141,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,250,251,254,273,282,283,284,287,289,292,322,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,417,418,426,427,431,436,473,494,495,497,515,516,519,520,],[35,35,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,35,-94,-109,-104,-65,-93,-110,35,-215,-107,35,-111,35,-63,-116,-29,-105,-62,-101,-67,-112,-106,35,-108,35,-103,-117,-68,-98,35,35,-53,35,-82,35,-61,-131,-301,-130,35,-147,-146,35,-160,-88,-90,35,-87,-89,-92,-81,-69,-30,35,35,35,-70,35,-83,35,35,-128,-140,-137,35,35,35,-161,35,35,35,-36,-35,35,35,-73,-76,-72,-74,35,-78,-193,-192,-77,-194,-75,35,35,-129,-132,-138,-302,-126,-127,-148,35,35,-71,35,-31,35,35,35,-34,35,35,35,35,-212,-211,35,-209,-195,-208,-196,-134,-133,-139,-150,-149,35,-33,-32,35,35,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'STATIC':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,59,60,61,63,64,67,68,69,70,71,73,74,78,80,83,87,91,92,96,101,104,105,107,113,120,121,122,136,141,142,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,231,233,245,250,254,282,289,322,323,327,332,333,335,336,342,345,347,350,351,356,357,392,412,413,417,426,427,431,436,473,494,495,497,515,516,519,520,],[9,9,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,9,-94,-109,-104,-65,-93,-110,9,-215,-107,-111,9,-63,-116,-29,-105,-62,-101,-67,-112,-106,9,-108,9,-103,-117,-68,-98,108,9,-53,9,-82,9,-61,-131,-301,-130,-147,-146,-160,-88,-90,9,-87,-89,-92,-81,-69,-30,182,9,-70,9,-83,-161,251,9,-36,-35,9,9,-73,-76,-72,-74,9,-78,-193,-192,-77,-194,-75,-132,-302,-148,362,-71,-31,-34,418,9,9,-212,-211,9,-209,-195,-208,-196,-134,-133,-150,-149,9,-33,-32,463,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'SIZEOF':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,136,141,144,145,148,156,157,163,164,166,168,170,171,173,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,501,502,506,509,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,156,-28,-303,156,-161,-303,156,156,-264,156,-262,156,-261,156,-260,156,156,-259,-263,156,156,156,-73,-76,-72,156,-74,156,156,-78,-193,-192,-77,-194,156,-75,-260,-302,156,156,156,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,156,-227,-228,-220,-226,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,-303,-260,156,-212,-211,156,-209,156,156,156,-195,156,-208,-196,156,156,156,-260,156,156,-12,156,156,-11,156,156,-28,-303,-260,-207,-210,156,-199,156,-197,-303,-176,156,156,-303,156,-260,156,156,156,156,-198,156,156,156,156,-11,156,-203,-202,-200,156,-303,156,156,156,-204,-201,156,-206,-205,]),'UNSIGNED':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,254,273,282,283,284,287,289,292,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[20,-303,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,20,-94,-109,-104,-65,-93,-110,20,-215,-107,-111,20,-63,-116,20,-29,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,20,-53,20,-82,20,20,-61,-131,-301,-130,20,-147,-146,-160,-88,-90,20,-87,-89,-92,-81,-84,-86,-69,-30,20,20,-70,20,-83,20,20,-128,-140,-137,20,20,20,-161,20,20,-36,-35,20,20,-73,-76,-72,-74,20,-78,-193,-192,-77,-194,-75,20,20,-129,-132,-138,-302,-126,-127,-148,-71,20,-31,20,20,20,-34,20,20,20,-212,-211,20,-209,-195,-208,-196,-134,-133,-139,-150,-149,20,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'UNION':([0,1,3,7,8,9,11,12,14,17,18,19,23,24,26,34,35,36,37,40,42,47,49,51,53,54,55,56,57,60,61,64,65,67,68,70,72,78,87,101,102,103,104,105,117,120,121,122,123,124,126,127,128,129,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,232,233,254,273,282,283,284,287,289,323,327,332,333,335,336,342,345,347,354,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[22,-303,-115,-97,-52,-95,-96,-64,-60,-66,22,-94,-65,-93,22,-63,-116,22,-29,-62,-67,-303,-303,-117,-68,-98,-85,-10,-9,22,-53,-82,22,22,-61,-301,22,-160,22,-81,-84,-86,-69,-30,22,-70,22,-83,22,22,-140,-137,22,22,-161,22,22,-36,-35,22,22,-73,-76,-72,-74,22,-78,-193,-192,-77,-194,-75,22,22,-138,-302,-71,22,-31,22,22,22,-34,22,22,-212,-211,22,-209,-195,-208,-196,-139,22,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'COLON':([2,3,5,6,8,10,15,20,21,25,29,30,32,35,37,39,41,44,45,48,50,51,61,69,71,73,74,85,86,88,105,115,119,125,130,140,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,180,184,185,209,224,226,230,231,233,234,240,241,245,247,272,274,275,276,278,282,285,286,288,289,293,340,341,350,351,353,356,357,364,365,375,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,430,440,441,451,452,453,458,477,478,487,499,503,512,],[-102,-115,-113,-99,-52,-114,-100,-109,-104,-110,-215,-107,-111,-116,-29,-105,-101,-112,-106,-108,-103,-117,-53,-131,-130,-147,-146,-54,-157,-37,-30,-288,-156,-128,235,-55,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,-230,-294,-290,-216,-36,-35,338,-213,348,-129,-132,-302,-126,-127,355,-148,-38,-300,-257,-298,-274,-273,-31,-251,-256,-254,-34,-255,433,-229,-134,-133,235,-150,-149,-44,-43,-217,-272,-271,-270,-269,-268,-281,-238,-250,-239,-237,-241,-245,-240,-236,-243,-248,-234,-233,-242,-249,-244,-246,460,-247,-235,-33,-32,-214,-39,-42,-258,-266,-267,-252,-41,-40,-231,-282,-275,-276,]),'$end':([0,12,14,17,23,26,34,40,42,43,52,53,68,101,104,120,233,254,347,],[-303,-64,-60,-66,-65,-58,-63,-62,-67,0,-59,-68,-61,-81,-69,-70,-302,-71,-196,]),'WSTRING_LITERAL':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,136,141,144,145,148,150,152,156,157,163,164,166,168,170,171,173,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,272,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,501,502,506,509,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,152,-28,-303,152,-161,-303,152,152,-264,272,-299,152,-262,152,-261,152,-260,152,152,-259,-263,152,152,152,-73,-76,-72,152,-74,152,152,-78,-193,-192,-77,-194,152,-75,-260,-302,152,152,152,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,152,-227,-228,-220,-226,-300,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,-303,-260,152,-212,-211,152,-209,152,152,152,-195,152,-208,-196,152,152,152,-260,152,152,-12,152,152,-11,152,152,-28,-303,-260,-207,-210,152,-199,152,-197,-303,-176,152,152,-303,152,-260,152,152,152,152,-198,152,152,152,152,-11,152,-203,-202,-200,152,-303,152,152,152,-204,-201,152,-206,-205,]),'DIVIDE':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,304,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,304,304,304,304,304,304,304,304,304,304,-234,-233,304,304,304,304,304,-235,-258,-266,-267,-252,-282,-275,-276,]),'FOR':([53,70,101,104,121,199,200,203,205,212,214,215,216,217,219,221,222,233,332,333,336,338,342,345,347,348,426,427,431,433,436,472,473,474,476,494,495,497,509,514,515,516,518,519,520,],[-68,-301,-81,-69,206,-73,-76,-72,-74,206,-78,-193,-192,-77,-194,206,-75,-302,-212,-211,-209,206,-195,-208,-196,206,-207,-210,-199,206,-197,206,-198,206,206,-203,-202,-200,206,206,-204,-201,206,-206,-205,]),'PLUSPLUS':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,115,121,136,141,144,145,147,148,150,151,152,153,154,155,156,157,158,159,163,164,166,167,168,169,170,171,172,173,174,175,177,178,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,226,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,272,273,275,276,278,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,377,378,379,380,383,388,389,416,417,418,424,426,427,429,431,433,436,447,450,452,453,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,499,501,502,503,506,509,512,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,166,-28,-303,-288,166,-161,-303,166,166,-292,-264,-280,-295,-299,-296,-293,-278,166,-262,-279,278,166,-261,166,-291,-260,-265,166,166,-297,-259,-289,-277,-294,-290,-263,166,166,166,-73,-76,-72,166,-74,166,166,-78,-193,-192,-77,-194,166,-75,-260,-288,-302,166,166,166,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,166,-227,-228,-220,-226,-300,166,-298,-274,-273,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,-303,-260,166,-212,-211,166,-209,166,166,166,-195,166,-208,-196,166,166,166,-260,166,166,-12,166,166,-11,-272,-271,-270,-269,-268,-281,166,166,-28,-303,-260,-207,-210,166,-199,166,-197,-303,-176,-266,-267,166,166,-303,166,-260,166,166,166,166,-198,166,166,166,166,-11,166,-203,-202,-200,-282,166,-303,-275,166,166,-276,166,-204,-201,166,-206,-205,]),'EQUALS':([8,37,61,85,86,87,88,89,97,105,115,119,135,140,147,149,150,151,152,153,154,155,158,159,167,169,172,174,175,177,178,184,185,226,233,247,272,274,275,276,278,282,285,286,288,289,293,364,365,368,373,377,378,379,380,383,388,412,413,440,441,445,449,451,452,453,458,477,478,479,499,503,512,],[-52,-29,-53,-54,-157,-156,-37,144,145,-30,-288,-156,246,-55,-292,263,-280,-295,-299,-296,-293,-278,-279,-253,-291,-265,-297,-289,-277,-294,-290,-36,-35,-288,-302,-38,-300,-257,-298,-274,-273,-31,-251,-256,-254,-34,-255,-44,-43,-177,450,-272,-271,-270,-269,-268,-281,-33,-32,-39,-42,-180,-178,-258,-266,-267,-252,-41,-40,-179,-282,-275,-276,]),'ELSE':([53,104,199,200,203,205,214,217,222,233,332,333,336,345,347,426,427,431,436,473,494,495,497,515,516,519,520,],[-68,-69,-73,-76,-72,-74,-78,-77,-75,-302,-212,-211,-209,-208,-196,-207,-210,-199,-197,-198,-203,-202,509,-204,-201,-206,-205,]),'ANDEQUAL':([115,147,149,150,151,152,153,154,155,158,159,167,169,172,174,175,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,451,452,453,458,499,503,512,],[-288,-292,268,-280,-295,-299,-296,-293,-278,-279,-253,-291,-265,-297,-289,-277,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-258,-266,-267,-252,-282,-275,-276,]),'EQ':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,308,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-238,308,-239,-237,-241,-245,-240,-236,-243,308,-234,-233,-242,308,-244,308,308,-235,-258,-266,-267,-252,-282,-275,-276,]),'AND':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,115,121,136,141,144,145,147,148,149,150,151,152,153,154,155,156,157,158,159,161,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,226,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,278,280,281,284,285,286,287,288,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,377,378,379,380,383,388,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,416,417,418,424,426,427,429,431,433,436,447,450,451,452,453,454,456,458,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,499,501,502,503,506,509,512,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,173,-28,-303,-288,173,-161,-303,173,173,-292,-264,-251,-280,-295,-299,-296,-293,-278,173,-262,-279,-253,-232,173,-261,173,-291,-260,-265,173,173,-297,-259,-289,-277,309,-294,-290,-263,173,173,173,-73,-76,-72,173,-74,173,173,-78,-193,-192,-77,-194,173,-75,-260,-288,-302,173,173,173,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,173,-227,-228,-220,-226,-300,173,-257,-298,-274,-273,173,173,173,-251,-256,173,-254,-255,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,-303,-260,173,-212,-211,173,-209,173,173,173,-195,173,-208,-196,173,173,173,-260,173,173,-12,173,173,-11,-272,-271,-270,-269,-268,-281,173,-238,309,-239,-237,-241,-245,-240,-236,-243,309,-234,-233,-242,309,-244,-246,309,-235,173,-28,-303,-260,-207,-210,173,-199,173,-197,-303,-176,-258,-266,-267,173,173,-252,-303,173,-260,173,173,173,173,-198,173,173,173,173,-11,173,-203,-202,-200,-282,173,-303,-275,173,173,-276,173,-204,-201,173,-206,-205,]),'TYPEID':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,76,77,78,79,80,81,83,84,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,136,137,139,142,146,170,184,185,186,189,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,254,273,277,279,282,283,284,287,289,323,327,332,333,335,336,342,345,347,350,351,353,354,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[29,-303,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,29,-94,-109,-104,-136,-65,-93,-110,29,69,73,-215,-107,-303,-111,88,-63,-116,29,-29,-135,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,29,-53,88,-82,29,29,-61,-131,-301,-130,29,-147,-146,-28,-158,-160,-27,-88,88,-90,88,29,-87,-89,-92,-81,-84,-86,-69,-30,193,29,-70,29,-83,29,29,-128,-140,-137,29,29,88,-161,-159,88,29,88,29,-36,-35,29,193,29,-73,-76,-72,-74,29,-78,-193,-192,-77,-194,-75,29,29,-129,-132,-138,-302,-126,-127,-148,-71,29,377,379,-31,29,29,29,-34,29,29,-212,-211,29,-209,-195,-208,-196,-134,-133,88,-139,-150,-149,29,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'LBRACE':([8,18,22,27,28,37,38,53,61,62,64,66,67,69,70,71,73,74,87,101,104,105,121,122,143,144,145,184,185,199,200,203,205,212,214,215,216,217,219,221,222,233,256,282,289,332,333,336,338,342,345,347,348,366,372,374,389,412,413,426,427,431,433,436,447,450,451,456,457,459,472,473,474,476,480,481,494,495,497,502,509,514,515,516,518,519,520,],[-52,-303,-136,70,70,-29,-135,-68,-53,-7,-82,70,-8,70,-301,70,70,70,-303,-81,-69,-30,70,-83,70,70,70,-36,-35,-73,-76,-72,-74,70,-78,-193,-192,-77,-194,70,-75,-302,-303,-31,-34,-212,-211,-209,70,-195,-208,-196,70,-12,70,-11,70,-33,-32,-207,-210,-199,70,-197,-303,-176,70,70,70,-303,70,-198,70,70,70,-11,-203,-202,-200,-303,70,70,-204,-201,70,-206,-205,]),'PPHASH':([0,12,14,17,23,26,34,40,42,53,68,101,104,120,233,254,347,],[42,-64,-60,-66,-65,42,-63,-62,-67,-68,-61,-81,-69,-70,-302,-71,-196,]),'INT':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,254,273,282,283,284,287,289,292,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[50,-303,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,50,-94,-109,-104,-65,-93,-110,50,-215,-107,-111,50,-63,-116,50,-29,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,50,-53,50,-82,50,50,-61,-131,-301,-130,50,-147,-146,-160,-88,-90,50,-87,-89,-92,-81,-84,-86,-69,-30,50,50,-70,50,-83,50,50,-128,-140,-137,50,50,50,-161,50,50,-36,-35,50,50,-73,-76,-72,-74,50,-78,-193,-192,-77,-194,-75,50,50,-129,-132,-138,-302,-126,-127,-148,-71,50,-31,50,50,50,-34,50,50,50,-212,-211,50,-209,-195,-208,-196,-134,-133,-139,-150,-149,50,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'SIGNED':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,254,273,282,283,284,287,289,292,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[48,-303,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,48,-94,-109,-104,-65,-93,-110,48,-215,-107,-111,48,-63,-116,48,-29,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,48,-53,48,-82,48,48,-61,-131,-301,-130,48,-147,-146,-160,-88,-90,48,-87,-89,-92,-81,-84,-86,-69,-30,48,48,-70,48,-83,48,48,-128,-140,-137,48,48,48,-161,48,48,-36,-35,48,48,-73,-76,-72,-74,48,-78,-193,-192,-77,-194,-75,48,48,-129,-132,-138,-302,-126,-127,-148,-71,48,-31,48,48,48,-34,48,48,48,-212,-211,48,-209,-195,-208,-196,-134,-133,-139,-150,-149,48,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'CONTINUE':([53,70,101,104,121,199,200,203,205,212,214,215,216,217,219,221,222,233,332,333,336,338,342,345,347,348,426,427,431,433,436,472,473,474,476,494,495,497,509,514,515,516,518,519,520,],[-68,-301,-81,-69,207,-73,-76,-72,-74,207,-78,-193,-192,-77,-194,207,-75,-302,-212,-211,-209,207,-195,-208,-196,207,-207,-210,-199,207,-197,207,-198,207,207,-203,-202,-200,207,207,-204,-201,207,-206,-205,]),'NOT':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,136,141,144,145,148,156,157,163,164,166,168,170,171,173,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,501,502,506,509,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,181,-28,-303,181,-161,-303,181,181,-264,181,-262,181,-261,181,-260,181,181,-259,-263,181,181,181,-73,-76,-72,181,-74,181,181,-78,-193,-192,-77,-194,181,-75,-260,-302,181,181,181,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,181,-227,-228,-220,-226,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,-303,-260,181,-212,-211,181,-209,181,181,181,-195,181,-208,-196,181,181,181,-260,181,181,-12,181,181,-11,181,181,-28,-303,-260,-207,-210,181,-199,181,-197,-303,-176,181,181,-303,181,-260,181,181,181,181,-198,181,181,181,181,-11,181,-203,-202,-200,181,-303,181,181,181,-204,-201,181,-206,-205,]),'OREQUAL':([115,147,149,150,151,152,153,154,155,158,159,167,169,172,174,175,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,451,452,453,458,499,503,512,],[-288,-292,269,-280,-295,-299,-296,-293,-278,-279,-253,-291,-265,-297,-289,-277,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-258,-266,-267,-252,-282,-275,-276,]),'MOD':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,312,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,312,312,312,312,312,312,312,312,312,312,-234,-233,312,312,312,312,312,-235,-258,-266,-267,-252,-282,-275,-276,]),'RSHIFT':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,294,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-238,294,-239,-237,294,294,294,-236,294,294,-234,-233,294,294,294,294,294,-235,-258,-266,-267,-252,-282,-275,-276,]),'DEFAULT':([53,70,101,104,121,199,200,203,205,212,214,215,216,217,219,221,222,233,332,333,336,338,342,345,347,348,426,427,431,433,436,472,473,474,476,494,495,497,509,514,515,516,518,519,520,],[-68,-301,-81,-69,209,-73,-76,-72,-74,209,-78,-193,-192,-77,-194,209,-75,-302,-212,-211,-209,209,-195,-208,-196,209,-207,-210,-199,209,-197,209,-198,209,209,-203,-202,-200,209,209,-204,-201,209,-206,-205,]),'__INT128':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,254,273,282,283,284,287,289,292,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[25,-303,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,25,-94,-109,-104,-65,-93,-110,25,-215,-107,-111,25,-63,-116,25,-29,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,25,-53,25,-82,25,25,-61,-131,-301,-130,25,-147,-146,-160,-88,-90,25,-87,-89,-92,-81,-84,-86,-69,-30,25,25,-70,25,-83,25,25,-128,-140,-137,25,25,25,-161,25,25,-36,-35,25,25,-73,-76,-72,-74,25,-78,-193,-192,-77,-194,-75,25,25,-129,-132,-138,-302,-126,-127,-148,-71,25,-31,25,25,25,-34,25,25,25,-212,-211,25,-209,-195,-208,-196,-134,-133,-139,-150,-149,25,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'WHILE':([53,70,101,104,121,199,200,203,205,212,214,215,216,217,219,221,222,233,332,333,336,338,342,345,346,347,348,426,427,431,433,436,472,473,474,476,494,495,497,509,514,515,516,518,519,520,],[-68,-301,-81,-69,210,-73,-76,-72,-74,210,-78,-193,-192,-77,-194,210,-75,-302,-212,-211,-209,210,-195,-208,435,-196,210,-207,-210,-199,210,-197,210,-198,210,210,-203,-202,-200,210,210,-204,-201,210,-206,-205,]),'DIVEQUAL':([115,147,149,150,151,152,153,154,155,158,159,167,169,172,174,175,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,451,452,453,458,499,503,512,],[-288,-292,260,-280,-295,-299,-296,-293,-278,-279,-253,-291,-265,-297,-289,-277,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-258,-266,-267,-252,-282,-275,-276,]),'EXTERN':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,60,61,63,64,67,68,69,70,71,73,74,80,83,87,91,92,96,101,104,105,113,120,121,122,142,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,231,233,245,254,282,289,323,327,332,333,335,336,342,345,347,350,351,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[11,11,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,11,-94,-109,-104,-65,-93,-110,11,-215,-107,-111,11,-63,-116,-29,-105,-62,-101,-67,-112,-106,11,-108,11,-103,-117,-68,-98,11,-53,11,-82,11,-61,-131,-301,-130,-147,-146,-88,-90,11,-87,-89,-92,-81,-69,-30,11,-70,11,-83,11,-36,-35,11,11,-73,-76,-72,-74,11,-78,-193,-192,-77,-194,-75,-132,-302,-148,-71,-31,-34,11,11,-212,-211,11,-209,-195,-208,-196,-134,-133,-150,-149,11,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'CASE':([53,70,101,104,121,199,200,203,205,212,214,215,216,217,219,221,222,233,332,333,336,338,342,345,347,348,426,427,431,433,436,472,473,474,476,494,495,497,509,514,515,516,518,519,520,],[-68,-301,-81,-69,211,-73,-76,-72,-74,211,-78,-193,-192,-77,-194,211,-75,-302,-212,-211,-209,211,-195,-208,-196,211,-207,-210,-199,211,-197,211,-198,211,211,-203,-202,-200,211,211,-204,-201,211,-206,-205,]),'LAND':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,307,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-238,307,-239,-237,-241,-245,-240,-236,-243,-248,-234,-233,-242,-249,-244,-246,-247,-235,-258,-266,-267,-252,-282,-275,-276,]),'REGISTER':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,60,61,63,64,67,68,69,70,71,73,74,80,83,87,91,92,96,101,104,105,113,120,121,122,142,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,231,233,245,254,282,289,323,327,332,333,335,336,342,345,347,350,351,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[19,19,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,19,-94,-109,-104,-65,-93,-110,19,-215,-107,-111,19,-63,-116,-29,-105,-62,-101,-67,-112,-106,19,-108,19,-103,-117,-68,-98,19,-53,19,-82,19,-61,-131,-301,-130,-147,-146,-88,-90,19,-87,-89,-92,-81,-69,-30,19,-70,19,-83,19,-36,-35,19,19,-73,-76,-72,-74,19,-78,-193,-192,-77,-194,-75,-132,-302,-148,-71,-31,-34,19,19,-212,-211,19,-209,-195,-208,-196,-134,-133,-150,-149,19,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'MODEQUAL':([115,147,149,150,151,152,153,154,155,158,159,167,169,172,174,175,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,451,452,453,458,499,503,512,],[-288,-292,262,-280,-295,-299,-296,-293,-278,-279,-253,-291,-265,-297,-289,-277,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-258,-266,-267,-252,-282,-275,-276,]),'NE':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,299,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-238,299,-239,-237,-241,-245,-240,-236,-243,299,-234,-233,-242,299,-244,299,299,-235,-258,-266,-267,-252,-282,-275,-276,]),'SWITCH':([53,70,101,104,121,199,200,203,205,212,214,215,216,217,219,221,222,233,332,333,336,338,342,345,347,348,426,427,431,433,436,472,473,474,476,494,495,497,509,514,515,516,518,519,520,],[-68,-301,-81,-69,213,-73,-76,-72,-74,213,-78,-193,-192,-77,-194,213,-75,-302,-212,-211,-209,213,-195,-208,-196,213,-207,-210,-199,213,-197,213,-198,213,213,-203,-202,-200,213,213,-204,-201,213,-206,-205,]),'INT_CONST_HEX':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,136,141,144,145,148,156,157,163,164,166,168,170,171,173,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,501,502,506,509,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,167,-28,-303,167,-161,-303,167,167,-264,167,-262,167,-261,167,-260,167,167,-259,-263,167,167,167,-73,-76,-72,167,-74,167,167,-78,-193,-192,-77,-194,167,-75,-260,-302,167,167,167,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,167,-227,-228,-220,-226,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,-303,-260,167,-212,-211,167,-209,167,167,167,-195,167,-208,-196,167,167,167,-260,167,167,-12,167,167,-11,167,167,-28,-303,-260,-207,-210,167,-199,167,-197,-303,-176,167,167,-303,167,-260,167,167,167,167,-198,167,167,167,167,-11,167,-203,-202,-200,167,-303,167,167,167,-204,-201,167,-206,-205,]),'_COMPLEX':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,254,273,282,283,284,287,289,292,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[30,-303,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,30,-94,-109,-104,-65,-93,-110,30,-215,-107,-111,30,-63,-116,30,-29,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,30,-53,30,-82,30,30,-61,-131,-301,-130,30,-147,-146,-160,-88,-90,30,-87,-89,-92,-81,-84,-86,-69,-30,30,30,-70,30,-83,30,30,-128,-140,-137,30,30,30,-161,30,30,-36,-35,30,30,-73,-76,-72,-74,30,-78,-193,-192,-77,-194,-75,30,30,-129,-132,-138,-302,-126,-127,-148,-71,30,-31,30,30,30,-34,30,30,30,-212,-211,30,-209,-195,-208,-196,-134,-133,-139,-150,-149,30,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'PPPRAGMASTR':([53,],[104,]),'PLUSEQUAL':([115,147,149,150,151,152,153,154,155,158,159,167,169,172,174,175,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,451,452,453,458,499,503,512,],[-288,-292,265,-280,-295,-299,-296,-293,-278,-279,-253,-291,-265,-297,-289,-277,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-258,-266,-267,-252,-282,-275,-276,]),'STRUCT':([0,1,3,7,8,9,11,12,14,17,18,19,23,24,26,34,35,36,37,40,42,47,49,51,53,54,55,56,57,60,61,64,65,67,68,70,72,78,87,101,102,103,104,105,117,120,121,122,123,124,126,127,128,129,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,232,233,254,273,282,283,284,287,289,323,327,332,333,335,336,342,345,347,354,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[38,-303,-115,-97,-52,-95,-96,-64,-60,-66,38,-94,-65,-93,38,-63,-116,38,-29,-62,-67,-303,-303,-117,-68,-98,-85,-10,-9,38,-53,-82,38,38,-61,-301,38,-160,38,-81,-84,-86,-69,-30,38,-70,38,-83,38,38,-140,-137,38,38,-161,38,38,-36,-35,38,38,-73,-76,-72,-74,38,-78,-193,-192,-77,-194,-75,38,38,-138,-302,-71,38,-31,38,38,38,-34,38,38,-212,-211,38,-209,-195,-208,-196,-139,38,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'CONDOP':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,310,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-238,-250,-239,-237,-241,-245,-240,-236,-243,-248,-234,-233,-242,-249,-244,-246,-247,-235,-258,-266,-267,-252,-282,-275,-276,]),'BREAK':([53,70,101,104,121,199,200,203,205,212,214,215,216,217,219,221,222,233,332,333,336,338,342,345,347,348,426,427,431,433,436,472,473,474,476,494,495,497,509,514,515,516,518,519,520,],[-68,-301,-81,-69,218,-73,-76,-72,-74,218,-78,-193,-192,-77,-194,218,-75,-302,-212,-211,-209,218,-195,-208,-196,218,-207,-210,-199,218,-197,218,-198,218,218,-203,-202,-200,218,218,-204,-201,218,-206,-205,]),'VOLATILE':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,31,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,59,60,61,63,64,67,68,69,70,71,72,73,74,76,78,80,83,87,91,92,96,101,104,105,107,108,113,120,121,122,123,124,125,126,127,128,129,130,136,141,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,250,251,254,273,282,283,284,287,289,292,322,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,417,418,426,427,431,436,473,494,495,497,515,516,519,520,],[51,51,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,51,-94,-109,-104,-65,-93,-110,51,-215,-107,51,-111,51,-63,-116,-29,-105,-62,-101,-67,-112,-106,51,-108,51,-103,-117,-68,-98,51,51,-53,51,-82,51,-61,-131,-301,-130,51,-147,-146,51,-160,-88,-90,51,-87,-89,-92,-81,-69,-30,51,51,51,-70,51,-83,51,51,-128,-140,-137,51,51,51,-161,51,51,51,-36,-35,51,51,-73,-76,-72,-74,51,-78,-193,-192,-77,-194,-75,51,51,-129,-132,-138,-302,-126,-127,-148,51,51,-71,51,-31,51,51,51,-34,51,51,51,51,-212,-211,51,-209,-195,-208,-196,-134,-133,-139,-150,-149,51,-33,-32,51,51,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'PPPRAGMA':([0,12,14,17,23,26,34,40,42,53,68,70,101,104,120,121,199,200,203,205,212,214,215,216,217,219,221,222,233,254,332,333,336,338,342,345,347,348,426,427,431,433,436,472,473,474,476,494,495,497,509,514,515,516,518,519,520,],[53,-64,-60,-66,-65,53,-63,-62,-67,-68,-61,-301,-81,-69,-70,53,-73,-76,-72,-74,53,-78,-193,-192,-77,-194,53,-75,-302,-71,-212,-211,-209,53,-195,-208,-196,53,-207,-210,-199,53,-197,53,-198,53,53,-203,-202,-200,53,53,-204,-201,53,-206,-205,]),'INLINE':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,60,61,63,64,67,68,69,70,71,73,74,80,83,87,91,92,96,101,104,105,113,120,121,122,142,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,231,233,245,254,282,289,323,327,332,333,335,336,342,345,347,350,351,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[54,54,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,54,-94,-109,-104,-65,-93,-110,54,-215,-107,-111,54,-63,-116,-29,-105,-62,-101,-67,-112,-106,54,-108,54,-103,-117,-68,-98,54,-53,54,-82,54,-61,-131,-301,-130,-147,-146,-88,-90,54,-87,-89,-92,-81,-69,-30,54,-70,54,-83,54,-36,-35,54,54,-73,-76,-72,-74,54,-78,-193,-192,-77,-194,-75,-132,-302,-148,-71,-31,-34,54,54,-212,-211,54,-209,-195,-208,-196,-134,-133,-150,-149,54,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'INT_CONST_BIN':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,136,141,144,145,148,156,157,163,164,166,168,170,171,173,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,501,502,506,509,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,147,-28,-303,147,-161,-303,147,147,-264,147,-262,147,-261,147,-260,147,147,-259,-263,147,147,147,-73,-76,-72,147,-74,147,147,-78,-193,-192,-77,-194,147,-75,-260,-302,147,147,147,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,147,-227,-228,-220,-226,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,-303,-260,147,-212,-211,147,-209,147,147,147,-195,147,-208,-196,147,147,147,-260,147,147,-12,147,147,-11,147,147,-28,-303,-260,-207,-210,147,-199,147,-197,-303,-176,147,147,-303,147,-260,147,147,147,147,-198,147,147,147,147,-11,147,-203,-202,-200,147,-303,147,147,147,-204,-201,147,-206,-205,]),'DO':([53,70,101,104,121,199,200,203,205,212,214,215,216,217,219,221,222,233,332,333,336,338,342,345,347,348,426,427,431,433,436,472,473,474,476,494,495,497,509,514,515,516,518,519,520,],[-68,-301,-81,-69,221,-73,-76,-72,-74,221,-78,-193,-192,-77,-194,221,-75,-302,-212,-211,-209,221,-195,-208,-196,221,-207,-210,-199,221,-197,221,-198,221,221,-203,-202,-200,221,221,-204,-201,221,-206,-205,]),'LNOT':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,136,141,144,145,148,156,157,163,164,166,168,170,171,173,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,501,502,506,509,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,148,-28,-303,148,-161,-303,148,148,-264,148,-262,148,-261,148,-260,148,148,-259,-263,148,148,148,-73,-76,-72,148,-74,148,148,-78,-193,-192,-77,-194,148,-75,-260,-302,148,148,148,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,148,-227,-228,-220,-226,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,-303,-260,148,-212,-211,148,-209,148,148,148,-195,148,-208,-196,148,148,148,-260,148,148,-12,148,148,-11,148,148,-28,-303,-260,-207,-210,148,-199,148,-197,-303,-176,148,148,-303,148,-260,148,148,148,148,-198,148,148,148,148,-11,148,-203,-202,-200,148,-303,148,148,148,-204,-201,148,-206,-205,]),'CONST':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,31,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,59,60,61,63,64,67,68,69,70,71,72,73,74,76,78,80,83,87,91,92,96,101,104,105,107,108,113,120,121,122,123,124,125,126,127,128,129,130,136,141,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,250,251,254,273,282,283,284,287,289,292,322,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,417,418,426,427,431,436,473,494,495,497,515,516,519,520,],[3,3,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,3,-94,-109,-104,-65,-93,-110,3,-215,-107,3,-111,3,-63,-116,-29,-105,-62,-101,-67,-112,-106,3,-108,3,-103,-117,-68,-98,3,3,-53,3,-82,3,-61,-131,-301,-130,3,-147,-146,3,-160,-88,-90,3,-87,-89,-92,-81,-69,-30,3,3,3,-70,3,-83,3,3,-128,-140,-137,3,3,3,-161,3,3,3,-36,-35,3,3,-73,-76,-72,-74,3,-78,-193,-192,-77,-194,-75,3,3,-129,-132,-138,-302,-126,-127,-148,3,3,-71,3,-31,3,3,3,-34,3,3,3,3,-212,-211,3,-209,-195,-208,-196,-134,-133,-139,-150,-149,3,-33,-32,3,3,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'LOR':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,295,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-238,-250,-239,-237,-241,-245,-240,-236,-243,-248,-234,-233,-242,-249,-244,-246,-247,-235,-258,-266,-267,-252,-282,-275,-276,]),'CHAR_CONST':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,136,141,144,145,148,156,157,163,164,166,168,170,171,173,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,501,502,506,509,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,151,-28,-303,151,-161,-303,151,151,-264,151,-262,151,-261,151,-260,151,151,-259,-263,151,151,151,-73,-76,-72,151,-74,151,151,-78,-193,-192,-77,-194,151,-75,-260,-302,151,151,151,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,151,-227,-228,-220,-226,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,-303,-260,151,-212,-211,151,-209,151,151,151,-195,151,-208,-196,151,151,151,-260,151,151,-12,151,151,-11,151,151,-28,-303,-260,-207,-210,151,-199,151,-197,-303,-176,151,151,-303,151,-260,151,151,151,151,-198,151,151,151,151,-11,151,-203,-202,-200,151,-303,151,151,151,-204,-201,151,-206,-205,]),'LSHIFT':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,296,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-238,296,-239,-237,296,296,296,-236,296,296,-234,-233,296,296,296,296,296,-235,-258,-266,-267,-252,-282,-275,-276,]),'RBRACE':([53,70,101,104,115,121,126,127,129,133,134,135,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,180,199,200,203,205,212,214,215,216,217,219,220,222,223,228,229,232,233,242,243,244,256,257,272,274,275,276,278,285,286,288,293,332,333,336,341,342,345,347,354,358,359,367,371,374,375,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,426,427,431,436,444,447,448,451,452,453,458,473,482,486,487,494,495,497,498,499,502,503,512,515,516,519,520,],[-68,-301,-81,-69,-288,-303,-140,-137,233,-151,233,-154,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,-230,-294,-290,-216,-73,-76,-72,-74,-6,-78,-193,-192,-77,-194,-5,-75,233,233,233,-138,-302,233,233,-152,-303,-171,-300,-257,-298,-274,-273,-251,-256,-254,-255,-212,-211,-209,-229,-195,-208,-196,-139,-153,-155,233,-22,-21,-217,-272,-271,-270,-269,-268,-281,-238,-250,-239,-237,-241,-245,-240,-236,-243,-248,-234,-233,-242,-249,-244,-246,-247,-235,-207,-210,-199,-197,-172,233,-174,-258,-266,-267,-252,-198,-173,233,-231,-203,-202,-200,-175,-282,233,-275,-276,-204,-201,-206,-205,]),'_BOOL':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,254,273,282,283,284,287,289,292,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[15,-303,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,15,-94,-109,-104,-65,-93,-110,15,-215,-107,-111,15,-63,-116,15,-29,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,15,-53,15,-82,15,15,-61,-131,-301,-130,15,-147,-146,-160,-88,-90,15,-87,-89,-92,-81,-84,-86,-69,-30,15,15,-70,15,-83,15,15,-128,-140,-137,15,15,15,-161,15,15,-36,-35,15,15,-73,-76,-72,-74,15,-78,-193,-192,-77,-194,-75,15,15,-129,-132,-138,-302,-126,-127,-148,-71,15,-31,15,15,15,-34,15,15,15,-212,-211,15,-209,-195,-208,-196,-134,-133,-139,-150,-149,15,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'LE':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,298,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-238,298,-239,-237,-241,298,-240,-236,-243,298,-234,-233,-242,298,298,298,298,-235,-258,-266,-267,-252,-282,-275,-276,]),'SEMI':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,46,47,48,49,50,51,53,54,55,56,57,61,63,65,68,69,70,71,72,73,74,80,82,83,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,115,119,120,121,123,124,125,126,127,129,130,140,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,180,184,185,199,200,202,203,204,205,207,208,212,214,215,216,217,218,219,220,221,222,224,226,228,229,230,231,232,233,234,236,237,238,239,240,241,245,247,248,254,255,257,258,259,272,274,275,276,278,282,285,286,288,289,293,331,332,333,334,335,336,338,341,342,343,345,347,348,350,351,352,354,356,357,364,365,375,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,412,413,426,427,428,429,430,431,433,436,438,439,440,441,444,451,452,453,458,470,471,472,473,474,476,477,478,482,487,492,494,495,497,499,503,508,509,512,514,515,516,518,519,520,],[17,-303,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,-94,-109,-104,-65,-93,-110,17,-215,-107,-111,-303,-63,-116,-303,-29,-105,-62,-101,-67,-112,-106,101,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,-53,-303,-303,-61,-131,-301,-130,126,-147,-146,-88,-20,-90,-54,-157,-156,-37,-120,-79,-87,-89,-19,-118,-122,-92,-124,-16,-80,-15,-81,-84,-86,-69,-30,-288,-156,-70,-303,126,126,-128,-140,-137,126,-303,-55,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,-230,-294,-290,-216,-36,-35,-73,-76,332,-72,333,-74,336,-14,-303,-78,-193,-192,-77,345,-194,-13,-303,-75,-213,-288,126,126,-129,-132,-138,-302,-126,-26,-25,354,-141,-127,-143,-148,-38,-119,-71,-121,-171,-125,-123,-300,-257,-298,-274,-273,-31,-251,-256,-254,-34,-255,426,-212,-211,427,-303,-209,-303,-229,-195,-13,-208,-196,-303,-134,-133,-145,-139,-150,-149,-44,-43,-217,-272,-271,-270,-269,-268,-281,-238,-250,-239,-237,-241,-245,-240,-236,-243,-248,-234,-233,-242,-249,-244,-246,-247,-235,-33,-32,-207,-210,470,-303,-214,-199,-303,-197,-142,-144,-39,-42,-172,-258,-266,-267,-252,-303,493,-303,-198,-303,-303,-41,-40,-173,-231,506,-203,-202,-200,-282,-275,515,-303,-276,-303,-204,-201,-303,-206,-205,]),'LT':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,300,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-238,300,-239,-237,-241,300,-240,-236,-243,300,-234,-233,-242,300,300,300,300,-235,-258,-266,-267,-252,-282,-275,-276,]),'COMMA':([2,3,5,6,7,8,9,10,11,15,16,19,20,21,24,25,29,30,31,32,35,37,39,41,44,45,48,50,51,54,61,69,71,73,74,76,77,78,79,80,82,83,85,86,87,88,89,91,92,94,95,96,97,98,105,112,113,114,115,116,118,119,125,133,134,135,136,137,140,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,180,184,185,187,188,189,190,191,192,193,194,196,208,224,226,230,231,233,234,236,239,240,241,242,243,244,245,247,248,255,257,258,259,272,274,275,276,278,282,285,286,288,289,290,292,293,320,321,328,330,334,341,350,351,352,356,357,358,359,364,365,371,375,377,378,379,380,381,382,383,384,385,388,390,391,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,421,422,430,432,434,437,438,439,440,441,444,448,451,452,453,458,465,466,467,468,469,477,478,482,483,486,487,488,489,496,498,499,503,504,505,511,512,],[-102,-115,-113,-99,-97,-52,-95,-114,-96,-100,-91,-94,-109,-104,-93,-110,-215,-107,-303,-111,-116,-29,-105,-101,-112,-106,-108,-103,-117,-98,-53,-131,-130,-147,-146,-28,-158,-160,-27,-88,139,-90,-54,-157,-156,-37,-120,-87,-89,-118,-122,-92,-124,146,-30,-164,-303,197,-288,198,-169,-156,-128,-151,244,-154,-161,-159,-55,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,-230,-294,-290,-216,-36,-35,-168,-2,-182,-56,-166,-1,-45,-167,-184,337,-213,-288,-129,-132,-302,-126,353,-141,-127,-143,244,244,-152,-148,-38,-119,-121,-171,-125,-123,-300,-257,-298,-274,-273,-31,-251,-256,-254,-34,337,-303,-255,-57,-183,-170,-165,337,-229,-134,-133,-145,-150,-149,-153,-155,-44,-43,447,-217,-272,-271,-270,-269,337,-286,-268,454,455,-281,-181,-182,-238,-250,-239,-237,-241,-245,-240,-236,-243,-248,-234,-233,-242,-249,-244,-246,337,-247,-235,-33,-32,-191,-185,-187,-189,-214,337,337,337,-142,-144,-39,-42,-172,-174,-258,-266,-267,-252,-51,-50,-186,-188,-190,-41,-40,-173,-287,502,-231,-46,-49,337,-175,-282,-275,-48,-47,337,-276,]),'OFFSETOF':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,136,141,144,145,148,156,157,163,164,166,168,170,171,173,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,501,502,506,509,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,162,-28,-303,162,-161,-303,162,162,-264,162,-262,162,-261,162,-260,162,162,-259,-263,162,162,162,-73,-76,-72,162,-74,162,162,-78,-193,-192,-77,-194,162,-75,-260,-302,162,162,162,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,162,-227,-228,-220,-226,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,-303,-260,162,-212,-211,162,-209,162,162,162,-195,162,-208,-196,162,162,162,-260,162,162,-12,162,162,-11,162,162,-28,-303,-260,-207,-210,162,-199,162,-197,-303,-176,162,162,-303,162,-260,162,162,162,162,-198,162,162,162,162,-11,162,-203,-202,-200,162,-303,162,162,162,-204,-201,162,-206,-205,]),'TYPEDEF':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,60,61,63,64,67,68,69,70,71,73,74,80,83,87,91,92,96,101,104,105,113,120,121,122,142,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,231,233,245,254,282,289,323,327,332,333,335,336,342,345,347,350,351,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[7,7,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,7,-94,-109,-104,-65,-93,-110,7,-215,-107,-111,7,-63,-116,-29,-105,-62,-101,-67,-112,-106,7,-108,7,-103,-117,-68,-98,7,-53,7,-82,7,-61,-131,-301,-130,-147,-146,-88,-90,7,-87,-89,-92,-81,-69,-30,7,-70,7,-83,7,-36,-35,7,7,-73,-76,-72,-74,7,-78,-193,-192,-77,-194,-75,-132,-302,-148,-71,-31,-34,7,7,-212,-211,7,-209,-195,-208,-196,-134,-133,-150,-149,7,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'XOR':([115,147,149,150,151,152,153,154,155,158,159,161,167,169,172,174,175,176,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,451,452,453,458,499,503,512,],[-288,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,-232,-291,-265,-297,-289,-277,303,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-238,303,-239,-237,-241,-245,-240,-236,-243,-248,-234,-233,-242,303,-244,-246,303,-235,-258,-266,-267,-252,-282,-275,-276,]),'AUTO':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,60,61,63,64,67,68,69,70,71,73,74,80,83,87,91,92,96,101,104,105,113,120,121,122,142,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,231,233,245,254,282,289,323,327,332,333,335,336,342,345,347,350,351,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[24,24,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,24,-94,-109,-104,-65,-93,-110,24,-215,-107,-111,24,-63,-116,-29,-105,-62,-101,-67,-112,-106,24,-108,24,-103,-117,-68,-98,24,-53,24,-82,24,-61,-131,-301,-130,-147,-146,-88,-90,24,-87,-89,-92,-81,-69,-30,24,-70,24,-83,24,-36,-35,24,24,-73,-76,-72,-74,24,-78,-193,-192,-77,-194,-75,-132,-302,-148,-71,-31,-34,24,24,-212,-211,24,-209,-195,-208,-196,-134,-133,-150,-149,24,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'TIMES':([0,1,2,3,4,5,6,7,9,10,11,12,14,15,16,17,19,20,21,23,24,25,26,29,30,31,32,33,34,35,36,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,59,63,65,68,69,70,71,73,74,76,77,78,79,80,81,83,91,92,96,101,102,103,104,106,107,108,113,115,120,121,125,130,136,139,141,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,161,163,164,166,167,168,169,170,171,172,173,174,175,176,177,178,181,182,183,186,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,226,230,231,233,234,235,240,245,246,249,250,251,254,256,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,278,280,281,284,285,286,287,288,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,350,351,353,355,356,357,361,362,363,366,370,372,374,377,378,379,380,383,388,389,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,416,417,418,424,426,427,429,431,433,436,447,450,451,452,453,454,456,458,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,499,501,502,503,506,509,512,514,515,516,518,519,520,],[31,-303,-102,-115,31,-113,-99,-97,-95,-114,-96,-64,-60,-100,-91,-66,-94,-109,-104,-65,-93,-110,31,-215,-107,-303,-111,31,-63,-116,31,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,-303,31,31,-61,-131,-301,-130,-147,-146,-28,31,-160,-27,-88,31,-90,-87,-89,-92,-81,-84,-86,-69,168,-28,-303,31,-288,-70,225,-128,31,-161,31,-303,225,225,31,-292,-264,-251,-280,-295,-299,-296,-293,-278,225,-262,-279,-253,-232,225,-261,225,-291,-260,-265,225,225,-297,-259,-289,-277,305,-294,-290,-263,225,225,31,325,-73,-76,-72,225,-74,225,225,-78,-193,-192,-77,-194,225,-75,-260,-288,-129,-132,-302,-126,225,-127,-148,225,361,-28,-303,-71,-303,-221,-224,-222,-218,-219,-223,-225,225,-227,-228,-220,-226,-300,225,-257,-298,-274,-273,225,225,225,-251,-256,225,-254,31,-255,225,225,225,225,225,225,225,225,225,225,225,225,225,225,225,225,225,225,225,-303,-260,424,-212,-211,225,-209,225,225,225,-195,225,-208,-196,225,225,-134,-133,31,225,-150,-149,-260,225,225,-12,225,225,-11,-272,-271,-270,-269,-268,-281,225,31,305,305,305,305,305,305,305,305,305,305,-234,-233,305,305,305,305,305,-235,462,-28,-303,-260,-207,-210,225,-199,225,-197,-303,-176,-258,-266,-267,225,225,-252,-303,225,-260,225,225,225,225,-198,225,225,225,225,-11,225,-203,-202,-200,-282,225,-303,-275,225,225,-276,225,-204,-201,225,-206,-205,]),'LPAREN':([0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,19,20,21,23,24,25,26,29,30,31,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,59,61,63,65,68,69,70,71,73,74,76,77,78,79,80,81,83,84,85,88,91,92,96,101,102,103,104,105,106,107,108,113,115,120,121,125,130,136,137,139,140,141,144,145,146,147,148,150,151,152,153,154,155,156,157,158,159,162,163,164,166,167,168,169,170,171,172,173,174,175,177,178,181,182,183,184,185,186,189,190,193,195,196,199,200,203,204,205,206,210,211,212,213,214,215,216,217,219,221,222,225,226,227,230,231,233,234,235,240,245,246,247,249,250,251,254,256,260,261,262,263,264,265,266,267,268,269,270,271,272,273,275,276,278,280,281,282,284,287,289,292,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,315,320,321,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,350,351,353,355,356,357,361,362,363,364,365,366,370,372,374,377,378,379,380,383,388,389,391,392,412,413,414,415,416,417,418,421,422,424,426,427,429,431,433,435,436,440,441,447,450,452,453,454,456,459,460,462,463,464,465,466,467,468,469,470,472,473,474,475,476,477,478,480,481,488,489,493,494,495,497,499,501,502,503,504,505,506,509,512,514,515,516,518,519,520,],[4,-303,-102,-115,4,-113,-99,-97,60,-95,-114,-96,-64,4,-60,-100,-91,-66,-94,-109,-104,-65,-93,-110,4,-215,-107,-303,-111,81,-63,-116,4,-29,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,-303,60,81,4,-61,-131,-301,-130,-147,-146,-28,-158,-160,-27,-88,81,-90,81,142,-37,-87,-89,-92,-81,-84,-86,-69,-30,170,-28,-303,186,-288,-70,170,-128,81,-161,-159,81,142,-303,170,170,81,-292,-264,-280,-295,-299,-296,-293,-278,273,-262,-279,281,283,284,-261,287,-291,-260,-265,170,287,-297,-259,-289,-277,-294,-290,-263,170,170,-36,-35,186,186,323,-45,170,327,-73,-76,-72,170,-74,335,339,284,170,344,-78,-193,-192,-77,-194,170,-75,-260,-288,349,-129,-132,-302,-126,284,-127,-148,284,-38,170,-28,-303,-71,-303,-221,-224,-222,-218,-219,-223,-225,170,-227,-228,-220,-226,-300,170,-298,-274,-273,170,170,-31,170,170,-34,392,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,284,170,284,284,186,323,327,-303,-260,170,-212,-211,170,-209,170,170,170,-195,170,-208,-196,170,170,-134,-133,81,284,-150,-149,-260,170,170,-44,-43,-12,284,170,-11,-272,-271,-270,-269,-268,-281,284,392,392,-33,-32,-191,-185,170,-28,-303,-187,-189,-260,-207,-210,170,-199,170,475,-197,-39,-42,-303,-176,-266,-267,170,284,-303,284,-260,170,170,-51,-50,-186,-188,-190,170,170,-198,170,170,170,-41,-40,170,-11,-46,-49,170,-203,-202,-200,-282,170,-303,-275,-48,-47,170,170,-276,170,-204,-201,170,-206,-205,]),'MINUSMINUS':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,115,121,136,141,144,145,147,148,150,151,152,153,154,155,156,157,158,159,163,164,166,167,168,169,170,171,172,173,174,175,177,178,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,226,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,272,273,275,276,278,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,377,378,379,380,383,388,389,416,417,418,424,426,427,429,431,433,436,447,450,452,453,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,499,501,502,503,506,509,512,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,171,-28,-303,-288,171,-161,-303,171,171,-292,-264,-280,-295,-299,-296,-293,-278,171,-262,-279,276,171,-261,171,-291,-260,-265,171,171,-297,-259,-289,-277,-294,-290,-263,171,171,171,-73,-76,-72,171,-74,171,171,-78,-193,-192,-77,-194,171,-75,-260,-288,-302,171,171,171,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,171,-227,-228,-220,-226,-300,171,-298,-274,-273,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,-303,-260,171,-212,-211,171,-209,171,171,171,-195,171,-208,-196,171,171,171,-260,171,171,-12,171,171,-11,-272,-271,-270,-269,-268,-281,171,171,-28,-303,-260,-207,-210,171,-199,171,-197,-303,-176,-266,-267,171,171,-303,171,-260,171,171,171,171,-198,171,171,171,171,-11,171,-203,-202,-200,-282,171,-303,-275,171,171,-276,171,-204,-201,171,-206,-205,]),'ID':([0,1,2,3,4,5,6,7,9,10,11,12,13,14,15,16,17,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,38,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,59,60,63,65,68,69,70,71,73,74,75,76,77,78,79,80,81,83,84,91,92,96,101,102,103,104,106,107,108,113,120,121,125,130,131,132,136,137,139,141,142,144,145,146,148,156,157,163,164,166,168,170,171,173,181,182,183,186,189,195,197,199,200,201,203,204,205,211,212,214,215,216,217,219,221,222,225,230,231,233,234,235,240,244,245,246,249,250,251,254,256,260,261,262,263,264,265,266,267,268,269,270,271,273,277,279,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,315,322,323,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,350,351,353,355,356,357,361,362,363,366,369,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,455,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,500,501,502,506,509,514,515,516,518,519,520,],[37,-303,-102,-115,37,-113,-99,-97,-95,-114,-96,-64,37,-60,-100,-91,-66,-94,-109,-104,-136,-65,-93,-110,37,71,74,-215,-107,-303,-111,37,-63,-116,37,-135,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,-303,115,37,37,-61,-131,-301,-130,-147,-146,135,-28,-158,-160,-27,-88,37,-90,37,-87,-89,-92,-81,-84,-86,-69,115,-28,-303,37,-70,226,-128,37,135,135,-161,-159,37,-303,115,115,115,37,-264,115,-262,115,-261,115,-260,115,115,-259,-263,115,115,37,37,115,115,-73,-76,331,-72,115,-74,115,226,-78,-193,-192,-77,-194,226,-75,-260,-129,-132,-302,-126,115,-127,135,-148,115,115,-28,-303,-71,-303,-221,-224,-222,-218,-219,-223,-225,115,-227,-228,-220,-226,115,378,380,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,37,-303,115,-260,115,-212,-211,115,-209,115,226,115,-195,115,-208,-196,226,115,-134,-133,37,115,-150,-149,-260,115,115,-12,115,115,115,-11,115,115,-28,-303,-260,-207,-210,115,-199,226,-197,-303,-176,115,115,115,-303,115,-260,115,115,115,226,-198,226,115,226,115,-11,115,-203,-202,-200,115,115,-303,115,226,226,-204,-201,226,-206,-205,]),'IF':([53,70,101,104,121,199,200,203,205,212,214,215,216,217,219,221,222,233,332,333,336,338,342,345,347,348,426,427,431,433,436,472,473,474,476,494,495,497,509,514,515,516,518,519,520,],[-68,-301,-81,-69,227,-73,-76,-72,-74,227,-78,-193,-192,-77,-194,227,-75,-302,-212,-211,-209,227,-195,-208,-196,227,-207,-210,-199,227,-197,227,-198,227,227,-203,-202,-200,227,227,-204,-201,227,-206,-205,]),'STRING_LITERAL':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,136,141,144,145,148,156,157,158,163,164,166,168,170,171,172,173,181,182,183,195,199,200,203,204,205,211,212,214,215,216,217,219,221,222,225,233,235,246,249,250,251,256,260,261,262,263,264,265,266,267,268,269,270,271,273,275,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,322,325,326,332,333,335,336,337,338,339,342,344,345,347,348,349,355,361,362,363,366,370,372,374,389,416,417,418,424,426,427,429,431,433,436,447,450,454,456,459,460,462,463,464,470,472,473,474,475,476,480,481,493,494,495,497,501,502,506,509,514,515,516,518,519,520,],[-115,-116,-117,-68,-303,-301,-28,-160,-27,-81,-69,172,-28,-303,172,-161,-303,172,172,-264,172,-262,275,172,-261,172,-260,172,172,-297,-259,-263,172,172,172,-73,-76,-72,172,-74,172,172,-78,-193,-192,-77,-194,172,-75,-260,-302,172,172,172,-28,-303,-303,-221,-224,-222,-218,-219,-223,-225,172,-227,-228,-220,-226,172,-298,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,-303,-260,172,-212,-211,172,-209,172,172,172,-195,172,-208,-196,172,172,172,-260,172,172,-12,172,172,-11,172,172,-28,-303,-260,-207,-210,172,-199,172,-197,-303,-176,172,172,-303,172,-260,172,172,172,172,-198,172,172,172,172,-11,172,-203,-202,-200,172,-303,172,172,172,-204,-201,172,-206,-205,]),'FLOAT':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,136,142,170,184,185,186,198,199,200,203,205,212,214,215,216,217,219,222,228,229,230,231,232,233,234,240,245,254,273,282,283,284,287,289,292,323,327,332,333,335,336,342,345,347,350,351,354,356,357,392,412,413,426,427,431,436,473,494,495,497,515,516,519,520,],[39,-303,-102,-115,-113,-99,-97,-52,-95,-114,-96,-64,-60,-100,-91,-66,39,-94,-109,-104,-65,-93,-110,39,-215,-107,-111,39,-63,-116,39,-29,-105,-62,-101,-67,-112,-106,-303,-108,-303,-103,-117,-68,-98,-85,-10,-9,39,-53,39,-82,39,39,-61,-131,-301,-130,39,-147,-146,-160,-88,-90,39,-87,-89,-92,-81,-84,-86,-69,-30,39,39,-70,39,-83,39,39,-128,-140,-137,39,39,39,-161,39,39,-36,-35,39,39,-73,-76,-72,-74,39,-78,-193,-192,-77,-194,-75,39,39,-129,-132,-138,-302,-126,-127,-148,-71,39,-31,39,39,39,-34,39,39,39,-212,-211,39,-209,-195,-208,-196,-134,-133,-139,-150,-149,39,-33,-32,-207,-210,-199,-197,-198,-203,-202,-200,-204,-201,-206,-205,]),'XOREQUAL':([115,147,149,150,151,152,153,154,155,158,159,167,169,172,174,175,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,451,452,453,458,499,503,512,],[-288,-292,264,-280,-295,-299,-296,-293,-278,-279,-253,-291,-265,-297,-289,-277,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-258,-266,-267,-252,-282,-275,-276,]),'LSHIFTEQUAL':([115,147,149,150,151,152,153,154,155,158,159,167,169,172,174,175,177,178,226,233,272,274,275,276,278,285,286,288,293,377,378,379,380,383,388,451,452,453,458,499,503,512,],[-288,-292,266,-280,-295,-299,-296,-293,-278,-279,-253,-291,-265,-297,-289,-277,-294,-290,-288,-302,-300,-257,-298,-274,-273,-251,-256,-254,-255,-272,-271,-270,-269,-268,-281,-258,-266,-267,-252,-282,-275,-276,]),'RBRACKET':([3,35,51,59,78,79,106,107,115,136,141,147,149,150,151,152,153,154,155,158,159,160,161,165,167,168,169,172,174,175,176,177,178,179,180,195,224,233,249,250,272,274,275,276,278,285,286,288,293,313,314,322,324,325,326,341,360,361,375,377,378,379,380,381,383,388,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,410,411,416,417,423,424,430,442,443,446,451,452,453,458,461,462,487,490,491,499,503,511,512,],[-115,-116,-117,-303,-160,-27,-303,-28,-288,-161,-303,-292,-251,-280,-295,-299,-296,-293,-278,-279,-253,282,-232,-4,-291,289,-265,-297,-289,-277,-230,-294,-290,-3,-216,-303,-213,-302,-303,-28,-300,-257,-298,-274,-273,-251,-256,-254,-255,412,413,-303,421,422,-303,-229,440,441,-217,-272,-271,-270,-269,452,-268,-281,-238,-250,-239,-237,-241,-245,-240,-236,-243,-248,-234,-233,-242,-249,-244,-246,-247,-235,-303,-28,467,468,-214,477,478,479,-258,-266,-267,-252,488,489,-231,504,505,-282,-275,517,-276,]),}
-
-_lr_action = {}
-for _k, _v in _lr_action_items.items():
- for _x,_y in zip(_v[0],_v[1]):
- if not _x in _lr_action: _lr_action[_x] = {}
- _lr_action[_x][_k] = _y
-del _lr_action_items
-
-_lr_goto_items = {'expression_statement':([121,212,221,338,348,433,472,474,476,509,514,518,],[199,199,199,199,199,199,199,199,199,199,199,199,]),'struct_or_union_specifier':([0,18,26,36,60,65,67,72,87,117,121,123,124,128,129,142,170,186,198,212,228,229,273,283,284,287,323,327,335,392,],[5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,]),'init_declarator_list':([33,63,],[82,82,]),'init_declarator_list_opt':([33,63,],[90,90,]),'iteration_statement':([121,212,221,338,348,433,472,474,476,509,514,518,],[200,200,200,200,200,200,200,200,200,200,200,200,]),'unified_string_literal':([106,121,144,145,156,163,166,170,171,182,183,195,204,211,212,221,235,246,249,267,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,326,335,337,338,339,344,348,349,355,362,363,370,372,389,416,429,433,454,456,460,463,464,470,472,474,475,476,480,493,501,506,509,514,518,],[158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,]),'assignment_expression_opt':([106,195,249,326,416,],[160,324,360,423,461,]),'brace_open':([27,28,66,69,71,73,74,121,143,144,145,212,221,338,348,372,389,433,451,456,457,472,474,476,480,509,514,518,],[72,75,121,123,124,131,132,121,121,256,256,121,121,121,121,256,459,121,459,459,459,121,121,121,256,121,121,121,]),'enumerator':([75,131,132,244,],[133,133,133,358,]),'typeid_noparen_declarator':([113,],[194,]),'type_qualifier_list_opt':([31,59,108,141,251,322,418,],[77,106,183,249,363,416,464,]),'declaration_specifiers_no_type_opt':([1,47,49,],[55,102,103,]),'expression_opt':([121,212,221,335,338,348,429,433,470,472,474,476,493,506,509,514,518,],[202,202,202,428,202,202,471,202,492,202,202,202,507,513,202,202,202,]),'designation':([256,447,459,502,],[366,366,366,366,]),'parameter_list':([60,142,186,323,327,392,],[116,116,116,116,116,116,]),'labeled_statement':([121,212,221,338,348,433,472,474,476,509,514,518,],[203,203,203,203,203,203,203,203,203,203,203,203,]),'abstract_declarator':([113,186,292,392,],[188,319,188,319,]),'translation_unit':([0,],[26,]),'init_declarator':([33,63,139,146,],[94,94,248,259,]),'direct_abstract_declarator':([113,186,189,292,315,391,392,],[196,196,321,196,321,321,196,]),'designator_list':([256,447,459,502,],[373,373,373,373,]),'identifier':([60,106,121,142,144,145,156,163,166,170,171,182,183,195,197,204,211,212,221,235,246,249,267,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,323,326,335,337,338,339,344,348,349,355,362,363,369,370,372,389,416,429,433,454,455,456,460,463,464,470,472,474,475,476,480,493,500,501,506,509,514,518,],[118,175,175,118,175,175,175,175,175,175,175,175,175,175,328,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,118,175,175,175,175,175,175,175,175,175,175,175,445,175,175,175,175,175,175,175,485,175,175,175,175,175,175,175,175,175,175,175,510,175,175,175,175,175,]),'offsetof_member_designator':([455,],[484,]),'unary_expression':([106,121,144,145,156,163,166,170,171,182,183,195,204,211,212,221,235,246,249,267,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,326,335,337,338,339,344,348,349,355,362,363,370,372,389,416,429,433,454,456,460,463,464,470,472,474,475,476,480,493,501,506,509,514,518,],[149,149,149,149,274,285,288,149,293,149,149,149,149,285,149,149,285,285,149,149,149,149,149,149,149,285,285,285,285,285,285,285,285,285,285,285,285,285,285,285,285,149,285,285,149,149,149,149,149,149,149,149,285,149,149,285,149,285,149,149,149,149,285,285,149,149,149,149,149,149,149,149,149,149,149,149,149,149,]),'abstract_declarator_opt':([113,292,],[187,390,]),'initializer':([144,145,372,480,],[255,258,448,498,]),'direct_id_declarator':([0,4,13,26,33,36,63,65,81,84,113,130,139,146,186,189,315,353,],[8,8,61,8,8,8,8,8,8,61,8,8,8,8,8,61,61,8,]),'struct_declaration_list':([72,123,124,],[129,228,229,]),'pp_directive':([0,26,],[12,12,]),'declaration_list':([18,87,],[67,67,]),'id_init_declarator':([36,65,],[95,95,]),'type_specifier':([0,18,26,36,60,65,67,72,87,117,121,123,124,128,129,142,170,186,198,212,228,229,273,283,284,287,323,327,335,392,],[16,16,16,96,16,96,16,125,16,96,16,125,125,230,125,16,125,16,16,16,125,125,125,125,125,125,16,16,16,16,]),'compound_statement':([66,121,143,212,221,338,348,433,472,474,476,509,514,518,],[120,205,254,205,205,205,205,205,205,205,205,205,205,205,]),'pointer':([0,4,26,33,36,63,65,77,81,113,130,139,146,186,292,353,392,],[13,13,13,84,13,84,13,137,84,189,84,84,84,315,391,84,391,]),'typeid_declarator':([33,63,81,130,139,146,353,],[86,86,138,86,86,86,86,]),'id_init_declarator_list':([36,65,],[98,98,]),'declarator':([33,63,130,139,146,353,],[89,89,241,89,89,241,]),'argument_expression_list':([281,],[384,]),'struct_declarator_list_opt':([130,],[238,]),'typedef_name':([0,18,26,36,60,65,67,72,87,117,121,123,124,128,129,142,170,186,198,212,228,229,273,283,284,287,323,327,335,392,],[32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,]),'parameter_type_list_opt':([186,327,392,],[318,425,318,]),'struct_declarator':([130,353,],[239,438,]),'type_qualifier':([0,1,18,26,31,33,47,49,59,60,63,67,72,76,87,107,108,113,121,123,124,128,129,130,141,142,170,186,198,212,228,229,250,251,273,283,284,287,292,322,323,327,335,392,417,418,],[47,47,47,47,78,91,47,47,78,47,91,47,78,136,47,136,78,91,47,78,78,136,78,240,78,47,78,47,47,47,78,78,136,78,78,78,78,78,240,78,47,47,47,47,136,78,]),'assignment_operator':([149,],[267,]),'expression':([121,170,204,212,221,273,280,284,287,310,335,338,339,344,348,349,429,433,470,472,474,475,476,493,501,506,509,514,518,],[208,290,334,208,208,290,381,290,290,409,208,208,432,434,208,437,208,208,208,208,208,496,208,208,511,208,208,208,208,]),'storage_class_specifier':([0,1,18,26,33,47,49,60,63,67,87,113,121,142,186,198,212,323,327,335,392,],[1,1,1,1,80,1,1,1,80,1,1,80,1,1,1,1,1,1,1,1,1,]),'unified_wstring_literal':([106,121,144,145,156,163,166,170,171,182,183,195,204,211,212,221,235,246,249,267,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,326,335,337,338,339,344,348,349,355,362,363,370,372,389,416,429,433,454,456,460,463,464,470,472,474,475,476,480,493,501,506,509,514,518,],[150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,]),'translation_unit_or_empty':([0,],[43,]),'initializer_list_opt':([256,],[367,]),'brace_close':([129,134,223,228,229,242,243,367,447,486,502,],[231,245,347,350,351,356,357,444,482,503,512,]),'direct_typeid_declarator':([33,63,81,84,130,139,146,353,],[85,85,85,140,85,85,85,85,]),'external_declaration':([0,26,],[14,68,]),'type_name':([170,273,283,284,287,],[291,376,385,386,387,]),'block_item_list':([121,],[212,]),'pppragma_directive':([0,26,121,212,221,338,348,433,472,474,476,509,514,518,],[23,23,214,214,214,214,214,214,214,214,214,214,214,214,]),'statement':([121,212,221,338,348,433,472,474,476,509,514,518,],[215,215,346,431,436,473,494,495,497,516,519,520,]),'cast_expression':([106,121,144,145,163,170,182,183,195,204,211,212,221,235,246,249,267,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,326,335,337,338,339,344,348,349,355,362,363,370,372,389,416,429,433,454,456,460,463,464,470,472,474,475,476,480,493,501,506,509,514,518,],[161,161,161,161,286,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,458,161,161,161,161,458,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,]),'struct_declarator_list':([130,],[236,]),'empty':([0,1,18,31,33,36,47,49,59,60,63,65,87,106,108,113,121,130,141,142,186,195,212,221,249,251,256,292,322,323,326,327,335,338,348,392,416,418,429,433,447,459,470,472,474,476,493,502,506,509,514,518,],[52,57,62,79,93,100,57,57,79,110,93,100,62,179,79,192,220,237,79,110,316,179,343,343,179,79,374,192,79,110,179,316,343,343,343,316,179,79,343,343,481,481,343,343,343,343,343,481,343,343,343,343,]),'parameter_declaration':([60,142,186,198,323,327,392,],[112,112,112,330,112,112,112,]),'primary_expression':([106,121,144,145,156,163,166,170,171,182,183,195,204,211,212,221,235,246,249,267,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,326,335,337,338,339,344,348,349,355,362,363,370,372,389,416,429,433,454,456,460,463,464,470,472,474,475,476,480,493,501,506,509,514,518,],[169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,]),'declaration':([0,18,26,67,87,121,212,335,],[34,64,34,122,64,216,216,429,]),'declaration_specifiers_no_type':([0,1,18,26,47,49,60,67,87,121,142,186,198,212,323,327,335,392,],[36,56,65,36,56,56,117,65,65,65,117,117,117,65,117,117,65,117,]),'jump_statement':([121,212,221,338,348,433,472,474,476,509,514,518,],[217,217,217,217,217,217,217,217,217,217,217,217,]),'enumerator_list':([75,131,132,],[134,242,243,]),'block_item':([121,212,],[219,342,]),'constant_expression':([211,235,246,355,370,],[340,352,359,439,446,]),'identifier_list_opt':([60,142,323,],[109,252,419,]),'constant':([106,121,144,145,156,163,166,170,171,182,183,195,204,211,212,221,235,246,249,267,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,326,335,337,338,339,344,348,349,355,362,363,370,372,389,416,429,433,454,456,460,463,464,470,472,474,475,476,480,493,501,506,509,514,518,],[155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,]),'type_specifier_no_typeid':([0,18,26,33,36,60,63,65,67,72,87,113,117,121,123,124,128,129,130,142,170,186,198,212,228,229,273,283,284,287,292,323,327,335,392,],[10,10,10,83,10,10,83,10,10,10,10,83,10,10,10,10,10,10,234,10,10,10,10,10,10,10,10,10,10,10,234,10,10,10,10,]),'struct_declaration':([72,123,124,129,228,229,],[127,127,127,232,232,232,]),'direct_typeid_noparen_declarator':([113,189,],[190,320,]),'id_declarator':([0,4,26,33,36,63,65,81,113,130,139,146,186,353,],[18,58,18,87,97,119,97,58,191,119,119,119,58,119,]),'selection_statement':([121,212,221,338,348,433,472,474,476,509,514,518,],[222,222,222,222,222,222,222,222,222,222,222,222,]),'postfix_expression':([106,121,144,145,156,163,166,170,171,182,183,195,204,211,212,221,235,246,249,267,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,326,335,337,338,339,344,348,349,355,362,363,370,372,389,416,429,433,454,456,460,463,464,470,472,474,475,476,480,493,501,506,509,514,518,],[159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,]),'initializer_list':([256,459,],[371,486,]),'unary_operator':([106,121,144,145,156,163,166,170,171,182,183,195,204,211,212,221,235,246,249,267,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,326,335,337,338,339,344,348,349,355,362,363,370,372,389,416,429,433,454,456,460,463,464,470,472,474,475,476,480,493,501,506,509,514,518,],[163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,]),'struct_or_union':([0,18,26,36,60,65,67,72,87,117,121,123,124,128,129,142,170,186,198,212,228,229,273,283,284,287,323,327,335,392,],[27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,]),'block_item_list_opt':([121,],[223,]),'assignment_expression':([106,121,144,145,170,182,183,195,204,212,221,249,267,273,280,281,284,287,310,326,335,337,338,339,344,348,349,362,363,372,416,429,433,454,463,464,470,472,474,475,476,480,493,501,506,509,514,518,],[165,224,257,257,224,313,314,165,224,224,224,165,375,224,224,382,224,224,224,165,224,430,224,224,224,224,224,442,443,257,165,224,224,483,490,491,224,224,224,224,224,257,224,224,224,224,224,224,]),'designation_opt':([256,447,459,502,],[372,480,372,480,]),'parameter_type_list':([60,142,186,323,327,392,],[111,253,317,420,317,317,]),'type_qualifier_list':([31,59,72,108,123,124,129,141,170,228,229,251,273,283,284,287,322,418,],[76,107,128,76,128,128,128,250,128,128,128,76,128,128,128,128,417,76,]),'designator':([256,373,447,459,502,],[368,449,368,368,368,]),'id_init_declarator_list_opt':([36,65,],[99,99,]),'declaration_specifiers':([0,18,26,60,67,87,121,142,186,198,212,323,327,335,392,],[33,63,33,113,63,63,63,113,113,113,63,113,113,63,113,]),'identifier_list':([60,142,323,],[114,114,114,]),'declaration_list_opt':([18,87,],[66,143,]),'function_definition':([0,26,],[40,40,]),'binary_expression':([106,121,144,145,170,182,183,195,204,211,212,221,235,246,249,267,273,280,281,284,287,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,326,335,337,338,339,344,348,349,355,362,363,370,372,416,429,433,454,460,463,464,470,472,474,475,476,480,493,501,506,509,514,518,],[176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,176,410,411,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,]),'enum_specifier':([0,18,26,36,60,65,67,72,87,117,121,123,124,128,129,142,170,186,198,212,228,229,273,283,284,287,323,327,335,392,],[44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,]),'decl_body':([0,18,26,67,87,121,212,335,],[46,46,46,46,46,46,46,46,]),'function_specifier':([0,1,18,26,33,47,49,60,63,67,87,113,121,142,186,198,212,323,327,335,392,],[49,49,49,49,92,49,49,49,92,49,49,92,49,49,49,49,49,49,49,49,49,]),'specifier_qualifier_list':([72,123,124,129,170,228,229,273,283,284,287,],[130,130,130,130,292,130,130,292,292,292,292,]),'conditional_expression':([106,121,144,145,170,182,183,195,204,211,212,221,235,246,249,267,273,280,281,284,287,310,326,335,337,338,339,344,348,349,355,362,363,370,372,416,429,433,454,460,463,464,470,472,474,475,476,480,493,501,506,509,514,518,],[180,180,180,180,180,180,180,180,180,341,180,180,341,341,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,341,180,180,341,180,180,180,180,180,487,180,180,180,180,180,180,180,180,180,180,180,180,180,180,]),}
-
-_lr_goto = {}
-for _k, _v in _lr_goto_items.items():
- for _x, _y in zip(_v[0], _v[1]):
- if not _x in _lr_goto: _lr_goto[_x] = {}
- _lr_goto[_x][_k] = _y
-del _lr_goto_items
-_lr_productions = [
- ("S' -> translation_unit_or_empty","S'",1,None,None,None),
- ('abstract_declarator_opt -> empty','abstract_declarator_opt',1,'p_abstract_declarator_opt','plyparser.py',42),
- ('abstract_declarator_opt -> abstract_declarator','abstract_declarator_opt',1,'p_abstract_declarator_opt','plyparser.py',43),
- ('assignment_expression_opt -> empty','assignment_expression_opt',1,'p_assignment_expression_opt','plyparser.py',42),
- ('assignment_expression_opt -> assignment_expression','assignment_expression_opt',1,'p_assignment_expression_opt','plyparser.py',43),
- ('block_item_list_opt -> empty','block_item_list_opt',1,'p_block_item_list_opt','plyparser.py',42),
- ('block_item_list_opt -> block_item_list','block_item_list_opt',1,'p_block_item_list_opt','plyparser.py',43),
- ('declaration_list_opt -> empty','declaration_list_opt',1,'p_declaration_list_opt','plyparser.py',42),
- ('declaration_list_opt -> declaration_list','declaration_list_opt',1,'p_declaration_list_opt','plyparser.py',43),
- ('declaration_specifiers_no_type_opt -> empty','declaration_specifiers_no_type_opt',1,'p_declaration_specifiers_no_type_opt','plyparser.py',42),
- ('declaration_specifiers_no_type_opt -> declaration_specifiers_no_type','declaration_specifiers_no_type_opt',1,'p_declaration_specifiers_no_type_opt','plyparser.py',43),
- ('designation_opt -> empty','designation_opt',1,'p_designation_opt','plyparser.py',42),
- ('designation_opt -> designation','designation_opt',1,'p_designation_opt','plyparser.py',43),
- ('expression_opt -> empty','expression_opt',1,'p_expression_opt','plyparser.py',42),
- ('expression_opt -> expression','expression_opt',1,'p_expression_opt','plyparser.py',43),
- ('id_init_declarator_list_opt -> empty','id_init_declarator_list_opt',1,'p_id_init_declarator_list_opt','plyparser.py',42),
- ('id_init_declarator_list_opt -> id_init_declarator_list','id_init_declarator_list_opt',1,'p_id_init_declarator_list_opt','plyparser.py',43),
- ('identifier_list_opt -> empty','identifier_list_opt',1,'p_identifier_list_opt','plyparser.py',42),
- ('identifier_list_opt -> identifier_list','identifier_list_opt',1,'p_identifier_list_opt','plyparser.py',43),
- ('init_declarator_list_opt -> empty','init_declarator_list_opt',1,'p_init_declarator_list_opt','plyparser.py',42),
- ('init_declarator_list_opt -> init_declarator_list','init_declarator_list_opt',1,'p_init_declarator_list_opt','plyparser.py',43),
- ('initializer_list_opt -> empty','initializer_list_opt',1,'p_initializer_list_opt','plyparser.py',42),
- ('initializer_list_opt -> initializer_list','initializer_list_opt',1,'p_initializer_list_opt','plyparser.py',43),
- ('parameter_type_list_opt -> empty','parameter_type_list_opt',1,'p_parameter_type_list_opt','plyparser.py',42),
- ('parameter_type_list_opt -> parameter_type_list','parameter_type_list_opt',1,'p_parameter_type_list_opt','plyparser.py',43),
- ('struct_declarator_list_opt -> empty','struct_declarator_list_opt',1,'p_struct_declarator_list_opt','plyparser.py',42),
- ('struct_declarator_list_opt -> struct_declarator_list','struct_declarator_list_opt',1,'p_struct_declarator_list_opt','plyparser.py',43),
- ('type_qualifier_list_opt -> empty','type_qualifier_list_opt',1,'p_type_qualifier_list_opt','plyparser.py',42),
- ('type_qualifier_list_opt -> type_qualifier_list','type_qualifier_list_opt',1,'p_type_qualifier_list_opt','plyparser.py',43),
- ('direct_id_declarator -> ID','direct_id_declarator',1,'p_direct_id_declarator_1','plyparser.py',109),
- ('direct_id_declarator -> LPAREN id_declarator RPAREN','direct_id_declarator',3,'p_direct_id_declarator_2','plyparser.py',109),
- ('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_id_declarator',5,'p_direct_id_declarator_3','plyparser.py',109),
- ('direct_id_declarator -> direct_id_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_id_declarator',6,'p_direct_id_declarator_4','plyparser.py',109),
- ('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_id_declarator',6,'p_direct_id_declarator_4','plyparser.py',110),
- ('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_id_declarator',5,'p_direct_id_declarator_5','plyparser.py',109),
- ('direct_id_declarator -> direct_id_declarator LPAREN parameter_type_list RPAREN','direct_id_declarator',4,'p_direct_id_declarator_6','plyparser.py',109),
- ('direct_id_declarator -> direct_id_declarator LPAREN identifier_list_opt RPAREN','direct_id_declarator',4,'p_direct_id_declarator_6','plyparser.py',110),
- ('direct_typeid_declarator -> TYPEID','direct_typeid_declarator',1,'p_direct_typeid_declarator_1','plyparser.py',109),
- ('direct_typeid_declarator -> LPAREN typeid_declarator RPAREN','direct_typeid_declarator',3,'p_direct_typeid_declarator_2','plyparser.py',109),
- ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_typeid_declarator',5,'p_direct_typeid_declarator_3','plyparser.py',109),
- ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_typeid_declarator',6,'p_direct_typeid_declarator_4','plyparser.py',109),
- ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_typeid_declarator',6,'p_direct_typeid_declarator_4','plyparser.py',110),
- ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_typeid_declarator',5,'p_direct_typeid_declarator_5','plyparser.py',109),
- ('direct_typeid_declarator -> direct_typeid_declarator LPAREN parameter_type_list RPAREN','direct_typeid_declarator',4,'p_direct_typeid_declarator_6','plyparser.py',109),
- ('direct_typeid_declarator -> direct_typeid_declarator LPAREN identifier_list_opt RPAREN','direct_typeid_declarator',4,'p_direct_typeid_declarator_6','plyparser.py',110),
- ('direct_typeid_noparen_declarator -> TYPEID','direct_typeid_noparen_declarator',1,'p_direct_typeid_noparen_declarator_1','plyparser.py',109),
- ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_typeid_noparen_declarator',5,'p_direct_typeid_noparen_declarator_3','plyparser.py',109),
- ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_typeid_noparen_declarator',6,'p_direct_typeid_noparen_declarator_4','plyparser.py',109),
- ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_typeid_noparen_declarator',6,'p_direct_typeid_noparen_declarator_4','plyparser.py',110),
- ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_typeid_noparen_declarator',5,'p_direct_typeid_noparen_declarator_5','plyparser.py',109),
- ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LPAREN parameter_type_list RPAREN','direct_typeid_noparen_declarator',4,'p_direct_typeid_noparen_declarator_6','plyparser.py',109),
- ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LPAREN identifier_list_opt RPAREN','direct_typeid_noparen_declarator',4,'p_direct_typeid_noparen_declarator_6','plyparser.py',110),
- ('id_declarator -> direct_id_declarator','id_declarator',1,'p_id_declarator_1','plyparser.py',109),
- ('id_declarator -> pointer direct_id_declarator','id_declarator',2,'p_id_declarator_2','plyparser.py',109),
- ('typeid_declarator -> direct_typeid_declarator','typeid_declarator',1,'p_typeid_declarator_1','plyparser.py',109),
- ('typeid_declarator -> pointer direct_typeid_declarator','typeid_declarator',2,'p_typeid_declarator_2','plyparser.py',109),
- ('typeid_noparen_declarator -> direct_typeid_noparen_declarator','typeid_noparen_declarator',1,'p_typeid_noparen_declarator_1','plyparser.py',109),
- ('typeid_noparen_declarator -> pointer direct_typeid_noparen_declarator','typeid_noparen_declarator',2,'p_typeid_noparen_declarator_2','plyparser.py',109),
- ('translation_unit_or_empty -> translation_unit','translation_unit_or_empty',1,'p_translation_unit_or_empty','c_parser.py',514),
- ('translation_unit_or_empty -> empty','translation_unit_or_empty',1,'p_translation_unit_or_empty','c_parser.py',515),
- ('translation_unit -> external_declaration','translation_unit',1,'p_translation_unit_1','c_parser.py',523),
- ('translation_unit -> translation_unit external_declaration','translation_unit',2,'p_translation_unit_2','c_parser.py',530),
- ('external_declaration -> function_definition','external_declaration',1,'p_external_declaration_1','c_parser.py',542),
- ('external_declaration -> declaration','external_declaration',1,'p_external_declaration_2','c_parser.py',547),
- ('external_declaration -> pp_directive','external_declaration',1,'p_external_declaration_3','c_parser.py',552),
- ('external_declaration -> pppragma_directive','external_declaration',1,'p_external_declaration_3','c_parser.py',553),
- ('external_declaration -> SEMI','external_declaration',1,'p_external_declaration_4','c_parser.py',558),
- ('pp_directive -> PPHASH','pp_directive',1,'p_pp_directive','c_parser.py',563),
- ('pppragma_directive -> PPPRAGMA','pppragma_directive',1,'p_pppragma_directive','c_parser.py',569),
- ('pppragma_directive -> PPPRAGMA PPPRAGMASTR','pppragma_directive',2,'p_pppragma_directive','c_parser.py',570),
- ('function_definition -> id_declarator declaration_list_opt compound_statement','function_definition',3,'p_function_definition_1','c_parser.py',581),
- ('function_definition -> declaration_specifiers id_declarator declaration_list_opt compound_statement','function_definition',4,'p_function_definition_2','c_parser.py',598),
- ('statement -> labeled_statement','statement',1,'p_statement','c_parser.py',609),
- ('statement -> expression_statement','statement',1,'p_statement','c_parser.py',610),
- ('statement -> compound_statement','statement',1,'p_statement','c_parser.py',611),
- ('statement -> selection_statement','statement',1,'p_statement','c_parser.py',612),
- ('statement -> iteration_statement','statement',1,'p_statement','c_parser.py',613),
- ('statement -> jump_statement','statement',1,'p_statement','c_parser.py',614),
- ('statement -> pppragma_directive','statement',1,'p_statement','c_parser.py',615),
- ('decl_body -> declaration_specifiers init_declarator_list_opt','decl_body',2,'p_decl_body','c_parser.py',629),
- ('decl_body -> declaration_specifiers_no_type id_init_declarator_list_opt','decl_body',2,'p_decl_body','c_parser.py',630),
- ('declaration -> decl_body SEMI','declaration',2,'p_declaration','c_parser.py',689),
- ('declaration_list -> declaration','declaration_list',1,'p_declaration_list','c_parser.py',698),
- ('declaration_list -> declaration_list declaration','declaration_list',2,'p_declaration_list','c_parser.py',699),
- ('declaration_specifiers_no_type -> type_qualifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_1','c_parser.py',709),
- ('declaration_specifiers_no_type -> storage_class_specifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_2','c_parser.py',714),
- ('declaration_specifiers_no_type -> function_specifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_3','c_parser.py',719),
- ('declaration_specifiers -> declaration_specifiers type_qualifier','declaration_specifiers',2,'p_declaration_specifiers_1','c_parser.py',725),
- ('declaration_specifiers -> declaration_specifiers storage_class_specifier','declaration_specifiers',2,'p_declaration_specifiers_2','c_parser.py',730),
- ('declaration_specifiers -> declaration_specifiers function_specifier','declaration_specifiers',2,'p_declaration_specifiers_3','c_parser.py',735),
- ('declaration_specifiers -> declaration_specifiers type_specifier_no_typeid','declaration_specifiers',2,'p_declaration_specifiers_4','c_parser.py',740),
- ('declaration_specifiers -> type_specifier','declaration_specifiers',1,'p_declaration_specifiers_5','c_parser.py',745),
- ('declaration_specifiers -> declaration_specifiers_no_type type_specifier','declaration_specifiers',2,'p_declaration_specifiers_6','c_parser.py',750),
- ('storage_class_specifier -> AUTO','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',756),
- ('storage_class_specifier -> REGISTER','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',757),
- ('storage_class_specifier -> STATIC','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',758),
- ('storage_class_specifier -> EXTERN','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',759),
- ('storage_class_specifier -> TYPEDEF','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',760),
- ('function_specifier -> INLINE','function_specifier',1,'p_function_specifier','c_parser.py',765),
- ('type_specifier_no_typeid -> VOID','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',770),
- ('type_specifier_no_typeid -> _BOOL','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',771),
- ('type_specifier_no_typeid -> CHAR','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',772),
- ('type_specifier_no_typeid -> SHORT','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',773),
- ('type_specifier_no_typeid -> INT','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',774),
- ('type_specifier_no_typeid -> LONG','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',775),
- ('type_specifier_no_typeid -> FLOAT','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',776),
- ('type_specifier_no_typeid -> DOUBLE','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',777),
- ('type_specifier_no_typeid -> _COMPLEX','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',778),
- ('type_specifier_no_typeid -> SIGNED','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',779),
- ('type_specifier_no_typeid -> UNSIGNED','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',780),
- ('type_specifier_no_typeid -> __INT128','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',781),
- ('type_specifier -> typedef_name','type_specifier',1,'p_type_specifier','c_parser.py',786),
- ('type_specifier -> enum_specifier','type_specifier',1,'p_type_specifier','c_parser.py',787),
- ('type_specifier -> struct_or_union_specifier','type_specifier',1,'p_type_specifier','c_parser.py',788),
- ('type_specifier -> type_specifier_no_typeid','type_specifier',1,'p_type_specifier','c_parser.py',789),
- ('type_qualifier -> CONST','type_qualifier',1,'p_type_qualifier','c_parser.py',794),
- ('type_qualifier -> RESTRICT','type_qualifier',1,'p_type_qualifier','c_parser.py',795),
- ('type_qualifier -> VOLATILE','type_qualifier',1,'p_type_qualifier','c_parser.py',796),
- ('init_declarator_list -> init_declarator','init_declarator_list',1,'p_init_declarator_list','c_parser.py',801),
- ('init_declarator_list -> init_declarator_list COMMA init_declarator','init_declarator_list',3,'p_init_declarator_list','c_parser.py',802),
- ('init_declarator -> declarator','init_declarator',1,'p_init_declarator','c_parser.py',810),
- ('init_declarator -> declarator EQUALS initializer','init_declarator',3,'p_init_declarator','c_parser.py',811),
- ('id_init_declarator_list -> id_init_declarator','id_init_declarator_list',1,'p_id_init_declarator_list','c_parser.py',816),
- ('id_init_declarator_list -> id_init_declarator_list COMMA init_declarator','id_init_declarator_list',3,'p_id_init_declarator_list','c_parser.py',817),
- ('id_init_declarator -> id_declarator','id_init_declarator',1,'p_id_init_declarator','c_parser.py',822),
- ('id_init_declarator -> id_declarator EQUALS initializer','id_init_declarator',3,'p_id_init_declarator','c_parser.py',823),
- ('specifier_qualifier_list -> specifier_qualifier_list type_specifier_no_typeid','specifier_qualifier_list',2,'p_specifier_qualifier_list_1','c_parser.py',830),
- ('specifier_qualifier_list -> specifier_qualifier_list type_qualifier','specifier_qualifier_list',2,'p_specifier_qualifier_list_2','c_parser.py',835),
- ('specifier_qualifier_list -> type_specifier','specifier_qualifier_list',1,'p_specifier_qualifier_list_3','c_parser.py',840),
- ('specifier_qualifier_list -> type_qualifier_list type_specifier','specifier_qualifier_list',2,'p_specifier_qualifier_list_4','c_parser.py',845),
- ('struct_or_union_specifier -> struct_or_union ID','struct_or_union_specifier',2,'p_struct_or_union_specifier_1','c_parser.py',854),
- ('struct_or_union_specifier -> struct_or_union TYPEID','struct_or_union_specifier',2,'p_struct_or_union_specifier_1','c_parser.py',855),
- ('struct_or_union_specifier -> struct_or_union brace_open struct_declaration_list brace_close','struct_or_union_specifier',4,'p_struct_or_union_specifier_2','c_parser.py',864),
- ('struct_or_union_specifier -> struct_or_union ID brace_open struct_declaration_list brace_close','struct_or_union_specifier',5,'p_struct_or_union_specifier_3','c_parser.py',873),
- ('struct_or_union_specifier -> struct_or_union TYPEID brace_open struct_declaration_list brace_close','struct_or_union_specifier',5,'p_struct_or_union_specifier_3','c_parser.py',874),
- ('struct_or_union -> STRUCT','struct_or_union',1,'p_struct_or_union','c_parser.py',883),
- ('struct_or_union -> UNION','struct_or_union',1,'p_struct_or_union','c_parser.py',884),
- ('struct_declaration_list -> struct_declaration','struct_declaration_list',1,'p_struct_declaration_list','c_parser.py',891),
- ('struct_declaration_list -> struct_declaration_list struct_declaration','struct_declaration_list',2,'p_struct_declaration_list','c_parser.py',892),
- ('struct_declaration -> specifier_qualifier_list struct_declarator_list_opt SEMI','struct_declaration',3,'p_struct_declaration_1','c_parser.py',900),
- ('struct_declaration -> SEMI','struct_declaration',1,'p_struct_declaration_2','c_parser.py',938),
- ('struct_declarator_list -> struct_declarator','struct_declarator_list',1,'p_struct_declarator_list','c_parser.py',943),
- ('struct_declarator_list -> struct_declarator_list COMMA struct_declarator','struct_declarator_list',3,'p_struct_declarator_list','c_parser.py',944),
- ('struct_declarator -> declarator','struct_declarator',1,'p_struct_declarator_1','c_parser.py',952),
- ('struct_declarator -> declarator COLON constant_expression','struct_declarator',3,'p_struct_declarator_2','c_parser.py',957),
- ('struct_declarator -> COLON constant_expression','struct_declarator',2,'p_struct_declarator_2','c_parser.py',958),
- ('enum_specifier -> ENUM ID','enum_specifier',2,'p_enum_specifier_1','c_parser.py',966),
- ('enum_specifier -> ENUM TYPEID','enum_specifier',2,'p_enum_specifier_1','c_parser.py',967),
- ('enum_specifier -> ENUM brace_open enumerator_list brace_close','enum_specifier',4,'p_enum_specifier_2','c_parser.py',972),
- ('enum_specifier -> ENUM ID brace_open enumerator_list brace_close','enum_specifier',5,'p_enum_specifier_3','c_parser.py',977),
- ('enum_specifier -> ENUM TYPEID brace_open enumerator_list brace_close','enum_specifier',5,'p_enum_specifier_3','c_parser.py',978),
- ('enumerator_list -> enumerator','enumerator_list',1,'p_enumerator_list','c_parser.py',983),
- ('enumerator_list -> enumerator_list COMMA','enumerator_list',2,'p_enumerator_list','c_parser.py',984),
- ('enumerator_list -> enumerator_list COMMA enumerator','enumerator_list',3,'p_enumerator_list','c_parser.py',985),
- ('enumerator -> ID','enumerator',1,'p_enumerator','c_parser.py',996),
- ('enumerator -> ID EQUALS constant_expression','enumerator',3,'p_enumerator','c_parser.py',997),
- ('declarator -> id_declarator','declarator',1,'p_declarator','c_parser.py',1012),
- ('declarator -> typeid_declarator','declarator',1,'p_declarator','c_parser.py',1013),
- ('pointer -> TIMES type_qualifier_list_opt','pointer',2,'p_pointer','c_parser.py',1124),
- ('pointer -> TIMES type_qualifier_list_opt pointer','pointer',3,'p_pointer','c_parser.py',1125),
- ('type_qualifier_list -> type_qualifier','type_qualifier_list',1,'p_type_qualifier_list','c_parser.py',1154),
- ('type_qualifier_list -> type_qualifier_list type_qualifier','type_qualifier_list',2,'p_type_qualifier_list','c_parser.py',1155),
- ('parameter_type_list -> parameter_list','parameter_type_list',1,'p_parameter_type_list','c_parser.py',1160),
- ('parameter_type_list -> parameter_list COMMA ELLIPSIS','parameter_type_list',3,'p_parameter_type_list','c_parser.py',1161),
- ('parameter_list -> parameter_declaration','parameter_list',1,'p_parameter_list','c_parser.py',1169),
- ('parameter_list -> parameter_list COMMA parameter_declaration','parameter_list',3,'p_parameter_list','c_parser.py',1170),
- ('parameter_declaration -> declaration_specifiers id_declarator','parameter_declaration',2,'p_parameter_declaration_1','c_parser.py',1189),
- ('parameter_declaration -> declaration_specifiers typeid_noparen_declarator','parameter_declaration',2,'p_parameter_declaration_1','c_parser.py',1190),
- ('parameter_declaration -> declaration_specifiers abstract_declarator_opt','parameter_declaration',2,'p_parameter_declaration_2','c_parser.py',1201),
- ('identifier_list -> identifier','identifier_list',1,'p_identifier_list','c_parser.py',1232),
- ('identifier_list -> identifier_list COMMA identifier','identifier_list',3,'p_identifier_list','c_parser.py',1233),
- ('initializer -> assignment_expression','initializer',1,'p_initializer_1','c_parser.py',1242),
- ('initializer -> brace_open initializer_list_opt brace_close','initializer',3,'p_initializer_2','c_parser.py',1247),
- ('initializer -> brace_open initializer_list COMMA brace_close','initializer',4,'p_initializer_2','c_parser.py',1248),
- ('initializer_list -> designation_opt initializer','initializer_list',2,'p_initializer_list','c_parser.py',1256),
- ('initializer_list -> initializer_list COMMA designation_opt initializer','initializer_list',4,'p_initializer_list','c_parser.py',1257),
- ('designation -> designator_list EQUALS','designation',2,'p_designation','c_parser.py',1268),
- ('designator_list -> designator','designator_list',1,'p_designator_list','c_parser.py',1276),
- ('designator_list -> designator_list designator','designator_list',2,'p_designator_list','c_parser.py',1277),
- ('designator -> LBRACKET constant_expression RBRACKET','designator',3,'p_designator','c_parser.py',1282),
- ('designator -> PERIOD identifier','designator',2,'p_designator','c_parser.py',1283),
- ('type_name -> specifier_qualifier_list abstract_declarator_opt','type_name',2,'p_type_name','c_parser.py',1288),
- ('abstract_declarator -> pointer','abstract_declarator',1,'p_abstract_declarator_1','c_parser.py',1299),
- ('abstract_declarator -> pointer direct_abstract_declarator','abstract_declarator',2,'p_abstract_declarator_2','c_parser.py',1307),
- ('abstract_declarator -> direct_abstract_declarator','abstract_declarator',1,'p_abstract_declarator_3','c_parser.py',1312),
- ('direct_abstract_declarator -> LPAREN abstract_declarator RPAREN','direct_abstract_declarator',3,'p_direct_abstract_declarator_1','c_parser.py',1322),
- ('direct_abstract_declarator -> direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator_2','c_parser.py',1326),
- ('direct_abstract_declarator -> LBRACKET assignment_expression_opt RBRACKET','direct_abstract_declarator',3,'p_direct_abstract_declarator_3','c_parser.py',1337),
- ('direct_abstract_declarator -> direct_abstract_declarator LBRACKET TIMES RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator_4','c_parser.py',1346),
- ('direct_abstract_declarator -> LBRACKET TIMES RBRACKET','direct_abstract_declarator',3,'p_direct_abstract_declarator_5','c_parser.py',1357),
- ('direct_abstract_declarator -> direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN','direct_abstract_declarator',4,'p_direct_abstract_declarator_6','c_parser.py',1366),
- ('direct_abstract_declarator -> LPAREN parameter_type_list_opt RPAREN','direct_abstract_declarator',3,'p_direct_abstract_declarator_7','c_parser.py',1376),
- ('block_item -> declaration','block_item',1,'p_block_item','c_parser.py',1387),
- ('block_item -> statement','block_item',1,'p_block_item','c_parser.py',1388),
- ('block_item_list -> block_item','block_item_list',1,'p_block_item_list','c_parser.py',1395),
- ('block_item_list -> block_item_list block_item','block_item_list',2,'p_block_item_list','c_parser.py',1396),
- ('compound_statement -> brace_open block_item_list_opt brace_close','compound_statement',3,'p_compound_statement_1','c_parser.py',1402),
- ('labeled_statement -> ID COLON statement','labeled_statement',3,'p_labeled_statement_1','c_parser.py',1408),
- ('labeled_statement -> CASE constant_expression COLON statement','labeled_statement',4,'p_labeled_statement_2','c_parser.py',1412),
- ('labeled_statement -> DEFAULT COLON statement','labeled_statement',3,'p_labeled_statement_3','c_parser.py',1416),
- ('selection_statement -> IF LPAREN expression RPAREN statement','selection_statement',5,'p_selection_statement_1','c_parser.py',1420),
- ('selection_statement -> IF LPAREN expression RPAREN statement ELSE statement','selection_statement',7,'p_selection_statement_2','c_parser.py',1424),
- ('selection_statement -> SWITCH LPAREN expression RPAREN statement','selection_statement',5,'p_selection_statement_3','c_parser.py',1428),
- ('iteration_statement -> WHILE LPAREN expression RPAREN statement','iteration_statement',5,'p_iteration_statement_1','c_parser.py',1433),
- ('iteration_statement -> DO statement WHILE LPAREN expression RPAREN SEMI','iteration_statement',7,'p_iteration_statement_2','c_parser.py',1437),
- ('iteration_statement -> FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN statement','iteration_statement',9,'p_iteration_statement_3','c_parser.py',1441),
- ('iteration_statement -> FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN statement','iteration_statement',8,'p_iteration_statement_4','c_parser.py',1445),
- ('jump_statement -> GOTO ID SEMI','jump_statement',3,'p_jump_statement_1','c_parser.py',1450),
- ('jump_statement -> BREAK SEMI','jump_statement',2,'p_jump_statement_2','c_parser.py',1454),
- ('jump_statement -> CONTINUE SEMI','jump_statement',2,'p_jump_statement_3','c_parser.py',1458),
- ('jump_statement -> RETURN expression SEMI','jump_statement',3,'p_jump_statement_4','c_parser.py',1462),
- ('jump_statement -> RETURN SEMI','jump_statement',2,'p_jump_statement_4','c_parser.py',1463),
- ('expression_statement -> expression_opt SEMI','expression_statement',2,'p_expression_statement','c_parser.py',1468),
- ('expression -> assignment_expression','expression',1,'p_expression','c_parser.py',1475),
- ('expression -> expression COMMA assignment_expression','expression',3,'p_expression','c_parser.py',1476),
- ('typedef_name -> TYPEID','typedef_name',1,'p_typedef_name','c_parser.py',1488),
- ('assignment_expression -> conditional_expression','assignment_expression',1,'p_assignment_expression','c_parser.py',1492),
- ('assignment_expression -> unary_expression assignment_operator assignment_expression','assignment_expression',3,'p_assignment_expression','c_parser.py',1493),
- ('assignment_operator -> EQUALS','assignment_operator',1,'p_assignment_operator','c_parser.py',1506),
- ('assignment_operator -> XOREQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1507),
- ('assignment_operator -> TIMESEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1508),
- ('assignment_operator -> DIVEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1509),
- ('assignment_operator -> MODEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1510),
- ('assignment_operator -> PLUSEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1511),
- ('assignment_operator -> MINUSEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1512),
- ('assignment_operator -> LSHIFTEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1513),
- ('assignment_operator -> RSHIFTEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1514),
- ('assignment_operator -> ANDEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1515),
- ('assignment_operator -> OREQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1516),
- ('constant_expression -> conditional_expression','constant_expression',1,'p_constant_expression','c_parser.py',1521),
- ('conditional_expression -> binary_expression','conditional_expression',1,'p_conditional_expression','c_parser.py',1525),
- ('conditional_expression -> binary_expression CONDOP expression COLON conditional_expression','conditional_expression',5,'p_conditional_expression','c_parser.py',1526),
- ('binary_expression -> cast_expression','binary_expression',1,'p_binary_expression','c_parser.py',1534),
- ('binary_expression -> binary_expression TIMES binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1535),
- ('binary_expression -> binary_expression DIVIDE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1536),
- ('binary_expression -> binary_expression MOD binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1537),
- ('binary_expression -> binary_expression PLUS binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1538),
- ('binary_expression -> binary_expression MINUS binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1539),
- ('binary_expression -> binary_expression RSHIFT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1540),
- ('binary_expression -> binary_expression LSHIFT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1541),
- ('binary_expression -> binary_expression LT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1542),
- ('binary_expression -> binary_expression LE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1543),
- ('binary_expression -> binary_expression GE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1544),
- ('binary_expression -> binary_expression GT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1545),
- ('binary_expression -> binary_expression EQ binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1546),
- ('binary_expression -> binary_expression NE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1547),
- ('binary_expression -> binary_expression AND binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1548),
- ('binary_expression -> binary_expression OR binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1549),
- ('binary_expression -> binary_expression XOR binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1550),
- ('binary_expression -> binary_expression LAND binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1551),
- ('binary_expression -> binary_expression LOR binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1552),
- ('cast_expression -> unary_expression','cast_expression',1,'p_cast_expression_1','c_parser.py',1560),
- ('cast_expression -> LPAREN type_name RPAREN cast_expression','cast_expression',4,'p_cast_expression_2','c_parser.py',1564),
- ('unary_expression -> postfix_expression','unary_expression',1,'p_unary_expression_1','c_parser.py',1568),
- ('unary_expression -> PLUSPLUS unary_expression','unary_expression',2,'p_unary_expression_2','c_parser.py',1572),
- ('unary_expression -> MINUSMINUS unary_expression','unary_expression',2,'p_unary_expression_2','c_parser.py',1573),
- ('unary_expression -> unary_operator cast_expression','unary_expression',2,'p_unary_expression_2','c_parser.py',1574),
- ('unary_expression -> SIZEOF unary_expression','unary_expression',2,'p_unary_expression_3','c_parser.py',1579),
- ('unary_expression -> SIZEOF LPAREN type_name RPAREN','unary_expression',4,'p_unary_expression_3','c_parser.py',1580),
- ('unary_operator -> AND','unary_operator',1,'p_unary_operator','c_parser.py',1588),
- ('unary_operator -> TIMES','unary_operator',1,'p_unary_operator','c_parser.py',1589),
- ('unary_operator -> PLUS','unary_operator',1,'p_unary_operator','c_parser.py',1590),
- ('unary_operator -> MINUS','unary_operator',1,'p_unary_operator','c_parser.py',1591),
- ('unary_operator -> NOT','unary_operator',1,'p_unary_operator','c_parser.py',1592),
- ('unary_operator -> LNOT','unary_operator',1,'p_unary_operator','c_parser.py',1593),
- ('postfix_expression -> primary_expression','postfix_expression',1,'p_postfix_expression_1','c_parser.py',1598),
- ('postfix_expression -> postfix_expression LBRACKET expression RBRACKET','postfix_expression',4,'p_postfix_expression_2','c_parser.py',1602),
- ('postfix_expression -> postfix_expression LPAREN argument_expression_list RPAREN','postfix_expression',4,'p_postfix_expression_3','c_parser.py',1606),
- ('postfix_expression -> postfix_expression LPAREN RPAREN','postfix_expression',3,'p_postfix_expression_3','c_parser.py',1607),
- ('postfix_expression -> postfix_expression PERIOD ID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1612),
- ('postfix_expression -> postfix_expression PERIOD TYPEID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1613),
- ('postfix_expression -> postfix_expression ARROW ID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1614),
- ('postfix_expression -> postfix_expression ARROW TYPEID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1615),
- ('postfix_expression -> postfix_expression PLUSPLUS','postfix_expression',2,'p_postfix_expression_5','c_parser.py',1621),
- ('postfix_expression -> postfix_expression MINUSMINUS','postfix_expression',2,'p_postfix_expression_5','c_parser.py',1622),
- ('postfix_expression -> LPAREN type_name RPAREN brace_open initializer_list brace_close','postfix_expression',6,'p_postfix_expression_6','c_parser.py',1627),
- ('postfix_expression -> LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close','postfix_expression',7,'p_postfix_expression_6','c_parser.py',1628),
- ('primary_expression -> identifier','primary_expression',1,'p_primary_expression_1','c_parser.py',1633),
- ('primary_expression -> constant','primary_expression',1,'p_primary_expression_2','c_parser.py',1637),
- ('primary_expression -> unified_string_literal','primary_expression',1,'p_primary_expression_3','c_parser.py',1641),
- ('primary_expression -> unified_wstring_literal','primary_expression',1,'p_primary_expression_3','c_parser.py',1642),
- ('primary_expression -> LPAREN expression RPAREN','primary_expression',3,'p_primary_expression_4','c_parser.py',1647),
- ('primary_expression -> OFFSETOF LPAREN type_name COMMA offsetof_member_designator RPAREN','primary_expression',6,'p_primary_expression_5','c_parser.py',1651),
- ('offsetof_member_designator -> identifier','offsetof_member_designator',1,'p_offsetof_member_designator','c_parser.py',1659),
- ('offsetof_member_designator -> offsetof_member_designator PERIOD identifier','offsetof_member_designator',3,'p_offsetof_member_designator','c_parser.py',1660),
- ('offsetof_member_designator -> offsetof_member_designator LBRACKET expression RBRACKET','offsetof_member_designator',4,'p_offsetof_member_designator','c_parser.py',1661),
- ('argument_expression_list -> assignment_expression','argument_expression_list',1,'p_argument_expression_list','c_parser.py',1674),
- ('argument_expression_list -> argument_expression_list COMMA assignment_expression','argument_expression_list',3,'p_argument_expression_list','c_parser.py',1675),
- ('identifier -> ID','identifier',1,'p_identifier','c_parser.py',1684),
- ('constant -> INT_CONST_DEC','constant',1,'p_constant_1','c_parser.py',1688),
- ('constant -> INT_CONST_OCT','constant',1,'p_constant_1','c_parser.py',1689),
- ('constant -> INT_CONST_HEX','constant',1,'p_constant_1','c_parser.py',1690),
- ('constant -> INT_CONST_BIN','constant',1,'p_constant_1','c_parser.py',1691),
- ('constant -> FLOAT_CONST','constant',1,'p_constant_2','c_parser.py',1697),
- ('constant -> HEX_FLOAT_CONST','constant',1,'p_constant_2','c_parser.py',1698),
- ('constant -> CHAR_CONST','constant',1,'p_constant_3','c_parser.py',1704),
- ('constant -> WCHAR_CONST','constant',1,'p_constant_3','c_parser.py',1705),
- ('unified_string_literal -> STRING_LITERAL','unified_string_literal',1,'p_unified_string_literal','c_parser.py',1716),
- ('unified_string_literal -> unified_string_literal STRING_LITERAL','unified_string_literal',2,'p_unified_string_literal','c_parser.py',1717),
- ('unified_wstring_literal -> WSTRING_LITERAL','unified_wstring_literal',1,'p_unified_wstring_literal','c_parser.py',1727),
- ('unified_wstring_literal -> unified_wstring_literal WSTRING_LITERAL','unified_wstring_literal',2,'p_unified_wstring_literal','c_parser.py',1728),
- ('brace_open -> LBRACE','brace_open',1,'p_brace_open','c_parser.py',1738),
- ('brace_close -> RBRACE','brace_close',1,'p_brace_close','c_parser.py',1744),
- ('empty ->