+# endif
+#endif
+'''
diff --git a/billinglayer/python/cffi/verifier.py b/billinglayer/python/cffi/verifier.py
new file mode 100644
index 0000000..a500c78
--- /dev/null
+++ b/billinglayer/python/cffi/verifier.py
@@ -0,0 +1,307 @@
+#
+# DEPRECATED: implementation for ffi.verify()
+#
+import sys, os, binascii, shutil, io
+from . import __version_verifier_modules__
+from . import ffiplatform
+from .error import VerificationError
+
+if sys.version_info >= (3, 3):
+ import importlib.machinery
+ def _extension_suffixes():
+ return importlib.machinery.EXTENSION_SUFFIXES[:]
+else:
+ import imp
+ def _extension_suffixes():
+ return [suffix for suffix, _, type in imp.get_suffixes()
+ if type == imp.C_EXTENSION]
+
+
+if sys.version_info >= (3,):
+ NativeIO = io.StringIO
+else:
+ class NativeIO(io.BytesIO):
+ def write(self, s):
+ if isinstance(s, unicode):
+ s = s.encode('ascii')
+ super(NativeIO, self).write(s)
+
+
+class Verifier(object):
+
+ def __init__(self, ffi, preamble, tmpdir=None, modulename=None,
+ ext_package=None, tag='', force_generic_engine=False,
+ source_extension='.c', flags=None, relative_to=None, **kwds):
+ if ffi._parser._uses_new_feature:
+ raise VerificationError(
+ "feature not supported with ffi.verify(), but only "
+ "with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,))
+ self.ffi = ffi
+ self.preamble = preamble
+ if not modulename:
+ flattened_kwds = ffiplatform.flatten(kwds)
+ vengine_class = _locate_engine_class(ffi, force_generic_engine)
+ self._vengine = vengine_class(self)
+ self._vengine.patch_extension_kwds(kwds)
+ self.flags = flags
+ self.kwds = self.make_relative_to(kwds, relative_to)
+ #
+ if modulename:
+ if tag:
+ raise TypeError("can't specify both 'modulename' and 'tag'")
+ else:
+ key = '\x00'.join(['%d.%d' % sys.version_info[:2],
+ __version_verifier_modules__,
+ preamble, flattened_kwds] +
+ ffi._cdefsources)
+ if sys.version_info >= (3,):
+ key = key.encode('utf-8')
+ k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff)
+ k1 = k1.lstrip('0x').rstrip('L')
+ k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff)
+ k2 = k2.lstrip('0').rstrip('L')
+ modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key,
+ k1, k2)
+ suffix = _get_so_suffixes()[0]
+ self.tmpdir = tmpdir or _caller_dir_pycache()
+ self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension)
+ self.modulefilename = os.path.join(self.tmpdir, modulename + suffix)
+ self.ext_package = ext_package
+ self._has_source = False
+ self._has_module = False
+
+ def write_source(self, file=None):
+ """Write the C source code. It is produced in 'self.sourcefilename',
+ which can be tweaked beforehand."""
+ with self.ffi._lock:
+ if self._has_source and file is None:
+ raise VerificationError(
+ "source code already written")
+ self._write_source(file)
+
+ def compile_module(self):
+ """Write the C source code (if not done already) and compile it.
+ This produces a dynamic link library in 'self.modulefilename'."""
+ with self.ffi._lock:
+ if self._has_module:
+ raise VerificationError("module already compiled")
+ if not self._has_source:
+ self._write_source()
+ self._compile_module()
+
+ def load_library(self):
+ """Get a C module from this Verifier instance.
+ Returns an instance of a FFILibrary class that behaves like the
+ objects returned by ffi.dlopen(), but that delegates all
+ operations to the C module. If necessary, the C code is written
+ and compiled first.
+ """
+ with self.ffi._lock:
+ if not self._has_module:
+ self._locate_module()
+ if not self._has_module:
+ if not self._has_source:
+ self._write_source()
+ self._compile_module()
+ return self._load_library()
+
+ def get_module_name(self):
+ basename = os.path.basename(self.modulefilename)
+ # kill both the .so extension and the other .'s, as introduced
+ # by Python 3: 'basename.cpython-33m.so'
+ basename = basename.split('.', 1)[0]
+ # and the _d added in Python 2 debug builds --- but try to be
+ # conservative and not kill a legitimate _d
+ if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'):
+ basename = basename[:-2]
+ return basename
+
+ def get_extension(self):
+ ffiplatform._hack_at_distutils() # backward compatibility hack
+ if not self._has_source:
+ with self.ffi._lock:
+ if not self._has_source:
+ self._write_source()
+ sourcename = ffiplatform.maybe_relative_path(self.sourcefilename)
+ modname = self.get_module_name()
+ return ffiplatform.get_extension(sourcename, modname, **self.kwds)
+
+ def generates_python_module(self):
+ return self._vengine._gen_python_module
+
+ def make_relative_to(self, kwds, relative_to):
+ if relative_to and os.path.dirname(relative_to):
+ dirname = os.path.dirname(relative_to)
+ kwds = kwds.copy()
+ for key in ffiplatform.LIST_OF_FILE_NAMES:
+ if key in kwds:
+ lst = kwds[key]
+ if not isinstance(lst, (list, tuple)):
+ raise TypeError("keyword '%s' should be a list or tuple"
+ % (key,))
+ lst = [os.path.join(dirname, fn) for fn in lst]
+ kwds[key] = lst
+ return kwds
+
+ # ----------
+
+ def _locate_module(self):
+ if not os.path.isfile(self.modulefilename):
+ if self.ext_package:
+ try:
+ pkg = __import__(self.ext_package, None, None, ['__doc__'])
+ except ImportError:
+ return # cannot import the package itself, give up
+ # (e.g. it might be called differently before installation)
+ path = pkg.__path__
+ else:
+ path = None
+ filename = self._vengine.find_module(self.get_module_name(), path,
+ _get_so_suffixes())
+ if filename is None:
+ return
+ self.modulefilename = filename
+ self._vengine.collect_types()
+ self._has_module = True
+
+ def _write_source_to(self, file):
+ self._vengine._f = file
+ try:
+ self._vengine.write_source_to_f()
+ finally:
+ del self._vengine._f
+
+ def _write_source(self, file=None):
+ if file is not None:
+ self._write_source_to(file)
+ else:
+ # Write our source file to an in memory file.
+ f = NativeIO()
+ self._write_source_to(f)
+ source_data = f.getvalue()
+
+ # Determine if this matches the current file
+ if os.path.exists(self.sourcefilename):
+ with open(self.sourcefilename, "r") as fp:
+ needs_written = not (fp.read() == source_data)
+ else:
+ needs_written = True
+
+ # Actually write the file out if it doesn't match
+ if needs_written:
+ _ensure_dir(self.sourcefilename)
+ with open(self.sourcefilename, "w") as fp:
+ fp.write(source_data)
+
+ # Set this flag
+ self._has_source = True
+
+ def _compile_module(self):
+ # compile this C source
+ tmpdir = os.path.dirname(self.sourcefilename)
+ outputfilename = ffiplatform.compile(tmpdir, self.get_extension())
+ try:
+ same = ffiplatform.samefile(outputfilename, self.modulefilename)
+ except OSError:
+ same = False
+ if not same:
+ _ensure_dir(self.modulefilename)
+ shutil.move(outputfilename, self.modulefilename)
+ self._has_module = True
+
+ def _load_library(self):
+ assert self._has_module
+ if self.flags is not None:
+ return self._vengine.load_library(self.flags)
+ else:
+ return self._vengine.load_library()
+
+# ____________________________________________________________
+
+_FORCE_GENERIC_ENGINE = False # for tests
+
+def _locate_engine_class(ffi, force_generic_engine):
+ if _FORCE_GENERIC_ENGINE:
+ force_generic_engine = True
+ if not force_generic_engine:
+ if '__pypy__' in sys.builtin_module_names:
+ force_generic_engine = True
+ else:
+ try:
+ import _cffi_backend
+ except ImportError:
+ _cffi_backend = '?'
+ if ffi._backend is not _cffi_backend:
+ force_generic_engine = True
+ if force_generic_engine:
+ from . import vengine_gen
+ return vengine_gen.VGenericEngine
+ else:
+ from . import vengine_cpy
+ return vengine_cpy.VCPythonEngine
+
+# ____________________________________________________________
+
+_TMPDIR = None
+
+def _caller_dir_pycache():
+ if _TMPDIR:
+ return _TMPDIR
+ result = os.environ.get('CFFI_TMPDIR')
+ if result:
+ return result
+ filename = sys._getframe(2).f_code.co_filename
+ return os.path.abspath(os.path.join(os.path.dirname(filename),
+ '__pycache__'))
+
+def set_tmpdir(dirname):
+ """Set the temporary directory to use instead of __pycache__."""
+ global _TMPDIR
+ _TMPDIR = dirname
+
+def cleanup_tmpdir(tmpdir=None, keep_so=False):
+ """Clean up the temporary directory by removing all files in it
+ called `_cffi_*.{c,so}` as well as the `build` subdirectory."""
+ tmpdir = tmpdir or _caller_dir_pycache()
+ try:
+ filelist = os.listdir(tmpdir)
+ except OSError:
+ return
+ if keep_so:
+ suffix = '.c' # only remove .c files
+ else:
+ suffix = _get_so_suffixes()[0].lower()
+ for fn in filelist:
+ if fn.lower().startswith('_cffi_') and (
+ fn.lower().endswith(suffix) or fn.lower().endswith('.c')):
+ try:
+ os.unlink(os.path.join(tmpdir, fn))
+ except OSError:
+ pass
+ clean_dir = [os.path.join(tmpdir, 'build')]
+ for dir in clean_dir:
+ try:
+ for fn in os.listdir(dir):
+ fn = os.path.join(dir, fn)
+ if os.path.isdir(fn):
+ clean_dir.append(fn)
+ else:
+ os.unlink(fn)
+ except OSError:
+ pass
+
+def _get_so_suffixes():
+ suffixes = _extension_suffixes()
+ if not suffixes:
+ # bah, no C_EXTENSION available. Occurs on pypy without cpyext
+ if sys.platform == 'win32':
+ suffixes = [".pyd"]
+ else:
+ suffixes = [".so"]
+
+ return suffixes
+
+def _ensure_dir(filename):
+ dirname = os.path.dirname(filename)
+ if dirname and not os.path.isdir(dirname):
+ os.makedirs(dirname)
diff --git a/billinglayer/python/charset_normalizer-3.2.0.dist-info/INSTALLER b/billinglayer/python/charset_normalizer-3.2.0.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/billinglayer/python/charset_normalizer-3.2.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/billinglayer/python/charset_normalizer-3.2.0.dist-info/LICENSE b/billinglayer/python/charset_normalizer-3.2.0.dist-info/LICENSE
new file mode 100644
index 0000000..ad82355
--- /dev/null
+++ b/billinglayer/python/charset_normalizer-3.2.0.dist-info/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2019 TAHRI Ahmed R.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
\ No newline at end of file
diff --git a/billinglayer/python/charset_normalizer-3.2.0.dist-info/METADATA b/billinglayer/python/charset_normalizer-3.2.0.dist-info/METADATA
new file mode 100644
index 0000000..ca190e1
--- /dev/null
+++ b/billinglayer/python/charset_normalizer-3.2.0.dist-info/METADATA
@@ -0,0 +1,628 @@
+Metadata-Version: 2.1
+Name: charset-normalizer
+Version: 3.2.0
+Summary: The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet.
+Home-page: https://github.com/Ousret/charset_normalizer
+Author: Ahmed TAHRI
+Author-email: ahmed.tahri@cloudnursery.dev
+License: MIT
+Project-URL: Bug Reports, https://github.com/Ousret/charset_normalizer/issues
+Project-URL: Documentation, https://charset-normalizer.readthedocs.io/en/latest
+Keywords: encoding,charset,charset-detector,detector,normalization,unicode,chardet,detect
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Intended Audience :: Developers
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Text Processing :: Linguistic
+Classifier: Topic :: Utilities
+Classifier: Typing :: Typed
+Requires-Python: >=3.7.0
+Description-Content-Type: text/markdown
+License-File: LICENSE
+Provides-Extra: unicode_backport
+
+Charset Detection, for Everyone 👋
+
+
+ The Real First Universal Charset Detector
+
+
+
+
+
+
+
+
+
+
+
+> A library that helps you read text from an unknown charset encoding.
Motivated by `chardet`,
+> I'm trying to resolve the issue by taking a new approach.
+> All IANA character set names for which the Python core library provides codecs are supported.
+
+
+ >>>>> 👉 Try Me Online Now, Then Adopt Me 👈 <<<<<
+
+
+This project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**.
+
+| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) |
+|--------------------------------------------------|:---------------------------------------------:|:------------------------------------------------------------------------------------------------------:|:-----------------------------------------------:|
+| `Fast` | ❌
| ✅
| ✅
|
+| `Universal**` | ❌ | ✅ | ❌ |
+| `Reliable` **without** distinguishable standards | ❌ | ✅ | ✅ |
+| `Reliable` **with** distinguishable standards | ✅ | ✅ | ✅ |
+| `License` | LGPL-2.1
_restrictive_ | MIT | MPL-1.1
_restrictive_ |
+| `Native Python` | ✅ | ✅ | ❌ |
+| `Detect spoken language` | ❌ | ✅ | N/A |
+| `UnicodeDecodeError Safety` | ❌ | ✅ | ❌ |
+| `Whl Size` | 193.6 kB | 40 kB | ~200 kB |
+| `Supported Encoding` | 33 | 🎉 [90](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 |
+
+
+
+
+*\*\* : They are clearly using specific code for a specific encoding even if covering most of used one*
+Did you got there because of the logs? See [https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html](https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html)
+
+## ⚡ Performance
+
+This package offer better performance than its counterpart Chardet. Here are some numbers.
+
+| Package | Accuracy | Mean per file (ms) | File per sec (est) |
+|-----------------------------------------------|:--------:|:------------------:|:------------------:|
+| [chardet](https://github.com/chardet/chardet) | 86 % | 200 ms | 5 file/sec |
+| charset-normalizer | **98 %** | **10 ms** | 100 file/sec |
+
+| Package | 99th percentile | 95th percentile | 50th percentile |
+|-----------------------------------------------|:---------------:|:---------------:|:---------------:|
+| [chardet](https://github.com/chardet/chardet) | 1200 ms | 287 ms | 23 ms |
+| charset-normalizer | 100 ms | 50 ms | 5 ms |
+
+Chardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload.
+
+> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows.
+> And yes, these results might change at any time. The dataset can be updated to include more files.
+> The actual delays heavily depends on your CPU capabilities. The factors should remain the same.
+> Keep in mind that the stats are generous and that Chardet accuracy vs our is measured using Chardet initial capability
+> (eg. Supported Encoding) Challenge-them if you want.
+
+## ✨ Installation
+
+Using pip:
+
+```sh
+pip install charset-normalizer -U
+```
+
+## 🚀 Basic Usage
+
+### CLI
+This package comes with a CLI.
+
+```
+usage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD]
+ file [file ...]
+
+The Real First Universal Charset Detector. Discover originating encoding used
+on text file. Normalize text to unicode.
+
+positional arguments:
+ files File(s) to be analysed
+
+optional arguments:
+ -h, --help show this help message and exit
+ -v, --verbose Display complementary information about file if any.
+ Stdout will contain logs about the detection process.
+ -a, --with-alternative
+ Output complementary possibilities if any. Top-level
+ JSON WILL be a list.
+ -n, --normalize Permit to normalize input file. If not set, program
+ does not write anything.
+ -m, --minimal Only output the charset detected to STDOUT. Disabling
+ JSON output.
+ -r, --replace Replace file when trying to normalize it instead of
+ creating a new one.
+ -f, --force Replace file without asking if you are sure, use this
+ flag with caution.
+ -t THRESHOLD, --threshold THRESHOLD
+ Define a custom maximum amount of chaos allowed in
+ decoded content. 0. <= chaos <= 1.
+ --version Show version information and exit.
+```
+
+```bash
+normalizer ./data/sample.1.fr.srt
+```
+
+🎉 Since version 1.4.0 the CLI produce easily usable stdout result in JSON format.
+
+```json
+{
+ "path": "/home/default/projects/charset_normalizer/data/sample.1.fr.srt",
+ "encoding": "cp1252",
+ "encoding_aliases": [
+ "1252",
+ "windows_1252"
+ ],
+ "alternative_encodings": [
+ "cp1254",
+ "cp1256",
+ "cp1258",
+ "iso8859_14",
+ "iso8859_15",
+ "iso8859_16",
+ "iso8859_3",
+ "iso8859_9",
+ "latin_1",
+ "mbcs"
+ ],
+ "language": "French",
+ "alphabets": [
+ "Basic Latin",
+ "Latin-1 Supplement"
+ ],
+ "has_sig_or_bom": false,
+ "chaos": 0.149,
+ "coherence": 97.152,
+ "unicode_path": null,
+ "is_preferred": true
+}
+```
+
+### Python
+*Just print out normalized text*
+```python
+from charset_normalizer import from_path
+
+results = from_path('./my_subtitle.srt')
+
+print(str(results.best()))
+```
+
+*Upgrade your code without effort*
+```python
+from charset_normalizer import detect
+```
+
+The above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible.
+
+See the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/)
+
+## 😇 Why
+
+When I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a
+reliable alternative using a completely different method. Also! I never back down on a good challenge!
+
+I **don't care** about the **originating charset** encoding, because **two different tables** can
+produce **two identical rendered string.**
+What I want is to get readable text, the best I can.
+
+In a way, **I'm brute forcing text decoding.** How cool is that ? 😎
+
+Don't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode.
+
+## 🍰 How
+
+ - Discard all charset encoding table that could not fit the binary content.
+ - Measure noise, or the mess once opened (by chunks) with a corresponding charset encoding.
+ - Extract matches with the lowest mess detected.
+ - Additionally, we measure coherence / probe for a language.
+
+**Wait a minute**, what is noise/mess and coherence according to **YOU ?**
+
+*Noise :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then
+**I established** some ground rules about **what is obvious** when **it seems like** a mess.
+ I know that my interpretation of what is noise is probably incomplete, feel free to contribute in order to
+ improve or rewrite it.
+
+*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought
+that intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design.
+
+## ⚡ Known limitations
+
+ - Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters))
+ - Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content.
+
+## ⚠️ About Python EOLs
+
+**If you are running:**
+
+- Python >=2.7,<3.5: Unsupported
+- Python 3.5: charset-normalizer < 2.1
+- Python 3.6: charset-normalizer < 3.1
+
+Upgrade your Python interpreter as soon as possible.
+
+## 👤 Contributing
+
+Contributions, issues and feature requests are very much welcome.
+Feel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute.
+
+## 📝 License
+
+Copyright © [Ahmed TAHRI @Ousret](https://github.com/Ousret).
+This project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed.
+
+Characters frequencies used in this project © 2012 [Denny Vrandečić](http://simia.net/letters/)
+
+## 💼 For Enterprise
+
+Professional support for charset-normalizer is available as part of the [Tidelift
+Subscription][1]. Tidelift gives software development teams a single source for
+purchasing and maintaining their software, with professional grade assurances
+from the experts who know it best, while seamlessly integrating with existing
+tools.
+
+[1]: https://tidelift.com/subscription/pkg/pypi-charset-normalizer?utm_source=pypi-charset-normalizer&utm_medium=readme
+
+# Changelog
+All notable changes to charset-normalizer will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
+
+## [3.2.0](https://github.com/Ousret/charset_normalizer/compare/3.1.0...3.2.0) (2023-06-07)
+
+### Changed
+- Typehint for function `from_path` no longer enforce `PathLike` as its first argument
+- Minor improvement over the global detection reliability
+
+### Added
+- Introduce function `is_binary` that relies on main capabilities, and optimized to detect binaries
+- Propagate `enable_fallback` argument throughout `from_bytes`, `from_path`, and `from_fp` that allow a deeper control over the detection (default True)
+- Explicit support for Python 3.12
+
+### Fixed
+- Edge case detection failure where a file would contain 'very-long' camel cased word (Issue #289)
+
+## [3.1.0](https://github.com/Ousret/charset_normalizer/compare/3.0.1...3.1.0) (2023-03-06)
+
+### Added
+- Argument `should_rename_legacy` for legacy function `detect` and disregard any new arguments without errors (PR #262)
+
+### Removed
+- Support for Python 3.6 (PR #260)
+
+### Changed
+- Optional speedup provided by mypy/c 1.0.1
+
+## [3.0.1](https://github.com/Ousret/charset_normalizer/compare/3.0.0...3.0.1) (2022-11-18)
+
+### Fixed
+- Multi-bytes cutter/chunk generator did not always cut correctly (PR #233)
+
+### Changed
+- Speedup provided by mypy/c 0.990 on Python >= 3.7
+
+## [3.0.0](https://github.com/Ousret/charset_normalizer/compare/2.1.1...3.0.0) (2022-10-20)
+
+### Added
+- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results
+- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES
+- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio
+- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)
+
+### Changed
+- Build with static metadata using 'build' frontend
+- Make the language detection stricter
+- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1
+
+### Fixed
+- CLI with opt --normalize fail when using full path for files
+- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it
+- Sphinx warnings when generating the documentation
+
+### Removed
+- Coherence detector no longer return 'Simple English' instead return 'English'
+- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'
+- Breaking: Method `first()` and `best()` from CharsetMatch
+- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII)
+- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches
+- Breaking: Top-level function `normalize`
+- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch
+- Support for the backport `unicodedata2`
+
+## [3.0.0rc1](https://github.com/Ousret/charset_normalizer/compare/3.0.0b2...3.0.0rc1) (2022-10-18)
+
+### Added
+- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results
+- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES
+- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio
+
+### Changed
+- Build with static metadata using 'build' frontend
+- Make the language detection stricter
+
+### Fixed
+- CLI with opt --normalize fail when using full path for files
+- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it
+
+### Removed
+- Coherence detector no longer return 'Simple English' instead return 'English'
+- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'
+
+## [3.0.0b2](https://github.com/Ousret/charset_normalizer/compare/3.0.0b1...3.0.0b2) (2022-08-21)
+
+### Added
+- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)
+
+### Removed
+- Breaking: Method `first()` and `best()` from CharsetMatch
+- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII)
+
+### Fixed
+- Sphinx warnings when generating the documentation
+
+## [3.0.0b1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...3.0.0b1) (2022-08-15)
+
+### Changed
+- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1
+
+### Removed
+- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches
+- Breaking: Top-level function `normalize`
+- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch
+- Support for the backport `unicodedata2`
+
+## [2.1.1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...2.1.1) (2022-08-19)
+
+### Deprecated
+- Function `normalize` scheduled for removal in 3.0
+
+### Changed
+- Removed useless call to decode in fn is_unprintable (#206)
+
+### Fixed
+- Third-party library (i18n xgettext) crashing not recognizing utf_8 (PEP 263) with underscore from [@aleksandernovikov](https://github.com/aleksandernovikov) (#204)
+
+## [2.1.0](https://github.com/Ousret/charset_normalizer/compare/2.0.12...2.1.0) (2022-06-19)
+
+### Added
+- Output the Unicode table version when running the CLI with `--version` (PR #194)
+
+### Changed
+- Re-use decoded buffer for single byte character sets from [@nijel](https://github.com/nijel) (PR #175)
+- Fixing some performance bottlenecks from [@deedy5](https://github.com/deedy5) (PR #183)
+
+### Fixed
+- Workaround potential bug in cpython with Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space (PR #175)
+- CLI default threshold aligned with the API threshold from [@oleksandr-kuzmenko](https://github.com/oleksandr-kuzmenko) (PR #181)
+
+### Removed
+- Support for Python 3.5 (PR #192)
+
+### Deprecated
+- Use of backport unicodedata from `unicodedata2` as Python is quickly catching up, scheduled for removal in 3.0 (PR #194)
+
+## [2.0.12](https://github.com/Ousret/charset_normalizer/compare/2.0.11...2.0.12) (2022-02-12)
+
+### Fixed
+- ASCII miss-detection on rare cases (PR #170)
+
+## [2.0.11](https://github.com/Ousret/charset_normalizer/compare/2.0.10...2.0.11) (2022-01-30)
+
+### Added
+- Explicit support for Python 3.11 (PR #164)
+
+### Changed
+- The logging behavior have been completely reviewed, now using only TRACE and DEBUG levels (PR #163 #165)
+
+## [2.0.10](https://github.com/Ousret/charset_normalizer/compare/2.0.9...2.0.10) (2022-01-04)
+
+### Fixed
+- Fallback match entries might lead to UnicodeDecodeError for large bytes sequence (PR #154)
+
+### Changed
+- Skipping the language-detection (CD) on ASCII (PR #155)
+
+## [2.0.9](https://github.com/Ousret/charset_normalizer/compare/2.0.8...2.0.9) (2021-12-03)
+
+### Changed
+- Moderating the logging impact (since 2.0.8) for specific environments (PR #147)
+
+### Fixed
+- Wrong logging level applied when setting kwarg `explain` to True (PR #146)
+
+## [2.0.8](https://github.com/Ousret/charset_normalizer/compare/2.0.7...2.0.8) (2021-11-24)
+### Changed
+- Improvement over Vietnamese detection (PR #126)
+- MD improvement on trailing data and long foreign (non-pure latin) data (PR #124)
+- Efficiency improvements in cd/alphabet_languages from [@adbar](https://github.com/adbar) (PR #122)
+- call sum() without an intermediary list following PEP 289 recommendations from [@adbar](https://github.com/adbar) (PR #129)
+- Code style as refactored by Sourcery-AI (PR #131)
+- Minor adjustment on the MD around european words (PR #133)
+- Remove and replace SRTs from assets / tests (PR #139)
+- Initialize the library logger with a `NullHandler` by default from [@nmaynes](https://github.com/nmaynes) (PR #135)
+- Setting kwarg `explain` to True will add provisionally (bounded to function lifespan) a specific stream handler (PR #135)
+
+### Fixed
+- Fix large (misleading) sequence giving UnicodeDecodeError (PR #137)
+- Avoid using too insignificant chunk (PR #137)
+
+### Added
+- Add and expose function `set_logging_handler` to configure a specific StreamHandler from [@nmaynes](https://github.com/nmaynes) (PR #135)
+- Add `CHANGELOG.md` entries, format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) (PR #141)
+
+## [2.0.7](https://github.com/Ousret/charset_normalizer/compare/2.0.6...2.0.7) (2021-10-11)
+### Added
+- Add support for Kazakh (Cyrillic) language detection (PR #109)
+
+### Changed
+- Further, improve inferring the language from a given single-byte code page (PR #112)
+- Vainly trying to leverage PEP263 when PEP3120 is not supported (PR #116)
+- Refactoring for potential performance improvements in loops from [@adbar](https://github.com/adbar) (PR #113)
+- Various detection improvement (MD+CD) (PR #117)
+
+### Removed
+- Remove redundant logging entry about detected language(s) (PR #115)
+
+### Fixed
+- Fix a minor inconsistency between Python 3.5 and other versions regarding language detection (PR #117 #102)
+
+## [2.0.6](https://github.com/Ousret/charset_normalizer/compare/2.0.5...2.0.6) (2021-09-18)
+### Fixed
+- Unforeseen regression with the loss of the backward-compatibility with some older minor of Python 3.5.x (PR #100)
+- Fix CLI crash when using --minimal output in certain cases (PR #103)
+
+### Changed
+- Minor improvement to the detection efficiency (less than 1%) (PR #106 #101)
+
+## [2.0.5](https://github.com/Ousret/charset_normalizer/compare/2.0.4...2.0.5) (2021-09-14)
+### Changed
+- The project now comply with: flake8, mypy, isort and black to ensure a better overall quality (PR #81)
+- The BC-support with v1.x was improved, the old staticmethods are restored (PR #82)
+- The Unicode detection is slightly improved (PR #93)
+- Add syntax sugar \_\_bool\_\_ for results CharsetMatches list-container (PR #91)
+
+### Removed
+- The project no longer raise warning on tiny content given for detection, will be simply logged as warning instead (PR #92)
+
+### Fixed
+- In some rare case, the chunks extractor could cut in the middle of a multi-byte character and could mislead the mess detection (PR #95)
+- Some rare 'space' characters could trip up the UnprintablePlugin/Mess detection (PR #96)
+- The MANIFEST.in was not exhaustive (PR #78)
+
+## [2.0.4](https://github.com/Ousret/charset_normalizer/compare/2.0.3...2.0.4) (2021-07-30)
+### Fixed
+- The CLI no longer raise an unexpected exception when no encoding has been found (PR #70)
+- Fix accessing the 'alphabets' property when the payload contains surrogate characters (PR #68)
+- The logger could mislead (explain=True) on detected languages and the impact of one MBCS match (PR #72)
+- Submatch factoring could be wrong in rare edge cases (PR #72)
+- Multiple files given to the CLI were ignored when publishing results to STDOUT. (After the first path) (PR #72)
+- Fix line endings from CRLF to LF for certain project files (PR #67)
+
+### Changed
+- Adjust the MD to lower the sensitivity, thus improving the global detection reliability (PR #69 #76)
+- Allow fallback on specified encoding if any (PR #71)
+
+## [2.0.3](https://github.com/Ousret/charset_normalizer/compare/2.0.2...2.0.3) (2021-07-16)
+### Changed
+- Part of the detection mechanism has been improved to be less sensitive, resulting in more accurate detection results. Especially ASCII. (PR #63)
+- According to the community wishes, the detection will fall back on ASCII or UTF-8 in a last-resort case. (PR #64)
+
+## [2.0.2](https://github.com/Ousret/charset_normalizer/compare/2.0.1...2.0.2) (2021-07-15)
+### Fixed
+- Empty/Too small JSON payload miss-detection fixed. Report from [@tseaver](https://github.com/tseaver) (PR #59)
+
+### Changed
+- Don't inject unicodedata2 into sys.modules from [@akx](https://github.com/akx) (PR #57)
+
+## [2.0.1](https://github.com/Ousret/charset_normalizer/compare/2.0.0...2.0.1) (2021-07-13)
+### Fixed
+- Make it work where there isn't a filesystem available, dropping assets frequencies.json. Report from [@sethmlarson](https://github.com/sethmlarson). (PR #55)
+- Using explain=False permanently disable the verbose output in the current runtime (PR #47)
+- One log entry (language target preemptive) was not show in logs when using explain=True (PR #47)
+- Fix undesired exception (ValueError) on getitem of instance CharsetMatches (PR #52)
+
+### Changed
+- Public function normalize default args values were not aligned with from_bytes (PR #53)
+
+### Added
+- You may now use charset aliases in cp_isolation and cp_exclusion arguments (PR #47)
+
+## [2.0.0](https://github.com/Ousret/charset_normalizer/compare/1.4.1...2.0.0) (2021-07-02)
+### Changed
+- 4x to 5 times faster than the previous 1.4.0 release. At least 2x faster than Chardet.
+- Accent has been made on UTF-8 detection, should perform rather instantaneous.
+- The backward compatibility with Chardet has been greatly improved. The legacy detect function returns an identical charset name whenever possible.
+- The detection mechanism has been slightly improved, now Turkish content is detected correctly (most of the time)
+- The program has been rewritten to ease the readability and maintainability. (+Using static typing)+
+- utf_7 detection has been reinstated.
+
+### Removed
+- This package no longer require anything when used with Python 3.5 (Dropped cached_property)
+- Removed support for these languages: Catalan, Esperanto, Kazakh, Baque, Volapük, Azeri, Galician, Nynorsk, Macedonian, and Serbocroatian.
+- The exception hook on UnicodeDecodeError has been removed.
+
+### Deprecated
+- Methods coherence_non_latin, w_counter, chaos_secondary_pass of the class CharsetMatch are now deprecated and scheduled for removal in v3.0
+
+### Fixed
+- The CLI output used the relative path of the file(s). Should be absolute.
+
+## [1.4.1](https://github.com/Ousret/charset_normalizer/compare/1.4.0...1.4.1) (2021-05-28)
+### Fixed
+- Logger configuration/usage no longer conflict with others (PR #44)
+
+## [1.4.0](https://github.com/Ousret/charset_normalizer/compare/1.3.9...1.4.0) (2021-05-21)
+### Removed
+- Using standard logging instead of using the package loguru.
+- Dropping nose test framework in favor of the maintained pytest.
+- Choose to not use dragonmapper package to help with gibberish Chinese/CJK text.
+- Require cached_property only for Python 3.5 due to constraint. Dropping for every other interpreter version.
+- Stop support for UTF-7 that does not contain a SIG.
+- Dropping PrettyTable, replaced with pure JSON output in CLI.
+
+### Fixed
+- BOM marker in a CharsetNormalizerMatch instance could be False in rare cases even if obviously present. Due to the sub-match factoring process.
+- Not searching properly for the BOM when trying utf32/16 parent codec.
+
+### Changed
+- Improving the package final size by compressing frequencies.json.
+- Huge improvement over the larges payload.
+
+### Added
+- CLI now produces JSON consumable output.
+- Return ASCII if given sequences fit. Given reasonable confidence.
+
+## [1.3.9](https://github.com/Ousret/charset_normalizer/compare/1.3.8...1.3.9) (2021-05-13)
+
+### Fixed
+- In some very rare cases, you may end up getting encode/decode errors due to a bad bytes payload (PR #40)
+
+## [1.3.8](https://github.com/Ousret/charset_normalizer/compare/1.3.7...1.3.8) (2021-05-12)
+
+### Fixed
+- Empty given payload for detection may cause an exception if trying to access the `alphabets` property. (PR #39)
+
+## [1.3.7](https://github.com/Ousret/charset_normalizer/compare/1.3.6...1.3.7) (2021-05-12)
+
+### Fixed
+- The legacy detect function should return UTF-8-SIG if sig is present in the payload. (PR #38)
+
+## [1.3.6](https://github.com/Ousret/charset_normalizer/compare/1.3.5...1.3.6) (2021-02-09)
+
+### Changed
+- Amend the previous release to allow prettytable 2.0 (PR #35)
+
+## [1.3.5](https://github.com/Ousret/charset_normalizer/compare/1.3.4...1.3.5) (2021-02-08)
+
+### Fixed
+- Fix error while using the package with a python pre-release interpreter (PR #33)
+
+### Changed
+- Dependencies refactoring, constraints revised.
+
+### Added
+- Add python 3.9 and 3.10 to the supported interpreters
+
+MIT License
+
+Copyright (c) 2019 TAHRI Ahmed R.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/billinglayer/python/charset_normalizer-3.2.0.dist-info/RECORD b/billinglayer/python/charset_normalizer-3.2.0.dist-info/RECORD
new file mode 100644
index 0000000..5cd4703
--- /dev/null
+++ b/billinglayer/python/charset_normalizer-3.2.0.dist-info/RECORD
@@ -0,0 +1,35 @@
+../../bin/normalizer,sha256=s96Kuek2W41Ri7tEiehraywrWWFl5pttBZvlFrupd74,252
+charset_normalizer-3.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+charset_normalizer-3.2.0.dist-info/LICENSE,sha256=6zGgxaT7Cbik4yBV0lweX5w1iidS_vPNcgIT0cz-4kE,1070
+charset_normalizer-3.2.0.dist-info/METADATA,sha256=K2QHhX9fQ7jFxO7y4IQk7TqYZSH7iTyxgTJQxA65EH0,31284
+charset_normalizer-3.2.0.dist-info/RECORD,,
+charset_normalizer-3.2.0.dist-info/WHEEL,sha256=8KU227XctfdX2qUwyjQUO-ciQuZtmyPUCKmeGV6Byto,152
+charset_normalizer-3.2.0.dist-info/entry_points.txt,sha256=uYo8aIGLWv8YgWfSna5HnfY_En4pkF1w4bgawNAXzP0,76
+charset_normalizer-3.2.0.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19
+charset_normalizer/__init__.py,sha256=UzI3xC8PhmcLRMzSgPb6minTmRq0kWznnCBJ8ZCc2XI,1577
+charset_normalizer/__pycache__/__init__.cpython-311.pyc,,
+charset_normalizer/__pycache__/api.cpython-311.pyc,,
+charset_normalizer/__pycache__/cd.cpython-311.pyc,,
+charset_normalizer/__pycache__/constant.cpython-311.pyc,,
+charset_normalizer/__pycache__/legacy.cpython-311.pyc,,
+charset_normalizer/__pycache__/md.cpython-311.pyc,,
+charset_normalizer/__pycache__/models.cpython-311.pyc,,
+charset_normalizer/__pycache__/utils.cpython-311.pyc,,
+charset_normalizer/__pycache__/version.cpython-311.pyc,,
+charset_normalizer/api.py,sha256=WOlWjy6wT8SeMYFpaGbXZFN1TMXa-s8vZYfkL4G29iQ,21097
+charset_normalizer/assets/__init__.py,sha256=wpRfujN7GJuEE5wHHo3wEDVoJ5ovzRIxsImyimCBfGU,20069
+charset_normalizer/assets/__pycache__/__init__.cpython-311.pyc,,
+charset_normalizer/cd.py,sha256=mZuiTSKq4XpweSDD2H4T4R3Axtaa-QS0tpEWdpMuAzQ,12554
+charset_normalizer/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+charset_normalizer/cli/__pycache__/__init__.cpython-311.pyc,,
+charset_normalizer/cli/__pycache__/normalizer.cpython-311.pyc,,
+charset_normalizer/cli/normalizer.py,sha256=2F-xURZJzo063Ye-2RLJ2wcmURpbKeAzKwpiws65dAs,9744
+charset_normalizer/constant.py,sha256=PmCeoKXqq3ZbCtCUpKHwwFBIv9DXMT_an1yd24q28mA,19101
+charset_normalizer/legacy.py,sha256=T-QuVMsMeDiQEk8WSszMrzVJg_14AMeSkmHdRYhdl1k,2071
+charset_normalizer/md.cpython-311-x86_64-linux-gnu.so,sha256=drk8jTNY7lZ9fU8bZ_e8bFSbF5erdCt5BCN8lI49IZ0,17496
+charset_normalizer/md.py,sha256=gEWM354DqBsiSoNkKzFrIW4KRFQjQLbqYnbHAdBwj74,18682
+charset_normalizer/md__mypyc.cpython-311-x86_64-linux-gnu.so,sha256=YZtTuQgOU2p-fpmS6PZJBbjPI0M-IDmYj_7urdONnIQ,418056
+charset_normalizer/models.py,sha256=mC11wo84l00u2o03TRNX7M5ItBAbPUKKXgJSFxA35GY,11492
+charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+charset_normalizer/utils.py,sha256=HdwmBy9vRqcRVGHKZqYWtpiS5wA35cLjnlVQCm0Bq9s,11578
+charset_normalizer/version.py,sha256=LbH8odlzMnwR4xZF9wCsnGXQA19axDO7HZ-J9hegIX0,79
diff --git a/billinglayer/python/charset_normalizer-3.2.0.dist-info/WHEEL b/billinglayer/python/charset_normalizer-3.2.0.dist-info/WHEEL
new file mode 100644
index 0000000..3bed0cb
--- /dev/null
+++ b/billinglayer/python/charset_normalizer-3.2.0.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.40.0)
+Root-Is-Purelib: false
+Tag: cp311-cp311-manylinux_2_17_x86_64
+Tag: cp311-cp311-manylinux2014_x86_64
+
diff --git a/billinglayer/python/charset_normalizer-3.2.0.dist-info/entry_points.txt b/billinglayer/python/charset_normalizer-3.2.0.dist-info/entry_points.txt
new file mode 100644
index 0000000..a06d360
--- /dev/null
+++ b/billinglayer/python/charset_normalizer-3.2.0.dist-info/entry_points.txt
@@ -0,0 +1,2 @@
+[console_scripts]
+normalizer = charset_normalizer.cli.normalizer:cli_detect
diff --git a/billinglayer/python/charset_normalizer-3.2.0.dist-info/top_level.txt b/billinglayer/python/charset_normalizer-3.2.0.dist-info/top_level.txt
new file mode 100644
index 0000000..66958f0
--- /dev/null
+++ b/billinglayer/python/charset_normalizer-3.2.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+charset_normalizer
diff --git a/billinglayer/python/charset_normalizer/__init__.py b/billinglayer/python/charset_normalizer/__init__.py
new file mode 100644
index 0000000..55991fc
--- /dev/null
+++ b/billinglayer/python/charset_normalizer/__init__.py
@@ -0,0 +1,46 @@
+# -*- coding: utf-8 -*-
+"""
+Charset-Normalizer
+~~~~~~~~~~~~~~
+The Real First Universal Charset Detector.
+A library that helps you read text from an unknown charset encoding.
+Motivated by chardet, This package is trying to resolve the issue by taking a new approach.
+All IANA character set names for which the Python core library provides codecs are supported.
+
+Basic usage:
+ >>> from charset_normalizer import from_bytes
+ >>> results = from_bytes('Bсеки човек има право на образование. Oбразованието!'.encode('utf_8'))
+ >>> best_guess = results.best()
+ >>> str(best_guess)
+ 'Bсеки човек има право на образование. Oбразованието!'
+
+Others methods and usages are available - see the full documentation
+at .
+:copyright: (c) 2021 by Ahmed TAHRI
+:license: MIT, see LICENSE for more details.
+"""
+import logging
+
+from .api import from_bytes, from_fp, from_path, is_binary
+from .legacy import detect
+from .models import CharsetMatch, CharsetMatches
+from .utils import set_logging_handler
+from .version import VERSION, __version__
+
+__all__ = (
+ "from_fp",
+ "from_path",
+ "from_bytes",
+ "is_binary",
+ "detect",
+ "CharsetMatch",
+ "CharsetMatches",
+ "__version__",
+ "VERSION",
+ "set_logging_handler",
+)
+
+# Attach a NullHandler to the top level logger by default
+# https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library
+
+logging.getLogger("charset_normalizer").addHandler(logging.NullHandler())
diff --git a/billinglayer/python/charset_normalizer/__pycache__/__init__.cpython-311.pyc b/billinglayer/python/charset_normalizer/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..e7d23ae
Binary files /dev/null and b/billinglayer/python/charset_normalizer/__pycache__/__init__.cpython-311.pyc differ
diff --git a/billinglayer/python/charset_normalizer/__pycache__/api.cpython-311.pyc b/billinglayer/python/charset_normalizer/__pycache__/api.cpython-311.pyc
new file mode 100644
index 0000000..369c017
Binary files /dev/null and b/billinglayer/python/charset_normalizer/__pycache__/api.cpython-311.pyc differ
diff --git a/billinglayer/python/charset_normalizer/__pycache__/cd.cpython-311.pyc b/billinglayer/python/charset_normalizer/__pycache__/cd.cpython-311.pyc
new file mode 100644
index 0000000..40417de
Binary files /dev/null and b/billinglayer/python/charset_normalizer/__pycache__/cd.cpython-311.pyc differ
diff --git a/billinglayer/python/charset_normalizer/__pycache__/constant.cpython-311.pyc b/billinglayer/python/charset_normalizer/__pycache__/constant.cpython-311.pyc
new file mode 100644
index 0000000..d26dee0
Binary files /dev/null and b/billinglayer/python/charset_normalizer/__pycache__/constant.cpython-311.pyc differ
diff --git a/billinglayer/python/charset_normalizer/__pycache__/legacy.cpython-311.pyc b/billinglayer/python/charset_normalizer/__pycache__/legacy.cpython-311.pyc
new file mode 100644
index 0000000..97756d8
Binary files /dev/null and b/billinglayer/python/charset_normalizer/__pycache__/legacy.cpython-311.pyc differ
diff --git a/billinglayer/python/charset_normalizer/__pycache__/md.cpython-311.pyc b/billinglayer/python/charset_normalizer/__pycache__/md.cpython-311.pyc
new file mode 100644
index 0000000..c564ae1
Binary files /dev/null and b/billinglayer/python/charset_normalizer/__pycache__/md.cpython-311.pyc differ
diff --git a/billinglayer/python/charset_normalizer/__pycache__/models.cpython-311.pyc b/billinglayer/python/charset_normalizer/__pycache__/models.cpython-311.pyc
new file mode 100644
index 0000000..b5bd4c1
Binary files /dev/null and b/billinglayer/python/charset_normalizer/__pycache__/models.cpython-311.pyc differ
diff --git a/billinglayer/python/charset_normalizer/__pycache__/utils.cpython-311.pyc b/billinglayer/python/charset_normalizer/__pycache__/utils.cpython-311.pyc
new file mode 100644
index 0000000..87869b7
Binary files /dev/null and b/billinglayer/python/charset_normalizer/__pycache__/utils.cpython-311.pyc differ
diff --git a/billinglayer/python/charset_normalizer/__pycache__/version.cpython-311.pyc b/billinglayer/python/charset_normalizer/__pycache__/version.cpython-311.pyc
new file mode 100644
index 0000000..d7dc94f
Binary files /dev/null and b/billinglayer/python/charset_normalizer/__pycache__/version.cpython-311.pyc differ
diff --git a/billinglayer/python/charset_normalizer/api.py b/billinglayer/python/charset_normalizer/api.py
new file mode 100644
index 0000000..0ba08e3
--- /dev/null
+++ b/billinglayer/python/charset_normalizer/api.py
@@ -0,0 +1,626 @@
+import logging
+from os import PathLike
+from typing import BinaryIO, List, Optional, Set, Union
+
+from .cd import (
+ coherence_ratio,
+ encoding_languages,
+ mb_encoding_languages,
+ merge_coherence_ratios,
+)
+from .constant import IANA_SUPPORTED, TOO_BIG_SEQUENCE, TOO_SMALL_SEQUENCE, TRACE
+from .md import mess_ratio
+from .models import CharsetMatch, CharsetMatches
+from .utils import (
+ any_specified_encoding,
+ cut_sequence_chunks,
+ iana_name,
+ identify_sig_or_bom,
+ is_cp_similar,
+ is_multi_byte_encoding,
+ should_strip_sig_or_bom,
+)
+
+# Will most likely be controversial
+# logging.addLevelName(TRACE, "TRACE")
+logger = logging.getLogger("charset_normalizer")
+explain_handler = logging.StreamHandler()
+explain_handler.setFormatter(
+ logging.Formatter("%(asctime)s | %(levelname)s | %(message)s")
+)
+
+
+def from_bytes(
+ sequences: Union[bytes, bytearray],
+ steps: int = 5,
+ chunk_size: int = 512,
+ threshold: float = 0.2,
+ cp_isolation: Optional[List[str]] = None,
+ cp_exclusion: Optional[List[str]] = None,
+ preemptive_behaviour: bool = True,
+ explain: bool = False,
+ language_threshold: float = 0.1,
+ enable_fallback: bool = True,
+) -> CharsetMatches:
+ """
+ Given a raw bytes sequence, return the best possibles charset usable to render str objects.
+ If there is no results, it is a strong indicator that the source is binary/not text.
+ By default, the process will extract 5 blocks of 512o each to assess the mess and coherence of a given sequence.
+ And will give up a particular code page after 20% of measured mess. Those criteria are customizable at will.
+
+ The preemptive behavior DOES NOT replace the traditional detection workflow, it prioritize a particular code page
+ but never take it for granted. Can improve the performance.
+
+ You may want to focus your attention to some code page or/and not others, use cp_isolation and cp_exclusion for that
+ purpose.
+
+ This function will strip the SIG in the payload/sequence every time except on UTF-16, UTF-32.
+ By default the library does not setup any handler other than the NullHandler, if you choose to set the 'explain'
+ toggle to True it will alter the logger configuration to add a StreamHandler that is suitable for debugging.
+ Custom logging format and handler can be set manually.
+ """
+
+ if not isinstance(sequences, (bytearray, bytes)):
+ raise TypeError(
+ "Expected object of type bytes or bytearray, got: {0}".format(
+ type(sequences)
+ )
+ )
+
+ if explain:
+ previous_logger_level: int = logger.level
+ logger.addHandler(explain_handler)
+ logger.setLevel(TRACE)
+
+ length: int = len(sequences)
+
+ if length == 0:
+ logger.debug("Encoding detection on empty bytes, assuming utf_8 intention.")
+ if explain:
+ logger.removeHandler(explain_handler)
+ logger.setLevel(previous_logger_level or logging.WARNING)
+ return CharsetMatches([CharsetMatch(sequences, "utf_8", 0.0, False, [], "")])
+
+ if cp_isolation is not None:
+ logger.log(
+ TRACE,
+ "cp_isolation is set. use this flag for debugging purpose. "
+ "limited list of encoding allowed : %s.",
+ ", ".join(cp_isolation),
+ )
+ cp_isolation = [iana_name(cp, False) for cp in cp_isolation]
+ else:
+ cp_isolation = []
+
+ if cp_exclusion is not None:
+ logger.log(
+ TRACE,
+ "cp_exclusion is set. use this flag for debugging purpose. "
+ "limited list of encoding excluded : %s.",
+ ", ".join(cp_exclusion),
+ )
+ cp_exclusion = [iana_name(cp, False) for cp in cp_exclusion]
+ else:
+ cp_exclusion = []
+
+ if length <= (chunk_size * steps):
+ logger.log(
+ TRACE,
+ "override steps (%i) and chunk_size (%i) as content does not fit (%i byte(s) given) parameters.",
+ steps,
+ chunk_size,
+ length,
+ )
+ steps = 1
+ chunk_size = length
+
+ if steps > 1 and length / steps < chunk_size:
+ chunk_size = int(length / steps)
+
+ is_too_small_sequence: bool = len(sequences) < TOO_SMALL_SEQUENCE
+ is_too_large_sequence: bool = len(sequences) >= TOO_BIG_SEQUENCE
+
+ if is_too_small_sequence:
+ logger.log(
+ TRACE,
+ "Trying to detect encoding from a tiny portion of ({}) byte(s).".format(
+ length
+ ),
+ )
+ elif is_too_large_sequence:
+ logger.log(
+ TRACE,
+ "Using lazy str decoding because the payload is quite large, ({}) byte(s).".format(
+ length
+ ),
+ )
+
+ prioritized_encodings: List[str] = []
+
+ specified_encoding: Optional[str] = (
+ any_specified_encoding(sequences) if preemptive_behaviour else None
+ )
+
+ if specified_encoding is not None:
+ prioritized_encodings.append(specified_encoding)
+ logger.log(
+ TRACE,
+ "Detected declarative mark in sequence. Priority +1 given for %s.",
+ specified_encoding,
+ )
+
+ tested: Set[str] = set()
+ tested_but_hard_failure: List[str] = []
+ tested_but_soft_failure: List[str] = []
+
+ fallback_ascii: Optional[CharsetMatch] = None
+ fallback_u8: Optional[CharsetMatch] = None
+ fallback_specified: Optional[CharsetMatch] = None
+
+ results: CharsetMatches = CharsetMatches()
+
+ sig_encoding, sig_payload = identify_sig_or_bom(sequences)
+
+ if sig_encoding is not None:
+ prioritized_encodings.append(sig_encoding)
+ logger.log(
+ TRACE,
+ "Detected a SIG or BOM mark on first %i byte(s). Priority +1 given for %s.",
+ len(sig_payload),
+ sig_encoding,
+ )
+
+ prioritized_encodings.append("ascii")
+
+ if "utf_8" not in prioritized_encodings:
+ prioritized_encodings.append("utf_8")
+
+ for encoding_iana in prioritized_encodings + IANA_SUPPORTED:
+ if cp_isolation and encoding_iana not in cp_isolation:
+ continue
+
+ if cp_exclusion and encoding_iana in cp_exclusion:
+ continue
+
+ if encoding_iana in tested:
+ continue
+
+ tested.add(encoding_iana)
+
+ decoded_payload: Optional[str] = None
+ bom_or_sig_available: bool = sig_encoding == encoding_iana
+ strip_sig_or_bom: bool = bom_or_sig_available and should_strip_sig_or_bom(
+ encoding_iana
+ )
+
+ if encoding_iana in {"utf_16", "utf_32"} and not bom_or_sig_available:
+ logger.log(
+ TRACE,
+ "Encoding %s won't be tested as-is because it require a BOM. Will try some sub-encoder LE/BE.",
+ encoding_iana,
+ )
+ continue
+ if encoding_iana in {"utf_7"} and not bom_or_sig_available:
+ logger.log(
+ TRACE,
+ "Encoding %s won't be tested as-is because detection is unreliable without BOM/SIG.",
+ encoding_iana,
+ )
+ continue
+
+ try:
+ is_multi_byte_decoder: bool = is_multi_byte_encoding(encoding_iana)
+ except (ModuleNotFoundError, ImportError):
+ logger.log(
+ TRACE,
+ "Encoding %s does not provide an IncrementalDecoder",
+ encoding_iana,
+ )
+ continue
+
+ try:
+ if is_too_large_sequence and is_multi_byte_decoder is False:
+ str(
+ sequences[: int(50e4)]
+ if strip_sig_or_bom is False
+ else sequences[len(sig_payload) : int(50e4)],
+ encoding=encoding_iana,
+ )
+ else:
+ decoded_payload = str(
+ sequences
+ if strip_sig_or_bom is False
+ else sequences[len(sig_payload) :],
+ encoding=encoding_iana,
+ )
+ except (UnicodeDecodeError, LookupError) as e:
+ if not isinstance(e, LookupError):
+ logger.log(
+ TRACE,
+ "Code page %s does not fit given bytes sequence at ALL. %s",
+ encoding_iana,
+ str(e),
+ )
+ tested_but_hard_failure.append(encoding_iana)
+ continue
+
+ similar_soft_failure_test: bool = False
+
+ for encoding_soft_failed in tested_but_soft_failure:
+ if is_cp_similar(encoding_iana, encoding_soft_failed):
+ similar_soft_failure_test = True
+ break
+
+ if similar_soft_failure_test:
+ logger.log(
+ TRACE,
+ "%s is deemed too similar to code page %s and was consider unsuited already. Continuing!",
+ encoding_iana,
+ encoding_soft_failed,
+ )
+ continue
+
+ r_ = range(
+ 0 if not bom_or_sig_available else len(sig_payload),
+ length,
+ int(length / steps),
+ )
+
+ multi_byte_bonus: bool = (
+ is_multi_byte_decoder
+ and decoded_payload is not None
+ and len(decoded_payload) < length
+ )
+
+ if multi_byte_bonus:
+ logger.log(
+ TRACE,
+ "Code page %s is a multi byte encoding table and it appear that at least one character "
+ "was encoded using n-bytes.",
+ encoding_iana,
+ )
+
+ max_chunk_gave_up: int = int(len(r_) / 4)
+
+ max_chunk_gave_up = max(max_chunk_gave_up, 2)
+ early_stop_count: int = 0
+ lazy_str_hard_failure = False
+
+ md_chunks: List[str] = []
+ md_ratios = []
+
+ try:
+ for chunk in cut_sequence_chunks(
+ sequences,
+ encoding_iana,
+ r_,
+ chunk_size,
+ bom_or_sig_available,
+ strip_sig_or_bom,
+ sig_payload,
+ is_multi_byte_decoder,
+ decoded_payload,
+ ):
+ md_chunks.append(chunk)
+
+ md_ratios.append(
+ mess_ratio(
+ chunk,
+ threshold,
+ explain is True and 1 <= len(cp_isolation) <= 2,
+ )
+ )
+
+ if md_ratios[-1] >= threshold:
+ early_stop_count += 1
+
+ if (early_stop_count >= max_chunk_gave_up) or (
+ bom_or_sig_available and strip_sig_or_bom is False
+ ):
+ break
+ except (
+ UnicodeDecodeError
+ ) as e: # Lazy str loading may have missed something there
+ logger.log(
+ TRACE,
+ "LazyStr Loading: After MD chunk decode, code page %s does not fit given bytes sequence at ALL. %s",
+ encoding_iana,
+ str(e),
+ )
+ early_stop_count = max_chunk_gave_up
+ lazy_str_hard_failure = True
+
+ # We might want to check the sequence again with the whole content
+ # Only if initial MD tests passes
+ if (
+ not lazy_str_hard_failure
+ and is_too_large_sequence
+ and not is_multi_byte_decoder
+ ):
+ try:
+ sequences[int(50e3) :].decode(encoding_iana, errors="strict")
+ except UnicodeDecodeError as e:
+ logger.log(
+ TRACE,
+ "LazyStr Loading: After final lookup, code page %s does not fit given bytes sequence at ALL. %s",
+ encoding_iana,
+ str(e),
+ )
+ tested_but_hard_failure.append(encoding_iana)
+ continue
+
+ mean_mess_ratio: float = sum(md_ratios) / len(md_ratios) if md_ratios else 0.0
+ if mean_mess_ratio >= threshold or early_stop_count >= max_chunk_gave_up:
+ tested_but_soft_failure.append(encoding_iana)
+ logger.log(
+ TRACE,
+ "%s was excluded because of initial chaos probing. Gave up %i time(s). "
+ "Computed mean chaos is %f %%.",
+ encoding_iana,
+ early_stop_count,
+ round(mean_mess_ratio * 100, ndigits=3),
+ )
+ # Preparing those fallbacks in case we got nothing.
+ if (
+ enable_fallback
+ and encoding_iana in ["ascii", "utf_8", specified_encoding]
+ and not lazy_str_hard_failure
+ ):
+ fallback_entry = CharsetMatch(
+ sequences, encoding_iana, threshold, False, [], decoded_payload
+ )
+ if encoding_iana == specified_encoding:
+ fallback_specified = fallback_entry
+ elif encoding_iana == "ascii":
+ fallback_ascii = fallback_entry
+ else:
+ fallback_u8 = fallback_entry
+ continue
+
+ logger.log(
+ TRACE,
+ "%s passed initial chaos probing. Mean measured chaos is %f %%",
+ encoding_iana,
+ round(mean_mess_ratio * 100, ndigits=3),
+ )
+
+ if not is_multi_byte_decoder:
+ target_languages: List[str] = encoding_languages(encoding_iana)
+ else:
+ target_languages = mb_encoding_languages(encoding_iana)
+
+ if target_languages:
+ logger.log(
+ TRACE,
+ "{} should target any language(s) of {}".format(
+ encoding_iana, str(target_languages)
+ ),
+ )
+
+ cd_ratios = []
+
+ # We shall skip the CD when its about ASCII
+ # Most of the time its not relevant to run "language-detection" on it.
+ if encoding_iana != "ascii":
+ for chunk in md_chunks:
+ chunk_languages = coherence_ratio(
+ chunk,
+ language_threshold,
+ ",".join(target_languages) if target_languages else None,
+ )
+
+ cd_ratios.append(chunk_languages)
+
+ cd_ratios_merged = merge_coherence_ratios(cd_ratios)
+
+ if cd_ratios_merged:
+ logger.log(
+ TRACE,
+ "We detected language {} using {}".format(
+ cd_ratios_merged, encoding_iana
+ ),
+ )
+
+ results.append(
+ CharsetMatch(
+ sequences,
+ encoding_iana,
+ mean_mess_ratio,
+ bom_or_sig_available,
+ cd_ratios_merged,
+ decoded_payload,
+ )
+ )
+
+ if (
+ encoding_iana in [specified_encoding, "ascii", "utf_8"]
+ and mean_mess_ratio < 0.1
+ ):
+ logger.debug(
+ "Encoding detection: %s is most likely the one.", encoding_iana
+ )
+ if explain:
+ logger.removeHandler(explain_handler)
+ logger.setLevel(previous_logger_level)
+ return CharsetMatches([results[encoding_iana]])
+
+ if encoding_iana == sig_encoding:
+ logger.debug(
+ "Encoding detection: %s is most likely the one as we detected a BOM or SIG within "
+ "the beginning of the sequence.",
+ encoding_iana,
+ )
+ if explain:
+ logger.removeHandler(explain_handler)
+ logger.setLevel(previous_logger_level)
+ return CharsetMatches([results[encoding_iana]])
+
+ if len(results) == 0:
+ if fallback_u8 or fallback_ascii or fallback_specified:
+ logger.log(
+ TRACE,
+ "Nothing got out of the detection process. Using ASCII/UTF-8/Specified fallback.",
+ )
+
+ if fallback_specified:
+ logger.debug(
+ "Encoding detection: %s will be used as a fallback match",
+ fallback_specified.encoding,
+ )
+ results.append(fallback_specified)
+ elif (
+ (fallback_u8 and fallback_ascii is None)
+ or (
+ fallback_u8
+ and fallback_ascii
+ and fallback_u8.fingerprint != fallback_ascii.fingerprint
+ )
+ or (fallback_u8 is not None)
+ ):
+ logger.debug("Encoding detection: utf_8 will be used as a fallback match")
+ results.append(fallback_u8)
+ elif fallback_ascii:
+ logger.debug("Encoding detection: ascii will be used as a fallback match")
+ results.append(fallback_ascii)
+
+ if results:
+ logger.debug(
+ "Encoding detection: Found %s as plausible (best-candidate) for content. With %i alternatives.",
+ results.best().encoding, # type: ignore
+ len(results) - 1,
+ )
+ else:
+ logger.debug("Encoding detection: Unable to determine any suitable charset.")
+
+ if explain:
+ logger.removeHandler(explain_handler)
+ logger.setLevel(previous_logger_level)
+
+ return results
+
+
+def from_fp(
+ fp: BinaryIO,
+ steps: int = 5,
+ chunk_size: int = 512,
+ threshold: float = 0.20,
+ cp_isolation: Optional[List[str]] = None,
+ cp_exclusion: Optional[List[str]] = None,
+ preemptive_behaviour: bool = True,
+ explain: bool = False,
+ language_threshold: float = 0.1,
+ enable_fallback: bool = True,
+) -> CharsetMatches:
+ """
+ Same thing than the function from_bytes but using a file pointer that is already ready.
+ Will not close the file pointer.
+ """
+ return from_bytes(
+ fp.read(),
+ steps,
+ chunk_size,
+ threshold,
+ cp_isolation,
+ cp_exclusion,
+ preemptive_behaviour,
+ explain,
+ language_threshold,
+ enable_fallback,
+ )
+
+
+def from_path(
+ path: Union[str, bytes, PathLike], # type: ignore[type-arg]
+ steps: int = 5,
+ chunk_size: int = 512,
+ threshold: float = 0.20,
+ cp_isolation: Optional[List[str]] = None,
+ cp_exclusion: Optional[List[str]] = None,
+ preemptive_behaviour: bool = True,
+ explain: bool = False,
+ language_threshold: float = 0.1,
+ enable_fallback: bool = True,
+) -> CharsetMatches:
+ """
+ Same thing than the function from_bytes but with one extra step. Opening and reading given file path in binary mode.
+ Can raise IOError.
+ """
+ with open(path, "rb") as fp:
+ return from_fp(
+ fp,
+ steps,
+ chunk_size,
+ threshold,
+ cp_isolation,
+ cp_exclusion,
+ preemptive_behaviour,
+ explain,
+ language_threshold,
+ enable_fallback,
+ )
+
+
+def is_binary(
+ fp_or_path_or_payload: Union[PathLike, str, BinaryIO, bytes], # type: ignore[type-arg]
+ steps: int = 5,
+ chunk_size: int = 512,
+ threshold: float = 0.20,
+ cp_isolation: Optional[List[str]] = None,
+ cp_exclusion: Optional[List[str]] = None,
+ preemptive_behaviour: bool = True,
+ explain: bool = False,
+ language_threshold: float = 0.1,
+ enable_fallback: bool = False,
+) -> bool:
+ """
+ Detect if the given input (file, bytes, or path) points to a binary file. aka. not a string.
+ Based on the same main heuristic algorithms and default kwargs at the sole exception that fallbacks match
+ are disabled to be stricter around ASCII-compatible but unlikely to be a string.
+ """
+ if isinstance(fp_or_path_or_payload, (str, PathLike)):
+ guesses = from_path(
+ fp_or_path_or_payload,
+ steps=steps,
+ chunk_size=chunk_size,
+ threshold=threshold,
+ cp_isolation=cp_isolation,
+ cp_exclusion=cp_exclusion,
+ preemptive_behaviour=preemptive_behaviour,
+ explain=explain,
+ language_threshold=language_threshold,
+ enable_fallback=enable_fallback,
+ )
+ elif isinstance(
+ fp_or_path_or_payload,
+ (
+ bytes,
+ bytearray,
+ ),
+ ):
+ guesses = from_bytes(
+ fp_or_path_or_payload,
+ steps=steps,
+ chunk_size=chunk_size,
+ threshold=threshold,
+ cp_isolation=cp_isolation,
+ cp_exclusion=cp_exclusion,
+ preemptive_behaviour=preemptive_behaviour,
+ explain=explain,
+ language_threshold=language_threshold,
+ enable_fallback=enable_fallback,
+ )
+ else:
+ guesses = from_fp(
+ fp_or_path_or_payload,
+ steps=steps,
+ chunk_size=chunk_size,
+ threshold=threshold,
+ cp_isolation=cp_isolation,
+ cp_exclusion=cp_exclusion,
+ preemptive_behaviour=preemptive_behaviour,
+ explain=explain,
+ language_threshold=language_threshold,
+ enable_fallback=enable_fallback,
+ )
+
+ return not guesses
diff --git a/billinglayer/python/charset_normalizer/assets/__init__.py b/billinglayer/python/charset_normalizer/assets/__init__.py
new file mode 100644
index 0000000..9075930
--- /dev/null
+++ b/billinglayer/python/charset_normalizer/assets/__init__.py
@@ -0,0 +1,1440 @@
+# -*- coding: utf-8 -*-
+from typing import Dict, List
+
+# Language label that contain the em dash "—"
+# character are to be considered alternative seq to origin
+FREQUENCIES: Dict[str, List[str]] = {
+ "English": [
+ "e",
+ "a",
+ "t",
+ "i",
+ "o",
+ "n",
+ "s",
+ "r",
+ "h",
+ "l",
+ "d",
+ "c",
+ "u",
+ "m",
+ "f",
+ "p",
+ "g",
+ "w",
+ "y",
+ "b",
+ "v",
+ "k",
+ "x",
+ "j",
+ "z",
+ "q",
+ ],
+ "English—": [
+ "e",
+ "a",
+ "t",
+ "i",
+ "o",
+ "n",
+ "s",
+ "r",
+ "h",
+ "l",
+ "d",
+ "c",
+ "m",
+ "u",
+ "f",
+ "p",
+ "g",
+ "w",
+ "b",
+ "y",
+ "v",
+ "k",
+ "j",
+ "x",
+ "z",
+ "q",
+ ],
+ "German": [
+ "e",
+ "n",
+ "i",
+ "r",
+ "s",
+ "t",
+ "a",
+ "d",
+ "h",
+ "u",
+ "l",
+ "g",
+ "o",
+ "c",
+ "m",
+ "b",
+ "f",
+ "k",
+ "w",
+ "z",
+ "p",
+ "v",
+ "ü",
+ "ä",
+ "ö",
+ "j",
+ ],
+ "French": [
+ "e",
+ "a",
+ "s",
+ "n",
+ "i",
+ "t",
+ "r",
+ "l",
+ "u",
+ "o",
+ "d",
+ "c",
+ "p",
+ "m",
+ "é",
+ "v",
+ "g",
+ "f",
+ "b",
+ "h",
+ "q",
+ "à",
+ "x",
+ "è",
+ "y",
+ "j",
+ ],
+ "Dutch": [
+ "e",
+ "n",
+ "a",
+ "i",
+ "r",
+ "t",
+ "o",
+ "d",
+ "s",
+ "l",
+ "g",
+ "h",
+ "v",
+ "m",
+ "u",
+ "k",
+ "c",
+ "p",
+ "b",
+ "w",
+ "j",
+ "z",
+ "f",
+ "y",
+ "x",
+ "ë",
+ ],
+ "Italian": [
+ "e",
+ "i",
+ "a",
+ "o",
+ "n",
+ "l",
+ "t",
+ "r",
+ "s",
+ "c",
+ "d",
+ "u",
+ "p",
+ "m",
+ "g",
+ "v",
+ "f",
+ "b",
+ "z",
+ "h",
+ "q",
+ "è",
+ "à",
+ "k",
+ "y",
+ "ò",
+ ],
+ "Polish": [
+ "a",
+ "i",
+ "o",
+ "e",
+ "n",
+ "r",
+ "z",
+ "w",
+ "s",
+ "c",
+ "t",
+ "k",
+ "y",
+ "d",
+ "p",
+ "m",
+ "u",
+ "l",
+ "j",
+ "ł",
+ "g",
+ "b",
+ "h",
+ "ą",
+ "ę",
+ "ó",
+ ],
+ "Spanish": [
+ "e",
+ "a",
+ "o",
+ "n",
+ "s",
+ "r",
+ "i",
+ "l",
+ "d",
+ "t",
+ "c",
+ "u",
+ "m",
+ "p",
+ "b",
+ "g",
+ "v",
+ "f",
+ "y",
+ "ó",
+ "h",
+ "q",
+ "í",
+ "j",
+ "z",
+ "á",
+ ],
+ "Russian": [
+ "о",
+ "а",
+ "е",
+ "и",
+ "н",
+ "с",
+ "т",
+ "р",
+ "в",
+ "л",
+ "к",
+ "м",
+ "д",
+ "п",
+ "у",
+ "г",
+ "я",
+ "ы",
+ "з",
+ "б",
+ "й",
+ "ь",
+ "ч",
+ "х",
+ "ж",
+ "ц",
+ ],
+ # Jap-Kanji
+ "Japanese": [
+ "人",
+ "一",
+ "大",
+ "亅",
+ "丁",
+ "丨",
+ "竹",
+ "笑",
+ "口",
+ "日",
+ "今",
+ "二",
+ "彳",
+ "行",
+ "十",
+ "土",
+ "丶",
+ "寸",
+ "寺",
+ "時",
+ "乙",
+ "丿",
+ "乂",
+ "气",
+ "気",
+ "冂",
+ "巾",
+ "亠",
+ "市",
+ "目",
+ "儿",
+ "見",
+ "八",
+ "小",
+ "凵",
+ "県",
+ "月",
+ "彐",
+ "門",
+ "間",
+ "木",
+ "東",
+ "山",
+ "出",
+ "本",
+ "中",
+ "刀",
+ "分",
+ "耳",
+ "又",
+ "取",
+ "最",
+ "言",
+ "田",
+ "心",
+ "思",
+ "刂",
+ "前",
+ "京",
+ "尹",
+ "事",
+ "生",
+ "厶",
+ "云",
+ "会",
+ "未",
+ "来",
+ "白",
+ "冫",
+ "楽",
+ "灬",
+ "馬",
+ "尸",
+ "尺",
+ "駅",
+ "明",
+ "耂",
+ "者",
+ "了",
+ "阝",
+ "都",
+ "高",
+ "卜",
+ "占",
+ "厂",
+ "广",
+ "店",
+ "子",
+ "申",
+ "奄",
+ "亻",
+ "俺",
+ "上",
+ "方",
+ "冖",
+ "学",
+ "衣",
+ "艮",
+ "食",
+ "自",
+ ],
+ # Jap-Katakana
+ "Japanese—": [
+ "ー",
+ "ン",
+ "ス",
+ "・",
+ "ル",
+ "ト",
+ "リ",
+ "イ",
+ "ア",
+ "ラ",
+ "ッ",
+ "ク",
+ "ド",
+ "シ",
+ "レ",
+ "ジ",
+ "タ",
+ "フ",
+ "ロ",
+ "カ",
+ "テ",
+ "マ",
+ "ィ",
+ "グ",
+ "バ",
+ "ム",
+ "プ",
+ "オ",
+ "コ",
+ "デ",
+ "ニ",
+ "ウ",
+ "メ",
+ "サ",
+ "ビ",
+ "ナ",
+ "ブ",
+ "ャ",
+ "エ",
+ "ュ",
+ "チ",
+ "キ",
+ "ズ",
+ "ダ",
+ "パ",
+ "ミ",
+ "ェ",
+ "ョ",
+ "ハ",
+ "セ",
+ "ベ",
+ "ガ",
+ "モ",
+ "ツ",
+ "ネ",
+ "ボ",
+ "ソ",
+ "ノ",
+ "ァ",
+ "ヴ",
+ "ワ",
+ "ポ",
+ "ペ",
+ "ピ",
+ "ケ",
+ "ゴ",
+ "ギ",
+ "ザ",
+ "ホ",
+ "ゲ",
+ "ォ",
+ "ヤ",
+ "ヒ",
+ "ユ",
+ "ヨ",
+ "ヘ",
+ "ゼ",
+ "ヌ",
+ "ゥ",
+ "ゾ",
+ "ヶ",
+ "ヂ",
+ "ヲ",
+ "ヅ",
+ "ヵ",
+ "ヱ",
+ "ヰ",
+ "ヮ",
+ "ヽ",
+ "゠",
+ "ヾ",
+ "ヷ",
+ "ヿ",
+ "ヸ",
+ "ヹ",
+ "ヺ",
+ ],
+ # Jap-Hiragana
+ "Japanese——": [
+ "の",
+ "に",
+ "る",
+ "た",
+ "と",
+ "は",
+ "し",
+ "い",
+ "を",
+ "で",
+ "て",
+ "が",
+ "な",
+ "れ",
+ "か",
+ "ら",
+ "さ",
+ "っ",
+ "り",
+ "す",
+ "あ",
+ "も",
+ "こ",
+ "ま",
+ "う",
+ "く",
+ "よ",
+ "き",
+ "ん",
+ "め",
+ "お",
+ "け",
+ "そ",
+ "つ",
+ "だ",
+ "や",
+ "え",
+ "ど",
+ "わ",
+ "ち",
+ "み",
+ "せ",
+ "じ",
+ "ば",
+ "へ",
+ "び",
+ "ず",
+ "ろ",
+ "ほ",
+ "げ",
+ "む",
+ "べ",
+ "ひ",
+ "ょ",
+ "ゆ",
+ "ぶ",
+ "ご",
+ "ゃ",
+ "ね",
+ "ふ",
+ "ぐ",
+ "ぎ",
+ "ぼ",
+ "ゅ",
+ "づ",
+ "ざ",
+ "ぞ",
+ "ぬ",
+ "ぜ",
+ "ぱ",
+ "ぽ",
+ "ぷ",
+ "ぴ",
+ "ぃ",
+ "ぁ",
+ "ぇ",
+ "ぺ",
+ "ゞ",
+ "ぢ",
+ "ぉ",
+ "ぅ",
+ "ゐ",
+ "ゝ",
+ "ゑ",
+ "゛",
+ "゜",
+ "ゎ",
+ "ゔ",
+ "゚",
+ "ゟ",
+ "゙",
+ "ゕ",
+ "ゖ",
+ ],
+ "Portuguese": [
+ "a",
+ "e",
+ "o",
+ "s",
+ "i",
+ "r",
+ "d",
+ "n",
+ "t",
+ "m",
+ "u",
+ "c",
+ "l",
+ "p",
+ "g",
+ "v",
+ "b",
+ "f",
+ "h",
+ "ã",
+ "q",
+ "é",
+ "ç",
+ "á",
+ "z",
+ "í",
+ ],
+ "Swedish": [
+ "e",
+ "a",
+ "n",
+ "r",
+ "t",
+ "s",
+ "i",
+ "l",
+ "d",
+ "o",
+ "m",
+ "k",
+ "g",
+ "v",
+ "h",
+ "f",
+ "u",
+ "p",
+ "ä",
+ "c",
+ "b",
+ "ö",
+ "å",
+ "y",
+ "j",
+ "x",
+ ],
+ "Chinese": [
+ "的",
+ "一",
+ "是",
+ "不",
+ "了",
+ "在",
+ "人",
+ "有",
+ "我",
+ "他",
+ "这",
+ "个",
+ "们",
+ "中",
+ "来",
+ "上",
+ "大",
+ "为",
+ "和",
+ "国",
+ "地",
+ "到",
+ "以",
+ "说",
+ "时",
+ "要",
+ "就",
+ "出",
+ "会",
+ "可",
+ "也",
+ "你",
+ "对",
+ "生",
+ "能",
+ "而",
+ "子",
+ "那",
+ "得",
+ "于",
+ "着",
+ "下",
+ "自",
+ "之",
+ "年",
+ "过",
+ "发",
+ "后",
+ "作",
+ "里",
+ "用",
+ "道",
+ "行",
+ "所",
+ "然",
+ "家",
+ "种",
+ "事",
+ "成",
+ "方",
+ "多",
+ "经",
+ "么",
+ "去",
+ "法",
+ "学",
+ "如",
+ "都",
+ "同",
+ "现",
+ "当",
+ "没",
+ "动",
+ "面",
+ "起",
+ "看",
+ "定",
+ "天",
+ "分",
+ "还",
+ "进",
+ "好",
+ "小",
+ "部",
+ "其",
+ "些",
+ "主",
+ "样",
+ "理",
+ "心",
+ "她",
+ "本",
+ "前",
+ "开",
+ "但",
+ "因",
+ "只",
+ "从",
+ "想",
+ "实",
+ ],
+ "Ukrainian": [
+ "о",
+ "а",
+ "н",
+ "і",
+ "и",
+ "р",
+ "в",
+ "т",
+ "е",
+ "с",
+ "к",
+ "л",
+ "у",
+ "д",
+ "м",
+ "п",
+ "з",
+ "я",
+ "ь",
+ "б",
+ "г",
+ "й",
+ "ч",
+ "х",
+ "ц",
+ "ї",
+ ],
+ "Norwegian": [
+ "e",
+ "r",
+ "n",
+ "t",
+ "a",
+ "s",
+ "i",
+ "o",
+ "l",
+ "d",
+ "g",
+ "k",
+ "m",
+ "v",
+ "f",
+ "p",
+ "u",
+ "b",
+ "h",
+ "å",
+ "y",
+ "j",
+ "ø",
+ "c",
+ "æ",
+ "w",
+ ],
+ "Finnish": [
+ "a",
+ "i",
+ "n",
+ "t",
+ "e",
+ "s",
+ "l",
+ "o",
+ "u",
+ "k",
+ "ä",
+ "m",
+ "r",
+ "v",
+ "j",
+ "h",
+ "p",
+ "y",
+ "d",
+ "ö",
+ "g",
+ "c",
+ "b",
+ "f",
+ "w",
+ "z",
+ ],
+ "Vietnamese": [
+ "n",
+ "h",
+ "t",
+ "i",
+ "c",
+ "g",
+ "a",
+ "o",
+ "u",
+ "m",
+ "l",
+ "r",
+ "à",
+ "đ",
+ "s",
+ "e",
+ "v",
+ "p",
+ "b",
+ "y",
+ "ư",
+ "d",
+ "á",
+ "k",
+ "ộ",
+ "ế",
+ ],
+ "Czech": [
+ "o",
+ "e",
+ "a",
+ "n",
+ "t",
+ "s",
+ "i",
+ "l",
+ "v",
+ "r",
+ "k",
+ "d",
+ "u",
+ "m",
+ "p",
+ "í",
+ "c",
+ "h",
+ "z",
+ "á",
+ "y",
+ "j",
+ "b",
+ "ě",
+ "é",
+ "ř",
+ ],
+ "Hungarian": [
+ "e",
+ "a",
+ "t",
+ "l",
+ "s",
+ "n",
+ "k",
+ "r",
+ "i",
+ "o",
+ "z",
+ "á",
+ "é",
+ "g",
+ "m",
+ "b",
+ "y",
+ "v",
+ "d",
+ "h",
+ "u",
+ "p",
+ "j",
+ "ö",
+ "f",
+ "c",
+ ],
+ "Korean": [
+ "이",
+ "다",
+ "에",
+ "의",
+ "는",
+ "로",
+ "하",
+ "을",
+ "가",
+ "고",
+ "지",
+ "서",
+ "한",
+ "은",
+ "기",
+ "으",
+ "년",
+ "대",
+ "사",
+ "시",
+ "를",
+ "리",
+ "도",
+ "인",
+ "스",
+ "일",
+ ],
+ "Indonesian": [
+ "a",
+ "n",
+ "e",
+ "i",
+ "r",
+ "t",
+ "u",
+ "s",
+ "d",
+ "k",
+ "m",
+ "l",
+ "g",
+ "p",
+ "b",
+ "o",
+ "h",
+ "y",
+ "j",
+ "c",
+ "w",
+ "f",
+ "v",
+ "z",
+ "x",
+ "q",
+ ],
+ "Turkish": [
+ "a",
+ "e",
+ "i",
+ "n",
+ "r",
+ "l",
+ "ı",
+ "k",
+ "d",
+ "t",
+ "s",
+ "m",
+ "y",
+ "u",
+ "o",
+ "b",
+ "ü",
+ "ş",
+ "v",
+ "g",
+ "z",
+ "h",
+ "c",
+ "p",
+ "ç",
+ "ğ",
+ ],
+ "Romanian": [
+ "e",
+ "i",
+ "a",
+ "r",
+ "n",
+ "t",
+ "u",
+ "l",
+ "o",
+ "c",
+ "s",
+ "d",
+ "p",
+ "m",
+ "ă",
+ "f",
+ "v",
+ "î",
+ "g",
+ "b",
+ "ș",
+ "ț",
+ "z",
+ "h",
+ "â",
+ "j",
+ ],
+ "Farsi": [
+ "ا",
+ "ی",
+ "ر",
+ "د",
+ "ن",
+ "ه",
+ "و",
+ "م",
+ "ت",
+ "ب",
+ "س",
+ "ل",
+ "ک",
+ "ش",
+ "ز",
+ "ف",
+ "گ",
+ "ع",
+ "خ",
+ "ق",
+ "ج",
+ "آ",
+ "پ",
+ "ح",
+ "ط",
+ "ص",
+ ],
+ "Arabic": [
+ "ا",
+ "ل",
+ "ي",
+ "م",
+ "و",
+ "ن",
+ "ر",
+ "ت",
+ "ب",
+ "ة",
+ "ع",
+ "د",
+ "س",
+ "ف",
+ "ه",
+ "ك",
+ "ق",
+ "أ",
+ "ح",
+ "ج",
+ "ش",
+ "ط",
+ "ص",
+ "ى",
+ "خ",
+ "إ",
+ ],
+ "Danish": [
+ "e",
+ "r",
+ "n",
+ "t",
+ "a",
+ "i",
+ "s",
+ "d",
+ "l",
+ "o",
+ "g",
+ "m",
+ "k",
+ "f",
+ "v",
+ "u",
+ "b",
+ "h",
+ "p",
+ "å",
+ "y",
+ "ø",
+ "æ",
+ "c",
+ "j",
+ "w",
+ ],
+ "Serbian": [
+ "а",
+ "и",
+ "о",
+ "е",
+ "н",
+ "р",
+ "с",
+ "у",
+ "т",
+ "к",
+ "ј",
+ "в",
+ "д",
+ "м",
+ "п",
+ "л",
+ "г",
+ "з",
+ "б",
+ "a",
+ "i",
+ "e",
+ "o",
+ "n",
+ "ц",
+ "ш",
+ ],
+ "Lithuanian": [
+ "i",
+ "a",
+ "s",
+ "o",
+ "r",
+ "e",
+ "t",
+ "n",
+ "u",
+ "k",
+ "m",
+ "l",
+ "p",
+ "v",
+ "d",
+ "j",
+ "g",
+ "ė",
+ "b",
+ "y",
+ "ų",
+ "š",
+ "ž",
+ "c",
+ "ą",
+ "į",
+ ],
+ "Slovene": [
+ "e",
+ "a",
+ "i",
+ "o",
+ "n",
+ "r",
+ "s",
+ "l",
+ "t",
+ "j",
+ "v",
+ "k",
+ "d",
+ "p",
+ "m",
+ "u",
+ "z",
+ "b",
+ "g",
+ "h",
+ "č",
+ "c",
+ "š",
+ "ž",
+ "f",
+ "y",
+ ],
+ "Slovak": [
+ "o",
+ "a",
+ "e",
+ "n",
+ "i",
+ "r",
+ "v",
+ "t",
+ "s",
+ "l",
+ "k",
+ "d",
+ "m",
+ "p",
+ "u",
+ "c",
+ "h",
+ "j",
+ "b",
+ "z",
+ "á",
+ "y",
+ "ý",
+ "í",
+ "č",
+ "é",
+ ],
+ "Hebrew": [
+ "י",
+ "ו",
+ "ה",
+ "ל",
+ "ר",
+ "ב",
+ "ת",
+ "מ",
+ "א",
+ "ש",
+ "נ",
+ "ע",
+ "ם",
+ "ד",
+ "ק",
+ "ח",
+ "פ",
+ "ס",
+ "כ",
+ "ג",
+ "ט",
+ "צ",
+ "ן",
+ "ז",
+ "ך",
+ ],
+ "Bulgarian": [
+ "а",
+ "и",
+ "о",
+ "е",
+ "н",
+ "т",
+ "р",
+ "с",
+ "в",
+ "л",
+ "к",
+ "д",
+ "п",
+ "м",
+ "з",
+ "г",
+ "я",
+ "ъ",
+ "у",
+ "б",
+ "ч",
+ "ц",
+ "й",
+ "ж",
+ "щ",
+ "х",
+ ],
+ "Croatian": [
+ "a",
+ "i",
+ "o",
+ "e",
+ "n",
+ "r",
+ "j",
+ "s",
+ "t",
+ "u",
+ "k",
+ "l",
+ "v",
+ "d",
+ "m",
+ "p",
+ "g",
+ "z",
+ "b",
+ "c",
+ "č",
+ "h",
+ "š",
+ "ž",
+ "ć",
+ "f",
+ ],
+ "Hindi": [
+ "क",
+ "र",
+ "स",
+ "न",
+ "त",
+ "म",
+ "ह",
+ "प",
+ "य",
+ "ल",
+ "व",
+ "ज",
+ "द",
+ "ग",
+ "ब",
+ "श",
+ "ट",
+ "अ",
+ "ए",
+ "थ",
+ "भ",
+ "ड",
+ "च",
+ "ध",
+ "ष",
+ "इ",
+ ],
+ "Estonian": [
+ "a",
+ "i",
+ "e",
+ "s",
+ "t",
+ "l",
+ "u",
+ "n",
+ "o",
+ "k",
+ "r",
+ "d",
+ "m",
+ "v",
+ "g",
+ "p",
+ "j",
+ "h",
+ "ä",
+ "b",
+ "õ",
+ "ü",
+ "f",
+ "c",
+ "ö",
+ "y",
+ ],
+ "Thai": [
+ "า",
+ "น",
+ "ร",
+ "อ",
+ "ก",
+ "เ",
+ "ง",
+ "ม",
+ "ย",
+ "ล",
+ "ว",
+ "ด",
+ "ท",
+ "ส",
+ "ต",
+ "ะ",
+ "ป",
+ "บ",
+ "ค",
+ "ห",
+ "แ",
+ "จ",
+ "พ",
+ "ช",
+ "ข",
+ "ใ",
+ ],
+ "Greek": [
+ "α",
+ "τ",
+ "ο",
+ "ι",
+ "ε",
+ "ν",
+ "ρ",
+ "σ",
+ "κ",
+ "η",
+ "π",
+ "ς",
+ "υ",
+ "μ",
+ "λ",
+ "ί",
+ "ό",
+ "ά",
+ "γ",
+ "έ",
+ "δ",
+ "ή",
+ "ω",
+ "χ",
+ "θ",
+ "ύ",
+ ],
+ "Tamil": [
+ "க",
+ "த",
+ "ப",
+ "ட",
+ "ர",
+ "ம",
+ "ல",
+ "ன",
+ "வ",
+ "ற",
+ "ய",
+ "ள",
+ "ச",
+ "ந",
+ "இ",
+ "ண",
+ "அ",
+ "ஆ",
+ "ழ",
+ "ங",
+ "எ",
+ "உ",
+ "ஒ",
+ "ஸ",
+ ],
+ "Kazakh": [
+ "а",
+ "ы",
+ "е",
+ "н",
+ "т",
+ "р",
+ "л",
+ "і",
+ "д",
+ "с",
+ "м",
+ "қ",
+ "к",
+ "о",
+ "б",
+ "и",
+ "у",
+ "ғ",
+ "ж",
+ "ң",
+ "з",
+ "ш",
+ "й",
+ "п",
+ "г",
+ "ө",
+ ],
+}
diff --git a/billinglayer/python/charset_normalizer/assets/__pycache__/__init__.cpython-311.pyc b/billinglayer/python/charset_normalizer/assets/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..d01ef7d
Binary files /dev/null and b/billinglayer/python/charset_normalizer/assets/__pycache__/__init__.cpython-311.pyc differ
diff --git a/billinglayer/python/charset_normalizer/cd.py b/billinglayer/python/charset_normalizer/cd.py
new file mode 100644
index 0000000..6e56fe8
--- /dev/null
+++ b/billinglayer/python/charset_normalizer/cd.py
@@ -0,0 +1,390 @@
+import importlib
+from codecs import IncrementalDecoder
+from collections import Counter
+from functools import lru_cache
+from typing import Counter as TypeCounter, Dict, List, Optional, Tuple
+
+from .assets import FREQUENCIES
+from .constant import KO_NAMES, LANGUAGE_SUPPORTED_COUNT, TOO_SMALL_SEQUENCE, ZH_NAMES
+from .md import is_suspiciously_successive_range
+from .models import CoherenceMatches
+from .utils import (
+ is_accentuated,
+ is_latin,
+ is_multi_byte_encoding,
+ is_unicode_range_secondary,
+ unicode_range,
+)
+
+
+def encoding_unicode_range(iana_name: str) -> List[str]:
+ """
+ Return associated unicode ranges in a single byte code page.
+ """
+ if is_multi_byte_encoding(iana_name):
+ raise IOError("Function not supported on multi-byte code page")
+
+ decoder = importlib.import_module(
+ "encodings.{}".format(iana_name)
+ ).IncrementalDecoder
+
+ p: IncrementalDecoder = decoder(errors="ignore")
+ seen_ranges: Dict[str, int] = {}
+ character_count: int = 0
+
+ for i in range(0x40, 0xFF):
+ chunk: str = p.decode(bytes([i]))
+
+ if chunk:
+ character_range: Optional[str] = unicode_range(chunk)
+
+ if character_range is None:
+ continue
+
+ if is_unicode_range_secondary(character_range) is False:
+ if character_range not in seen_ranges:
+ seen_ranges[character_range] = 0
+ seen_ranges[character_range] += 1
+ character_count += 1
+
+ return sorted(
+ [
+ character_range
+ for character_range in seen_ranges
+ if seen_ranges[character_range] / character_count >= 0.15
+ ]
+ )
+
+
+def unicode_range_languages(primary_range: str) -> List[str]:
+ """
+ Return inferred languages used with a unicode range.
+ """
+ languages: List[str] = []
+
+ for language, characters in FREQUENCIES.items():
+ for character in characters:
+ if unicode_range(character) == primary_range:
+ languages.append(language)
+ break
+
+ return languages
+
+
+@lru_cache()
+def encoding_languages(iana_name: str) -> List[str]:
+ """
+ Single-byte encoding language association. Some code page are heavily linked to particular language(s).
+ This function does the correspondence.
+ """
+ unicode_ranges: List[str] = encoding_unicode_range(iana_name)
+ primary_range: Optional[str] = None
+
+ for specified_range in unicode_ranges:
+ if "Latin" not in specified_range:
+ primary_range = specified_range
+ break
+
+ if primary_range is None:
+ return ["Latin Based"]
+
+ return unicode_range_languages(primary_range)
+
+
+@lru_cache()
+def mb_encoding_languages(iana_name: str) -> List[str]:
+ """
+ Multi-byte encoding language association. Some code page are heavily linked to particular language(s).
+ This function does the correspondence.
+ """
+ if (
+ iana_name.startswith("shift_")
+ or iana_name.startswith("iso2022_jp")
+ or iana_name.startswith("euc_j")
+ or iana_name == "cp932"
+ ):
+ return ["Japanese"]
+ if iana_name.startswith("gb") or iana_name in ZH_NAMES:
+ return ["Chinese"]
+ if iana_name.startswith("iso2022_kr") or iana_name in KO_NAMES:
+ return ["Korean"]
+
+ return []
+
+
+@lru_cache(maxsize=LANGUAGE_SUPPORTED_COUNT)
+def get_target_features(language: str) -> Tuple[bool, bool]:
+ """
+ Determine main aspects from a supported language if it contains accents and if is pure Latin.
+ """
+ target_have_accents: bool = False
+ target_pure_latin: bool = True
+
+ for character in FREQUENCIES[language]:
+ if not target_have_accents and is_accentuated(character):
+ target_have_accents = True
+ if target_pure_latin and is_latin(character) is False:
+ target_pure_latin = False
+
+ return target_have_accents, target_pure_latin
+
+
+def alphabet_languages(
+ characters: List[str], ignore_non_latin: bool = False
+) -> List[str]:
+ """
+ Return associated languages associated to given characters.
+ """
+ languages: List[Tuple[str, float]] = []
+
+ source_have_accents = any(is_accentuated(character) for character in characters)
+
+ for language, language_characters in FREQUENCIES.items():
+ target_have_accents, target_pure_latin = get_target_features(language)
+
+ if ignore_non_latin and target_pure_latin is False:
+ continue
+
+ if target_have_accents is False and source_have_accents:
+ continue
+
+ character_count: int = len(language_characters)
+
+ character_match_count: int = len(
+ [c for c in language_characters if c in characters]
+ )
+
+ ratio: float = character_match_count / character_count
+
+ if ratio >= 0.2:
+ languages.append((language, ratio))
+
+ languages = sorted(languages, key=lambda x: x[1], reverse=True)
+
+ return [compatible_language[0] for compatible_language in languages]
+
+
+def characters_popularity_compare(
+ language: str, ordered_characters: List[str]
+) -> float:
+ """
+ Determine if a ordered characters list (by occurrence from most appearance to rarest) match a particular language.
+ The result is a ratio between 0. (absolutely no correspondence) and 1. (near perfect fit).
+ Beware that is function is not strict on the match in order to ease the detection. (Meaning close match is 1.)
+ """
+ if language not in FREQUENCIES:
+ raise ValueError("{} not available".format(language))
+
+ character_approved_count: int = 0
+ FREQUENCIES_language_set = set(FREQUENCIES[language])
+
+ ordered_characters_count: int = len(ordered_characters)
+ target_language_characters_count: int = len(FREQUENCIES[language])
+
+ large_alphabet: bool = target_language_characters_count > 26
+
+ for character, character_rank in zip(
+ ordered_characters, range(0, ordered_characters_count)
+ ):
+ if character not in FREQUENCIES_language_set:
+ continue
+
+ character_rank_in_language: int = FREQUENCIES[language].index(character)
+ expected_projection_ratio: float = (
+ target_language_characters_count / ordered_characters_count
+ )
+ character_rank_projection: int = int(character_rank * expected_projection_ratio)
+
+ if (
+ large_alphabet is False
+ and abs(character_rank_projection - character_rank_in_language) > 4
+ ):
+ continue
+
+ if (
+ large_alphabet is True
+ and abs(character_rank_projection - character_rank_in_language)
+ < target_language_characters_count / 3
+ ):
+ character_approved_count += 1
+ continue
+
+ characters_before_source: List[str] = FREQUENCIES[language][
+ 0:character_rank_in_language
+ ]
+ characters_after_source: List[str] = FREQUENCIES[language][
+ character_rank_in_language:
+ ]
+ characters_before: List[str] = ordered_characters[0:character_rank]
+ characters_after: List[str] = ordered_characters[character_rank:]
+
+ before_match_count: int = len(
+ set(characters_before) & set(characters_before_source)
+ )
+
+ after_match_count: int = len(
+ set(characters_after) & set(characters_after_source)
+ )
+
+ if len(characters_before_source) == 0 and before_match_count <= 4:
+ character_approved_count += 1
+ continue
+
+ if len(characters_after_source) == 0 and after_match_count <= 4:
+ character_approved_count += 1
+ continue
+
+ if (
+ before_match_count / len(characters_before_source) >= 0.4
+ or after_match_count / len(characters_after_source) >= 0.4
+ ):
+ character_approved_count += 1
+ continue
+
+ return character_approved_count / len(ordered_characters)
+
+
+def alpha_unicode_split(decoded_sequence: str) -> List[str]:
+ """
+ Given a decoded text sequence, return a list of str. Unicode range / alphabet separation.
+ Ex. a text containing English/Latin with a bit a Hebrew will return two items in the resulting list;
+ One containing the latin letters and the other hebrew.
+ """
+ layers: Dict[str, str] = {}
+
+ for character in decoded_sequence:
+ if character.isalpha() is False:
+ continue
+
+ character_range: Optional[str] = unicode_range(character)
+
+ if character_range is None:
+ continue
+
+ layer_target_range: Optional[str] = None
+
+ for discovered_range in layers:
+ if (
+ is_suspiciously_successive_range(discovered_range, character_range)
+ is False
+ ):
+ layer_target_range = discovered_range
+ break
+
+ if layer_target_range is None:
+ layer_target_range = character_range
+
+ if layer_target_range not in layers:
+ layers[layer_target_range] = character.lower()
+ continue
+
+ layers[layer_target_range] += character.lower()
+
+ return list(layers.values())
+
+
+def merge_coherence_ratios(results: List[CoherenceMatches]) -> CoherenceMatches:
+ """
+ This function merge results previously given by the function coherence_ratio.
+ The return type is the same as coherence_ratio.
+ """
+ per_language_ratios: Dict[str, List[float]] = {}
+ for result in results:
+ for sub_result in result:
+ language, ratio = sub_result
+ if language not in per_language_ratios:
+ per_language_ratios[language] = [ratio]
+ continue
+ per_language_ratios[language].append(ratio)
+
+ merge = [
+ (
+ language,
+ round(
+ sum(per_language_ratios[language]) / len(per_language_ratios[language]),
+ 4,
+ ),
+ )
+ for language in per_language_ratios
+ ]
+
+ return sorted(merge, key=lambda x: x[1], reverse=True)
+
+
+def filter_alt_coherence_matches(results: CoherenceMatches) -> CoherenceMatches:
+ """
+ We shall NOT return "English—" in CoherenceMatches because it is an alternative
+ of "English". This function only keeps the best match and remove the em-dash in it.
+ """
+ index_results: Dict[str, List[float]] = dict()
+
+ for result in results:
+ language, ratio = result
+ no_em_name: str = language.replace("—", "")
+
+ if no_em_name not in index_results:
+ index_results[no_em_name] = []
+
+ index_results[no_em_name].append(ratio)
+
+ if any(len(index_results[e]) > 1 for e in index_results):
+ filtered_results: CoherenceMatches = []
+
+ for language in index_results:
+ filtered_results.append((language, max(index_results[language])))
+
+ return filtered_results
+
+ return results
+
+
+@lru_cache(maxsize=2048)
+def coherence_ratio(
+ decoded_sequence: str, threshold: float = 0.1, lg_inclusion: Optional[str] = None
+) -> CoherenceMatches:
+ """
+ Detect ANY language that can be identified in given sequence. The sequence will be analysed by layers.
+ A layer = Character extraction by alphabets/ranges.
+ """
+
+ results: List[Tuple[str, float]] = []
+ ignore_non_latin: bool = False
+
+ sufficient_match_count: int = 0
+
+ lg_inclusion_list = lg_inclusion.split(",") if lg_inclusion is not None else []
+ if "Latin Based" in lg_inclusion_list:
+ ignore_non_latin = True
+ lg_inclusion_list.remove("Latin Based")
+
+ for layer in alpha_unicode_split(decoded_sequence):
+ sequence_frequencies: TypeCounter[str] = Counter(layer)
+ most_common = sequence_frequencies.most_common()
+
+ character_count: int = sum(o for c, o in most_common)
+
+ if character_count <= TOO_SMALL_SEQUENCE:
+ continue
+
+ popular_character_ordered: List[str] = [c for c, o in most_common]
+
+ for language in lg_inclusion_list or alphabet_languages(
+ popular_character_ordered, ignore_non_latin
+ ):
+ ratio: float = characters_popularity_compare(
+ language, popular_character_ordered
+ )
+
+ if ratio < threshold:
+ continue
+ elif ratio >= 0.8:
+ sufficient_match_count += 1
+
+ results.append((language, round(ratio, 4)))
+
+ if sufficient_match_count >= 3:
+ break
+
+ return sorted(
+ filter_alt_coherence_matches(results), key=lambda x: x[1], reverse=True
+ )
diff --git a/billinglayer/python/charset_normalizer/cli/__init__.py b/billinglayer/python/charset_normalizer/cli/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/billinglayer/python/charset_normalizer/cli/__pycache__/__init__.cpython-311.pyc b/billinglayer/python/charset_normalizer/cli/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..c9486e8
Binary files /dev/null and b/billinglayer/python/charset_normalizer/cli/__pycache__/__init__.cpython-311.pyc differ
diff --git a/billinglayer/python/charset_normalizer/cli/__pycache__/normalizer.cpython-311.pyc b/billinglayer/python/charset_normalizer/cli/__pycache__/normalizer.cpython-311.pyc
new file mode 100644
index 0000000..997f5ae
Binary files /dev/null and b/billinglayer/python/charset_normalizer/cli/__pycache__/normalizer.cpython-311.pyc differ
diff --git a/billinglayer/python/charset_normalizer/cli/normalizer.py b/billinglayer/python/charset_normalizer/cli/normalizer.py
new file mode 100644
index 0000000..f4bcbaa
--- /dev/null
+++ b/billinglayer/python/charset_normalizer/cli/normalizer.py
@@ -0,0 +1,296 @@
+import argparse
+import sys
+from json import dumps
+from os.path import abspath, basename, dirname, join, realpath
+from platform import python_version
+from typing import List, Optional
+from unicodedata import unidata_version
+
+import charset_normalizer.md as md_module
+from charset_normalizer import from_fp
+from charset_normalizer.models import CliDetectionResult
+from charset_normalizer.version import __version__
+
+
+def query_yes_no(question: str, default: str = "yes") -> bool:
+ """Ask a yes/no question via input() and return their answer.
+
+ "question" is a string that is presented to the user.
+ "default" is the presumed answer if the user just hits .
+ It must be "yes" (the default), "no" or None (meaning
+ an answer is required of the user).
+
+ The "answer" return value is True for "yes" or False for "no".
+
+ Credit goes to (c) https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input
+ """
+ valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False}
+ if default is None:
+ prompt = " [y/n] "
+ elif default == "yes":
+ prompt = " [Y/n] "
+ elif default == "no":
+ prompt = " [y/N] "
+ else:
+ raise ValueError("invalid default answer: '%s'" % default)
+
+ while True:
+ sys.stdout.write(question + prompt)
+ choice = input().lower()
+ if default is not None and choice == "":
+ return valid[default]
+ elif choice in valid:
+ return valid[choice]
+ else:
+ sys.stdout.write("Please respond with 'yes' or 'no' " "(or 'y' or 'n').\n")
+
+
+def cli_detect(argv: Optional[List[str]] = None) -> int:
+ """
+ CLI assistant using ARGV and ArgumentParser
+ :param argv:
+ :return: 0 if everything is fine, anything else equal trouble
+ """
+ parser = argparse.ArgumentParser(
+ description="The Real First Universal Charset Detector. "
+ "Discover originating encoding used on text file. "
+ "Normalize text to unicode."
+ )
+
+ parser.add_argument(
+ "files", type=argparse.FileType("rb"), nargs="+", help="File(s) to be analysed"
+ )
+ parser.add_argument(
+ "-v",
+ "--verbose",
+ action="store_true",
+ default=False,
+ dest="verbose",
+ help="Display complementary information about file if any. "
+ "Stdout will contain logs about the detection process.",
+ )
+ parser.add_argument(
+ "-a",
+ "--with-alternative",
+ action="store_true",
+ default=False,
+ dest="alternatives",
+ help="Output complementary possibilities if any. Top-level JSON WILL be a list.",
+ )
+ parser.add_argument(
+ "-n",
+ "--normalize",
+ action="store_true",
+ default=False,
+ dest="normalize",
+ help="Permit to normalize input file. If not set, program does not write anything.",
+ )
+ parser.add_argument(
+ "-m",
+ "--minimal",
+ action="store_true",
+ default=False,
+ dest="minimal",
+ help="Only output the charset detected to STDOUT. Disabling JSON output.",
+ )
+ parser.add_argument(
+ "-r",
+ "--replace",
+ action="store_true",
+ default=False,
+ dest="replace",
+ help="Replace file when trying to normalize it instead of creating a new one.",
+ )
+ parser.add_argument(
+ "-f",
+ "--force",
+ action="store_true",
+ default=False,
+ dest="force",
+ help="Replace file without asking if you are sure, use this flag with caution.",
+ )
+ parser.add_argument(
+ "-t",
+ "--threshold",
+ action="store",
+ default=0.2,
+ type=float,
+ dest="threshold",
+ help="Define a custom maximum amount of chaos allowed in decoded content. 0. <= chaos <= 1.",
+ )
+ parser.add_argument(
+ "--version",
+ action="version",
+ version="Charset-Normalizer {} - Python {} - Unicode {} - SpeedUp {}".format(
+ __version__,
+ python_version(),
+ unidata_version,
+ "OFF" if md_module.__file__.lower().endswith(".py") else "ON",
+ ),
+ help="Show version information and exit.",
+ )
+
+ args = parser.parse_args(argv)
+
+ if args.replace is True and args.normalize is False:
+ print("Use --replace in addition of --normalize only.", file=sys.stderr)
+ return 1
+
+ if args.force is True and args.replace is False:
+ print("Use --force in addition of --replace only.", file=sys.stderr)
+ return 1
+
+ if args.threshold < 0.0 or args.threshold > 1.0:
+ print("--threshold VALUE should be between 0. AND 1.", file=sys.stderr)
+ return 1
+
+ x_ = []
+
+ for my_file in args.files:
+ matches = from_fp(my_file, threshold=args.threshold, explain=args.verbose)
+
+ best_guess = matches.best()
+
+ if best_guess is None:
+ print(
+ 'Unable to identify originating encoding for "{}". {}'.format(
+ my_file.name,
+ "Maybe try increasing maximum amount of chaos."
+ if args.threshold < 1.0
+ else "",
+ ),
+ file=sys.stderr,
+ )
+ x_.append(
+ CliDetectionResult(
+ abspath(my_file.name),
+ None,
+ [],
+ [],
+ "Unknown",
+ [],
+ False,
+ 1.0,
+ 0.0,
+ None,
+ True,
+ )
+ )
+ else:
+ x_.append(
+ CliDetectionResult(
+ abspath(my_file.name),
+ best_guess.encoding,
+ best_guess.encoding_aliases,
+ [
+ cp
+ for cp in best_guess.could_be_from_charset
+ if cp != best_guess.encoding
+ ],
+ best_guess.language,
+ best_guess.alphabets,
+ best_guess.bom,
+ best_guess.percent_chaos,
+ best_guess.percent_coherence,
+ None,
+ True,
+ )
+ )
+
+ if len(matches) > 1 and args.alternatives:
+ for el in matches:
+ if el != best_guess:
+ x_.append(
+ CliDetectionResult(
+ abspath(my_file.name),
+ el.encoding,
+ el.encoding_aliases,
+ [
+ cp
+ for cp in el.could_be_from_charset
+ if cp != el.encoding
+ ],
+ el.language,
+ el.alphabets,
+ el.bom,
+ el.percent_chaos,
+ el.percent_coherence,
+ None,
+ False,
+ )
+ )
+
+ if args.normalize is True:
+ if best_guess.encoding.startswith("utf") is True:
+ print(
+ '"{}" file does not need to be normalized, as it already came from unicode.'.format(
+ my_file.name
+ ),
+ file=sys.stderr,
+ )
+ if my_file.closed is False:
+ my_file.close()
+ continue
+
+ dir_path = dirname(realpath(my_file.name))
+ file_name = basename(realpath(my_file.name))
+
+ o_: List[str] = file_name.split(".")
+
+ if args.replace is False:
+ o_.insert(-1, best_guess.encoding)
+ if my_file.closed is False:
+ my_file.close()
+ elif (
+ args.force is False
+ and query_yes_no(
+ 'Are you sure to normalize "{}" by replacing it ?'.format(
+ my_file.name
+ ),
+ "no",
+ )
+ is False
+ ):
+ if my_file.closed is False:
+ my_file.close()
+ continue
+
+ try:
+ x_[0].unicode_path = join(dir_path, ".".join(o_))
+
+ with open(x_[0].unicode_path, "w", encoding="utf-8") as fp:
+ fp.write(str(best_guess))
+ except IOError as e:
+ print(str(e), file=sys.stderr)
+ if my_file.closed is False:
+ my_file.close()
+ return 2
+
+ if my_file.closed is False:
+ my_file.close()
+
+ if args.minimal is False:
+ print(
+ dumps(
+ [el.__dict__ for el in x_] if len(x_) > 1 else x_[0].__dict__,
+ ensure_ascii=True,
+ indent=4,
+ )
+ )
+ else:
+ for my_file in args.files:
+ print(
+ ", ".join(
+ [
+ el.encoding or "undefined"
+ for el in x_
+ if el.path == abspath(my_file.name)
+ ]
+ )
+ )
+
+ return 0
+
+
+if __name__ == "__main__":
+ cli_detect()
diff --git a/billinglayer/python/charset_normalizer/constant.py b/billinglayer/python/charset_normalizer/constant.py
new file mode 100644
index 0000000..3188108
--- /dev/null
+++ b/billinglayer/python/charset_normalizer/constant.py
@@ -0,0 +1,495 @@
+from codecs import BOM_UTF8, BOM_UTF16_BE, BOM_UTF16_LE, BOM_UTF32_BE, BOM_UTF32_LE
+from encodings.aliases import aliases
+from re import IGNORECASE, compile as re_compile
+from typing import Dict, List, Set, Union
+
+from .assets import FREQUENCIES
+
+# Contain for each eligible encoding a list of/item bytes SIG/BOM
+ENCODING_MARKS: Dict[str, Union[bytes, List[bytes]]] = {
+ "utf_8": BOM_UTF8,
+ "utf_7": [
+ b"\x2b\x2f\x76\x38",
+ b"\x2b\x2f\x76\x39",
+ b"\x2b\x2f\x76\x2b",
+ b"\x2b\x2f\x76\x2f",
+ b"\x2b\x2f\x76\x38\x2d",
+ ],
+ "gb18030": b"\x84\x31\x95\x33",
+ "utf_32": [BOM_UTF32_BE, BOM_UTF32_LE],
+ "utf_16": [BOM_UTF16_BE, BOM_UTF16_LE],
+}
+
+TOO_SMALL_SEQUENCE: int = 32
+TOO_BIG_SEQUENCE: int = int(10e6)
+
+UTF8_MAXIMAL_ALLOCATION: int = 1112064
+
+UNICODE_RANGES_COMBINED: Dict[str, range] = {
+ "Control character": range(31 + 1),
+ "Basic Latin": range(32, 127 + 1),
+ "Latin-1 Supplement": range(128, 255 + 1),
+ "Latin Extended-A": range(256, 383 + 1),
+ "Latin Extended-B": range(384, 591 + 1),
+ "IPA Extensions": range(592, 687 + 1),
+ "Spacing Modifier Letters": range(688, 767 + 1),
+ "Combining Diacritical Marks": range(768, 879 + 1),
+ "Greek and Coptic": range(880, 1023 + 1),
+ "Cyrillic": range(1024, 1279 + 1),
+ "Cyrillic Supplement": range(1280, 1327 + 1),
+ "Armenian": range(1328, 1423 + 1),
+ "Hebrew": range(1424, 1535 + 1),
+ "Arabic": range(1536, 1791 + 1),
+ "Syriac": range(1792, 1871 + 1),
+ "Arabic Supplement": range(1872, 1919 + 1),
+ "Thaana": range(1920, 1983 + 1),
+ "NKo": range(1984, 2047 + 1),
+ "Samaritan": range(2048, 2111 + 1),
+ "Mandaic": range(2112, 2143 + 1),
+ "Syriac Supplement": range(2144, 2159 + 1),
+ "Arabic Extended-A": range(2208, 2303 + 1),
+ "Devanagari": range(2304, 2431 + 1),
+ "Bengali": range(2432, 2559 + 1),
+ "Gurmukhi": range(2560, 2687 + 1),
+ "Gujarati": range(2688, 2815 + 1),
+ "Oriya": range(2816, 2943 + 1),
+ "Tamil": range(2944, 3071 + 1),
+ "Telugu": range(3072, 3199 + 1),
+ "Kannada": range(3200, 3327 + 1),
+ "Malayalam": range(3328, 3455 + 1),
+ "Sinhala": range(3456, 3583 + 1),
+ "Thai": range(3584, 3711 + 1),
+ "Lao": range(3712, 3839 + 1),
+ "Tibetan": range(3840, 4095 + 1),
+ "Myanmar": range(4096, 4255 + 1),
+ "Georgian": range(4256, 4351 + 1),
+ "Hangul Jamo": range(4352, 4607 + 1),
+ "Ethiopic": range(4608, 4991 + 1),
+ "Ethiopic Supplement": range(4992, 5023 + 1),
+ "Cherokee": range(5024, 5119 + 1),
+ "Unified Canadian Aboriginal Syllabics": range(5120, 5759 + 1),
+ "Ogham": range(5760, 5791 + 1),
+ "Runic": range(5792, 5887 + 1),
+ "Tagalog": range(5888, 5919 + 1),
+ "Hanunoo": range(5920, 5951 + 1),
+ "Buhid": range(5952, 5983 + 1),
+ "Tagbanwa": range(5984, 6015 + 1),
+ "Khmer": range(6016, 6143 + 1),
+ "Mongolian": range(6144, 6319 + 1),
+ "Unified Canadian Aboriginal Syllabics Extended": range(6320, 6399 + 1),
+ "Limbu": range(6400, 6479 + 1),
+ "Tai Le": range(6480, 6527 + 1),
+ "New Tai Lue": range(6528, 6623 + 1),
+ "Khmer Symbols": range(6624, 6655 + 1),
+ "Buginese": range(6656, 6687 + 1),
+ "Tai Tham": range(6688, 6831 + 1),
+ "Combining Diacritical Marks Extended": range(6832, 6911 + 1),
+ "Balinese": range(6912, 7039 + 1),
+ "Sundanese": range(7040, 7103 + 1),
+ "Batak": range(7104, 7167 + 1),
+ "Lepcha": range(7168, 7247 + 1),
+ "Ol Chiki": range(7248, 7295 + 1),
+ "Cyrillic Extended C": range(7296, 7311 + 1),
+ "Sundanese Supplement": range(7360, 7375 + 1),
+ "Vedic Extensions": range(7376, 7423 + 1),
+ "Phonetic Extensions": range(7424, 7551 + 1),
+ "Phonetic Extensions Supplement": range(7552, 7615 + 1),
+ "Combining Diacritical Marks Supplement": range(7616, 7679 + 1),
+ "Latin Extended Additional": range(7680, 7935 + 1),
+ "Greek Extended": range(7936, 8191 + 1),
+ "General Punctuation": range(8192, 8303 + 1),
+ "Superscripts and Subscripts": range(8304, 8351 + 1),
+ "Currency Symbols": range(8352, 8399 + 1),
+ "Combining Diacritical Marks for Symbols": range(8400, 8447 + 1),
+ "Letterlike Symbols": range(8448, 8527 + 1),
+ "Number Forms": range(8528, 8591 + 1),
+ "Arrows": range(8592, 8703 + 1),
+ "Mathematical Operators": range(8704, 8959 + 1),
+ "Miscellaneous Technical": range(8960, 9215 + 1),
+ "Control Pictures": range(9216, 9279 + 1),
+ "Optical Character Recognition": range(9280, 9311 + 1),
+ "Enclosed Alphanumerics": range(9312, 9471 + 1),
+ "Box Drawing": range(9472, 9599 + 1),
+ "Block Elements": range(9600, 9631 + 1),
+ "Geometric Shapes": range(9632, 9727 + 1),
+ "Miscellaneous Symbols": range(9728, 9983 + 1),
+ "Dingbats": range(9984, 10175 + 1),
+ "Miscellaneous Mathematical Symbols-A": range(10176, 10223 + 1),
+ "Supplemental Arrows-A": range(10224, 10239 + 1),
+ "Braille Patterns": range(10240, 10495 + 1),
+ "Supplemental Arrows-B": range(10496, 10623 + 1),
+ "Miscellaneous Mathematical Symbols-B": range(10624, 10751 + 1),
+ "Supplemental Mathematical Operators": range(10752, 11007 + 1),
+ "Miscellaneous Symbols and Arrows": range(11008, 11263 + 1),
+ "Glagolitic": range(11264, 11359 + 1),
+ "Latin Extended-C": range(11360, 11391 + 1),
+ "Coptic": range(11392, 11519 + 1),
+ "Georgian Supplement": range(11520, 11567 + 1),
+ "Tifinagh": range(11568, 11647 + 1),
+ "Ethiopic Extended": range(11648, 11743 + 1),
+ "Cyrillic Extended-A": range(11744, 11775 + 1),
+ "Supplemental Punctuation": range(11776, 11903 + 1),
+ "CJK Radicals Supplement": range(11904, 12031 + 1),
+ "Kangxi Radicals": range(12032, 12255 + 1),
+ "Ideographic Description Characters": range(12272, 12287 + 1),
+ "CJK Symbols and Punctuation": range(12288, 12351 + 1),
+ "Hiragana": range(12352, 12447 + 1),
+ "Katakana": range(12448, 12543 + 1),
+ "Bopomofo": range(12544, 12591 + 1),
+ "Hangul Compatibility Jamo": range(12592, 12687 + 1),
+ "Kanbun": range(12688, 12703 + 1),
+ "Bopomofo Extended": range(12704, 12735 + 1),
+ "CJK Strokes": range(12736, 12783 + 1),
+ "Katakana Phonetic Extensions": range(12784, 12799 + 1),
+ "Enclosed CJK Letters and Months": range(12800, 13055 + 1),
+ "CJK Compatibility": range(13056, 13311 + 1),
+ "CJK Unified Ideographs Extension A": range(13312, 19903 + 1),
+ "Yijing Hexagram Symbols": range(19904, 19967 + 1),
+ "CJK Unified Ideographs": range(19968, 40959 + 1),
+ "Yi Syllables": range(40960, 42127 + 1),
+ "Yi Radicals": range(42128, 42191 + 1),
+ "Lisu": range(42192, 42239 + 1),
+ "Vai": range(42240, 42559 + 1),
+ "Cyrillic Extended-B": range(42560, 42655 + 1),
+ "Bamum": range(42656, 42751 + 1),
+ "Modifier Tone Letters": range(42752, 42783 + 1),
+ "Latin Extended-D": range(42784, 43007 + 1),
+ "Syloti Nagri": range(43008, 43055 + 1),
+ "Common Indic Number Forms": range(43056, 43071 + 1),
+ "Phags-pa": range(43072, 43135 + 1),
+ "Saurashtra": range(43136, 43231 + 1),
+ "Devanagari Extended": range(43232, 43263 + 1),
+ "Kayah Li": range(43264, 43311 + 1),
+ "Rejang": range(43312, 43359 + 1),
+ "Hangul Jamo Extended-A": range(43360, 43391 + 1),
+ "Javanese": range(43392, 43487 + 1),
+ "Myanmar Extended-B": range(43488, 43519 + 1),
+ "Cham": range(43520, 43615 + 1),
+ "Myanmar Extended-A": range(43616, 43647 + 1),
+ "Tai Viet": range(43648, 43743 + 1),
+ "Meetei Mayek Extensions": range(43744, 43775 + 1),
+ "Ethiopic Extended-A": range(43776, 43823 + 1),
+ "Latin Extended-E": range(43824, 43887 + 1),
+ "Cherokee Supplement": range(43888, 43967 + 1),
+ "Meetei Mayek": range(43968, 44031 + 1),
+ "Hangul Syllables": range(44032, 55215 + 1),
+ "Hangul Jamo Extended-B": range(55216, 55295 + 1),
+ "High Surrogates": range(55296, 56191 + 1),
+ "High Private Use Surrogates": range(56192, 56319 + 1),
+ "Low Surrogates": range(56320, 57343 + 1),
+ "Private Use Area": range(57344, 63743 + 1),
+ "CJK Compatibility Ideographs": range(63744, 64255 + 1),
+ "Alphabetic Presentation Forms": range(64256, 64335 + 1),
+ "Arabic Presentation Forms-A": range(64336, 65023 + 1),
+ "Variation Selectors": range(65024, 65039 + 1),
+ "Vertical Forms": range(65040, 65055 + 1),
+ "Combining Half Marks": range(65056, 65071 + 1),
+ "CJK Compatibility Forms": range(65072, 65103 + 1),
+ "Small Form Variants": range(65104, 65135 + 1),
+ "Arabic Presentation Forms-B": range(65136, 65279 + 1),
+ "Halfwidth and Fullwidth Forms": range(65280, 65519 + 1),
+ "Specials": range(65520, 65535 + 1),
+ "Linear B Syllabary": range(65536, 65663 + 1),
+ "Linear B Ideograms": range(65664, 65791 + 1),
+ "Aegean Numbers": range(65792, 65855 + 1),
+ "Ancient Greek Numbers": range(65856, 65935 + 1),
+ "Ancient Symbols": range(65936, 65999 + 1),
+ "Phaistos Disc": range(66000, 66047 + 1),
+ "Lycian": range(66176, 66207 + 1),
+ "Carian": range(66208, 66271 + 1),
+ "Coptic Epact Numbers": range(66272, 66303 + 1),
+ "Old Italic": range(66304, 66351 + 1),
+ "Gothic": range(66352, 66383 + 1),
+ "Old Permic": range(66384, 66431 + 1),
+ "Ugaritic": range(66432, 66463 + 1),
+ "Old Persian": range(66464, 66527 + 1),
+ "Deseret": range(66560, 66639 + 1),
+ "Shavian": range(66640, 66687 + 1),
+ "Osmanya": range(66688, 66735 + 1),
+ "Osage": range(66736, 66815 + 1),
+ "Elbasan": range(66816, 66863 + 1),
+ "Caucasian Albanian": range(66864, 66927 + 1),
+ "Linear A": range(67072, 67455 + 1),
+ "Cypriot Syllabary": range(67584, 67647 + 1),
+ "Imperial Aramaic": range(67648, 67679 + 1),
+ "Palmyrene": range(67680, 67711 + 1),
+ "Nabataean": range(67712, 67759 + 1),
+ "Hatran": range(67808, 67839 + 1),
+ "Phoenician": range(67840, 67871 + 1),
+ "Lydian": range(67872, 67903 + 1),
+ "Meroitic Hieroglyphs": range(67968, 67999 + 1),
+ "Meroitic Cursive": range(68000, 68095 + 1),
+ "Kharoshthi": range(68096, 68191 + 1),
+ "Old South Arabian": range(68192, 68223 + 1),
+ "Old North Arabian": range(68224, 68255 + 1),
+ "Manichaean": range(68288, 68351 + 1),
+ "Avestan": range(68352, 68415 + 1),
+ "Inscriptional Parthian": range(68416, 68447 + 1),
+ "Inscriptional Pahlavi": range(68448, 68479 + 1),
+ "Psalter Pahlavi": range(68480, 68527 + 1),
+ "Old Turkic": range(68608, 68687 + 1),
+ "Old Hungarian": range(68736, 68863 + 1),
+ "Rumi Numeral Symbols": range(69216, 69247 + 1),
+ "Brahmi": range(69632, 69759 + 1),
+ "Kaithi": range(69760, 69839 + 1),
+ "Sora Sompeng": range(69840, 69887 + 1),
+ "Chakma": range(69888, 69967 + 1),
+ "Mahajani": range(69968, 70015 + 1),
+ "Sharada": range(70016, 70111 + 1),
+ "Sinhala Archaic Numbers": range(70112, 70143 + 1),
+ "Khojki": range(70144, 70223 + 1),
+ "Multani": range(70272, 70319 + 1),
+ "Khudawadi": range(70320, 70399 + 1),
+ "Grantha": range(70400, 70527 + 1),
+ "Newa": range(70656, 70783 + 1),
+ "Tirhuta": range(70784, 70879 + 1),
+ "Siddham": range(71040, 71167 + 1),
+ "Modi": range(71168, 71263 + 1),
+ "Mongolian Supplement": range(71264, 71295 + 1),
+ "Takri": range(71296, 71375 + 1),
+ "Ahom": range(71424, 71487 + 1),
+ "Warang Citi": range(71840, 71935 + 1),
+ "Zanabazar Square": range(72192, 72271 + 1),
+ "Soyombo": range(72272, 72367 + 1),
+ "Pau Cin Hau": range(72384, 72447 + 1),
+ "Bhaiksuki": range(72704, 72815 + 1),
+ "Marchen": range(72816, 72895 + 1),
+ "Masaram Gondi": range(72960, 73055 + 1),
+ "Cuneiform": range(73728, 74751 + 1),
+ "Cuneiform Numbers and Punctuation": range(74752, 74879 + 1),
+ "Early Dynastic Cuneiform": range(74880, 75087 + 1),
+ "Egyptian Hieroglyphs": range(77824, 78895 + 1),
+ "Anatolian Hieroglyphs": range(82944, 83583 + 1),
+ "Bamum Supplement": range(92160, 92735 + 1),
+ "Mro": range(92736, 92783 + 1),
+ "Bassa Vah": range(92880, 92927 + 1),
+ "Pahawh Hmong": range(92928, 93071 + 1),
+ "Miao": range(93952, 94111 + 1),
+ "Ideographic Symbols and Punctuation": range(94176, 94207 + 1),
+ "Tangut": range(94208, 100351 + 1),
+ "Tangut Components": range(100352, 101119 + 1),
+ "Kana Supplement": range(110592, 110847 + 1),
+ "Kana Extended-A": range(110848, 110895 + 1),
+ "Nushu": range(110960, 111359 + 1),
+ "Duployan": range(113664, 113823 + 1),
+ "Shorthand Format Controls": range(113824, 113839 + 1),
+ "Byzantine Musical Symbols": range(118784, 119039 + 1),
+ "Musical Symbols": range(119040, 119295 + 1),
+ "Ancient Greek Musical Notation": range(119296, 119375 + 1),
+ "Tai Xuan Jing Symbols": range(119552, 119647 + 1),
+ "Counting Rod Numerals": range(119648, 119679 + 1),
+ "Mathematical Alphanumeric Symbols": range(119808, 120831 + 1),
+ "Sutton SignWriting": range(120832, 121519 + 1),
+ "Glagolitic Supplement": range(122880, 122927 + 1),
+ "Mende Kikakui": range(124928, 125151 + 1),
+ "Adlam": range(125184, 125279 + 1),
+ "Arabic Mathematical Alphabetic Symbols": range(126464, 126719 + 1),
+ "Mahjong Tiles": range(126976, 127023 + 1),
+ "Domino Tiles": range(127024, 127135 + 1),
+ "Playing Cards": range(127136, 127231 + 1),
+ "Enclosed Alphanumeric Supplement": range(127232, 127487 + 1),
+ "Enclosed Ideographic Supplement": range(127488, 127743 + 1),
+ "Miscellaneous Symbols and Pictographs": range(127744, 128511 + 1),
+ "Emoticons range(Emoji)": range(128512, 128591 + 1),
+ "Ornamental Dingbats": range(128592, 128639 + 1),
+ "Transport and Map Symbols": range(128640, 128767 + 1),
+ "Alchemical Symbols": range(128768, 128895 + 1),
+ "Geometric Shapes Extended": range(128896, 129023 + 1),
+ "Supplemental Arrows-C": range(129024, 129279 + 1),
+ "Supplemental Symbols and Pictographs": range(129280, 129535 + 1),
+ "CJK Unified Ideographs Extension B": range(131072, 173791 + 1),
+ "CJK Unified Ideographs Extension C": range(173824, 177983 + 1),
+ "CJK Unified Ideographs Extension D": range(177984, 178207 + 1),
+ "CJK Unified Ideographs Extension E": range(178208, 183983 + 1),
+ "CJK Unified Ideographs Extension F": range(183984, 191471 + 1),
+ "CJK Compatibility Ideographs Supplement": range(194560, 195103 + 1),
+ "Tags": range(917504, 917631 + 1),
+ "Variation Selectors Supplement": range(917760, 917999 + 1),
+}
+
+
+UNICODE_SECONDARY_RANGE_KEYWORD: List[str] = [
+ "Supplement",
+ "Extended",
+ "Extensions",
+ "Modifier",
+ "Marks",
+ "Punctuation",
+ "Symbols",
+ "Forms",
+ "Operators",
+ "Miscellaneous",
+ "Drawing",
+ "Block",
+ "Shapes",
+ "Supplemental",
+ "Tags",
+]
+
+RE_POSSIBLE_ENCODING_INDICATION = re_compile(
+ r"(?:(?:encoding)|(?:charset)|(?:coding))(?:[\:= ]{1,10})(?:[\"\']?)([a-zA-Z0-9\-_]+)(?:[\"\']?)",
+ IGNORECASE,
+)
+
+IANA_SUPPORTED: List[str] = sorted(
+ filter(
+ lambda x: x.endswith("_codec") is False
+ and x not in {"rot_13", "tactis", "mbcs"},
+ list(set(aliases.values())),
+ )
+)
+
+IANA_SUPPORTED_COUNT: int = len(IANA_SUPPORTED)
+
+# pre-computed code page that are similar using the function cp_similarity.
+IANA_SUPPORTED_SIMILAR: Dict[str, List[str]] = {
+ "cp037": ["cp1026", "cp1140", "cp273", "cp500"],
+ "cp1026": ["cp037", "cp1140", "cp273", "cp500"],
+ "cp1125": ["cp866"],
+ "cp1140": ["cp037", "cp1026", "cp273", "cp500"],
+ "cp1250": ["iso8859_2"],
+ "cp1251": ["kz1048", "ptcp154"],
+ "cp1252": ["iso8859_15", "iso8859_9", "latin_1"],
+ "cp1253": ["iso8859_7"],
+ "cp1254": ["iso8859_15", "iso8859_9", "latin_1"],
+ "cp1257": ["iso8859_13"],
+ "cp273": ["cp037", "cp1026", "cp1140", "cp500"],
+ "cp437": ["cp850", "cp858", "cp860", "cp861", "cp862", "cp863", "cp865"],
+ "cp500": ["cp037", "cp1026", "cp1140", "cp273"],
+ "cp850": ["cp437", "cp857", "cp858", "cp865"],
+ "cp857": ["cp850", "cp858", "cp865"],
+ "cp858": ["cp437", "cp850", "cp857", "cp865"],
+ "cp860": ["cp437", "cp861", "cp862", "cp863", "cp865"],
+ "cp861": ["cp437", "cp860", "cp862", "cp863", "cp865"],
+ "cp862": ["cp437", "cp860", "cp861", "cp863", "cp865"],
+ "cp863": ["cp437", "cp860", "cp861", "cp862", "cp865"],
+ "cp865": ["cp437", "cp850", "cp857", "cp858", "cp860", "cp861", "cp862", "cp863"],
+ "cp866": ["cp1125"],
+ "iso8859_10": ["iso8859_14", "iso8859_15", "iso8859_4", "iso8859_9", "latin_1"],
+ "iso8859_11": ["tis_620"],
+ "iso8859_13": ["cp1257"],
+ "iso8859_14": [
+ "iso8859_10",
+ "iso8859_15",
+ "iso8859_16",
+ "iso8859_3",
+ "iso8859_9",
+ "latin_1",
+ ],
+ "iso8859_15": [
+ "cp1252",
+ "cp1254",
+ "iso8859_10",
+ "iso8859_14",
+ "iso8859_16",
+ "iso8859_3",
+ "iso8859_9",
+ "latin_1",
+ ],
+ "iso8859_16": [
+ "iso8859_14",
+ "iso8859_15",
+ "iso8859_2",
+ "iso8859_3",
+ "iso8859_9",
+ "latin_1",
+ ],
+ "iso8859_2": ["cp1250", "iso8859_16", "iso8859_4"],
+ "iso8859_3": ["iso8859_14", "iso8859_15", "iso8859_16", "iso8859_9", "latin_1"],
+ "iso8859_4": ["iso8859_10", "iso8859_2", "iso8859_9", "latin_1"],
+ "iso8859_7": ["cp1253"],
+ "iso8859_9": [
+ "cp1252",
+ "cp1254",
+ "cp1258",
+ "iso8859_10",
+ "iso8859_14",
+ "iso8859_15",
+ "iso8859_16",
+ "iso8859_3",
+ "iso8859_4",
+ "latin_1",
+ ],
+ "kz1048": ["cp1251", "ptcp154"],
+ "latin_1": [
+ "cp1252",
+ "cp1254",
+ "cp1258",
+ "iso8859_10",
+ "iso8859_14",
+ "iso8859_15",
+ "iso8859_16",
+ "iso8859_3",
+ "iso8859_4",
+ "iso8859_9",
+ ],
+ "mac_iceland": ["mac_roman", "mac_turkish"],
+ "mac_roman": ["mac_iceland", "mac_turkish"],
+ "mac_turkish": ["mac_iceland", "mac_roman"],
+ "ptcp154": ["cp1251", "kz1048"],
+ "tis_620": ["iso8859_11"],
+}
+
+
+CHARDET_CORRESPONDENCE: Dict[str, str] = {
+ "iso2022_kr": "ISO-2022-KR",
+ "iso2022_jp": "ISO-2022-JP",
+ "euc_kr": "EUC-KR",
+ "tis_620": "TIS-620",
+ "utf_32": "UTF-32",
+ "euc_jp": "EUC-JP",
+ "koi8_r": "KOI8-R",
+ "iso8859_1": "ISO-8859-1",
+ "iso8859_2": "ISO-8859-2",
+ "iso8859_5": "ISO-8859-5",
+ "iso8859_6": "ISO-8859-6",
+ "iso8859_7": "ISO-8859-7",
+ "iso8859_8": "ISO-8859-8",
+ "utf_16": "UTF-16",
+ "cp855": "IBM855",
+ "mac_cyrillic": "MacCyrillic",
+ "gb2312": "GB2312",
+ "gb18030": "GB18030",
+ "cp932": "CP932",
+ "cp866": "IBM866",
+ "utf_8": "utf-8",
+ "utf_8_sig": "UTF-8-SIG",
+ "shift_jis": "SHIFT_JIS",
+ "big5": "Big5",
+ "cp1250": "windows-1250",
+ "cp1251": "windows-1251",
+ "cp1252": "Windows-1252",
+ "cp1253": "windows-1253",
+ "cp1255": "windows-1255",
+ "cp1256": "windows-1256",
+ "cp1254": "Windows-1254",
+ "cp949": "CP949",
+}
+
+
+COMMON_SAFE_ASCII_CHARACTERS: Set[str] = {
+ "<",
+ ">",
+ "=",
+ ":",
+ "/",
+ "&",
+ ";",
+ "{",
+ "}",
+ "[",
+ "]",
+ ",",
+ "|",
+ '"',
+ "-",
+}
+
+
+KO_NAMES: Set[str] = {"johab", "cp949", "euc_kr"}
+ZH_NAMES: Set[str] = {"big5", "cp950", "big5hkscs", "hz"}
+
+LANGUAGE_SUPPORTED_COUNT: int = len(FREQUENCIES)
+
+# Logging LEVEL below DEBUG
+TRACE: int = 5
diff --git a/billinglayer/python/charset_normalizer/legacy.py b/billinglayer/python/charset_normalizer/legacy.py
new file mode 100644
index 0000000..43aad21
--- /dev/null
+++ b/billinglayer/python/charset_normalizer/legacy.py
@@ -0,0 +1,54 @@
+from typing import Any, Dict, Optional, Union
+from warnings import warn
+
+from .api import from_bytes
+from .constant import CHARDET_CORRESPONDENCE
+
+
+def detect(
+ byte_str: bytes, should_rename_legacy: bool = False, **kwargs: Any
+) -> Dict[str, Optional[Union[str, float]]]:
+ """
+ chardet legacy method
+ Detect the encoding of the given byte string. It should be mostly backward-compatible.
+ Encoding name will match Chardet own writing whenever possible. (Not on encoding name unsupported by it)
+ This function is deprecated and should be used to migrate your project easily, consult the documentation for
+ further information. Not planned for removal.
+
+ :param byte_str: The byte sequence to examine.
+ :param should_rename_legacy: Should we rename legacy encodings
+ to their more modern equivalents?
+ """
+ if len(kwargs):
+ warn(
+ f"charset-normalizer disregard arguments '{','.join(list(kwargs.keys()))}' in legacy function detect()"
+ )
+
+ if not isinstance(byte_str, (bytearray, bytes)):
+ raise TypeError( # pragma: nocover
+ "Expected object of type bytes or bytearray, got: "
+ "{0}".format(type(byte_str))
+ )
+
+ if isinstance(byte_str, bytearray):
+ byte_str = bytes(byte_str)
+
+ r = from_bytes(byte_str).best()
+
+ encoding = r.encoding if r is not None else None
+ language = r.language if r is not None and r.language != "Unknown" else ""
+ confidence = 1.0 - r.chaos if r is not None else None
+
+ # Note: CharsetNormalizer does not return 'UTF-8-SIG' as the sig get stripped in the detection/normalization process
+ # but chardet does return 'utf-8-sig' and it is a valid codec name.
+ if r is not None and encoding == "utf_8" and r.bom:
+ encoding += "_sig"
+
+ if should_rename_legacy is False and encoding in CHARDET_CORRESPONDENCE:
+ encoding = CHARDET_CORRESPONDENCE[encoding]
+
+ return {
+ "encoding": encoding,
+ "language": language,
+ "confidence": confidence,
+ }
diff --git a/billinglayer/python/charset_normalizer/md.cpython-311-x86_64-linux-gnu.so b/billinglayer/python/charset_normalizer/md.cpython-311-x86_64-linux-gnu.so
new file mode 100755
index 0000000..38fda6e
Binary files /dev/null and b/billinglayer/python/charset_normalizer/md.cpython-311-x86_64-linux-gnu.so differ
diff --git a/billinglayer/python/charset_normalizer/md.py b/billinglayer/python/charset_normalizer/md.py
new file mode 100644
index 0000000..13aa062
--- /dev/null
+++ b/billinglayer/python/charset_normalizer/md.py
@@ -0,0 +1,582 @@
+from functools import lru_cache
+from logging import getLogger
+from typing import List, Optional
+
+from .constant import (
+ COMMON_SAFE_ASCII_CHARACTERS,
+ TRACE,
+ UNICODE_SECONDARY_RANGE_KEYWORD,
+)
+from .utils import (
+ is_accentuated,
+ is_ascii,
+ is_case_variable,
+ is_cjk,
+ is_emoticon,
+ is_hangul,
+ is_hiragana,
+ is_katakana,
+ is_latin,
+ is_punctuation,
+ is_separator,
+ is_symbol,
+ is_thai,
+ is_unprintable,
+ remove_accent,
+ unicode_range,
+)
+
+
+class MessDetectorPlugin:
+ """
+ Base abstract class used for mess detection plugins.
+ All detectors MUST extend and implement given methods.
+ """
+
+ def eligible(self, character: str) -> bool:
+ """
+ Determine if given character should be fed in.
+ """
+ raise NotImplementedError # pragma: nocover
+
+ def feed(self, character: str) -> None:
+ """
+ The main routine to be executed upon character.
+ Insert the logic in witch the text would be considered chaotic.
+ """
+ raise NotImplementedError # pragma: nocover
+
+ def reset(self) -> None: # pragma: no cover
+ """
+ Permit to reset the plugin to the initial state.
+ """
+ raise NotImplementedError
+
+ @property
+ def ratio(self) -> float:
+ """
+ Compute the chaos ratio based on what your feed() has seen.
+ Must NOT be lower than 0.; No restriction gt 0.
+ """
+ raise NotImplementedError # pragma: nocover
+
+
+class TooManySymbolOrPunctuationPlugin(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._punctuation_count: int = 0
+ self._symbol_count: int = 0
+ self._character_count: int = 0
+
+ self._last_printable_char: Optional[str] = None
+ self._frenzy_symbol_in_word: bool = False
+
+ def eligible(self, character: str) -> bool:
+ return character.isprintable()
+
+ def feed(self, character: str) -> None:
+ self._character_count += 1
+
+ if (
+ character != self._last_printable_char
+ and character not in COMMON_SAFE_ASCII_CHARACTERS
+ ):
+ if is_punctuation(character):
+ self._punctuation_count += 1
+ elif (
+ character.isdigit() is False
+ and is_symbol(character)
+ and is_emoticon(character) is False
+ ):
+ self._symbol_count += 2
+
+ self._last_printable_char = character
+
+ def reset(self) -> None: # pragma: no cover
+ self._punctuation_count = 0
+ self._character_count = 0
+ self._symbol_count = 0
+
+ @property
+ def ratio(self) -> float:
+ if self._character_count == 0:
+ return 0.0
+
+ ratio_of_punctuation: float = (
+ self._punctuation_count + self._symbol_count
+ ) / self._character_count
+
+ return ratio_of_punctuation if ratio_of_punctuation >= 0.3 else 0.0
+
+
+class TooManyAccentuatedPlugin(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._character_count: int = 0
+ self._accentuated_count: int = 0
+
+ def eligible(self, character: str) -> bool:
+ return character.isalpha()
+
+ def feed(self, character: str) -> None:
+ self._character_count += 1
+
+ if is_accentuated(character):
+ self._accentuated_count += 1
+
+ def reset(self) -> None: # pragma: no cover
+ self._character_count = 0
+ self._accentuated_count = 0
+
+ @property
+ def ratio(self) -> float:
+ if self._character_count == 0 or self._character_count < 8:
+ return 0.0
+ ratio_of_accentuation: float = self._accentuated_count / self._character_count
+ return ratio_of_accentuation if ratio_of_accentuation >= 0.35 else 0.0
+
+
+class UnprintablePlugin(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._unprintable_count: int = 0
+ self._character_count: int = 0
+
+ def eligible(self, character: str) -> bool:
+ return True
+
+ def feed(self, character: str) -> None:
+ if is_unprintable(character):
+ self._unprintable_count += 1
+ self._character_count += 1
+
+ def reset(self) -> None: # pragma: no cover
+ self._unprintable_count = 0
+
+ @property
+ def ratio(self) -> float:
+ if self._character_count == 0:
+ return 0.0
+
+ return (self._unprintable_count * 8) / self._character_count
+
+
+class SuspiciousDuplicateAccentPlugin(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._successive_count: int = 0
+ self._character_count: int = 0
+
+ self._last_latin_character: Optional[str] = None
+
+ def eligible(self, character: str) -> bool:
+ return character.isalpha() and is_latin(character)
+
+ def feed(self, character: str) -> None:
+ self._character_count += 1
+ if (
+ self._last_latin_character is not None
+ and is_accentuated(character)
+ and is_accentuated(self._last_latin_character)
+ ):
+ if character.isupper() and self._last_latin_character.isupper():
+ self._successive_count += 1
+ # Worse if its the same char duplicated with different accent.
+ if remove_accent(character) == remove_accent(self._last_latin_character):
+ self._successive_count += 1
+ self._last_latin_character = character
+
+ def reset(self) -> None: # pragma: no cover
+ self._successive_count = 0
+ self._character_count = 0
+ self._last_latin_character = None
+
+ @property
+ def ratio(self) -> float:
+ if self._character_count == 0:
+ return 0.0
+
+ return (self._successive_count * 2) / self._character_count
+
+
+class SuspiciousRange(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._suspicious_successive_range_count: int = 0
+ self._character_count: int = 0
+ self._last_printable_seen: Optional[str] = None
+
+ def eligible(self, character: str) -> bool:
+ return character.isprintable()
+
+ def feed(self, character: str) -> None:
+ self._character_count += 1
+
+ if (
+ character.isspace()
+ or is_punctuation(character)
+ or character in COMMON_SAFE_ASCII_CHARACTERS
+ ):
+ self._last_printable_seen = None
+ return
+
+ if self._last_printable_seen is None:
+ self._last_printable_seen = character
+ return
+
+ unicode_range_a: Optional[str] = unicode_range(self._last_printable_seen)
+ unicode_range_b: Optional[str] = unicode_range(character)
+
+ if is_suspiciously_successive_range(unicode_range_a, unicode_range_b):
+ self._suspicious_successive_range_count += 1
+
+ self._last_printable_seen = character
+
+ def reset(self) -> None: # pragma: no cover
+ self._character_count = 0
+ self._suspicious_successive_range_count = 0
+ self._last_printable_seen = None
+
+ @property
+ def ratio(self) -> float:
+ if self._character_count == 0:
+ return 0.0
+
+ ratio_of_suspicious_range_usage: float = (
+ self._suspicious_successive_range_count * 2
+ ) / self._character_count
+
+ if ratio_of_suspicious_range_usage < 0.1:
+ return 0.0
+
+ return ratio_of_suspicious_range_usage
+
+
+class SuperWeirdWordPlugin(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._word_count: int = 0
+ self._bad_word_count: int = 0
+ self._foreign_long_count: int = 0
+
+ self._is_current_word_bad: bool = False
+ self._foreign_long_watch: bool = False
+
+ self._character_count: int = 0
+ self._bad_character_count: int = 0
+
+ self._buffer: str = ""
+ self._buffer_accent_count: int = 0
+
+ def eligible(self, character: str) -> bool:
+ return True
+
+ def feed(self, character: str) -> None:
+ if character.isalpha():
+ self._buffer += character
+ if is_accentuated(character):
+ self._buffer_accent_count += 1
+ if (
+ self._foreign_long_watch is False
+ and (is_latin(character) is False or is_accentuated(character))
+ and is_cjk(character) is False
+ and is_hangul(character) is False
+ and is_katakana(character) is False
+ and is_hiragana(character) is False
+ and is_thai(character) is False
+ ):
+ self._foreign_long_watch = True
+ return
+ if not self._buffer:
+ return
+ if (
+ character.isspace() or is_punctuation(character) or is_separator(character)
+ ) and self._buffer:
+ self._word_count += 1
+ buffer_length: int = len(self._buffer)
+
+ self._character_count += buffer_length
+
+ if buffer_length >= 4:
+ if self._buffer_accent_count / buffer_length > 0.34:
+ self._is_current_word_bad = True
+ # Word/Buffer ending with an upper case accentuated letter are so rare,
+ # that we will consider them all as suspicious. Same weight as foreign_long suspicious.
+ if is_accentuated(self._buffer[-1]) and self._buffer[-1].isupper():
+ self._foreign_long_count += 1
+ self._is_current_word_bad = True
+ if buffer_length >= 24 and self._foreign_long_watch:
+ camel_case_dst = [
+ i
+ for c, i in zip(self._buffer, range(0, buffer_length))
+ if c.isupper()
+ ]
+ probable_camel_cased: bool = False
+
+ if camel_case_dst and (len(camel_case_dst) / buffer_length <= 0.3):
+ probable_camel_cased = True
+
+ if not probable_camel_cased:
+ self._foreign_long_count += 1
+ self._is_current_word_bad = True
+
+ if self._is_current_word_bad:
+ self._bad_word_count += 1
+ self._bad_character_count += len(self._buffer)
+ self._is_current_word_bad = False
+
+ self._foreign_long_watch = False
+ self._buffer = ""
+ self._buffer_accent_count = 0
+ elif (
+ character not in {"<", ">", "-", "=", "~", "|", "_"}
+ and character.isdigit() is False
+ and is_symbol(character)
+ ):
+ self._is_current_word_bad = True
+ self._buffer += character
+
+ def reset(self) -> None: # pragma: no cover
+ self._buffer = ""
+ self._is_current_word_bad = False
+ self._foreign_long_watch = False
+ self._bad_word_count = 0
+ self._word_count = 0
+ self._character_count = 0
+ self._bad_character_count = 0
+ self._foreign_long_count = 0
+
+ @property
+ def ratio(self) -> float:
+ if self._word_count <= 10 and self._foreign_long_count == 0:
+ return 0.0
+
+ return self._bad_character_count / self._character_count
+
+
+class CjkInvalidStopPlugin(MessDetectorPlugin):
+ """
+ GB(Chinese) based encoding often render the stop incorrectly when the content does not fit and
+ can be easily detected. Searching for the overuse of '丅' and '丄'.
+ """
+
+ def __init__(self) -> None:
+ self._wrong_stop_count: int = 0
+ self._cjk_character_count: int = 0
+
+ def eligible(self, character: str) -> bool:
+ return True
+
+ def feed(self, character: str) -> None:
+ if character in {"丅", "丄"}:
+ self._wrong_stop_count += 1
+ return
+ if is_cjk(character):
+ self._cjk_character_count += 1
+
+ def reset(self) -> None: # pragma: no cover
+ self._wrong_stop_count = 0
+ self._cjk_character_count = 0
+
+ @property
+ def ratio(self) -> float:
+ if self._cjk_character_count < 16:
+ return 0.0
+ return self._wrong_stop_count / self._cjk_character_count
+
+
+class ArchaicUpperLowerPlugin(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._buf: bool = False
+
+ self._character_count_since_last_sep: int = 0
+
+ self._successive_upper_lower_count: int = 0
+ self._successive_upper_lower_count_final: int = 0
+
+ self._character_count: int = 0
+
+ self._last_alpha_seen: Optional[str] = None
+ self._current_ascii_only: bool = True
+
+ def eligible(self, character: str) -> bool:
+ return True
+
+ def feed(self, character: str) -> None:
+ is_concerned = character.isalpha() and is_case_variable(character)
+ chunk_sep = is_concerned is False
+
+ if chunk_sep and self._character_count_since_last_sep > 0:
+ if (
+ self._character_count_since_last_sep <= 64
+ and character.isdigit() is False
+ and self._current_ascii_only is False
+ ):
+ self._successive_upper_lower_count_final += (
+ self._successive_upper_lower_count
+ )
+
+ self._successive_upper_lower_count = 0
+ self._character_count_since_last_sep = 0
+ self._last_alpha_seen = None
+ self._buf = False
+ self._character_count += 1
+ self._current_ascii_only = True
+
+ return
+
+ if self._current_ascii_only is True and is_ascii(character) is False:
+ self._current_ascii_only = False
+
+ if self._last_alpha_seen is not None:
+ if (character.isupper() and self._last_alpha_seen.islower()) or (
+ character.islower() and self._last_alpha_seen.isupper()
+ ):
+ if self._buf is True:
+ self._successive_upper_lower_count += 2
+ self._buf = False
+ else:
+ self._buf = True
+ else:
+ self._buf = False
+
+ self._character_count += 1
+ self._character_count_since_last_sep += 1
+ self._last_alpha_seen = character
+
+ def reset(self) -> None: # pragma: no cover
+ self._character_count = 0
+ self._character_count_since_last_sep = 0
+ self._successive_upper_lower_count = 0
+ self._successive_upper_lower_count_final = 0
+ self._last_alpha_seen = None
+ self._buf = False
+ self._current_ascii_only = True
+
+ @property
+ def ratio(self) -> float:
+ if self._character_count == 0:
+ return 0.0
+
+ return self._successive_upper_lower_count_final / self._character_count
+
+
+@lru_cache(maxsize=1024)
+def is_suspiciously_successive_range(
+ unicode_range_a: Optional[str], unicode_range_b: Optional[str]
+) -> bool:
+ """
+ Determine if two Unicode range seen next to each other can be considered as suspicious.
+ """
+ if unicode_range_a is None or unicode_range_b is None:
+ return True
+
+ if unicode_range_a == unicode_range_b:
+ return False
+
+ if "Latin" in unicode_range_a and "Latin" in unicode_range_b:
+ return False
+
+ if "Emoticons" in unicode_range_a or "Emoticons" in unicode_range_b:
+ return False
+
+ # Latin characters can be accompanied with a combining diacritical mark
+ # eg. Vietnamese.
+ if ("Latin" in unicode_range_a or "Latin" in unicode_range_b) and (
+ "Combining" in unicode_range_a or "Combining" in unicode_range_b
+ ):
+ return False
+
+ keywords_range_a, keywords_range_b = unicode_range_a.split(
+ " "
+ ), unicode_range_b.split(" ")
+
+ for el in keywords_range_a:
+ if el in UNICODE_SECONDARY_RANGE_KEYWORD:
+ continue
+ if el in keywords_range_b:
+ return False
+
+ # Japanese Exception
+ range_a_jp_chars, range_b_jp_chars = (
+ unicode_range_a
+ in (
+ "Hiragana",
+ "Katakana",
+ ),
+ unicode_range_b in ("Hiragana", "Katakana"),
+ )
+ if (range_a_jp_chars or range_b_jp_chars) and (
+ "CJK" in unicode_range_a or "CJK" in unicode_range_b
+ ):
+ return False
+ if range_a_jp_chars and range_b_jp_chars:
+ return False
+
+ if "Hangul" in unicode_range_a or "Hangul" in unicode_range_b:
+ if "CJK" in unicode_range_a or "CJK" in unicode_range_b:
+ return False
+ if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin":
+ return False
+
+ # Chinese/Japanese use dedicated range for punctuation and/or separators.
+ if ("CJK" in unicode_range_a or "CJK" in unicode_range_b) or (
+ unicode_range_a in ["Katakana", "Hiragana"]
+ and unicode_range_b in ["Katakana", "Hiragana"]
+ ):
+ if "Punctuation" in unicode_range_a or "Punctuation" in unicode_range_b:
+ return False
+ if "Forms" in unicode_range_a or "Forms" in unicode_range_b:
+ return False
+
+ return True
+
+
+@lru_cache(maxsize=2048)
+def mess_ratio(
+ decoded_sequence: str, maximum_threshold: float = 0.2, debug: bool = False
+) -> float:
+ """
+ Compute a mess ratio given a decoded bytes sequence. The maximum threshold does stop the computation earlier.
+ """
+
+ detectors: List[MessDetectorPlugin] = [
+ md_class() for md_class in MessDetectorPlugin.__subclasses__()
+ ]
+
+ length: int = len(decoded_sequence) + 1
+
+ mean_mess_ratio: float = 0.0
+
+ if length < 512:
+ intermediary_mean_mess_ratio_calc: int = 32
+ elif length <= 1024:
+ intermediary_mean_mess_ratio_calc = 64
+ else:
+ intermediary_mean_mess_ratio_calc = 128
+
+ for character, index in zip(decoded_sequence + "\n", range(length)):
+ for detector in detectors:
+ if detector.eligible(character):
+ detector.feed(character)
+
+ if (
+ index > 0 and index % intermediary_mean_mess_ratio_calc == 0
+ ) or index == length - 1:
+ mean_mess_ratio = sum(dt.ratio for dt in detectors)
+
+ if mean_mess_ratio >= maximum_threshold:
+ break
+
+ if debug:
+ logger = getLogger("charset_normalizer")
+
+ logger.log(
+ TRACE,
+ "Mess-detector extended-analysis start. "
+ f"intermediary_mean_mess_ratio_calc={intermediary_mean_mess_ratio_calc} mean_mess_ratio={mean_mess_ratio} "
+ f"maximum_threshold={maximum_threshold}",
+ )
+
+ if len(decoded_sequence) > 16:
+ logger.log(TRACE, f"Starting with: {decoded_sequence[:16]}")
+ logger.log(TRACE, f"Ending with: {decoded_sequence[-16::]}")
+
+ for dt in detectors: # pragma: nocover
+ logger.log(TRACE, f"{dt.__class__}: {dt.ratio}")
+
+ return round(mean_mess_ratio, 3)
diff --git a/billinglayer/python/charset_normalizer/md__mypyc.cpython-311-x86_64-linux-gnu.so b/billinglayer/python/charset_normalizer/md__mypyc.cpython-311-x86_64-linux-gnu.so
new file mode 100755
index 0000000..b9b97c9
Binary files /dev/null and b/billinglayer/python/charset_normalizer/md__mypyc.cpython-311-x86_64-linux-gnu.so differ
diff --git a/billinglayer/python/charset_normalizer/models.py b/billinglayer/python/charset_normalizer/models.py
new file mode 100644
index 0000000..7f8ca38
--- /dev/null
+++ b/billinglayer/python/charset_normalizer/models.py
@@ -0,0 +1,337 @@
+from encodings.aliases import aliases
+from hashlib import sha256
+from json import dumps
+from typing import Any, Dict, Iterator, List, Optional, Tuple, Union
+
+from .constant import TOO_BIG_SEQUENCE
+from .utils import iana_name, is_multi_byte_encoding, unicode_range
+
+
+class CharsetMatch:
+ def __init__(
+ self,
+ payload: bytes,
+ guessed_encoding: str,
+ mean_mess_ratio: float,
+ has_sig_or_bom: bool,
+ languages: "CoherenceMatches",
+ decoded_payload: Optional[str] = None,
+ ):
+ self._payload: bytes = payload
+
+ self._encoding: str = guessed_encoding
+ self._mean_mess_ratio: float = mean_mess_ratio
+ self._languages: CoherenceMatches = languages
+ self._has_sig_or_bom: bool = has_sig_or_bom
+ self._unicode_ranges: Optional[List[str]] = None
+
+ self._leaves: List[CharsetMatch] = []
+ self._mean_coherence_ratio: float = 0.0
+
+ self._output_payload: Optional[bytes] = None
+ self._output_encoding: Optional[str] = None
+
+ self._string: Optional[str] = decoded_payload
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, CharsetMatch):
+ raise TypeError(
+ "__eq__ cannot be invoked on {} and {}.".format(
+ str(other.__class__), str(self.__class__)
+ )
+ )
+ return self.encoding == other.encoding and self.fingerprint == other.fingerprint
+
+ def __lt__(self, other: object) -> bool:
+ """
+ Implemented to make sorted available upon CharsetMatches items.
+ """
+ if not isinstance(other, CharsetMatch):
+ raise ValueError
+
+ chaos_difference: float = abs(self.chaos - other.chaos)
+ coherence_difference: float = abs(self.coherence - other.coherence)
+
+ # Below 1% difference --> Use Coherence
+ if chaos_difference < 0.01 and coherence_difference > 0.02:
+ # When having a tough decision, use the result that decoded as many multi-byte as possible.
+ if chaos_difference == 0.0 and self.coherence == other.coherence:
+ return self.multi_byte_usage > other.multi_byte_usage
+ return self.coherence > other.coherence
+
+ return self.chaos < other.chaos
+
+ @property
+ def multi_byte_usage(self) -> float:
+ return 1.0 - len(str(self)) / len(self.raw)
+
+ def __str__(self) -> str:
+ # Lazy Str Loading
+ if self._string is None:
+ self._string = str(self._payload, self._encoding, "strict")
+ return self._string
+
+ def __repr__(self) -> str:
+ return "".format(self.encoding, self.fingerprint)
+
+ def add_submatch(self, other: "CharsetMatch") -> None:
+ if not isinstance(other, CharsetMatch) or other == self:
+ raise ValueError(
+ "Unable to add instance <{}> as a submatch of a CharsetMatch".format(
+ other.__class__
+ )
+ )
+
+ other._string = None # Unload RAM usage; dirty trick.
+ self._leaves.append(other)
+
+ @property
+ def encoding(self) -> str:
+ return self._encoding
+
+ @property
+ def encoding_aliases(self) -> List[str]:
+ """
+ Encoding name are known by many name, using this could help when searching for IBM855 when it's listed as CP855.
+ """
+ also_known_as: List[str] = []
+ for u, p in aliases.items():
+ if self.encoding == u:
+ also_known_as.append(p)
+ elif self.encoding == p:
+ also_known_as.append(u)
+ return also_known_as
+
+ @property
+ def bom(self) -> bool:
+ return self._has_sig_or_bom
+
+ @property
+ def byte_order_mark(self) -> bool:
+ return self._has_sig_or_bom
+
+ @property
+ def languages(self) -> List[str]:
+ """
+ Return the complete list of possible languages found in decoded sequence.
+ Usually not really useful. Returned list may be empty even if 'language' property return something != 'Unknown'.
+ """
+ return [e[0] for e in self._languages]
+
+ @property
+ def language(self) -> str:
+ """
+ Most probable language found in decoded sequence. If none were detected or inferred, the property will return
+ "Unknown".
+ """
+ if not self._languages:
+ # Trying to infer the language based on the given encoding
+ # Its either English or we should not pronounce ourselves in certain cases.
+ if "ascii" in self.could_be_from_charset:
+ return "English"
+
+ # doing it there to avoid circular import
+ from charset_normalizer.cd import encoding_languages, mb_encoding_languages
+
+ languages = (
+ mb_encoding_languages(self.encoding)
+ if is_multi_byte_encoding(self.encoding)
+ else encoding_languages(self.encoding)
+ )
+
+ if len(languages) == 0 or "Latin Based" in languages:
+ return "Unknown"
+
+ return languages[0]
+
+ return self._languages[0][0]
+
+ @property
+ def chaos(self) -> float:
+ return self._mean_mess_ratio
+
+ @property
+ def coherence(self) -> float:
+ if not self._languages:
+ return 0.0
+ return self._languages[0][1]
+
+ @property
+ def percent_chaos(self) -> float:
+ return round(self.chaos * 100, ndigits=3)
+
+ @property
+ def percent_coherence(self) -> float:
+ return round(self.coherence * 100, ndigits=3)
+
+ @property
+ def raw(self) -> bytes:
+ """
+ Original untouched bytes.
+ """
+ return self._payload
+
+ @property
+ def submatch(self) -> List["CharsetMatch"]:
+ return self._leaves
+
+ @property
+ def has_submatch(self) -> bool:
+ return len(self._leaves) > 0
+
+ @property
+ def alphabets(self) -> List[str]:
+ if self._unicode_ranges is not None:
+ return self._unicode_ranges
+ # list detected ranges
+ detected_ranges: List[Optional[str]] = [
+ unicode_range(char) for char in str(self)
+ ]
+ # filter and sort
+ self._unicode_ranges = sorted(list({r for r in detected_ranges if r}))
+ return self._unicode_ranges
+
+ @property
+ def could_be_from_charset(self) -> List[str]:
+ """
+ The complete list of encoding that output the exact SAME str result and therefore could be the originating
+ encoding.
+ This list does include the encoding available in property 'encoding'.
+ """
+ return [self._encoding] + [m.encoding for m in self._leaves]
+
+ def output(self, encoding: str = "utf_8") -> bytes:
+ """
+ Method to get re-encoded bytes payload using given target encoding. Default to UTF-8.
+ Any errors will be simply ignored by the encoder NOT replaced.
+ """
+ if self._output_encoding is None or self._output_encoding != encoding:
+ self._output_encoding = encoding
+ self._output_payload = str(self).encode(encoding, "replace")
+
+ return self._output_payload # type: ignore
+
+ @property
+ def fingerprint(self) -> str:
+ """
+ Retrieve the unique SHA256 computed using the transformed (re-encoded) payload. Not the original one.
+ """
+ return sha256(self.output()).hexdigest()
+
+
+class CharsetMatches:
+ """
+ Container with every CharsetMatch items ordered by default from most probable to the less one.
+ Act like a list(iterable) but does not implements all related methods.
+ """
+
+ def __init__(self, results: Optional[List[CharsetMatch]] = None):
+ self._results: List[CharsetMatch] = sorted(results) if results else []
+
+ def __iter__(self) -> Iterator[CharsetMatch]:
+ yield from self._results
+
+ def __getitem__(self, item: Union[int, str]) -> CharsetMatch:
+ """
+ Retrieve a single item either by its position or encoding name (alias may be used here).
+ Raise KeyError upon invalid index or encoding not present in results.
+ """
+ if isinstance(item, int):
+ return self._results[item]
+ if isinstance(item, str):
+ item = iana_name(item, False)
+ for result in self._results:
+ if item in result.could_be_from_charset:
+ return result
+ raise KeyError
+
+ def __len__(self) -> int:
+ return len(self._results)
+
+ def __bool__(self) -> bool:
+ return len(self._results) > 0
+
+ def append(self, item: CharsetMatch) -> None:
+ """
+ Insert a single match. Will be inserted accordingly to preserve sort.
+ Can be inserted as a submatch.
+ """
+ if not isinstance(item, CharsetMatch):
+ raise ValueError(
+ "Cannot append instance '{}' to CharsetMatches".format(
+ str(item.__class__)
+ )
+ )
+ # We should disable the submatch factoring when the input file is too heavy (conserve RAM usage)
+ if len(item.raw) <= TOO_BIG_SEQUENCE:
+ for match in self._results:
+ if match.fingerprint == item.fingerprint and match.chaos == item.chaos:
+ match.add_submatch(item)
+ return
+ self._results.append(item)
+ self._results = sorted(self._results)
+
+ def best(self) -> Optional["CharsetMatch"]:
+ """
+ Simply return the first match. Strict equivalent to matches[0].
+ """
+ if not self._results:
+ return None
+ return self._results[0]
+
+ def first(self) -> Optional["CharsetMatch"]:
+ """
+ Redundant method, call the method best(). Kept for BC reasons.
+ """
+ return self.best()
+
+
+CoherenceMatch = Tuple[str, float]
+CoherenceMatches = List[CoherenceMatch]
+
+
+class CliDetectionResult:
+ def __init__(
+ self,
+ path: str,
+ encoding: Optional[str],
+ encoding_aliases: List[str],
+ alternative_encodings: List[str],
+ language: str,
+ alphabets: List[str],
+ has_sig_or_bom: bool,
+ chaos: float,
+ coherence: float,
+ unicode_path: Optional[str],
+ is_preferred: bool,
+ ):
+ self.path: str = path
+ self.unicode_path: Optional[str] = unicode_path
+ self.encoding: Optional[str] = encoding
+ self.encoding_aliases: List[str] = encoding_aliases
+ self.alternative_encodings: List[str] = alternative_encodings
+ self.language: str = language
+ self.alphabets: List[str] = alphabets
+ self.has_sig_or_bom: bool = has_sig_or_bom
+ self.chaos: float = chaos
+ self.coherence: float = coherence
+ self.is_preferred: bool = is_preferred
+
+ @property
+ def __dict__(self) -> Dict[str, Any]: # type: ignore
+ return {
+ "path": self.path,
+ "encoding": self.encoding,
+ "encoding_aliases": self.encoding_aliases,
+ "alternative_encodings": self.alternative_encodings,
+ "language": self.language,
+ "alphabets": self.alphabets,
+ "has_sig_or_bom": self.has_sig_or_bom,
+ "chaos": self.chaos,
+ "coherence": self.coherence,
+ "unicode_path": self.unicode_path,
+ "is_preferred": self.is_preferred,
+ }
+
+ def to_json(self) -> str:
+ return dumps(self.__dict__, ensure_ascii=True, indent=4)
diff --git a/billinglayer/python/charset_normalizer/py.typed b/billinglayer/python/charset_normalizer/py.typed
new file mode 100644
index 0000000..e69de29
diff --git a/billinglayer/python/charset_normalizer/utils.py b/billinglayer/python/charset_normalizer/utils.py
new file mode 100644
index 0000000..bf2767a
--- /dev/null
+++ b/billinglayer/python/charset_normalizer/utils.py
@@ -0,0 +1,414 @@
+import importlib
+import logging
+import unicodedata
+from codecs import IncrementalDecoder
+from encodings.aliases import aliases
+from functools import lru_cache
+from re import findall
+from typing import Generator, List, Optional, Set, Tuple, Union
+
+from _multibytecodec import MultibyteIncrementalDecoder
+
+from .constant import (
+ ENCODING_MARKS,
+ IANA_SUPPORTED_SIMILAR,
+ RE_POSSIBLE_ENCODING_INDICATION,
+ UNICODE_RANGES_COMBINED,
+ UNICODE_SECONDARY_RANGE_KEYWORD,
+ UTF8_MAXIMAL_ALLOCATION,
+)
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_accentuated(character: str) -> bool:
+ try:
+ description: str = unicodedata.name(character)
+ except ValueError:
+ return False
+ return (
+ "WITH GRAVE" in description
+ or "WITH ACUTE" in description
+ or "WITH CEDILLA" in description
+ or "WITH DIAERESIS" in description
+ or "WITH CIRCUMFLEX" in description
+ or "WITH TILDE" in description
+ )
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def remove_accent(character: str) -> str:
+ decomposed: str = unicodedata.decomposition(character)
+ if not decomposed:
+ return character
+
+ codes: List[str] = decomposed.split(" ")
+
+ return chr(int(codes[0], 16))
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def unicode_range(character: str) -> Optional[str]:
+ """
+ Retrieve the Unicode range official name from a single character.
+ """
+ character_ord: int = ord(character)
+
+ for range_name, ord_range in UNICODE_RANGES_COMBINED.items():
+ if character_ord in ord_range:
+ return range_name
+
+ return None
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_latin(character: str) -> bool:
+ try:
+ description: str = unicodedata.name(character)
+ except ValueError:
+ return False
+ return "LATIN" in description
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_ascii(character: str) -> bool:
+ try:
+ character.encode("ascii")
+ except UnicodeEncodeError:
+ return False
+ return True
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_punctuation(character: str) -> bool:
+ character_category: str = unicodedata.category(character)
+
+ if "P" in character_category:
+ return True
+
+ character_range: Optional[str] = unicode_range(character)
+
+ if character_range is None:
+ return False
+
+ return "Punctuation" in character_range
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_symbol(character: str) -> bool:
+ character_category: str = unicodedata.category(character)
+
+ if "S" in character_category or "N" in character_category:
+ return True
+
+ character_range: Optional[str] = unicode_range(character)
+
+ if character_range is None:
+ return False
+
+ return "Forms" in character_range
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_emoticon(character: str) -> bool:
+ character_range: Optional[str] = unicode_range(character)
+
+ if character_range is None:
+ return False
+
+ return "Emoticons" in character_range
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_separator(character: str) -> bool:
+ if character.isspace() or character in {"|", "+", "<", ">"}:
+ return True
+
+ character_category: str = unicodedata.category(character)
+
+ return "Z" in character_category or character_category in {"Po", "Pd", "Pc"}
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_case_variable(character: str) -> bool:
+ return character.islower() != character.isupper()
+
+
+def is_private_use_only(character: str) -> bool:
+ character_category: str = unicodedata.category(character)
+
+ return character_category == "Co"
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_cjk(character: str) -> bool:
+ try:
+ character_name = unicodedata.name(character)
+ except ValueError:
+ return False
+
+ return "CJK" in character_name
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_hiragana(character: str) -> bool:
+ try:
+ character_name = unicodedata.name(character)
+ except ValueError:
+ return False
+
+ return "HIRAGANA" in character_name
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_katakana(character: str) -> bool:
+ try:
+ character_name = unicodedata.name(character)
+ except ValueError:
+ return False
+
+ return "KATAKANA" in character_name
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_hangul(character: str) -> bool:
+ try:
+ character_name = unicodedata.name(character)
+ except ValueError:
+ return False
+
+ return "HANGUL" in character_name
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_thai(character: str) -> bool:
+ try:
+ character_name = unicodedata.name(character)
+ except ValueError:
+ return False
+
+ return "THAI" in character_name
+
+
+@lru_cache(maxsize=len(UNICODE_RANGES_COMBINED))
+def is_unicode_range_secondary(range_name: str) -> bool:
+ return any(keyword in range_name for keyword in UNICODE_SECONDARY_RANGE_KEYWORD)
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_unprintable(character: str) -> bool:
+ return (
+ character.isspace() is False # includes \n \t \r \v
+ and character.isprintable() is False
+ and character != "\x1A" # Why? Its the ASCII substitute character.
+ and character != "\ufeff" # bug discovered in Python,
+ # Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space.
+ )
+
+
+def any_specified_encoding(sequence: bytes, search_zone: int = 4096) -> Optional[str]:
+ """
+ Extract using ASCII-only decoder any specified encoding in the first n-bytes.
+ """
+ if not isinstance(sequence, bytes):
+ raise TypeError
+
+ seq_len: int = len(sequence)
+
+ results: List[str] = findall(
+ RE_POSSIBLE_ENCODING_INDICATION,
+ sequence[: min(seq_len, search_zone)].decode("ascii", errors="ignore"),
+ )
+
+ if len(results) == 0:
+ return None
+
+ for specified_encoding in results:
+ specified_encoding = specified_encoding.lower().replace("-", "_")
+
+ encoding_alias: str
+ encoding_iana: str
+
+ for encoding_alias, encoding_iana in aliases.items():
+ if encoding_alias == specified_encoding:
+ return encoding_iana
+ if encoding_iana == specified_encoding:
+ return encoding_iana
+
+ return None
+
+
+@lru_cache(maxsize=128)
+def is_multi_byte_encoding(name: str) -> bool:
+ """
+ Verify is a specific encoding is a multi byte one based on it IANA name
+ """
+ return name in {
+ "utf_8",
+ "utf_8_sig",
+ "utf_16",
+ "utf_16_be",
+ "utf_16_le",
+ "utf_32",
+ "utf_32_le",
+ "utf_32_be",
+ "utf_7",
+ } or issubclass(
+ importlib.import_module("encodings.{}".format(name)).IncrementalDecoder,
+ MultibyteIncrementalDecoder,
+ )
+
+
+def identify_sig_or_bom(sequence: bytes) -> Tuple[Optional[str], bytes]:
+ """
+ Identify and extract SIG/BOM in given sequence.
+ """
+
+ for iana_encoding in ENCODING_MARKS:
+ marks: Union[bytes, List[bytes]] = ENCODING_MARKS[iana_encoding]
+
+ if isinstance(marks, bytes):
+ marks = [marks]
+
+ for mark in marks:
+ if sequence.startswith(mark):
+ return iana_encoding, mark
+
+ return None, b""
+
+
+def should_strip_sig_or_bom(iana_encoding: str) -> bool:
+ return iana_encoding not in {"utf_16", "utf_32"}
+
+
+def iana_name(cp_name: str, strict: bool = True) -> str:
+ cp_name = cp_name.lower().replace("-", "_")
+
+ encoding_alias: str
+ encoding_iana: str
+
+ for encoding_alias, encoding_iana in aliases.items():
+ if cp_name in [encoding_alias, encoding_iana]:
+ return encoding_iana
+
+ if strict:
+ raise ValueError("Unable to retrieve IANA for '{}'".format(cp_name))
+
+ return cp_name
+
+
+def range_scan(decoded_sequence: str) -> List[str]:
+ ranges: Set[str] = set()
+
+ for character in decoded_sequence:
+ character_range: Optional[str] = unicode_range(character)
+
+ if character_range is None:
+ continue
+
+ ranges.add(character_range)
+
+ return list(ranges)
+
+
+def cp_similarity(iana_name_a: str, iana_name_b: str) -> float:
+ if is_multi_byte_encoding(iana_name_a) or is_multi_byte_encoding(iana_name_b):
+ return 0.0
+
+ decoder_a = importlib.import_module(
+ "encodings.{}".format(iana_name_a)
+ ).IncrementalDecoder
+ decoder_b = importlib.import_module(
+ "encodings.{}".format(iana_name_b)
+ ).IncrementalDecoder
+
+ id_a: IncrementalDecoder = decoder_a(errors="ignore")
+ id_b: IncrementalDecoder = decoder_b(errors="ignore")
+
+ character_match_count: int = 0
+
+ for i in range(255):
+ to_be_decoded: bytes = bytes([i])
+ if id_a.decode(to_be_decoded) == id_b.decode(to_be_decoded):
+ character_match_count += 1
+
+ return character_match_count / 254
+
+
+def is_cp_similar(iana_name_a: str, iana_name_b: str) -> bool:
+ """
+ Determine if two code page are at least 80% similar. IANA_SUPPORTED_SIMILAR dict was generated using
+ the function cp_similarity.
+ """
+ return (
+ iana_name_a in IANA_SUPPORTED_SIMILAR
+ and iana_name_b in IANA_SUPPORTED_SIMILAR[iana_name_a]
+ )
+
+
+def set_logging_handler(
+ name: str = "charset_normalizer",
+ level: int = logging.INFO,
+ format_string: str = "%(asctime)s | %(levelname)s | %(message)s",
+) -> None:
+ logger = logging.getLogger(name)
+ logger.setLevel(level)
+
+ handler = logging.StreamHandler()
+ handler.setFormatter(logging.Formatter(format_string))
+ logger.addHandler(handler)
+
+
+def cut_sequence_chunks(
+ sequences: bytes,
+ encoding_iana: str,
+ offsets: range,
+ chunk_size: int,
+ bom_or_sig_available: bool,
+ strip_sig_or_bom: bool,
+ sig_payload: bytes,
+ is_multi_byte_decoder: bool,
+ decoded_payload: Optional[str] = None,
+) -> Generator[str, None, None]:
+ if decoded_payload and is_multi_byte_decoder is False:
+ for i in offsets:
+ chunk = decoded_payload[i : i + chunk_size]
+ if not chunk:
+ break
+ yield chunk
+ else:
+ for i in offsets:
+ chunk_end = i + chunk_size
+ if chunk_end > len(sequences) + 8:
+ continue
+
+ cut_sequence = sequences[i : i + chunk_size]
+
+ if bom_or_sig_available and strip_sig_or_bom is False:
+ cut_sequence = sig_payload + cut_sequence
+
+ chunk = cut_sequence.decode(
+ encoding_iana,
+ errors="ignore" if is_multi_byte_decoder else "strict",
+ )
+
+ # multi-byte bad cutting detector and adjustment
+ # not the cleanest way to perform that fix but clever enough for now.
+ if is_multi_byte_decoder and i > 0:
+ chunk_partial_size_chk: int = min(chunk_size, 16)
+
+ if (
+ decoded_payload
+ and chunk[:chunk_partial_size_chk] not in decoded_payload
+ ):
+ for j in range(i, i - 4, -1):
+ cut_sequence = sequences[j:chunk_end]
+
+ if bom_or_sig_available and strip_sig_or_bom is False:
+ cut_sequence = sig_payload + cut_sequence
+
+ chunk = cut_sequence.decode(encoding_iana, errors="ignore")
+
+ if chunk[:chunk_partial_size_chk] in decoded_payload:
+ break
+
+ yield chunk
diff --git a/billinglayer/python/charset_normalizer/version.py b/billinglayer/python/charset_normalizer/version.py
new file mode 100644
index 0000000..5eed49a
--- /dev/null
+++ b/billinglayer/python/charset_normalizer/version.py
@@ -0,0 +1,6 @@
+"""
+Expose version
+"""
+
+__version__ = "3.2.0"
+VERSION = __version__.split(".")
diff --git a/billinglayer/python/click/__init__.py b/billinglayer/python/click/__init__.py
new file mode 100644
index 0000000..9a1dab0
--- /dev/null
+++ b/billinglayer/python/click/__init__.py
@@ -0,0 +1,73 @@
+"""
+Click is a simple Python module inspired by the stdlib optparse to make
+writing command line scripts fun. Unlike other modules, it's based
+around a simple API that does not come with too much magic and is
+composable.
+"""
+from .core import Argument as Argument
+from .core import BaseCommand as BaseCommand
+from .core import Command as Command
+from .core import CommandCollection as CommandCollection
+from .core import Context as Context
+from .core import Group as Group
+from .core import MultiCommand as MultiCommand
+from .core import Option as Option
+from .core import Parameter as Parameter
+from .decorators import argument as argument
+from .decorators import command as command
+from .decorators import confirmation_option as confirmation_option
+from .decorators import group as group
+from .decorators import help_option as help_option
+from .decorators import make_pass_decorator as make_pass_decorator
+from .decorators import option as option
+from .decorators import pass_context as pass_context
+from .decorators import pass_obj as pass_obj
+from .decorators import password_option as password_option
+from .decorators import version_option as version_option
+from .exceptions import Abort as Abort
+from .exceptions import BadArgumentUsage as BadArgumentUsage
+from .exceptions import BadOptionUsage as BadOptionUsage
+from .exceptions import BadParameter as BadParameter
+from .exceptions import ClickException as ClickException
+from .exceptions import FileError as FileError
+from .exceptions import MissingParameter as MissingParameter
+from .exceptions import NoSuchOption as NoSuchOption
+from .exceptions import UsageError as UsageError
+from .formatting import HelpFormatter as HelpFormatter
+from .formatting import wrap_text as wrap_text
+from .globals import get_current_context as get_current_context
+from .parser import OptionParser as OptionParser
+from .termui import clear as clear
+from .termui import confirm as confirm
+from .termui import echo_via_pager as echo_via_pager
+from .termui import edit as edit
+from .termui import getchar as getchar
+from .termui import launch as launch
+from .termui import pause as pause
+from .termui import progressbar as progressbar
+from .termui import prompt as prompt
+from .termui import secho as secho
+from .termui import style as style
+from .termui import unstyle as unstyle
+from .types import BOOL as BOOL
+from .types import Choice as Choice
+from .types import DateTime as DateTime
+from .types import File as File
+from .types import FLOAT as FLOAT
+from .types import FloatRange as FloatRange
+from .types import INT as INT
+from .types import IntRange as IntRange
+from .types import ParamType as ParamType
+from .types import Path as Path
+from .types import STRING as STRING
+from .types import Tuple as Tuple
+from .types import UNPROCESSED as UNPROCESSED
+from .types import UUID as UUID
+from .utils import echo as echo
+from .utils import format_filename as format_filename
+from .utils import get_app_dir as get_app_dir
+from .utils import get_binary_stream as get_binary_stream
+from .utils import get_text_stream as get_text_stream
+from .utils import open_file as open_file
+
+__version__ = "8.1.7"
diff --git a/billinglayer/python/click/__pycache__/__init__.cpython-311.pyc b/billinglayer/python/click/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..1c4b03d
Binary files /dev/null and b/billinglayer/python/click/__pycache__/__init__.cpython-311.pyc differ
diff --git a/billinglayer/python/click/__pycache__/_compat.cpython-311.pyc b/billinglayer/python/click/__pycache__/_compat.cpython-311.pyc
new file mode 100644
index 0000000..aa105fe
Binary files /dev/null and b/billinglayer/python/click/__pycache__/_compat.cpython-311.pyc differ
diff --git a/billinglayer/python/click/__pycache__/_termui_impl.cpython-311.pyc b/billinglayer/python/click/__pycache__/_termui_impl.cpython-311.pyc
new file mode 100644
index 0000000..206cfdd
Binary files /dev/null and b/billinglayer/python/click/__pycache__/_termui_impl.cpython-311.pyc differ
diff --git a/billinglayer/python/click/__pycache__/_textwrap.cpython-311.pyc b/billinglayer/python/click/__pycache__/_textwrap.cpython-311.pyc
new file mode 100644
index 0000000..ef2c062
Binary files /dev/null and b/billinglayer/python/click/__pycache__/_textwrap.cpython-311.pyc differ
diff --git a/billinglayer/python/click/__pycache__/_winconsole.cpython-311.pyc b/billinglayer/python/click/__pycache__/_winconsole.cpython-311.pyc
new file mode 100644
index 0000000..8eec081
Binary files /dev/null and b/billinglayer/python/click/__pycache__/_winconsole.cpython-311.pyc differ
diff --git a/billinglayer/python/click/__pycache__/core.cpython-311.pyc b/billinglayer/python/click/__pycache__/core.cpython-311.pyc
new file mode 100644
index 0000000..893a478
Binary files /dev/null and b/billinglayer/python/click/__pycache__/core.cpython-311.pyc differ
diff --git a/billinglayer/python/click/__pycache__/decorators.cpython-311.pyc b/billinglayer/python/click/__pycache__/decorators.cpython-311.pyc
new file mode 100644
index 0000000..221aa30
Binary files /dev/null and b/billinglayer/python/click/__pycache__/decorators.cpython-311.pyc differ
diff --git a/billinglayer/python/click/__pycache__/exceptions.cpython-311.pyc b/billinglayer/python/click/__pycache__/exceptions.cpython-311.pyc
new file mode 100644
index 0000000..62589f2
Binary files /dev/null and b/billinglayer/python/click/__pycache__/exceptions.cpython-311.pyc differ
diff --git a/billinglayer/python/click/__pycache__/formatting.cpython-311.pyc b/billinglayer/python/click/__pycache__/formatting.cpython-311.pyc
new file mode 100644
index 0000000..d4defa2
Binary files /dev/null and b/billinglayer/python/click/__pycache__/formatting.cpython-311.pyc differ
diff --git a/billinglayer/python/click/__pycache__/globals.cpython-311.pyc b/billinglayer/python/click/__pycache__/globals.cpython-311.pyc
new file mode 100644
index 0000000..d98bf27
Binary files /dev/null and b/billinglayer/python/click/__pycache__/globals.cpython-311.pyc differ
diff --git a/billinglayer/python/click/__pycache__/parser.cpython-311.pyc b/billinglayer/python/click/__pycache__/parser.cpython-311.pyc
new file mode 100644
index 0000000..f7ea009
Binary files /dev/null and b/billinglayer/python/click/__pycache__/parser.cpython-311.pyc differ
diff --git a/billinglayer/python/click/__pycache__/shell_completion.cpython-311.pyc b/billinglayer/python/click/__pycache__/shell_completion.cpython-311.pyc
new file mode 100644
index 0000000..cdafa8f
Binary files /dev/null and b/billinglayer/python/click/__pycache__/shell_completion.cpython-311.pyc differ
diff --git a/billinglayer/python/click/__pycache__/termui.cpython-311.pyc b/billinglayer/python/click/__pycache__/termui.cpython-311.pyc
new file mode 100644
index 0000000..218f9e0
Binary files /dev/null and b/billinglayer/python/click/__pycache__/termui.cpython-311.pyc differ
diff --git a/billinglayer/python/click/__pycache__/testing.cpython-311.pyc b/billinglayer/python/click/__pycache__/testing.cpython-311.pyc
new file mode 100644
index 0000000..21e2744
Binary files /dev/null and b/billinglayer/python/click/__pycache__/testing.cpython-311.pyc differ
diff --git a/billinglayer/python/click/__pycache__/types.cpython-311.pyc b/billinglayer/python/click/__pycache__/types.cpython-311.pyc
new file mode 100644
index 0000000..4575bc6
Binary files /dev/null and b/billinglayer/python/click/__pycache__/types.cpython-311.pyc differ
diff --git a/billinglayer/python/click/__pycache__/utils.cpython-311.pyc b/billinglayer/python/click/__pycache__/utils.cpython-311.pyc
new file mode 100644
index 0000000..5767500
Binary files /dev/null and b/billinglayer/python/click/__pycache__/utils.cpython-311.pyc differ
diff --git a/billinglayer/python/click/_compat.py b/billinglayer/python/click/_compat.py
new file mode 100644
index 0000000..23f8866
--- /dev/null
+++ b/billinglayer/python/click/_compat.py
@@ -0,0 +1,623 @@
+import codecs
+import io
+import os
+import re
+import sys
+import typing as t
+from weakref import WeakKeyDictionary
+
+CYGWIN = sys.platform.startswith("cygwin")
+WIN = sys.platform.startswith("win")
+auto_wrap_for_ansi: t.Optional[t.Callable[[t.TextIO], t.TextIO]] = None
+_ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]")
+
+
+def _make_text_stream(
+ stream: t.BinaryIO,
+ encoding: t.Optional[str],
+ errors: t.Optional[str],
+ force_readable: bool = False,
+ force_writable: bool = False,
+) -> t.TextIO:
+ if encoding is None:
+ encoding = get_best_encoding(stream)
+ if errors is None:
+ errors = "replace"
+ return _NonClosingTextIOWrapper(
+ stream,
+ encoding,
+ errors,
+ line_buffering=True,
+ force_readable=force_readable,
+ force_writable=force_writable,
+ )
+
+
+def is_ascii_encoding(encoding: str) -> bool:
+ """Checks if a given encoding is ascii."""
+ try:
+ return codecs.lookup(encoding).name == "ascii"
+ except LookupError:
+ return False
+
+
+def get_best_encoding(stream: t.IO[t.Any]) -> str:
+ """Returns the default stream encoding if not found."""
+ rv = getattr(stream, "encoding", None) or sys.getdefaultencoding()
+ if is_ascii_encoding(rv):
+ return "utf-8"
+ return rv
+
+
+class _NonClosingTextIOWrapper(io.TextIOWrapper):
+ def __init__(
+ self,
+ stream: t.BinaryIO,
+ encoding: t.Optional[str],
+ errors: t.Optional[str],
+ force_readable: bool = False,
+ force_writable: bool = False,
+ **extra: t.Any,
+ ) -> None:
+ self._stream = stream = t.cast(
+ t.BinaryIO, _FixupStream(stream, force_readable, force_writable)
+ )
+ super().__init__(stream, encoding, errors, **extra)
+
+ def __del__(self) -> None:
+ try:
+ self.detach()
+ except Exception:
+ pass
+
+ def isatty(self) -> bool:
+ # https://bitbucket.org/pypy/pypy/issue/1803
+ return self._stream.isatty()
+
+
+class _FixupStream:
+ """The new io interface needs more from streams than streams
+ traditionally implement. As such, this fix-up code is necessary in
+ some circumstances.
+
+ The forcing of readable and writable flags are there because some tools
+ put badly patched objects on sys (one such offender are certain version
+ of jupyter notebook).
+ """
+
+ def __init__(
+ self,
+ stream: t.BinaryIO,
+ force_readable: bool = False,
+ force_writable: bool = False,
+ ):
+ self._stream = stream
+ self._force_readable = force_readable
+ self._force_writable = force_writable
+
+ def __getattr__(self, name: str) -> t.Any:
+ return getattr(self._stream, name)
+
+ def read1(self, size: int) -> bytes:
+ f = getattr(self._stream, "read1", None)
+
+ if f is not None:
+ return t.cast(bytes, f(size))
+
+ return self._stream.read(size)
+
+ def readable(self) -> bool:
+ if self._force_readable:
+ return True
+ x = getattr(self._stream, "readable", None)
+ if x is not None:
+ return t.cast(bool, x())
+ try:
+ self._stream.read(0)
+ except Exception:
+ return False
+ return True
+
+ def writable(self) -> bool:
+ if self._force_writable:
+ return True
+ x = getattr(self._stream, "writable", None)
+ if x is not None:
+ return t.cast(bool, x())
+ try:
+ self._stream.write("") # type: ignore
+ except Exception:
+ try:
+ self._stream.write(b"")
+ except Exception:
+ return False
+ return True
+
+ def seekable(self) -> bool:
+ x = getattr(self._stream, "seekable", None)
+ if x is not None:
+ return t.cast(bool, x())
+ try:
+ self._stream.seek(self._stream.tell())
+ except Exception:
+ return False
+ return True
+
+
+def _is_binary_reader(stream: t.IO[t.Any], default: bool = False) -> bool:
+ try:
+ return isinstance(stream.read(0), bytes)
+ except Exception:
+ return default
+ # This happens in some cases where the stream was already
+ # closed. In this case, we assume the default.
+
+
+def _is_binary_writer(stream: t.IO[t.Any], default: bool = False) -> bool:
+ try:
+ stream.write(b"")
+ except Exception:
+ try:
+ stream.write("")
+ return False
+ except Exception:
+ pass
+ return default
+ return True
+
+
+def _find_binary_reader(stream: t.IO[t.Any]) -> t.Optional[t.BinaryIO]:
+ # We need to figure out if the given stream is already binary.
+ # This can happen because the official docs recommend detaching
+ # the streams to get binary streams. Some code might do this, so
+ # we need to deal with this case explicitly.
+ if _is_binary_reader(stream, False):
+ return t.cast(t.BinaryIO, stream)
+
+ buf = getattr(stream, "buffer", None)
+
+ # Same situation here; this time we assume that the buffer is
+ # actually binary in case it's closed.
+ if buf is not None and _is_binary_reader(buf, True):
+ return t.cast(t.BinaryIO, buf)
+
+ return None
+
+
+def _find_binary_writer(stream: t.IO[t.Any]) -> t.Optional[t.BinaryIO]:
+ # We need to figure out if the given stream is already binary.
+ # This can happen because the official docs recommend detaching
+ # the streams to get binary streams. Some code might do this, so
+ # we need to deal with this case explicitly.
+ if _is_binary_writer(stream, False):
+ return t.cast(t.BinaryIO, stream)
+
+ buf = getattr(stream, "buffer", None)
+
+ # Same situation here; this time we assume that the buffer is
+ # actually binary in case it's closed.
+ if buf is not None and _is_binary_writer(buf, True):
+ return t.cast(t.BinaryIO, buf)
+
+ return None
+
+
+def _stream_is_misconfigured(stream: t.TextIO) -> bool:
+ """A stream is misconfigured if its encoding is ASCII."""
+ # If the stream does not have an encoding set, we assume it's set
+ # to ASCII. This appears to happen in certain unittest
+ # environments. It's not quite clear what the correct behavior is
+ # but this at least will force Click to recover somehow.
+ return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii")
+
+
+def _is_compat_stream_attr(stream: t.TextIO, attr: str, value: t.Optional[str]) -> bool:
+ """A stream attribute is compatible if it is equal to the
+ desired value or the desired value is unset and the attribute
+ has a value.
+ """
+ stream_value = getattr(stream, attr, None)
+ return stream_value == value or (value is None and stream_value is not None)
+
+
+def _is_compatible_text_stream(
+ stream: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str]
+) -> bool:
+ """Check if a stream's encoding and errors attributes are
+ compatible with the desired values.
+ """
+ return _is_compat_stream_attr(
+ stream, "encoding", encoding
+ ) and _is_compat_stream_attr(stream, "errors", errors)
+
+
+def _force_correct_text_stream(
+ text_stream: t.IO[t.Any],
+ encoding: t.Optional[str],
+ errors: t.Optional[str],
+ is_binary: t.Callable[[t.IO[t.Any], bool], bool],
+ find_binary: t.Callable[[t.IO[t.Any]], t.Optional[t.BinaryIO]],
+ force_readable: bool = False,
+ force_writable: bool = False,
+) -> t.TextIO:
+ if is_binary(text_stream, False):
+ binary_reader = t.cast(t.BinaryIO, text_stream)
+ else:
+ text_stream = t.cast(t.TextIO, text_stream)
+ # If the stream looks compatible, and won't default to a
+ # misconfigured ascii encoding, return it as-is.
+ if _is_compatible_text_stream(text_stream, encoding, errors) and not (
+ encoding is None and _stream_is_misconfigured(text_stream)
+ ):
+ return text_stream
+
+ # Otherwise, get the underlying binary reader.
+ possible_binary_reader = find_binary(text_stream)
+
+ # If that's not possible, silently use the original reader
+ # and get mojibake instead of exceptions.
+ if possible_binary_reader is None:
+ return text_stream
+
+ binary_reader = possible_binary_reader
+
+ # Default errors to replace instead of strict in order to get
+ # something that works.
+ if errors is None:
+ errors = "replace"
+
+ # Wrap the binary stream in a text stream with the correct
+ # encoding parameters.
+ return _make_text_stream(
+ binary_reader,
+ encoding,
+ errors,
+ force_readable=force_readable,
+ force_writable=force_writable,
+ )
+
+
+def _force_correct_text_reader(
+ text_reader: t.IO[t.Any],
+ encoding: t.Optional[str],
+ errors: t.Optional[str],
+ force_readable: bool = False,
+) -> t.TextIO:
+ return _force_correct_text_stream(
+ text_reader,
+ encoding,
+ errors,
+ _is_binary_reader,
+ _find_binary_reader,
+ force_readable=force_readable,
+ )
+
+
+def _force_correct_text_writer(
+ text_writer: t.IO[t.Any],
+ encoding: t.Optional[str],
+ errors: t.Optional[str],
+ force_writable: bool = False,
+) -> t.TextIO:
+ return _force_correct_text_stream(
+ text_writer,
+ encoding,
+ errors,
+ _is_binary_writer,
+ _find_binary_writer,
+ force_writable=force_writable,
+ )
+
+
+def get_binary_stdin() -> t.BinaryIO:
+ reader = _find_binary_reader(sys.stdin)
+ if reader is None:
+ raise RuntimeError("Was not able to determine binary stream for sys.stdin.")
+ return reader
+
+
+def get_binary_stdout() -> t.BinaryIO:
+ writer = _find_binary_writer(sys.stdout)
+ if writer is None:
+ raise RuntimeError("Was not able to determine binary stream for sys.stdout.")
+ return writer
+
+
+def get_binary_stderr() -> t.BinaryIO:
+ writer = _find_binary_writer(sys.stderr)
+ if writer is None:
+ raise RuntimeError("Was not able to determine binary stream for sys.stderr.")
+ return writer
+
+
+def get_text_stdin(
+ encoding: t.Optional[str] = None, errors: t.Optional[str] = None
+) -> t.TextIO:
+ rv = _get_windows_console_stream(sys.stdin, encoding, errors)
+ if rv is not None:
+ return rv
+ return _force_correct_text_reader(sys.stdin, encoding, errors, force_readable=True)
+
+
+def get_text_stdout(
+ encoding: t.Optional[str] = None, errors: t.Optional[str] = None
+) -> t.TextIO:
+ rv = _get_windows_console_stream(sys.stdout, encoding, errors)
+ if rv is not None:
+ return rv
+ return _force_correct_text_writer(sys.stdout, encoding, errors, force_writable=True)
+
+
+def get_text_stderr(
+ encoding: t.Optional[str] = None, errors: t.Optional[str] = None
+) -> t.TextIO:
+ rv = _get_windows_console_stream(sys.stderr, encoding, errors)
+ if rv is not None:
+ return rv
+ return _force_correct_text_writer(sys.stderr, encoding, errors, force_writable=True)
+
+
+def _wrap_io_open(
+ file: t.Union[str, "os.PathLike[str]", int],
+ mode: str,
+ encoding: t.Optional[str],
+ errors: t.Optional[str],
+) -> t.IO[t.Any]:
+ """Handles not passing ``encoding`` and ``errors`` in binary mode."""
+ if "b" in mode:
+ return open(file, mode)
+
+ return open(file, mode, encoding=encoding, errors=errors)
+
+
+def open_stream(
+ filename: "t.Union[str, os.PathLike[str]]",
+ mode: str = "r",
+ encoding: t.Optional[str] = None,
+ errors: t.Optional[str] = "strict",
+ atomic: bool = False,
+) -> t.Tuple[t.IO[t.Any], bool]:
+ binary = "b" in mode
+ filename = os.fspath(filename)
+
+ # Standard streams first. These are simple because they ignore the
+ # atomic flag. Use fsdecode to handle Path("-").
+ if os.fsdecode(filename) == "-":
+ if any(m in mode for m in ["w", "a", "x"]):
+ if binary:
+ return get_binary_stdout(), False
+ return get_text_stdout(encoding=encoding, errors=errors), False
+ if binary:
+ return get_binary_stdin(), False
+ return get_text_stdin(encoding=encoding, errors=errors), False
+
+ # Non-atomic writes directly go out through the regular open functions.
+ if not atomic:
+ return _wrap_io_open(filename, mode, encoding, errors), True
+
+ # Some usability stuff for atomic writes
+ if "a" in mode:
+ raise ValueError(
+ "Appending to an existing file is not supported, because that"
+ " would involve an expensive `copy`-operation to a temporary"
+ " file. Open the file in normal `w`-mode and copy explicitly"
+ " if that's what you're after."
+ )
+ if "x" in mode:
+ raise ValueError("Use the `overwrite`-parameter instead.")
+ if "w" not in mode:
+ raise ValueError("Atomic writes only make sense with `w`-mode.")
+
+ # Atomic writes are more complicated. They work by opening a file
+ # as a proxy in the same folder and then using the fdopen
+ # functionality to wrap it in a Python file. Then we wrap it in an
+ # atomic file that moves the file over on close.
+ import errno
+ import random
+
+ try:
+ perm: t.Optional[int] = os.stat(filename).st_mode
+ except OSError:
+ perm = None
+
+ flags = os.O_RDWR | os.O_CREAT | os.O_EXCL
+
+ if binary:
+ flags |= getattr(os, "O_BINARY", 0)
+
+ while True:
+ tmp_filename = os.path.join(
+ os.path.dirname(filename),
+ f".__atomic-write{random.randrange(1 << 32):08x}",
+ )
+ try:
+ fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm)
+ break
+ except OSError as e:
+ if e.errno == errno.EEXIST or (
+ os.name == "nt"
+ and e.errno == errno.EACCES
+ and os.path.isdir(e.filename)
+ and os.access(e.filename, os.W_OK)
+ ):
+ continue
+ raise
+
+ if perm is not None:
+ os.chmod(tmp_filename, perm) # in case perm includes bits in umask
+
+ f = _wrap_io_open(fd, mode, encoding, errors)
+ af = _AtomicFile(f, tmp_filename, os.path.realpath(filename))
+ return t.cast(t.IO[t.Any], af), True
+
+
+class _AtomicFile:
+ def __init__(self, f: t.IO[t.Any], tmp_filename: str, real_filename: str) -> None:
+ self._f = f
+ self._tmp_filename = tmp_filename
+ self._real_filename = real_filename
+ self.closed = False
+
+ @property
+ def name(self) -> str:
+ return self._real_filename
+
+ def close(self, delete: bool = False) -> None:
+ if self.closed:
+ return
+ self._f.close()
+ os.replace(self._tmp_filename, self._real_filename)
+ self.closed = True
+
+ def __getattr__(self, name: str) -> t.Any:
+ return getattr(self._f, name)
+
+ def __enter__(self) -> "_AtomicFile":
+ return self
+
+ def __exit__(self, exc_type: t.Optional[t.Type[BaseException]], *_: t.Any) -> None:
+ self.close(delete=exc_type is not None)
+
+ def __repr__(self) -> str:
+ return repr(self._f)
+
+
+def strip_ansi(value: str) -> str:
+ return _ansi_re.sub("", value)
+
+
+def _is_jupyter_kernel_output(stream: t.IO[t.Any]) -> bool:
+ while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)):
+ stream = stream._stream
+
+ return stream.__class__.__module__.startswith("ipykernel.")
+
+
+def should_strip_ansi(
+ stream: t.Optional[t.IO[t.Any]] = None, color: t.Optional[bool] = None
+) -> bool:
+ if color is None:
+ if stream is None:
+ stream = sys.stdin
+ return not isatty(stream) and not _is_jupyter_kernel_output(stream)
+ return not color
+
+
+# On Windows, wrap the output streams with colorama to support ANSI
+# color codes.
+# NOTE: double check is needed so mypy does not analyze this on Linux
+if sys.platform.startswith("win") and WIN:
+ from ._winconsole import _get_windows_console_stream
+
+ def _get_argv_encoding() -> str:
+ import locale
+
+ return locale.getpreferredencoding()
+
+ _ansi_stream_wrappers: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary()
+
+ def auto_wrap_for_ansi( # noqa: F811
+ stream: t.TextIO, color: t.Optional[bool] = None
+ ) -> t.TextIO:
+ """Support ANSI color and style codes on Windows by wrapping a
+ stream with colorama.
+ """
+ try:
+ cached = _ansi_stream_wrappers.get(stream)
+ except Exception:
+ cached = None
+
+ if cached is not None:
+ return cached
+
+ import colorama
+
+ strip = should_strip_ansi(stream, color)
+ ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip)
+ rv = t.cast(t.TextIO, ansi_wrapper.stream)
+ _write = rv.write
+
+ def _safe_write(s):
+ try:
+ return _write(s)
+ except BaseException:
+ ansi_wrapper.reset_all()
+ raise
+
+ rv.write = _safe_write
+
+ try:
+ _ansi_stream_wrappers[stream] = rv
+ except Exception:
+ pass
+
+ return rv
+
+else:
+
+ def _get_argv_encoding() -> str:
+ return getattr(sys.stdin, "encoding", None) or sys.getfilesystemencoding()
+
+ def _get_windows_console_stream(
+ f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str]
+ ) -> t.Optional[t.TextIO]:
+ return None
+
+
+def term_len(x: str) -> int:
+ return len(strip_ansi(x))
+
+
+def isatty(stream: t.IO[t.Any]) -> bool:
+ try:
+ return stream.isatty()
+ except Exception:
+ return False
+
+
+def _make_cached_stream_func(
+ src_func: t.Callable[[], t.Optional[t.TextIO]],
+ wrapper_func: t.Callable[[], t.TextIO],
+) -> t.Callable[[], t.Optional[t.TextIO]]:
+ cache: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary()
+
+ def func() -> t.Optional[t.TextIO]:
+ stream = src_func()
+
+ if stream is None:
+ return None
+
+ try:
+ rv = cache.get(stream)
+ except Exception:
+ rv = None
+ if rv is not None:
+ return rv
+ rv = wrapper_func()
+ try:
+ cache[stream] = rv
+ except Exception:
+ pass
+ return rv
+
+ return func
+
+
+_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin)
+_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout)
+_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr)
+
+
+binary_streams: t.Mapping[str, t.Callable[[], t.BinaryIO]] = {
+ "stdin": get_binary_stdin,
+ "stdout": get_binary_stdout,
+ "stderr": get_binary_stderr,
+}
+
+text_streams: t.Mapping[
+ str, t.Callable[[t.Optional[str], t.Optional[str]], t.TextIO]
+] = {
+ "stdin": get_text_stdin,
+ "stdout": get_text_stdout,
+ "stderr": get_text_stderr,
+}
diff --git a/billinglayer/python/click/_termui_impl.py b/billinglayer/python/click/_termui_impl.py
new file mode 100644
index 0000000..f744657
--- /dev/null
+++ b/billinglayer/python/click/_termui_impl.py
@@ -0,0 +1,739 @@
+"""
+This module contains implementations for the termui module. To keep the
+import time of Click down, some infrequently used functionality is
+placed in this module and only imported as needed.
+"""
+import contextlib
+import math
+import os
+import sys
+import time
+import typing as t
+from gettext import gettext as _
+from io import StringIO
+from types import TracebackType
+
+from ._compat import _default_text_stdout
+from ._compat import CYGWIN
+from ._compat import get_best_encoding
+from ._compat import isatty
+from ._compat import open_stream
+from ._compat import strip_ansi
+from ._compat import term_len
+from ._compat import WIN
+from .exceptions import ClickException
+from .utils import echo
+
+V = t.TypeVar("V")
+
+if os.name == "nt":
+ BEFORE_BAR = "\r"
+ AFTER_BAR = "\n"
+else:
+ BEFORE_BAR = "\r\033[?25l"
+ AFTER_BAR = "\033[?25h\n"
+
+
+class ProgressBar(t.Generic[V]):
+ def __init__(
+ self,
+ iterable: t.Optional[t.Iterable[V]],
+ length: t.Optional[int] = None,
+ fill_char: str = "#",
+ empty_char: str = " ",
+ bar_template: str = "%(bar)s",
+ info_sep: str = " ",
+ show_eta: bool = True,
+ show_percent: t.Optional[bool] = None,
+ show_pos: bool = False,
+ item_show_func: t.Optional[t.Callable[[t.Optional[V]], t.Optional[str]]] = None,
+ label: t.Optional[str] = None,
+ file: t.Optional[t.TextIO] = None,
+ color: t.Optional[bool] = None,
+ update_min_steps: int = 1,
+ width: int = 30,
+ ) -> None:
+ self.fill_char = fill_char
+ self.empty_char = empty_char
+ self.bar_template = bar_template
+ self.info_sep = info_sep
+ self.show_eta = show_eta
+ self.show_percent = show_percent
+ self.show_pos = show_pos
+ self.item_show_func = item_show_func
+ self.label: str = label or ""
+
+ if file is None:
+ file = _default_text_stdout()
+
+ # There are no standard streams attached to write to. For example,
+ # pythonw on Windows.
+ if file is None:
+ file = StringIO()
+
+ self.file = file
+ self.color = color
+ self.update_min_steps = update_min_steps
+ self._completed_intervals = 0
+ self.width: int = width
+ self.autowidth: bool = width == 0
+
+ if length is None:
+ from operator import length_hint
+
+ length = length_hint(iterable, -1)
+
+ if length == -1:
+ length = None
+ if iterable is None:
+ if length is None:
+ raise TypeError("iterable or length is required")
+ iterable = t.cast(t.Iterable[V], range(length))
+ self.iter: t.Iterable[V] = iter(iterable)
+ self.length = length
+ self.pos = 0
+ self.avg: t.List[float] = []
+ self.last_eta: float
+ self.start: float
+ self.start = self.last_eta = time.time()
+ self.eta_known: bool = False
+ self.finished: bool = False
+ self.max_width: t.Optional[int] = None
+ self.entered: bool = False
+ self.current_item: t.Optional[V] = None
+ self.is_hidden: bool = not isatty(self.file)
+ self._last_line: t.Optional[str] = None
+
+ def __enter__(self) -> "ProgressBar[V]":
+ self.entered = True
+ self.render_progress()
+ return self
+
+ def __exit__(
+ self,
+ exc_type: t.Optional[t.Type[BaseException]],
+ exc_value: t.Optional[BaseException],
+ tb: t.Optional[TracebackType],
+ ) -> None:
+ self.render_finish()
+
+ def __iter__(self) -> t.Iterator[V]:
+ if not self.entered:
+ raise RuntimeError("You need to use progress bars in a with block.")
+ self.render_progress()
+ return self.generator()
+
+ def __next__(self) -> V:
+ # Iteration is defined in terms of a generator function,
+ # returned by iter(self); use that to define next(). This works
+ # because `self.iter` is an iterable consumed by that generator,
+ # so it is re-entry safe. Calling `next(self.generator())`
+ # twice works and does "what you want".
+ return next(iter(self))
+
+ def render_finish(self) -> None:
+ if self.is_hidden:
+ return
+ self.file.write(AFTER_BAR)
+ self.file.flush()
+
+ @property
+ def pct(self) -> float:
+ if self.finished:
+ return 1.0
+ return min(self.pos / (float(self.length or 1) or 1), 1.0)
+
+ @property
+ def time_per_iteration(self) -> float:
+ if not self.avg:
+ return 0.0
+ return sum(self.avg) / float(len(self.avg))
+
+ @property
+ def eta(self) -> float:
+ if self.length is not None and not self.finished:
+ return self.time_per_iteration * (self.length - self.pos)
+ return 0.0
+
+ def format_eta(self) -> str:
+ if self.eta_known:
+ t = int(self.eta)
+ seconds = t % 60
+ t //= 60
+ minutes = t % 60
+ t //= 60
+ hours = t % 24
+ t //= 24
+ if t > 0:
+ return f"{t}d {hours:02}:{minutes:02}:{seconds:02}"
+ else:
+ return f"{hours:02}:{minutes:02}:{seconds:02}"
+ return ""
+
+ def format_pos(self) -> str:
+ pos = str(self.pos)
+ if self.length is not None:
+ pos += f"/{self.length}"
+ return pos
+
+ def format_pct(self) -> str:
+ return f"{int(self.pct * 100): 4}%"[1:]
+
+ def format_bar(self) -> str:
+ if self.length is not None:
+ bar_length = int(self.pct * self.width)
+ bar = self.fill_char * bar_length
+ bar += self.empty_char * (self.width - bar_length)
+ elif self.finished:
+ bar = self.fill_char * self.width
+ else:
+ chars = list(self.empty_char * (self.width or 1))
+ if self.time_per_iteration != 0:
+ chars[
+ int(
+ (math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5)
+ * self.width
+ )
+ ] = self.fill_char
+ bar = "".join(chars)
+ return bar
+
+ def format_progress_line(self) -> str:
+ show_percent = self.show_percent
+
+ info_bits = []
+ if self.length is not None and show_percent is None:
+ show_percent = not self.show_pos
+
+ if self.show_pos:
+ info_bits.append(self.format_pos())
+ if show_percent:
+ info_bits.append(self.format_pct())
+ if self.show_eta and self.eta_known and not self.finished:
+ info_bits.append(self.format_eta())
+ if self.item_show_func is not None:
+ item_info = self.item_show_func(self.current_item)
+ if item_info is not None:
+ info_bits.append(item_info)
+
+ return (
+ self.bar_template
+ % {
+ "label": self.label,
+ "bar": self.format_bar(),
+ "info": self.info_sep.join(info_bits),
+ }
+ ).rstrip()
+
+ def render_progress(self) -> None:
+ import shutil
+
+ if self.is_hidden:
+ # Only output the label as it changes if the output is not a
+ # TTY. Use file=stderr if you expect to be piping stdout.
+ if self._last_line != self.label:
+ self._last_line = self.label
+ echo(self.label, file=self.file, color=self.color)
+
+ return
+
+ buf = []
+ # Update width in case the terminal has been resized
+ if self.autowidth:
+ old_width = self.width
+ self.width = 0
+ clutter_length = term_len(self.format_progress_line())
+ new_width = max(0, shutil.get_terminal_size().columns - clutter_length)
+ if new_width < old_width:
+ buf.append(BEFORE_BAR)
+ buf.append(" " * self.max_width) # type: ignore
+ self.max_width = new_width
+ self.width = new_width
+
+ clear_width = self.width
+ if self.max_width is not None:
+ clear_width = self.max_width
+
+ buf.append(BEFORE_BAR)
+ line = self.format_progress_line()
+ line_len = term_len(line)
+ if self.max_width is None or self.max_width < line_len:
+ self.max_width = line_len
+
+ buf.append(line)
+ buf.append(" " * (clear_width - line_len))
+ line = "".join(buf)
+ # Render the line only if it changed.
+
+ if line != self._last_line:
+ self._last_line = line
+ echo(line, file=self.file, color=self.color, nl=False)
+ self.file.flush()
+
+ def make_step(self, n_steps: int) -> None:
+ self.pos += n_steps
+ if self.length is not None and self.pos >= self.length:
+ self.finished = True
+
+ if (time.time() - self.last_eta) < 1.0:
+ return
+
+ self.last_eta = time.time()
+
+ # self.avg is a rolling list of length <= 7 of steps where steps are
+ # defined as time elapsed divided by the total progress through
+ # self.length.
+ if self.pos:
+ step = (time.time() - self.start) / self.pos
+ else:
+ step = time.time() - self.start
+
+ self.avg = self.avg[-6:] + [step]
+
+ self.eta_known = self.length is not None
+
+ def update(self, n_steps: int, current_item: t.Optional[V] = None) -> None:
+ """Update the progress bar by advancing a specified number of
+ steps, and optionally set the ``current_item`` for this new
+ position.
+
+ :param n_steps: Number of steps to advance.
+ :param current_item: Optional item to set as ``current_item``
+ for the updated position.
+
+ .. versionchanged:: 8.0
+ Added the ``current_item`` optional parameter.
+
+ .. versionchanged:: 8.0
+ Only render when the number of steps meets the
+ ``update_min_steps`` threshold.
+ """
+ if current_item is not None:
+ self.current_item = current_item
+
+ self._completed_intervals += n_steps
+
+ if self._completed_intervals >= self.update_min_steps:
+ self.make_step(self._completed_intervals)
+ self.render_progress()
+ self._completed_intervals = 0
+
+ def finish(self) -> None:
+ self.eta_known = False
+ self.current_item = None
+ self.finished = True
+
+ def generator(self) -> t.Iterator[V]:
+ """Return a generator which yields the items added to the bar
+ during construction, and updates the progress bar *after* the
+ yielded block returns.
+ """
+ # WARNING: the iterator interface for `ProgressBar` relies on
+ # this and only works because this is a simple generator which
+ # doesn't create or manage additional state. If this function
+ # changes, the impact should be evaluated both against
+ # `iter(bar)` and `next(bar)`. `next()` in particular may call
+ # `self.generator()` repeatedly, and this must remain safe in
+ # order for that interface to work.
+ if not self.entered:
+ raise RuntimeError("You need to use progress bars in a with block.")
+
+ if self.is_hidden:
+ yield from self.iter
+ else:
+ for rv in self.iter:
+ self.current_item = rv
+
+ # This allows show_item_func to be updated before the
+ # item is processed. Only trigger at the beginning of
+ # the update interval.
+ if self._completed_intervals == 0:
+ self.render_progress()
+
+ yield rv
+ self.update(1)
+
+ self.finish()
+ self.render_progress()
+
+
+def pager(generator: t.Iterable[str], color: t.Optional[bool] = None) -> None:
+ """Decide what method to use for paging through text."""
+ stdout = _default_text_stdout()
+
+ # There are no standard streams attached to write to. For example,
+ # pythonw on Windows.
+ if stdout is None:
+ stdout = StringIO()
+
+ if not isatty(sys.stdin) or not isatty(stdout):
+ return _nullpager(stdout, generator, color)
+ pager_cmd = (os.environ.get("PAGER", None) or "").strip()
+ if pager_cmd:
+ if WIN:
+ return _tempfilepager(generator, pager_cmd, color)
+ return _pipepager(generator, pager_cmd, color)
+ if os.environ.get("TERM") in ("dumb", "emacs"):
+ return _nullpager(stdout, generator, color)
+ if WIN or sys.platform.startswith("os2"):
+ return _tempfilepager(generator, "more <", color)
+ if hasattr(os, "system") and os.system("(less) 2>/dev/null") == 0:
+ return _pipepager(generator, "less", color)
+
+ import tempfile
+
+ fd, filename = tempfile.mkstemp()
+ os.close(fd)
+ try:
+ if hasattr(os, "system") and os.system(f'more "{filename}"') == 0:
+ return _pipepager(generator, "more", color)
+ return _nullpager(stdout, generator, color)
+ finally:
+ os.unlink(filename)
+
+
+def _pipepager(generator: t.Iterable[str], cmd: str, color: t.Optional[bool]) -> None:
+ """Page through text by feeding it to another program. Invoking a
+ pager through this might support colors.
+ """
+ import subprocess
+
+ env = dict(os.environ)
+
+ # If we're piping to less we might support colors under the
+ # condition that
+ cmd_detail = cmd.rsplit("/", 1)[-1].split()
+ if color is None and cmd_detail[0] == "less":
+ less_flags = f"{os.environ.get('LESS', '')}{' '.join(cmd_detail[1:])}"
+ if not less_flags:
+ env["LESS"] = "-R"
+ color = True
+ elif "r" in less_flags or "R" in less_flags:
+ color = True
+
+ c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, env=env)
+ stdin = t.cast(t.BinaryIO, c.stdin)
+ encoding = get_best_encoding(stdin)
+ try:
+ for text in generator:
+ if not color:
+ text = strip_ansi(text)
+
+ stdin.write(text.encode(encoding, "replace"))
+ except (OSError, KeyboardInterrupt):
+ pass
+ else:
+ stdin.close()
+
+ # Less doesn't respect ^C, but catches it for its own UI purposes (aborting
+ # search or other commands inside less).
+ #
+ # That means when the user hits ^C, the parent process (click) terminates,
+ # but less is still alive, paging the output and messing up the terminal.
+ #
+ # If the user wants to make the pager exit on ^C, they should set
+ # `LESS='-K'`. It's not our decision to make.
+ while True:
+ try:
+ c.wait()
+ except KeyboardInterrupt:
+ pass
+ else:
+ break
+
+
+def _tempfilepager(
+ generator: t.Iterable[str], cmd: str, color: t.Optional[bool]
+) -> None:
+ """Page through text by invoking a program on a temporary file."""
+ import tempfile
+
+ fd, filename = tempfile.mkstemp()
+ # TODO: This never terminates if the passed generator never terminates.
+ text = "".join(generator)
+ if not color:
+ text = strip_ansi(text)
+ encoding = get_best_encoding(sys.stdout)
+ with open_stream(filename, "wb")[0] as f:
+ f.write(text.encode(encoding))
+ try:
+ os.system(f'{cmd} "{filename}"')
+ finally:
+ os.close(fd)
+ os.unlink(filename)
+
+
+def _nullpager(
+ stream: t.TextIO, generator: t.Iterable[str], color: t.Optional[bool]
+) -> None:
+ """Simply print unformatted text. This is the ultimate fallback."""
+ for text in generator:
+ if not color:
+ text = strip_ansi(text)
+ stream.write(text)
+
+
+class Editor:
+ def __init__(
+ self,
+ editor: t.Optional[str] = None,
+ env: t.Optional[t.Mapping[str, str]] = None,
+ require_save: bool = True,
+ extension: str = ".txt",
+ ) -> None:
+ self.editor = editor
+ self.env = env
+ self.require_save = require_save
+ self.extension = extension
+
+ def get_editor(self) -> str:
+ if self.editor is not None:
+ return self.editor
+ for key in "VISUAL", "EDITOR":
+ rv = os.environ.get(key)
+ if rv:
+ return rv
+ if WIN:
+ return "notepad"
+ for editor in "sensible-editor", "vim", "nano":
+ if os.system(f"which {editor} >/dev/null 2>&1") == 0:
+ return editor
+ return "vi"
+
+ def edit_file(self, filename: str) -> None:
+ import subprocess
+
+ editor = self.get_editor()
+ environ: t.Optional[t.Dict[str, str]] = None
+
+ if self.env:
+ environ = os.environ.copy()
+ environ.update(self.env)
+
+ try:
+ c = subprocess.Popen(f'{editor} "{filename}"', env=environ, shell=True)
+ exit_code = c.wait()
+ if exit_code != 0:
+ raise ClickException(
+ _("{editor}: Editing failed").format(editor=editor)
+ )
+ except OSError as e:
+ raise ClickException(
+ _("{editor}: Editing failed: {e}").format(editor=editor, e=e)
+ ) from e
+
+ def edit(self, text: t.Optional[t.AnyStr]) -> t.Optional[t.AnyStr]:
+ import tempfile
+
+ if not text:
+ data = b""
+ elif isinstance(text, (bytes, bytearray)):
+ data = text
+ else:
+ if text and not text.endswith("\n"):
+ text += "\n"
+
+ if WIN:
+ data = text.replace("\n", "\r\n").encode("utf-8-sig")
+ else:
+ data = text.encode("utf-8")
+
+ fd, name = tempfile.mkstemp(prefix="editor-", suffix=self.extension)
+ f: t.BinaryIO
+
+ try:
+ with os.fdopen(fd, "wb") as f:
+ f.write(data)
+
+ # If the filesystem resolution is 1 second, like Mac OS
+ # 10.12 Extended, or 2 seconds, like FAT32, and the editor
+ # closes very fast, require_save can fail. Set the modified
+ # time to be 2 seconds in the past to work around this.
+ os.utime(name, (os.path.getatime(name), os.path.getmtime(name) - 2))
+ # Depending on the resolution, the exact value might not be
+ # recorded, so get the new recorded value.
+ timestamp = os.path.getmtime(name)
+
+ self.edit_file(name)
+
+ if self.require_save and os.path.getmtime(name) == timestamp:
+ return None
+
+ with open(name, "rb") as f:
+ rv = f.read()
+
+ if isinstance(text, (bytes, bytearray)):
+ return rv
+
+ return rv.decode("utf-8-sig").replace("\r\n", "\n") # type: ignore
+ finally:
+ os.unlink(name)
+
+
+def open_url(url: str, wait: bool = False, locate: bool = False) -> int:
+ import subprocess
+
+ def _unquote_file(url: str) -> str:
+ from urllib.parse import unquote
+
+ if url.startswith("file://"):
+ url = unquote(url[7:])
+
+ return url
+
+ if sys.platform == "darwin":
+ args = ["open"]
+ if wait:
+ args.append("-W")
+ if locate:
+ args.append("-R")
+ args.append(_unquote_file(url))
+ null = open("/dev/null", "w")
+ try:
+ return subprocess.Popen(args, stderr=null).wait()
+ finally:
+ null.close()
+ elif WIN:
+ if locate:
+ url = _unquote_file(url.replace('"', ""))
+ args = f'explorer /select,"{url}"'
+ else:
+ url = url.replace('"', "")
+ wait_str = "/WAIT" if wait else ""
+ args = f'start {wait_str} "" "{url}"'
+ return os.system(args)
+ elif CYGWIN:
+ if locate:
+ url = os.path.dirname(_unquote_file(url).replace('"', ""))
+ args = f'cygstart "{url}"'
+ else:
+ url = url.replace('"', "")
+ wait_str = "-w" if wait else ""
+ args = f'cygstart {wait_str} "{url}"'
+ return os.system(args)
+
+ try:
+ if locate:
+ url = os.path.dirname(_unquote_file(url)) or "."
+ else:
+ url = _unquote_file(url)
+ c = subprocess.Popen(["xdg-open", url])
+ if wait:
+ return c.wait()
+ return 0
+ except OSError:
+ if url.startswith(("http://", "https://")) and not locate and not wait:
+ import webbrowser
+
+ webbrowser.open(url)
+ return 0
+ return 1
+
+
+def _translate_ch_to_exc(ch: str) -> t.Optional[BaseException]:
+ if ch == "\x03":
+ raise KeyboardInterrupt()
+
+ if ch == "\x04" and not WIN: # Unix-like, Ctrl+D
+ raise EOFError()
+
+ if ch == "\x1a" and WIN: # Windows, Ctrl+Z
+ raise EOFError()
+
+ return None
+
+
+if WIN:
+ import msvcrt
+
+ @contextlib.contextmanager
+ def raw_terminal() -> t.Iterator[int]:
+ yield -1
+
+ def getchar(echo: bool) -> str:
+ # The function `getch` will return a bytes object corresponding to
+ # the pressed character. Since Windows 10 build 1803, it will also
+ # return \x00 when called a second time after pressing a regular key.
+ #
+ # `getwch` does not share this probably-bugged behavior. Moreover, it
+ # returns a Unicode object by default, which is what we want.
+ #
+ # Either of these functions will return \x00 or \xe0 to indicate
+ # a special key, and you need to call the same function again to get
+ # the "rest" of the code. The fun part is that \u00e0 is
+ # "latin small letter a with grave", so if you type that on a French
+ # keyboard, you _also_ get a \xe0.
+ # E.g., consider the Up arrow. This returns \xe0 and then \x48. The
+ # resulting Unicode string reads as "a with grave" + "capital H".
+ # This is indistinguishable from when the user actually types
+ # "a with grave" and then "capital H".
+ #
+ # When \xe0 is returned, we assume it's part of a special-key sequence
+ # and call `getwch` again, but that means that when the user types
+ # the \u00e0 character, `getchar` doesn't return until a second
+ # character is typed.
+ # The alternative is returning immediately, but that would mess up
+ # cross-platform handling of arrow keys and others that start with
+ # \xe0. Another option is using `getch`, but then we can't reliably
+ # read non-ASCII characters, because return values of `getch` are
+ # limited to the current 8-bit codepage.
+ #
+ # Anyway, Click doesn't claim to do this Right(tm), and using `getwch`
+ # is doing the right thing in more situations than with `getch`.
+ func: t.Callable[[], str]
+
+ if echo:
+ func = msvcrt.getwche # type: ignore
+ else:
+ func = msvcrt.getwch # type: ignore
+
+ rv = func()
+
+ if rv in ("\x00", "\xe0"):
+ # \x00 and \xe0 are control characters that indicate special key,
+ # see above.
+ rv += func()
+
+ _translate_ch_to_exc(rv)
+ return rv
+
+else:
+ import tty
+ import termios
+
+ @contextlib.contextmanager
+ def raw_terminal() -> t.Iterator[int]:
+ f: t.Optional[t.TextIO]
+ fd: int
+
+ if not isatty(sys.stdin):
+ f = open("/dev/tty")
+ fd = f.fileno()
+ else:
+ fd = sys.stdin.fileno()
+ f = None
+
+ try:
+ old_settings = termios.tcgetattr(fd)
+
+ try:
+ tty.setraw(fd)
+ yield fd
+ finally:
+ termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
+ sys.stdout.flush()
+
+ if f is not None:
+ f.close()
+ except termios.error:
+ pass
+
+ def getchar(echo: bool) -> str:
+ with raw_terminal() as fd:
+ ch = os.read(fd, 32).decode(get_best_encoding(sys.stdin), "replace")
+
+ if echo and isatty(sys.stdout):
+ sys.stdout.write(ch)
+
+ _translate_ch_to_exc(ch)
+ return ch
diff --git a/billinglayer/python/click/_textwrap.py b/billinglayer/python/click/_textwrap.py
new file mode 100644
index 0000000..b47dcbd
--- /dev/null
+++ b/billinglayer/python/click/_textwrap.py
@@ -0,0 +1,49 @@
+import textwrap
+import typing as t
+from contextlib import contextmanager
+
+
+class TextWrapper(textwrap.TextWrapper):
+ def _handle_long_word(
+ self,
+ reversed_chunks: t.List[str],
+ cur_line: t.List[str],
+ cur_len: int,
+ width: int,
+ ) -> None:
+ space_left = max(width - cur_len, 1)
+
+ if self.break_long_words:
+ last = reversed_chunks[-1]
+ cut = last[:space_left]
+ res = last[space_left:]
+ cur_line.append(cut)
+ reversed_chunks[-1] = res
+ elif not cur_line:
+ cur_line.append(reversed_chunks.pop())
+
+ @contextmanager
+ def extra_indent(self, indent: str) -> t.Iterator[None]:
+ old_initial_indent = self.initial_indent
+ old_subsequent_indent = self.subsequent_indent
+ self.initial_indent += indent
+ self.subsequent_indent += indent
+
+ try:
+ yield
+ finally:
+ self.initial_indent = old_initial_indent
+ self.subsequent_indent = old_subsequent_indent
+
+ def indent_only(self, text: str) -> str:
+ rv = []
+
+ for idx, line in enumerate(text.splitlines()):
+ indent = self.initial_indent
+
+ if idx > 0:
+ indent = self.subsequent_indent
+
+ rv.append(f"{indent}{line}")
+
+ return "\n".join(rv)
diff --git a/billinglayer/python/click/_winconsole.py b/billinglayer/python/click/_winconsole.py
new file mode 100644
index 0000000..6b20df3
--- /dev/null
+++ b/billinglayer/python/click/_winconsole.py
@@ -0,0 +1,279 @@
+# This module is based on the excellent work by Adam Bartoš who
+# provided a lot of what went into the implementation here in
+# the discussion to issue1602 in the Python bug tracker.
+#
+# There are some general differences in regards to how this works
+# compared to the original patches as we do not need to patch
+# the entire interpreter but just work in our little world of
+# echo and prompt.
+import io
+import sys
+import time
+import typing as t
+from ctypes import byref
+from ctypes import c_char
+from ctypes import c_char_p
+from ctypes import c_int
+from ctypes import c_ssize_t
+from ctypes import c_ulong
+from ctypes import c_void_p
+from ctypes import POINTER
+from ctypes import py_object
+from ctypes import Structure
+from ctypes.wintypes import DWORD
+from ctypes.wintypes import HANDLE
+from ctypes.wintypes import LPCWSTR
+from ctypes.wintypes import LPWSTR
+
+from ._compat import _NonClosingTextIOWrapper
+
+assert sys.platform == "win32"
+import msvcrt # noqa: E402
+from ctypes import windll # noqa: E402
+from ctypes import WINFUNCTYPE # noqa: E402
+
+c_ssize_p = POINTER(c_ssize_t)
+
+kernel32 = windll.kernel32
+GetStdHandle = kernel32.GetStdHandle
+ReadConsoleW = kernel32.ReadConsoleW
+WriteConsoleW = kernel32.WriteConsoleW
+GetConsoleMode = kernel32.GetConsoleMode
+GetLastError = kernel32.GetLastError
+GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32))
+CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))(
+ ("CommandLineToArgvW", windll.shell32)
+)
+LocalFree = WINFUNCTYPE(c_void_p, c_void_p)(("LocalFree", windll.kernel32))
+
+STDIN_HANDLE = GetStdHandle(-10)
+STDOUT_HANDLE = GetStdHandle(-11)
+STDERR_HANDLE = GetStdHandle(-12)
+
+PyBUF_SIMPLE = 0
+PyBUF_WRITABLE = 1
+
+ERROR_SUCCESS = 0
+ERROR_NOT_ENOUGH_MEMORY = 8
+ERROR_OPERATION_ABORTED = 995
+
+STDIN_FILENO = 0
+STDOUT_FILENO = 1
+STDERR_FILENO = 2
+
+EOF = b"\x1a"
+MAX_BYTES_WRITTEN = 32767
+
+try:
+ from ctypes import pythonapi
+except ImportError:
+ # On PyPy we cannot get buffers so our ability to operate here is
+ # severely limited.
+ get_buffer = None
+else:
+
+ class Py_buffer(Structure):
+ _fields_ = [
+ ("buf", c_void_p),
+ ("obj", py_object),
+ ("len", c_ssize_t),
+ ("itemsize", c_ssize_t),
+ ("readonly", c_int),
+ ("ndim", c_int),
+ ("format", c_char_p),
+ ("shape", c_ssize_p),
+ ("strides", c_ssize_p),
+ ("suboffsets", c_ssize_p),
+ ("internal", c_void_p),
+ ]
+
+ PyObject_GetBuffer = pythonapi.PyObject_GetBuffer
+ PyBuffer_Release = pythonapi.PyBuffer_Release
+
+ def get_buffer(obj, writable=False):
+ buf = Py_buffer()
+ flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLE
+ PyObject_GetBuffer(py_object(obj), byref(buf), flags)
+
+ try:
+ buffer_type = c_char * buf.len
+ return buffer_type.from_address(buf.buf)
+ finally:
+ PyBuffer_Release(byref(buf))
+
+
+class _WindowsConsoleRawIOBase(io.RawIOBase):
+ def __init__(self, handle):
+ self.handle = handle
+
+ def isatty(self):
+ super().isatty()
+ return True
+
+
+class _WindowsConsoleReader(_WindowsConsoleRawIOBase):
+ def readable(self):
+ return True
+
+ def readinto(self, b):
+ bytes_to_be_read = len(b)
+ if not bytes_to_be_read:
+ return 0
+ elif bytes_to_be_read % 2:
+ raise ValueError(
+ "cannot read odd number of bytes from UTF-16-LE encoded console"
+ )
+
+ buffer = get_buffer(b, writable=True)
+ code_units_to_be_read = bytes_to_be_read // 2
+ code_units_read = c_ulong()
+
+ rv = ReadConsoleW(
+ HANDLE(self.handle),
+ buffer,
+ code_units_to_be_read,
+ byref(code_units_read),
+ None,
+ )
+ if GetLastError() == ERROR_OPERATION_ABORTED:
+ # wait for KeyboardInterrupt
+ time.sleep(0.1)
+ if not rv:
+ raise OSError(f"Windows error: {GetLastError()}")
+
+ if buffer[0] == EOF:
+ return 0
+ return 2 * code_units_read.value
+
+
+class _WindowsConsoleWriter(_WindowsConsoleRawIOBase):
+ def writable(self):
+ return True
+
+ @staticmethod
+ def _get_error_message(errno):
+ if errno == ERROR_SUCCESS:
+ return "ERROR_SUCCESS"
+ elif errno == ERROR_NOT_ENOUGH_MEMORY:
+ return "ERROR_NOT_ENOUGH_MEMORY"
+ return f"Windows error {errno}"
+
+ def write(self, b):
+ bytes_to_be_written = len(b)
+ buf = get_buffer(b)
+ code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2
+ code_units_written = c_ulong()
+
+ WriteConsoleW(
+ HANDLE(self.handle),
+ buf,
+ code_units_to_be_written,
+ byref(code_units_written),
+ None,
+ )
+ bytes_written = 2 * code_units_written.value
+
+ if bytes_written == 0 and bytes_to_be_written > 0:
+ raise OSError(self._get_error_message(GetLastError()))
+ return bytes_written
+
+
+class ConsoleStream:
+ def __init__(self, text_stream: t.TextIO, byte_stream: t.BinaryIO) -> None:
+ self._text_stream = text_stream
+ self.buffer = byte_stream
+
+ @property
+ def name(self) -> str:
+ return self.buffer.name
+
+ def write(self, x: t.AnyStr) -> int:
+ if isinstance(x, str):
+ return self._text_stream.write(x)
+ try:
+ self.flush()
+ except Exception:
+ pass
+ return self.buffer.write(x)
+
+ def writelines(self, lines: t.Iterable[t.AnyStr]) -> None:
+ for line in lines:
+ self.write(line)
+
+ def __getattr__(self, name: str) -> t.Any:
+ return getattr(self._text_stream, name)
+
+ def isatty(self) -> bool:
+ return self.buffer.isatty()
+
+ def __repr__(self):
+ return f""
+
+
+def _get_text_stdin(buffer_stream: t.BinaryIO) -> t.TextIO:
+ text_stream = _NonClosingTextIOWrapper(
+ io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)),
+ "utf-16-le",
+ "strict",
+ line_buffering=True,
+ )
+ return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream))
+
+
+def _get_text_stdout(buffer_stream: t.BinaryIO) -> t.TextIO:
+ text_stream = _NonClosingTextIOWrapper(
+ io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)),
+ "utf-16-le",
+ "strict",
+ line_buffering=True,
+ )
+ return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream))
+
+
+def _get_text_stderr(buffer_stream: t.BinaryIO) -> t.TextIO:
+ text_stream = _NonClosingTextIOWrapper(
+ io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)),
+ "utf-16-le",
+ "strict",
+ line_buffering=True,
+ )
+ return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream))
+
+
+_stream_factories: t.Mapping[int, t.Callable[[t.BinaryIO], t.TextIO]] = {
+ 0: _get_text_stdin,
+ 1: _get_text_stdout,
+ 2: _get_text_stderr,
+}
+
+
+def _is_console(f: t.TextIO) -> bool:
+ if not hasattr(f, "fileno"):
+ return False
+
+ try:
+ fileno = f.fileno()
+ except (OSError, io.UnsupportedOperation):
+ return False
+
+ handle = msvcrt.get_osfhandle(fileno)
+ return bool(GetConsoleMode(handle, byref(DWORD())))
+
+
+def _get_windows_console_stream(
+ f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str]
+) -> t.Optional[t.TextIO]:
+ if (
+ get_buffer is not None
+ and encoding in {"utf-16-le", None}
+ and errors in {"strict", None}
+ and _is_console(f)
+ ):
+ func = _stream_factories.get(f.fileno())
+ if func is not None:
+ b = getattr(f, "buffer", None)
+
+ if b is None:
+ return None
+
+ return func(b)
diff --git a/billinglayer/python/click/core.py b/billinglayer/python/click/core.py
new file mode 100644
index 0000000..cc65e89
--- /dev/null
+++ b/billinglayer/python/click/core.py
@@ -0,0 +1,3042 @@
+import enum
+import errno
+import inspect
+import os
+import sys
+import typing as t
+from collections import abc
+from contextlib import contextmanager
+from contextlib import ExitStack
+from functools import update_wrapper
+from gettext import gettext as _
+from gettext import ngettext
+from itertools import repeat
+from types import TracebackType
+
+from . import types
+from .exceptions import Abort
+from .exceptions import BadParameter
+from .exceptions import ClickException
+from .exceptions import Exit
+from .exceptions import MissingParameter
+from .exceptions import UsageError
+from .formatting import HelpFormatter
+from .formatting import join_options
+from .globals import pop_context
+from .globals import push_context
+from .parser import _flag_needs_value
+from .parser import OptionParser
+from .parser import split_opt
+from .termui import confirm
+from .termui import prompt
+from .termui import style
+from .utils import _detect_program_name
+from .utils import _expand_args
+from .utils import echo
+from .utils import make_default_short_help
+from .utils import make_str
+from .utils import PacifyFlushWrapper
+
+if t.TYPE_CHECKING:
+ import typing_extensions as te
+ from .shell_completion import CompletionItem
+
+F = t.TypeVar("F", bound=t.Callable[..., t.Any])
+V = t.TypeVar("V")
+
+
+def _complete_visible_commands(
+ ctx: "Context", incomplete: str
+) -> t.Iterator[t.Tuple[str, "Command"]]:
+ """List all the subcommands of a group that start with the
+ incomplete value and aren't hidden.
+
+ :param ctx: Invocation context for the group.
+ :param incomplete: Value being completed. May be empty.
+ """
+ multi = t.cast(MultiCommand, ctx.command)
+
+ for name in multi.list_commands(ctx):
+ if name.startswith(incomplete):
+ command = multi.get_command(ctx, name)
+
+ if command is not None and not command.hidden:
+ yield name, command
+
+
+def _check_multicommand(
+ base_command: "MultiCommand", cmd_name: str, cmd: "Command", register: bool = False
+) -> None:
+ if not base_command.chain or not isinstance(cmd, MultiCommand):
+ return
+ if register:
+ hint = (
+ "It is not possible to add multi commands as children to"
+ " another multi command that is in chain mode."
+ )
+ else:
+ hint = (
+ "Found a multi command as subcommand to a multi command"
+ " that is in chain mode. This is not supported."
+ )
+ raise RuntimeError(
+ f"{hint}. Command {base_command.name!r} is set to chain and"
+ f" {cmd_name!r} was added as a subcommand but it in itself is a"
+ f" multi command. ({cmd_name!r} is a {type(cmd).__name__}"
+ f" within a chained {type(base_command).__name__} named"
+ f" {base_command.name!r})."
+ )
+
+
+def batch(iterable: t.Iterable[V], batch_size: int) -> t.List[t.Tuple[V, ...]]:
+ return list(zip(*repeat(iter(iterable), batch_size)))
+
+
+@contextmanager
+def augment_usage_errors(
+ ctx: "Context", param: t.Optional["Parameter"] = None
+) -> t.Iterator[None]:
+ """Context manager that attaches extra information to exceptions."""
+ try:
+ yield
+ except BadParameter as e:
+ if e.ctx is None:
+ e.ctx = ctx
+ if param is not None and e.param is None:
+ e.param = param
+ raise
+ except UsageError as e:
+ if e.ctx is None:
+ e.ctx = ctx
+ raise
+
+
+def iter_params_for_processing(
+ invocation_order: t.Sequence["Parameter"],
+ declaration_order: t.Sequence["Parameter"],
+) -> t.List["Parameter"]:
+ """Given a sequence of parameters in the order as should be considered
+ for processing and an iterable of parameters that exist, this returns
+ a list in the correct order as they should be processed.
+ """
+
+ def sort_key(item: "Parameter") -> t.Tuple[bool, float]:
+ try:
+ idx: float = invocation_order.index(item)
+ except ValueError:
+ idx = float("inf")
+
+ return not item.is_eager, idx
+
+ return sorted(declaration_order, key=sort_key)
+
+
+class ParameterSource(enum.Enum):
+ """This is an :class:`~enum.Enum` that indicates the source of a
+ parameter's value.
+
+ Use :meth:`click.Context.get_parameter_source` to get the
+ source for a parameter by name.
+
+ .. versionchanged:: 8.0
+ Use :class:`~enum.Enum` and drop the ``validate`` method.
+
+ .. versionchanged:: 8.0
+ Added the ``PROMPT`` value.
+ """
+
+ COMMANDLINE = enum.auto()
+ """The value was provided by the command line args."""
+ ENVIRONMENT = enum.auto()
+ """The value was provided with an environment variable."""
+ DEFAULT = enum.auto()
+ """Used the default specified by the parameter."""
+ DEFAULT_MAP = enum.auto()
+ """Used a default provided by :attr:`Context.default_map`."""
+ PROMPT = enum.auto()
+ """Used a prompt to confirm a default or provide a value."""
+
+
+class Context:
+ """The context is a special internal object that holds state relevant
+ for the script execution at every single level. It's normally invisible
+ to commands unless they opt-in to getting access to it.
+
+ The context is useful as it can pass internal objects around and can
+ control special execution features such as reading data from
+ environment variables.
+
+ A context can be used as context manager in which case it will call
+ :meth:`close` on teardown.
+
+ :param command: the command class for this context.
+ :param parent: the parent context.
+ :param info_name: the info name for this invocation. Generally this
+ is the most descriptive name for the script or
+ command. For the toplevel script it is usually
+ the name of the script, for commands below it it's
+ the name of the script.
+ :param obj: an arbitrary object of user data.
+ :param auto_envvar_prefix: the prefix to use for automatic environment
+ variables. If this is `None` then reading
+ from environment variables is disabled. This
+ does not affect manually set environment
+ variables which are always read.
+ :param default_map: a dictionary (like object) with default values
+ for parameters.
+ :param terminal_width: the width of the terminal. The default is
+ inherit from parent context. If no context
+ defines the terminal width then auto
+ detection will be applied.
+ :param max_content_width: the maximum width for content rendered by
+ Click (this currently only affects help
+ pages). This defaults to 80 characters if
+ not overridden. In other words: even if the
+ terminal is larger than that, Click will not
+ format things wider than 80 characters by
+ default. In addition to that, formatters might
+ add some safety mapping on the right.
+ :param resilient_parsing: if this flag is enabled then Click will
+ parse without any interactivity or callback
+ invocation. Default values will also be
+ ignored. This is useful for implementing
+ things such as completion support.
+ :param allow_extra_args: if this is set to `True` then extra arguments
+ at the end will not raise an error and will be
+ kept on the context. The default is to inherit
+ from the command.
+ :param allow_interspersed_args: if this is set to `False` then options
+ and arguments cannot be mixed. The
+ default is to inherit from the command.
+ :param ignore_unknown_options: instructs click to ignore options it does
+ not know and keeps them for later
+ processing.
+ :param help_option_names: optionally a list of strings that define how
+ the default help parameter is named. The
+ default is ``['--help']``.
+ :param token_normalize_func: an optional function that is used to
+ normalize tokens (options, choices,
+ etc.). This for instance can be used to
+ implement case insensitive behavior.
+ :param color: controls if the terminal supports ANSI colors or not. The
+ default is autodetection. This is only needed if ANSI
+ codes are used in texts that Click prints which is by
+ default not the case. This for instance would affect
+ help output.
+ :param show_default: Show the default value for commands. If this
+ value is not set, it defaults to the value from the parent
+ context. ``Command.show_default`` overrides this default for the
+ specific command.
+
+ .. versionchanged:: 8.1
+ The ``show_default`` parameter is overridden by
+ ``Command.show_default``, instead of the other way around.
+
+ .. versionchanged:: 8.0
+ The ``show_default`` parameter defaults to the value from the
+ parent context.
+
+ .. versionchanged:: 7.1
+ Added the ``show_default`` parameter.
+
+ .. versionchanged:: 4.0
+ Added the ``color``, ``ignore_unknown_options``, and
+ ``max_content_width`` parameters.
+
+ .. versionchanged:: 3.0
+ Added the ``allow_extra_args`` and ``allow_interspersed_args``
+ parameters.
+
+ .. versionchanged:: 2.0
+ Added the ``resilient_parsing``, ``help_option_names``, and
+ ``token_normalize_func`` parameters.
+ """
+
+ #: The formatter class to create with :meth:`make_formatter`.
+ #:
+ #: .. versionadded:: 8.0
+ formatter_class: t.Type["HelpFormatter"] = HelpFormatter
+
+ def __init__(
+ self,
+ command: "Command",
+ parent: t.Optional["Context"] = None,
+ info_name: t.Optional[str] = None,
+ obj: t.Optional[t.Any] = None,
+ auto_envvar_prefix: t.Optional[str] = None,
+ default_map: t.Optional[t.MutableMapping[str, t.Any]] = None,
+ terminal_width: t.Optional[int] = None,
+ max_content_width: t.Optional[int] = None,
+ resilient_parsing: bool = False,
+ allow_extra_args: t.Optional[bool] = None,
+ allow_interspersed_args: t.Optional[bool] = None,
+ ignore_unknown_options: t.Optional[bool] = None,
+ help_option_names: t.Optional[t.List[str]] = None,
+ token_normalize_func: t.Optional[t.Callable[[str], str]] = None,
+ color: t.Optional[bool] = None,
+ show_default: t.Optional[bool] = None,
+ ) -> None:
+ #: the parent context or `None` if none exists.
+ self.parent = parent
+ #: the :class:`Command` for this context.
+ self.command = command
+ #: the descriptive information name
+ self.info_name = info_name
+ #: Map of parameter names to their parsed values. Parameters
+ #: with ``expose_value=False`` are not stored.
+ self.params: t.Dict[str, t.Any] = {}
+ #: the leftover arguments.
+ self.args: t.List[str] = []
+ #: protected arguments. These are arguments that are prepended
+ #: to `args` when certain parsing scenarios are encountered but
+ #: must be never propagated to another arguments. This is used
+ #: to implement nested parsing.
+ self.protected_args: t.List[str] = []
+ #: the collected prefixes of the command's options.
+ self._opt_prefixes: t.Set[str] = set(parent._opt_prefixes) if parent else set()
+
+ if obj is None and parent is not None:
+ obj = parent.obj
+
+ #: the user object stored.
+ self.obj: t.Any = obj
+ self._meta: t.Dict[str, t.Any] = getattr(parent, "meta", {})
+
+ #: A dictionary (-like object) with defaults for parameters.
+ if (
+ default_map is None
+ and info_name is not None
+ and parent is not None
+ and parent.default_map is not None
+ ):
+ default_map = parent.default_map.get(info_name)
+
+ self.default_map: t.Optional[t.MutableMapping[str, t.Any]] = default_map
+
+ #: This flag indicates if a subcommand is going to be executed. A
+ #: group callback can use this information to figure out if it's
+ #: being executed directly or because the execution flow passes
+ #: onwards to a subcommand. By default it's None, but it can be
+ #: the name of the subcommand to execute.
+ #:
+ #: If chaining is enabled this will be set to ``'*'`` in case
+ #: any commands are executed. It is however not possible to
+ #: figure out which ones. If you require this knowledge you
+ #: should use a :func:`result_callback`.
+ self.invoked_subcommand: t.Optional[str] = None
+
+ if terminal_width is None and parent is not None:
+ terminal_width = parent.terminal_width
+
+ #: The width of the terminal (None is autodetection).
+ self.terminal_width: t.Optional[int] = terminal_width
+
+ if max_content_width is None and parent is not None:
+ max_content_width = parent.max_content_width
+
+ #: The maximum width of formatted content (None implies a sensible
+ #: default which is 80 for most things).
+ self.max_content_width: t.Optional[int] = max_content_width
+
+ if allow_extra_args is None:
+ allow_extra_args = command.allow_extra_args
+
+ #: Indicates if the context allows extra args or if it should
+ #: fail on parsing.
+ #:
+ #: .. versionadded:: 3.0
+ self.allow_extra_args = allow_extra_args
+
+ if allow_interspersed_args is None:
+ allow_interspersed_args = command.allow_interspersed_args
+
+ #: Indicates if the context allows mixing of arguments and
+ #: options or not.
+ #:
+ #: .. versionadded:: 3.0
+ self.allow_interspersed_args: bool = allow_interspersed_args
+
+ if ignore_unknown_options is None:
+ ignore_unknown_options = command.ignore_unknown_options
+
+ #: Instructs click to ignore options that a command does not
+ #: understand and will store it on the context for later
+ #: processing. This is primarily useful for situations where you
+ #: want to call into external programs. Generally this pattern is
+ #: strongly discouraged because it's not possibly to losslessly
+ #: forward all arguments.
+ #:
+ #: .. versionadded:: 4.0
+ self.ignore_unknown_options: bool = ignore_unknown_options
+
+ if help_option_names is None:
+ if parent is not None:
+ help_option_names = parent.help_option_names
+ else:
+ help_option_names = ["--help"]
+
+ #: The names for the help options.
+ self.help_option_names: t.List[str] = help_option_names
+
+ if token_normalize_func is None and parent is not None:
+ token_normalize_func = parent.token_normalize_func
+
+ #: An optional normalization function for tokens. This is
+ #: options, choices, commands etc.
+ self.token_normalize_func: t.Optional[
+ t.Callable[[str], str]
+ ] = token_normalize_func
+
+ #: Indicates if resilient parsing is enabled. In that case Click
+ #: will do its best to not cause any failures and default values
+ #: will be ignored. Useful for completion.
+ self.resilient_parsing: bool = resilient_parsing
+
+ # If there is no envvar prefix yet, but the parent has one and
+ # the command on this level has a name, we can expand the envvar
+ # prefix automatically.
+ if auto_envvar_prefix is None:
+ if (
+ parent is not None
+ and parent.auto_envvar_prefix is not None
+ and self.info_name is not None
+ ):
+ auto_envvar_prefix = (
+ f"{parent.auto_envvar_prefix}_{self.info_name.upper()}"
+ )
+ else:
+ auto_envvar_prefix = auto_envvar_prefix.upper()
+
+ if auto_envvar_prefix is not None:
+ auto_envvar_prefix = auto_envvar_prefix.replace("-", "_")
+
+ self.auto_envvar_prefix: t.Optional[str] = auto_envvar_prefix
+
+ if color is None and parent is not None:
+ color = parent.color
+
+ #: Controls if styling output is wanted or not.
+ self.color: t.Optional[bool] = color
+
+ if show_default is None and parent is not None:
+ show_default = parent.show_default
+
+ #: Show option default values when formatting help text.
+ self.show_default: t.Optional[bool] = show_default
+
+ self._close_callbacks: t.List[t.Callable[[], t.Any]] = []
+ self._depth = 0
+ self._parameter_source: t.Dict[str, ParameterSource] = {}
+ self._exit_stack = ExitStack()
+
+ def to_info_dict(self) -> t.Dict[str, t.Any]:
+ """Gather information that could be useful for a tool generating
+ user-facing documentation. This traverses the entire CLI
+ structure.
+
+ .. code-block:: python
+
+ with Context(cli) as ctx:
+ info = ctx.to_info_dict()
+
+ .. versionadded:: 8.0
+ """
+ return {
+ "command": self.command.to_info_dict(self),
+ "info_name": self.info_name,
+ "allow_extra_args": self.allow_extra_args,
+ "allow_interspersed_args": self.allow_interspersed_args,
+ "ignore_unknown_options": self.ignore_unknown_options,
+ "auto_envvar_prefix": self.auto_envvar_prefix,
+ }
+
+ def __enter__(self) -> "Context":
+ self._depth += 1
+ push_context(self)
+ return self
+
+ def __exit__(
+ self,
+ exc_type: t.Optional[t.Type[BaseException]],
+ exc_value: t.Optional[BaseException],
+ tb: t.Optional[TracebackType],
+ ) -> None:
+ self._depth -= 1
+ if self._depth == 0:
+ self.close()
+ pop_context()
+
+ @contextmanager
+ def scope(self, cleanup: bool = True) -> t.Iterator["Context"]:
+ """This helper method can be used with the context object to promote
+ it to the current thread local (see :func:`get_current_context`).
+ The default behavior of this is to invoke the cleanup functions which
+ can be disabled by setting `cleanup` to `False`. The cleanup
+ functions are typically used for things such as closing file handles.
+
+ If the cleanup is intended the context object can also be directly
+ used as a context manager.
+
+ Example usage::
+
+ with ctx.scope():
+ assert get_current_context() is ctx
+
+ This is equivalent::
+
+ with ctx:
+ assert get_current_context() is ctx
+
+ .. versionadded:: 5.0
+
+ :param cleanup: controls if the cleanup functions should be run or
+ not. The default is to run these functions. In
+ some situations the context only wants to be
+ temporarily pushed in which case this can be disabled.
+ Nested pushes automatically defer the cleanup.
+ """
+ if not cleanup:
+ self._depth += 1
+ try:
+ with self as rv:
+ yield rv
+ finally:
+ if not cleanup:
+ self._depth -= 1
+
+ @property
+ def meta(self) -> t.Dict[str, t.Any]:
+ """This is a dictionary which is shared with all the contexts
+ that are nested. It exists so that click utilities can store some
+ state here if they need to. It is however the responsibility of
+ that code to manage this dictionary well.
+
+ The keys are supposed to be unique dotted strings. For instance
+ module paths are a good choice for it. What is stored in there is
+ irrelevant for the operation of click. However what is important is
+ that code that places data here adheres to the general semantics of
+ the system.
+
+ Example usage::
+
+ LANG_KEY = f'{__name__}.lang'
+
+ def set_language(value):
+ ctx = get_current_context()
+ ctx.meta[LANG_KEY] = value
+
+ def get_language():
+ return get_current_context().meta.get(LANG_KEY, 'en_US')
+
+ .. versionadded:: 5.0
+ """
+ return self._meta
+
+ def make_formatter(self) -> HelpFormatter:
+ """Creates the :class:`~click.HelpFormatter` for the help and
+ usage output.
+
+ To quickly customize the formatter class used without overriding
+ this method, set the :attr:`formatter_class` attribute.
+
+ .. versionchanged:: 8.0
+ Added the :attr:`formatter_class` attribute.
+ """
+ return self.formatter_class(
+ width=self.terminal_width, max_width=self.max_content_width
+ )
+
+ def with_resource(self, context_manager: t.ContextManager[V]) -> V:
+ """Register a resource as if it were used in a ``with``
+ statement. The resource will be cleaned up when the context is
+ popped.
+
+ Uses :meth:`contextlib.ExitStack.enter_context`. It calls the
+ resource's ``__enter__()`` method and returns the result. When
+ the context is popped, it closes the stack, which calls the
+ resource's ``__exit__()`` method.
+
+ To register a cleanup function for something that isn't a
+ context manager, use :meth:`call_on_close`. Or use something
+ from :mod:`contextlib` to turn it into a context manager first.
+
+ .. code-block:: python
+
+ @click.group()
+ @click.option("--name")
+ @click.pass_context
+ def cli(ctx):
+ ctx.obj = ctx.with_resource(connect_db(name))
+
+ :param context_manager: The context manager to enter.
+ :return: Whatever ``context_manager.__enter__()`` returns.
+
+ .. versionadded:: 8.0
+ """
+ return self._exit_stack.enter_context(context_manager)
+
+ def call_on_close(self, f: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]:
+ """Register a function to be called when the context tears down.
+
+ This can be used to close resources opened during the script
+ execution. Resources that support Python's context manager
+ protocol which would be used in a ``with`` statement should be
+ registered with :meth:`with_resource` instead.
+
+ :param f: The function to execute on teardown.
+ """
+ return self._exit_stack.callback(f)
+
+ def close(self) -> None:
+ """Invoke all close callbacks registered with
+ :meth:`call_on_close`, and exit all context managers entered
+ with :meth:`with_resource`.
+ """
+ self._exit_stack.close()
+ # In case the context is reused, create a new exit stack.
+ self._exit_stack = ExitStack()
+
+ @property
+ def command_path(self) -> str:
+ """The computed command path. This is used for the ``usage``
+ information on the help page. It's automatically created by
+ combining the info names of the chain of contexts to the root.
+ """
+ rv = ""
+ if self.info_name is not None:
+ rv = self.info_name
+ if self.parent is not None:
+ parent_command_path = [self.parent.command_path]
+
+ if isinstance(self.parent.command, Command):
+ for param in self.parent.command.get_params(self):
+ parent_command_path.extend(param.get_usage_pieces(self))
+
+ rv = f"{' '.join(parent_command_path)} {rv}"
+ return rv.lstrip()
+
+ def find_root(self) -> "Context":
+ """Finds the outermost context."""
+ node = self
+ while node.parent is not None:
+ node = node.parent
+ return node
+
+ def find_object(self, object_type: t.Type[V]) -> t.Optional[V]:
+ """Finds the closest object of a given type."""
+ node: t.Optional["Context"] = self
+
+ while node is not None:
+ if isinstance(node.obj, object_type):
+ return node.obj
+
+ node = node.parent
+
+ return None
+
+ def ensure_object(self, object_type: t.Type[V]) -> V:
+ """Like :meth:`find_object` but sets the innermost object to a
+ new instance of `object_type` if it does not exist.
+ """
+ rv = self.find_object(object_type)
+ if rv is None:
+ self.obj = rv = object_type()
+ return rv
+
+ @t.overload
+ def lookup_default(
+ self, name: str, call: "te.Literal[True]" = True
+ ) -> t.Optional[t.Any]:
+ ...
+
+ @t.overload
+ def lookup_default(
+ self, name: str, call: "te.Literal[False]" = ...
+ ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]:
+ ...
+
+ def lookup_default(self, name: str, call: bool = True) -> t.Optional[t.Any]:
+ """Get the default for a parameter from :attr:`default_map`.
+
+ :param name: Name of the parameter.
+ :param call: If the default is a callable, call it. Disable to
+ return the callable instead.
+
+ .. versionchanged:: 8.0
+ Added the ``call`` parameter.
+ """
+ if self.default_map is not None:
+ value = self.default_map.get(name)
+
+ if call and callable(value):
+ return value()
+
+ return value
+
+ return None
+
+ def fail(self, message: str) -> "te.NoReturn":
+ """Aborts the execution of the program with a specific error
+ message.
+
+ :param message: the error message to fail with.
+ """
+ raise UsageError(message, self)
+
+ def abort(self) -> "te.NoReturn":
+ """Aborts the script."""
+ raise Abort()
+
+ def exit(self, code: int = 0) -> "te.NoReturn":
+ """Exits the application with a given exit code."""
+ raise Exit(code)
+
+ def get_usage(self) -> str:
+ """Helper method to get formatted usage string for the current
+ context and command.
+ """
+ return self.command.get_usage(self)
+
+ def get_help(self) -> str:
+ """Helper method to get formatted help page for the current
+ context and command.
+ """
+ return self.command.get_help(self)
+
+ def _make_sub_context(self, command: "Command") -> "Context":
+ """Create a new context of the same type as this context, but
+ for a new command.
+
+ :meta private:
+ """
+ return type(self)(command, info_name=command.name, parent=self)
+
+ @t.overload
+ def invoke(
+ __self, # noqa: B902
+ __callback: "t.Callable[..., V]",
+ *args: t.Any,
+ **kwargs: t.Any,
+ ) -> V:
+ ...
+
+ @t.overload
+ def invoke(
+ __self, # noqa: B902
+ __callback: "Command",
+ *args: t.Any,
+ **kwargs: t.Any,
+ ) -> t.Any:
+ ...
+
+ def invoke(
+ __self, # noqa: B902
+ __callback: t.Union["Command", "t.Callable[..., V]"],
+ *args: t.Any,
+ **kwargs: t.Any,
+ ) -> t.Union[t.Any, V]:
+ """Invokes a command callback in exactly the way it expects. There
+ are two ways to invoke this method:
+
+ 1. the first argument can be a callback and all other arguments and
+ keyword arguments are forwarded directly to the function.
+ 2. the first argument is a click command object. In that case all
+ arguments are forwarded as well but proper click parameters
+ (options and click arguments) must be keyword arguments and Click
+ will fill in defaults.
+
+ Note that before Click 3.2 keyword arguments were not properly filled
+ in against the intention of this code and no context was created. For
+ more information about this change and why it was done in a bugfix
+ release see :ref:`upgrade-to-3.2`.
+
+ .. versionchanged:: 8.0
+ All ``kwargs`` are tracked in :attr:`params` so they will be
+ passed if :meth:`forward` is called at multiple levels.
+ """
+ if isinstance(__callback, Command):
+ other_cmd = __callback
+
+ if other_cmd.callback is None:
+ raise TypeError(
+ "The given command does not have a callback that can be invoked."
+ )
+ else:
+ __callback = t.cast("t.Callable[..., V]", other_cmd.callback)
+
+ ctx = __self._make_sub_context(other_cmd)
+
+ for param in other_cmd.params:
+ if param.name not in kwargs and param.expose_value:
+ kwargs[param.name] = param.type_cast_value( # type: ignore
+ ctx, param.get_default(ctx)
+ )
+
+ # Track all kwargs as params, so that forward() will pass
+ # them on in subsequent calls.
+ ctx.params.update(kwargs)
+ else:
+ ctx = __self
+
+ with augment_usage_errors(__self):
+ with ctx:
+ return __callback(*args, **kwargs)
+
+ def forward(
+ __self, __cmd: "Command", *args: t.Any, **kwargs: t.Any # noqa: B902
+ ) -> t.Any:
+ """Similar to :meth:`invoke` but fills in default keyword
+ arguments from the current context if the other command expects
+ it. This cannot invoke callbacks directly, only other commands.
+
+ .. versionchanged:: 8.0
+ All ``kwargs`` are tracked in :attr:`params` so they will be
+ passed if ``forward`` is called at multiple levels.
+ """
+ # Can only forward to other commands, not direct callbacks.
+ if not isinstance(__cmd, Command):
+ raise TypeError("Callback is not a command.")
+
+ for param in __self.params:
+ if param not in kwargs:
+ kwargs[param] = __self.params[param]
+
+ return __self.invoke(__cmd, *args, **kwargs)
+
+ def set_parameter_source(self, name: str, source: ParameterSource) -> None:
+ """Set the source of a parameter. This indicates the location
+ from which the value of the parameter was obtained.
+
+ :param name: The name of the parameter.
+ :param source: A member of :class:`~click.core.ParameterSource`.
+ """
+ self._parameter_source[name] = source
+
+ def get_parameter_source(self, name: str) -> t.Optional[ParameterSource]:
+ """Get the source of a parameter. This indicates the location
+ from which the value of the parameter was obtained.
+
+ This can be useful for determining when a user specified a value
+ on the command line that is the same as the default value. It
+ will be :attr:`~click.core.ParameterSource.DEFAULT` only if the
+ value was actually taken from the default.
+
+ :param name: The name of the parameter.
+ :rtype: ParameterSource
+
+ .. versionchanged:: 8.0
+ Returns ``None`` if the parameter was not provided from any
+ source.
+ """
+ return self._parameter_source.get(name)
+
+
+class BaseCommand:
+ """The base command implements the minimal API contract of commands.
+ Most code will never use this as it does not implement a lot of useful
+ functionality but it can act as the direct subclass of alternative
+ parsing methods that do not depend on the Click parser.
+
+ For instance, this can be used to bridge Click and other systems like
+ argparse or docopt.
+
+ Because base commands do not implement a lot of the API that other
+ parts of Click take for granted, they are not supported for all
+ operations. For instance, they cannot be used with the decorators
+ usually and they have no built-in callback system.
+
+ .. versionchanged:: 2.0
+ Added the `context_settings` parameter.
+
+ :param name: the name of the command to use unless a group overrides it.
+ :param context_settings: an optional dictionary with defaults that are
+ passed to the context object.
+ """
+
+ #: The context class to create with :meth:`make_context`.
+ #:
+ #: .. versionadded:: 8.0
+ context_class: t.Type[Context] = Context
+ #: the default for the :attr:`Context.allow_extra_args` flag.
+ allow_extra_args = False
+ #: the default for the :attr:`Context.allow_interspersed_args` flag.
+ allow_interspersed_args = True
+ #: the default for the :attr:`Context.ignore_unknown_options` flag.
+ ignore_unknown_options = False
+
+ def __init__(
+ self,
+ name: t.Optional[str],
+ context_settings: t.Optional[t.MutableMapping[str, t.Any]] = None,
+ ) -> None:
+ #: the name the command thinks it has. Upon registering a command
+ #: on a :class:`Group` the group will default the command name
+ #: with this information. You should instead use the
+ #: :class:`Context`\'s :attr:`~Context.info_name` attribute.
+ self.name = name
+
+ if context_settings is None:
+ context_settings = {}
+
+ #: an optional dictionary with defaults passed to the context.
+ self.context_settings: t.MutableMapping[str, t.Any] = context_settings
+
+ def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]:
+ """Gather information that could be useful for a tool generating
+ user-facing documentation. This traverses the entire structure
+ below this command.
+
+ Use :meth:`click.Context.to_info_dict` to traverse the entire
+ CLI structure.
+
+ :param ctx: A :class:`Context` representing this command.
+
+ .. versionadded:: 8.0
+ """
+ return {"name": self.name}
+
+ def __repr__(self) -> str:
+ return f"<{self.__class__.__name__} {self.name}>"
+
+ def get_usage(self, ctx: Context) -> str:
+ raise NotImplementedError("Base commands cannot get usage")
+
+ def get_help(self, ctx: Context) -> str:
+ raise NotImplementedError("Base commands cannot get help")
+
+ def make_context(
+ self,
+ info_name: t.Optional[str],
+ args: t.List[str],
+ parent: t.Optional[Context] = None,
+ **extra: t.Any,
+ ) -> Context:
+ """This function when given an info name and arguments will kick
+ off the parsing and create a new :class:`Context`. It does not
+ invoke the actual command callback though.
+
+ To quickly customize the context class used without overriding
+ this method, set the :attr:`context_class` attribute.
+
+ :param info_name: the info name for this invocation. Generally this
+ is the most descriptive name for the script or
+ command. For the toplevel script it's usually
+ the name of the script, for commands below it's
+ the name of the command.
+ :param args: the arguments to parse as list of strings.
+ :param parent: the parent context if available.
+ :param extra: extra keyword arguments forwarded to the context
+ constructor.
+
+ .. versionchanged:: 8.0
+ Added the :attr:`context_class` attribute.
+ """
+ for key, value in self.context_settings.items():
+ if key not in extra:
+ extra[key] = value
+
+ ctx = self.context_class(
+ self, info_name=info_name, parent=parent, **extra # type: ignore
+ )
+
+ with ctx.scope(cleanup=False):
+ self.parse_args(ctx, args)
+ return ctx
+
+ def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]:
+ """Given a context and a list of arguments this creates the parser
+ and parses the arguments, then modifies the context as necessary.
+ This is automatically invoked by :meth:`make_context`.
+ """
+ raise NotImplementedError("Base commands do not know how to parse arguments.")
+
+ def invoke(self, ctx: Context) -> t.Any:
+ """Given a context, this invokes the command. The default
+ implementation is raising a not implemented error.
+ """
+ raise NotImplementedError("Base commands are not invocable by default")
+
+ def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]:
+ """Return a list of completions for the incomplete value. Looks
+ at the names of chained multi-commands.
+
+ Any command could be part of a chained multi-command, so sibling
+ commands are valid at any point during command completion. Other
+ command classes will return more completions.
+
+ :param ctx: Invocation context for this command.
+ :param incomplete: Value being completed. May be empty.
+
+ .. versionadded:: 8.0
+ """
+ from click.shell_completion import CompletionItem
+
+ results: t.List["CompletionItem"] = []
+
+ while ctx.parent is not None:
+ ctx = ctx.parent
+
+ if isinstance(ctx.command, MultiCommand) and ctx.command.chain:
+ results.extend(
+ CompletionItem(name, help=command.get_short_help_str())
+ for name, command in _complete_visible_commands(ctx, incomplete)
+ if name not in ctx.protected_args
+ )
+
+ return results
+
+ @t.overload
+ def main(
+ self,
+ args: t.Optional[t.Sequence[str]] = None,
+ prog_name: t.Optional[str] = None,
+ complete_var: t.Optional[str] = None,
+ standalone_mode: "te.Literal[True]" = True,
+ **extra: t.Any,
+ ) -> "te.NoReturn":
+ ...
+
+ @t.overload
+ def main(
+ self,
+ args: t.Optional[t.Sequence[str]] = None,
+ prog_name: t.Optional[str] = None,
+ complete_var: t.Optional[str] = None,
+ standalone_mode: bool = ...,
+ **extra: t.Any,
+ ) -> t.Any:
+ ...
+
+ def main(
+ self,
+ args: t.Optional[t.Sequence[str]] = None,
+ prog_name: t.Optional[str] = None,
+ complete_var: t.Optional[str] = None,
+ standalone_mode: bool = True,
+ windows_expand_args: bool = True,
+ **extra: t.Any,
+ ) -> t.Any:
+ """This is the way to invoke a script with all the bells and
+ whistles as a command line application. This will always terminate
+ the application after a call. If this is not wanted, ``SystemExit``
+ needs to be caught.
+
+ This method is also available by directly calling the instance of
+ a :class:`Command`.
+
+ :param args: the arguments that should be used for parsing. If not
+ provided, ``sys.argv[1:]`` is used.
+ :param prog_name: the program name that should be used. By default
+ the program name is constructed by taking the file
+ name from ``sys.argv[0]``.
+ :param complete_var: the environment variable that controls the
+ bash completion support. The default is
+ ``"__COMPLETE"`` with prog_name in
+ uppercase.
+ :param standalone_mode: the default behavior is to invoke the script
+ in standalone mode. Click will then
+ handle exceptions and convert them into
+ error messages and the function will never
+ return but shut down the interpreter. If
+ this is set to `False` they will be
+ propagated to the caller and the return
+ value of this function is the return value
+ of :meth:`invoke`.
+ :param windows_expand_args: Expand glob patterns, user dir, and
+ env vars in command line args on Windows.
+ :param extra: extra keyword arguments are forwarded to the context
+ constructor. See :class:`Context` for more information.
+
+ .. versionchanged:: 8.0.1
+ Added the ``windows_expand_args`` parameter to allow
+ disabling command line arg expansion on Windows.
+
+ .. versionchanged:: 8.0
+ When taking arguments from ``sys.argv`` on Windows, glob
+ patterns, user dir, and env vars are expanded.
+
+ .. versionchanged:: 3.0
+ Added the ``standalone_mode`` parameter.
+ """
+ if args is None:
+ args = sys.argv[1:]
+
+ if os.name == "nt" and windows_expand_args:
+ args = _expand_args(args)
+ else:
+ args = list(args)
+
+ if prog_name is None:
+ prog_name = _detect_program_name()
+
+ # Process shell completion requests and exit early.
+ self._main_shell_completion(extra, prog_name, complete_var)
+
+ try:
+ try:
+ with self.make_context(prog_name, args, **extra) as ctx:
+ rv = self.invoke(ctx)
+ if not standalone_mode:
+ return rv
+ # it's not safe to `ctx.exit(rv)` here!
+ # note that `rv` may actually contain data like "1" which
+ # has obvious effects
+ # more subtle case: `rv=[None, None]` can come out of
+ # chained commands which all returned `None` -- so it's not
+ # even always obvious that `rv` indicates success/failure
+ # by its truthiness/falsiness
+ ctx.exit()
+ except (EOFError, KeyboardInterrupt) as e:
+ echo(file=sys.stderr)
+ raise Abort() from e
+ except ClickException as e:
+ if not standalone_mode:
+ raise
+ e.show()
+ sys.exit(e.exit_code)
+ except OSError as e:
+ if e.errno == errno.EPIPE:
+ sys.stdout = t.cast(t.TextIO, PacifyFlushWrapper(sys.stdout))
+ sys.stderr = t.cast(t.TextIO, PacifyFlushWrapper(sys.stderr))
+ sys.exit(1)
+ else:
+ raise
+ except Exit as e:
+ if standalone_mode:
+ sys.exit(e.exit_code)
+ else:
+ # in non-standalone mode, return the exit code
+ # note that this is only reached if `self.invoke` above raises
+ # an Exit explicitly -- thus bypassing the check there which
+ # would return its result
+ # the results of non-standalone execution may therefore be
+ # somewhat ambiguous: if there are codepaths which lead to
+ # `ctx.exit(1)` and to `return 1`, the caller won't be able to
+ # tell the difference between the two
+ return e.exit_code
+ except Abort:
+ if not standalone_mode:
+ raise
+ echo(_("Aborted!"), file=sys.stderr)
+ sys.exit(1)
+
+ def _main_shell_completion(
+ self,
+ ctx_args: t.MutableMapping[str, t.Any],
+ prog_name: str,
+ complete_var: t.Optional[str] = None,
+ ) -> None:
+ """Check if the shell is asking for tab completion, process
+ that, then exit early. Called from :meth:`main` before the
+ program is invoked.
+
+ :param prog_name: Name of the executable in the shell.
+ :param complete_var: Name of the environment variable that holds
+ the completion instruction. Defaults to
+ ``_{PROG_NAME}_COMPLETE``.
+
+ .. versionchanged:: 8.2.0
+ Dots (``.``) in ``prog_name`` are replaced with underscores (``_``).
+ """
+ if complete_var is None:
+ complete_name = prog_name.replace("-", "_").replace(".", "_")
+ complete_var = f"_{complete_name}_COMPLETE".upper()
+
+ instruction = os.environ.get(complete_var)
+
+ if not instruction:
+ return
+
+ from .shell_completion import shell_complete
+
+ rv = shell_complete(self, ctx_args, prog_name, complete_var, instruction)
+ sys.exit(rv)
+
+ def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Any:
+ """Alias for :meth:`main`."""
+ return self.main(*args, **kwargs)
+
+
+class Command(BaseCommand):
+ """Commands are the basic building block of command line interfaces in
+ Click. A basic command handles command line parsing and might dispatch
+ more parsing to commands nested below it.
+
+ :param name: the name of the command to use unless a group overrides it.
+ :param context_settings: an optional dictionary with defaults that are
+ passed to the context object.
+ :param callback: the callback to invoke. This is optional.
+ :param params: the parameters to register with this command. This can
+ be either :class:`Option` or :class:`Argument` objects.
+ :param help: the help string to use for this command.
+ :param epilog: like the help string but it's printed at the end of the
+ help page after everything else.
+ :param short_help: the short help to use for this command. This is
+ shown on the command listing of the parent command.
+ :param add_help_option: by default each command registers a ``--help``
+ option. This can be disabled by this parameter.
+ :param no_args_is_help: this controls what happens if no arguments are
+ provided. This option is disabled by default.
+ If enabled this will add ``--help`` as argument
+ if no arguments are passed
+ :param hidden: hide this command from help outputs.
+
+ :param deprecated: issues a message indicating that
+ the command is deprecated.
+
+ .. versionchanged:: 8.1
+ ``help``, ``epilog``, and ``short_help`` are stored unprocessed,
+ all formatting is done when outputting help text, not at init,
+ and is done even if not using the ``@command`` decorator.
+
+ .. versionchanged:: 8.0
+ Added a ``repr`` showing the command name.
+
+ .. versionchanged:: 7.1
+ Added the ``no_args_is_help`` parameter.
+
+ .. versionchanged:: 2.0
+ Added the ``context_settings`` parameter.
+ """
+
+ def __init__(
+ self,
+ name: t.Optional[str],
+ context_settings: t.Optional[t.MutableMapping[str, t.Any]] = None,
+ callback: t.Optional[t.Callable[..., t.Any]] = None,
+ params: t.Optional[t.List["Parameter"]] = None,
+ help: t.Optional[str] = None,
+ epilog: t.Optional[str] = None,
+ short_help: t.Optional[str] = None,
+ options_metavar: t.Optional[str] = "[OPTIONS]",
+ add_help_option: bool = True,
+ no_args_is_help: bool = False,
+ hidden: bool = False,
+ deprecated: bool = False,
+ ) -> None:
+ super().__init__(name, context_settings)
+ #: the callback to execute when the command fires. This might be
+ #: `None` in which case nothing happens.
+ self.callback = callback
+ #: the list of parameters for this command in the order they
+ #: should show up in the help page and execute. Eager parameters
+ #: will automatically be handled before non eager ones.
+ self.params: t.List["Parameter"] = params or []
+ self.help = help
+ self.epilog = epilog
+ self.options_metavar = options_metavar
+ self.short_help = short_help
+ self.add_help_option = add_help_option
+ self.no_args_is_help = no_args_is_help
+ self.hidden = hidden
+ self.deprecated = deprecated
+
+ def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]:
+ info_dict = super().to_info_dict(ctx)
+ info_dict.update(
+ params=[param.to_info_dict() for param in self.get_params(ctx)],
+ help=self.help,
+ epilog=self.epilog,
+ short_help=self.short_help,
+ hidden=self.hidden,
+ deprecated=self.deprecated,
+ )
+ return info_dict
+
+ def get_usage(self, ctx: Context) -> str:
+ """Formats the usage line into a string and returns it.
+
+ Calls :meth:`format_usage` internally.
+ """
+ formatter = ctx.make_formatter()
+ self.format_usage(ctx, formatter)
+ return formatter.getvalue().rstrip("\n")
+
+ def get_params(self, ctx: Context) -> t.List["Parameter"]:
+ rv = self.params
+ help_option = self.get_help_option(ctx)
+
+ if help_option is not None:
+ rv = [*rv, help_option]
+
+ return rv
+
+ def format_usage(self, ctx: Context, formatter: HelpFormatter) -> None:
+ """Writes the usage line into the formatter.
+
+ This is a low-level method called by :meth:`get_usage`.
+ """
+ pieces = self.collect_usage_pieces(ctx)
+ formatter.write_usage(ctx.command_path, " ".join(pieces))
+
+ def collect_usage_pieces(self, ctx: Context) -> t.List[str]:
+ """Returns all the pieces that go into the usage line and returns
+ it as a list of strings.
+ """
+ rv = [self.options_metavar] if self.options_metavar else []
+
+ for param in self.get_params(ctx):
+ rv.extend(param.get_usage_pieces(ctx))
+
+ return rv
+
+ def get_help_option_names(self, ctx: Context) -> t.List[str]:
+ """Returns the names for the help option."""
+ all_names = set(ctx.help_option_names)
+ for param in self.params:
+ all_names.difference_update(param.opts)
+ all_names.difference_update(param.secondary_opts)
+ return list(all_names)
+
+ def get_help_option(self, ctx: Context) -> t.Optional["Option"]:
+ """Returns the help option object."""
+ help_options = self.get_help_option_names(ctx)
+
+ if not help_options or not self.add_help_option:
+ return None
+
+ def show_help(ctx: Context, param: "Parameter", value: str) -> None:
+ if value and not ctx.resilient_parsing:
+ echo(ctx.get_help(), color=ctx.color)
+ ctx.exit()
+
+ return Option(
+ help_options,
+ is_flag=True,
+ is_eager=True,
+ expose_value=False,
+ callback=show_help,
+ help=_("Show this message and exit."),
+ )
+
+ def make_parser(self, ctx: Context) -> OptionParser:
+ """Creates the underlying option parser for this command."""
+ parser = OptionParser(ctx)
+ for param in self.get_params(ctx):
+ param.add_to_parser(parser, ctx)
+ return parser
+
+ def get_help(self, ctx: Context) -> str:
+ """Formats the help into a string and returns it.
+
+ Calls :meth:`format_help` internally.
+ """
+ formatter = ctx.make_formatter()
+ self.format_help(ctx, formatter)
+ return formatter.getvalue().rstrip("\n")
+
+ def get_short_help_str(self, limit: int = 45) -> str:
+ """Gets short help for the command or makes it by shortening the
+ long help string.
+ """
+ if self.short_help:
+ text = inspect.cleandoc(self.short_help)
+ elif self.help:
+ text = make_default_short_help(self.help, limit)
+ else:
+ text = ""
+
+ if self.deprecated:
+ text = _("(Deprecated) {text}").format(text=text)
+
+ return text.strip()
+
+ def format_help(self, ctx: Context, formatter: HelpFormatter) -> None:
+ """Writes the help into the formatter if it exists.
+
+ This is a low-level method called by :meth:`get_help`.
+
+ This calls the following methods:
+
+ - :meth:`format_usage`
+ - :meth:`format_help_text`
+ - :meth:`format_options`
+ - :meth:`format_epilog`
+ """
+ self.format_usage(ctx, formatter)
+ self.format_help_text(ctx, formatter)
+ self.format_options(ctx, formatter)
+ self.format_epilog(ctx, formatter)
+
+ def format_help_text(self, ctx: Context, formatter: HelpFormatter) -> None:
+ """Writes the help text to the formatter if it exists."""
+ if self.help is not None:
+ # truncate the help text to the first form feed
+ text = inspect.cleandoc(self.help).partition("\f")[0]
+ else:
+ text = ""
+
+ if self.deprecated:
+ text = _("(Deprecated) {text}").format(text=text)
+
+ if text:
+ formatter.write_paragraph()
+
+ with formatter.indentation():
+ formatter.write_text(text)
+
+ def format_options(self, ctx: Context, formatter: HelpFormatter) -> None:
+ """Writes all the options into the formatter if they exist."""
+ opts = []
+ for param in self.get_params(ctx):
+ rv = param.get_help_record(ctx)
+ if rv is not None:
+ opts.append(rv)
+
+ if opts:
+ with formatter.section(_("Options")):
+ formatter.write_dl(opts)
+
+ def format_epilog(self, ctx: Context, formatter: HelpFormatter) -> None:
+ """Writes the epilog into the formatter if it exists."""
+ if self.epilog:
+ epilog = inspect.cleandoc(self.epilog)
+ formatter.write_paragraph()
+
+ with formatter.indentation():
+ formatter.write_text(epilog)
+
+ def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]:
+ if not args and self.no_args_is_help and not ctx.resilient_parsing:
+ echo(ctx.get_help(), color=ctx.color)
+ ctx.exit()
+
+ parser = self.make_parser(ctx)
+ opts, args, param_order = parser.parse_args(args=args)
+
+ for param in iter_params_for_processing(param_order, self.get_params(ctx)):
+ value, args = param.handle_parse_result(ctx, opts, args)
+
+ if args and not ctx.allow_extra_args and not ctx.resilient_parsing:
+ ctx.fail(
+ ngettext(
+ "Got unexpected extra argument ({args})",
+ "Got unexpected extra arguments ({args})",
+ len(args),
+ ).format(args=" ".join(map(str, args)))
+ )
+
+ ctx.args = args
+ ctx._opt_prefixes.update(parser._opt_prefixes)
+ return args
+
+ def invoke(self, ctx: Context) -> t.Any:
+ """Given a context, this invokes the attached callback (if it exists)
+ in the right way.
+ """
+ if self.deprecated:
+ message = _(
+ "DeprecationWarning: The command {name!r} is deprecated."
+ ).format(name=self.name)
+ echo(style(message, fg="red"), err=True)
+
+ if self.callback is not None:
+ return ctx.invoke(self.callback, **ctx.params)
+
+ def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]:
+ """Return a list of completions for the incomplete value. Looks
+ at the names of options and chained multi-commands.
+
+ :param ctx: Invocation context for this command.
+ :param incomplete: Value being completed. May be empty.
+
+ .. versionadded:: 8.0
+ """
+ from click.shell_completion import CompletionItem
+
+ results: t.List["CompletionItem"] = []
+
+ if incomplete and not incomplete[0].isalnum():
+ for param in self.get_params(ctx):
+ if (
+ not isinstance(param, Option)
+ or param.hidden
+ or (
+ not param.multiple
+ and ctx.get_parameter_source(param.name) # type: ignore
+ is ParameterSource.COMMANDLINE
+ )
+ ):
+ continue
+
+ results.extend(
+ CompletionItem(name, help=param.help)
+ for name in [*param.opts, *param.secondary_opts]
+ if name.startswith(incomplete)
+ )
+
+ results.extend(super().shell_complete(ctx, incomplete))
+ return results
+
+
+class MultiCommand(Command):
+ """A multi command is the basic implementation of a command that
+ dispatches to subcommands. The most common version is the
+ :class:`Group`.
+
+ :param invoke_without_command: this controls how the multi command itself
+ is invoked. By default it's only invoked
+ if a subcommand is provided.
+ :param no_args_is_help: this controls what happens if no arguments are
+ provided. This option is enabled by default if
+ `invoke_without_command` is disabled or disabled
+ if it's enabled. If enabled this will add
+ ``--help`` as argument if no arguments are
+ passed.
+ :param subcommand_metavar: the string that is used in the documentation
+ to indicate the subcommand place.
+ :param chain: if this is set to `True` chaining of multiple subcommands
+ is enabled. This restricts the form of commands in that
+ they cannot have optional arguments but it allows
+ multiple commands to be chained together.
+ :param result_callback: The result callback to attach to this multi
+ command. This can be set or changed later with the
+ :meth:`result_callback` decorator.
+ :param attrs: Other command arguments described in :class:`Command`.
+ """
+
+ allow_extra_args = True
+ allow_interspersed_args = False
+
+ def __init__(
+ self,
+ name: t.Optional[str] = None,
+ invoke_without_command: bool = False,
+ no_args_is_help: t.Optional[bool] = None,
+ subcommand_metavar: t.Optional[str] = None,
+ chain: bool = False,
+ result_callback: t.Optional[t.Callable[..., t.Any]] = None,
+ **attrs: t.Any,
+ ) -> None:
+ super().__init__(name, **attrs)
+
+ if no_args_is_help is None:
+ no_args_is_help = not invoke_without_command
+
+ self.no_args_is_help = no_args_is_help
+ self.invoke_without_command = invoke_without_command
+
+ if subcommand_metavar is None:
+ if chain:
+ subcommand_metavar = "COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]..."
+ else:
+ subcommand_metavar = "COMMAND [ARGS]..."
+
+ self.subcommand_metavar = subcommand_metavar
+ self.chain = chain
+ # The result callback that is stored. This can be set or
+ # overridden with the :func:`result_callback` decorator.
+ self._result_callback = result_callback
+
+ if self.chain:
+ for param in self.params:
+ if isinstance(param, Argument) and not param.required:
+ raise RuntimeError(
+ "Multi commands in chain mode cannot have"
+ " optional arguments."
+ )
+
+ def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]:
+ info_dict = super().to_info_dict(ctx)
+ commands = {}
+
+ for name in self.list_commands(ctx):
+ command = self.get_command(ctx, name)
+
+ if command is None:
+ continue
+
+ sub_ctx = ctx._make_sub_context(command)
+
+ with sub_ctx.scope(cleanup=False):
+ commands[name] = command.to_info_dict(sub_ctx)
+
+ info_dict.update(commands=commands, chain=self.chain)
+ return info_dict
+
+ def collect_usage_pieces(self, ctx: Context) -> t.List[str]:
+ rv = super().collect_usage_pieces(ctx)
+ rv.append(self.subcommand_metavar)
+ return rv
+
+ def format_options(self, ctx: Context, formatter: HelpFormatter) -> None:
+ super().format_options(ctx, formatter)
+ self.format_commands(ctx, formatter)
+
+ def result_callback(self, replace: bool = False) -> t.Callable[[F], F]:
+ """Adds a result callback to the command. By default if a
+ result callback is already registered this will chain them but
+ this can be disabled with the `replace` parameter. The result
+ callback is invoked with the return value of the subcommand
+ (or the list of return values from all subcommands if chaining
+ is enabled) as well as the parameters as they would be passed
+ to the main callback.
+
+ Example::
+
+ @click.group()
+ @click.option('-i', '--input', default=23)
+ def cli(input):
+ return 42
+
+ @cli.result_callback()
+ def process_result(result, input):
+ return result + input
+
+ :param replace: if set to `True` an already existing result
+ callback will be removed.
+
+ .. versionchanged:: 8.0
+ Renamed from ``resultcallback``.
+
+ .. versionadded:: 3.0
+ """
+
+ def decorator(f: F) -> F:
+ old_callback = self._result_callback
+
+ if old_callback is None or replace:
+ self._result_callback = f
+ return f
+
+ def function(__value, *args, **kwargs): # type: ignore
+ inner = old_callback(__value, *args, **kwargs)
+ return f(inner, *args, **kwargs)
+
+ self._result_callback = rv = update_wrapper(t.cast(F, function), f)
+ return rv
+
+ return decorator
+
+ def format_commands(self, ctx: Context, formatter: HelpFormatter) -> None:
+ """Extra format methods for multi methods that adds all the commands
+ after the options.
+ """
+ commands = []
+ for subcommand in self.list_commands(ctx):
+ cmd = self.get_command(ctx, subcommand)
+ # What is this, the tool lied about a command. Ignore it
+ if cmd is None:
+ continue
+ if cmd.hidden:
+ continue
+
+ commands.append((subcommand, cmd))
+
+ # allow for 3 times the default spacing
+ if len(commands):
+ limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands)
+
+ rows = []
+ for subcommand, cmd in commands:
+ help = cmd.get_short_help_str(limit)
+ rows.append((subcommand, help))
+
+ if rows:
+ with formatter.section(_("Commands")):
+ formatter.write_dl(rows)
+
+ def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]:
+ if not args and self.no_args_is_help and not ctx.resilient_parsing:
+ echo(ctx.get_help(), color=ctx.color)
+ ctx.exit()
+
+ rest = super().parse_args(ctx, args)
+
+ if self.chain:
+ ctx.protected_args = rest
+ ctx.args = []
+ elif rest:
+ ctx.protected_args, ctx.args = rest[:1], rest[1:]
+
+ return ctx.args
+
+ def invoke(self, ctx: Context) -> t.Any:
+ def _process_result(value: t.Any) -> t.Any:
+ if self._result_callback is not None:
+ value = ctx.invoke(self._result_callback, value, **ctx.params)
+ return value
+
+ if not ctx.protected_args:
+ if self.invoke_without_command:
+ # No subcommand was invoked, so the result callback is
+ # invoked with the group return value for regular
+ # groups, or an empty list for chained groups.
+ with ctx:
+ rv = super().invoke(ctx)
+ return _process_result([] if self.chain else rv)
+ ctx.fail(_("Missing command."))
+
+ # Fetch args back out
+ args = [*ctx.protected_args, *ctx.args]
+ ctx.args = []
+ ctx.protected_args = []
+
+ # If we're not in chain mode, we only allow the invocation of a
+ # single command but we also inform the current context about the
+ # name of the command to invoke.
+ if not self.chain:
+ # Make sure the context is entered so we do not clean up
+ # resources until the result processor has worked.
+ with ctx:
+ cmd_name, cmd, args = self.resolve_command(ctx, args)
+ assert cmd is not None
+ ctx.invoked_subcommand = cmd_name
+ super().invoke(ctx)
+ sub_ctx = cmd.make_context(cmd_name, args, parent=ctx)
+ with sub_ctx:
+ return _process_result(sub_ctx.command.invoke(sub_ctx))
+
+ # In chain mode we create the contexts step by step, but after the
+ # base command has been invoked. Because at that point we do not
+ # know the subcommands yet, the invoked subcommand attribute is
+ # set to ``*`` to inform the command that subcommands are executed
+ # but nothing else.
+ with ctx:
+ ctx.invoked_subcommand = "*" if args else None
+ super().invoke(ctx)
+
+ # Otherwise we make every single context and invoke them in a
+ # chain. In that case the return value to the result processor
+ # is the list of all invoked subcommand's results.
+ contexts = []
+ while args:
+ cmd_name, cmd, args = self.resolve_command(ctx, args)
+ assert cmd is not None
+ sub_ctx = cmd.make_context(
+ cmd_name,
+ args,
+ parent=ctx,
+ allow_extra_args=True,
+ allow_interspersed_args=False,
+ )
+ contexts.append(sub_ctx)
+ args, sub_ctx.args = sub_ctx.args, []
+
+ rv = []
+ for sub_ctx in contexts:
+ with sub_ctx:
+ rv.append(sub_ctx.command.invoke(sub_ctx))
+ return _process_result(rv)
+
+ def resolve_command(
+ self, ctx: Context, args: t.List[str]
+ ) -> t.Tuple[t.Optional[str], t.Optional[Command], t.List[str]]:
+ cmd_name = make_str(args[0])
+ original_cmd_name = cmd_name
+
+ # Get the command
+ cmd = self.get_command(ctx, cmd_name)
+
+ # If we can't find the command but there is a normalization
+ # function available, we try with that one.
+ if cmd is None and ctx.token_normalize_func is not None:
+ cmd_name = ctx.token_normalize_func(cmd_name)
+ cmd = self.get_command(ctx, cmd_name)
+
+ # If we don't find the command we want to show an error message
+ # to the user that it was not provided. However, there is
+ # something else we should do: if the first argument looks like
+ # an option we want to kick off parsing again for arguments to
+ # resolve things like --help which now should go to the main
+ # place.
+ if cmd is None and not ctx.resilient_parsing:
+ if split_opt(cmd_name)[0]:
+ self.parse_args(ctx, ctx.args)
+ ctx.fail(_("No such command {name!r}.").format(name=original_cmd_name))
+ return cmd_name if cmd else None, cmd, args[1:]
+
+ def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]:
+ """Given a context and a command name, this returns a
+ :class:`Command` object if it exists or returns `None`.
+ """
+ raise NotImplementedError
+
+ def list_commands(self, ctx: Context) -> t.List[str]:
+ """Returns a list of subcommand names in the order they should
+ appear.
+ """
+ return []
+
+ def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]:
+ """Return a list of completions for the incomplete value. Looks
+ at the names of options, subcommands, and chained
+ multi-commands.
+
+ :param ctx: Invocation context for this command.
+ :param incomplete: Value being completed. May be empty.
+
+ .. versionadded:: 8.0
+ """
+ from click.shell_completion import CompletionItem
+
+ results = [
+ CompletionItem(name, help=command.get_short_help_str())
+ for name, command in _complete_visible_commands(ctx, incomplete)
+ ]
+ results.extend(super().shell_complete(ctx, incomplete))
+ return results
+
+
+class Group(MultiCommand):
+ """A group allows a command to have subcommands attached. This is
+ the most common way to implement nesting in Click.
+
+ :param name: The name of the group command.
+ :param commands: A dict mapping names to :class:`Command` objects.
+ Can also be a list of :class:`Command`, which will use
+ :attr:`Command.name` to create the dict.
+ :param attrs: Other command arguments described in
+ :class:`MultiCommand`, :class:`Command`, and
+ :class:`BaseCommand`.
+
+ .. versionchanged:: 8.0
+ The ``commands`` argument can be a list of command objects.
+ """
+
+ #: If set, this is used by the group's :meth:`command` decorator
+ #: as the default :class:`Command` class. This is useful to make all
+ #: subcommands use a custom command class.
+ #:
+ #: .. versionadded:: 8.0
+ command_class: t.Optional[t.Type[Command]] = None
+
+ #: If set, this is used by the group's :meth:`group` decorator
+ #: as the default :class:`Group` class. This is useful to make all
+ #: subgroups use a custom group class.
+ #:
+ #: If set to the special value :class:`type` (literally
+ #: ``group_class = type``), this group's class will be used as the
+ #: default class. This makes a custom group class continue to make
+ #: custom groups.
+ #:
+ #: .. versionadded:: 8.0
+ group_class: t.Optional[t.Union[t.Type["Group"], t.Type[type]]] = None
+ # Literal[type] isn't valid, so use Type[type]
+
+ def __init__(
+ self,
+ name: t.Optional[str] = None,
+ commands: t.Optional[
+ t.Union[t.MutableMapping[str, Command], t.Sequence[Command]]
+ ] = None,
+ **attrs: t.Any,
+ ) -> None:
+ super().__init__(name, **attrs)
+
+ if commands is None:
+ commands = {}
+ elif isinstance(commands, abc.Sequence):
+ commands = {c.name: c for c in commands if c.name is not None}
+
+ #: The registered subcommands by their exported names.
+ self.commands: t.MutableMapping[str, Command] = commands
+
+ def add_command(self, cmd: Command, name: t.Optional[str] = None) -> None:
+ """Registers another :class:`Command` with this group. If the name
+ is not provided, the name of the command is used.
+ """
+ name = name or cmd.name
+ if name is None:
+ raise TypeError("Command has no name.")
+ _check_multicommand(self, name, cmd, register=True)
+ self.commands[name] = cmd
+
+ @t.overload
+ def command(self, __func: t.Callable[..., t.Any]) -> Command:
+ ...
+
+ @t.overload
+ def command(
+ self, *args: t.Any, **kwargs: t.Any
+ ) -> t.Callable[[t.Callable[..., t.Any]], Command]:
+ ...
+
+ def command(
+ self, *args: t.Any, **kwargs: t.Any
+ ) -> t.Union[t.Callable[[t.Callable[..., t.Any]], Command], Command]:
+ """A shortcut decorator for declaring and attaching a command to
+ the group. This takes the same arguments as :func:`command` and
+ immediately registers the created command with this group by
+ calling :meth:`add_command`.
+
+ To customize the command class used, set the
+ :attr:`command_class` attribute.
+
+ .. versionchanged:: 8.1
+ This decorator can be applied without parentheses.
+
+ .. versionchanged:: 8.0
+ Added the :attr:`command_class` attribute.
+ """
+ from .decorators import command
+
+ func: t.Optional[t.Callable[..., t.Any]] = None
+
+ if args and callable(args[0]):
+ assert (
+ len(args) == 1 and not kwargs
+ ), "Use 'command(**kwargs)(callable)' to provide arguments."
+ (func,) = args
+ args = ()
+
+ if self.command_class and kwargs.get("cls") is None:
+ kwargs["cls"] = self.command_class
+
+ def decorator(f: t.Callable[..., t.Any]) -> Command:
+ cmd: Command = command(*args, **kwargs)(f)
+ self.add_command(cmd)
+ return cmd
+
+ if func is not None:
+ return decorator(func)
+
+ return decorator
+
+ @t.overload
+ def group(self, __func: t.Callable[..., t.Any]) -> "Group":
+ ...
+
+ @t.overload
+ def group(
+ self, *args: t.Any, **kwargs: t.Any
+ ) -> t.Callable[[t.Callable[..., t.Any]], "Group"]:
+ ...
+
+ def group(
+ self, *args: t.Any, **kwargs: t.Any
+ ) -> t.Union[t.Callable[[t.Callable[..., t.Any]], "Group"], "Group"]:
+ """A shortcut decorator for declaring and attaching a group to
+ the group. This takes the same arguments as :func:`group` and
+ immediately registers the created group with this group by
+ calling :meth:`add_command`.
+
+ To customize the group class used, set the :attr:`group_class`
+ attribute.
+
+ .. versionchanged:: 8.1
+ This decorator can be applied without parentheses.
+
+ .. versionchanged:: 8.0
+ Added the :attr:`group_class` attribute.
+ """
+ from .decorators import group
+
+ func: t.Optional[t.Callable[..., t.Any]] = None
+
+ if args and callable(args[0]):
+ assert (
+ len(args) == 1 and not kwargs
+ ), "Use 'group(**kwargs)(callable)' to provide arguments."
+ (func,) = args
+ args = ()
+
+ if self.group_class is not None and kwargs.get("cls") is None:
+ if self.group_class is type:
+ kwargs["cls"] = type(self)
+ else:
+ kwargs["cls"] = self.group_class
+
+ def decorator(f: t.Callable[..., t.Any]) -> "Group":
+ cmd: Group = group(*args, **kwargs)(f)
+ self.add_command(cmd)
+ return cmd
+
+ if func is not None:
+ return decorator(func)
+
+ return decorator
+
+ def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]:
+ return self.commands.get(cmd_name)
+
+ def list_commands(self, ctx: Context) -> t.List[str]:
+ return sorted(self.commands)
+
+
+class CommandCollection(MultiCommand):
+ """A command collection is a multi command that merges multiple multi
+ commands together into one. This is a straightforward implementation
+ that accepts a list of different multi commands as sources and
+ provides all the commands for each of them.
+
+ See :class:`MultiCommand` and :class:`Command` for the description of
+ ``name`` and ``attrs``.
+ """
+
+ def __init__(
+ self,
+ name: t.Optional[str] = None,
+ sources: t.Optional[t.List[MultiCommand]] = None,
+ **attrs: t.Any,
+ ) -> None:
+ super().__init__(name, **attrs)
+ #: The list of registered multi commands.
+ self.sources: t.List[MultiCommand] = sources or []
+
+ def add_source(self, multi_cmd: MultiCommand) -> None:
+ """Adds a new multi command to the chain dispatcher."""
+ self.sources.append(multi_cmd)
+
+ def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]:
+ for source in self.sources:
+ rv = source.get_command(ctx, cmd_name)
+
+ if rv is not None:
+ if self.chain:
+ _check_multicommand(self, cmd_name, rv)
+
+ return rv
+
+ return None
+
+ def list_commands(self, ctx: Context) -> t.List[str]:
+ rv: t.Set[str] = set()
+
+ for source in self.sources:
+ rv.update(source.list_commands(ctx))
+
+ return sorted(rv)
+
+
+def _check_iter(value: t.Any) -> t.Iterator[t.Any]:
+ """Check if the value is iterable but not a string. Raises a type
+ error, or return an iterator over the value.
+ """
+ if isinstance(value, str):
+ raise TypeError
+
+ return iter(value)
+
+
+class Parameter:
+ r"""A parameter to a command comes in two versions: they are either
+ :class:`Option`\s or :class:`Argument`\s. Other subclasses are currently
+ not supported by design as some of the internals for parsing are
+ intentionally not finalized.
+
+ Some settings are supported by both options and arguments.
+
+ :param param_decls: the parameter declarations for this option or
+ argument. This is a list of flags or argument
+ names.
+ :param type: the type that should be used. Either a :class:`ParamType`
+ or a Python type. The latter is converted into the former
+ automatically if supported.
+ :param required: controls if this is optional or not.
+ :param default: the default value if omitted. This can also be a callable,
+ in which case it's invoked when the default is needed
+ without any arguments.
+ :param callback: A function to further process or validate the value
+ after type conversion. It is called as ``f(ctx, param, value)``
+ and must return the value. It is called for all sources,
+ including prompts.
+ :param nargs: the number of arguments to match. If not ``1`` the return
+ value is a tuple instead of single value. The default for
+ nargs is ``1`` (except if the type is a tuple, then it's
+ the arity of the tuple). If ``nargs=-1``, all remaining
+ parameters are collected.
+ :param metavar: how the value is represented in the help page.
+ :param expose_value: if this is `True` then the value is passed onwards
+ to the command callback and stored on the context,
+ otherwise it's skipped.
+ :param is_eager: eager values are processed before non eager ones. This
+ should not be set for arguments or it will inverse the
+ order of processing.
+ :param envvar: a string or list of strings that are environment variables
+ that should be checked.
+ :param shell_complete: A function that returns custom shell
+ completions. Used instead of the param's type completion if
+ given. Takes ``ctx, param, incomplete`` and must return a list
+ of :class:`~click.shell_completion.CompletionItem` or a list of
+ strings.
+
+ .. versionchanged:: 8.0
+ ``process_value`` validates required parameters and bounded
+ ``nargs``, and invokes the parameter callback before returning
+ the value. This allows the callback to validate prompts.
+ ``full_process_value`` is removed.
+
+ .. versionchanged:: 8.0
+ ``autocompletion`` is renamed to ``shell_complete`` and has new
+ semantics described above. The old name is deprecated and will
+ be removed in 8.1, until then it will be wrapped to match the
+ new requirements.
+
+ .. versionchanged:: 8.0
+ For ``multiple=True, nargs>1``, the default must be a list of
+ tuples.
+
+ .. versionchanged:: 8.0
+ Setting a default is no longer required for ``nargs>1``, it will
+ default to ``None``. ``multiple=True`` or ``nargs=-1`` will
+ default to ``()``.
+
+ .. versionchanged:: 7.1
+ Empty environment variables are ignored rather than taking the
+ empty string value. This makes it possible for scripts to clear
+ variables if they can't unset them.
+
+ .. versionchanged:: 2.0
+ Changed signature for parameter callback to also be passed the
+ parameter. The old callback format will still work, but it will
+ raise a warning to give you a chance to migrate the code easier.
+ """
+
+ param_type_name = "parameter"
+
+ def __init__(
+ self,
+ param_decls: t.Optional[t.Sequence[str]] = None,
+ type: t.Optional[t.Union[types.ParamType, t.Any]] = None,
+ required: bool = False,
+ default: t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]] = None,
+ callback: t.Optional[t.Callable[[Context, "Parameter", t.Any], t.Any]] = None,
+ nargs: t.Optional[int] = None,
+ multiple: bool = False,
+ metavar: t.Optional[str] = None,
+ expose_value: bool = True,
+ is_eager: bool = False,
+ envvar: t.Optional[t.Union[str, t.Sequence[str]]] = None,
+ shell_complete: t.Optional[
+ t.Callable[
+ [Context, "Parameter", str],
+ t.Union[t.List["CompletionItem"], t.List[str]],
+ ]
+ ] = None,
+ ) -> None:
+ self.name: t.Optional[str]
+ self.opts: t.List[str]
+ self.secondary_opts: t.List[str]
+ self.name, self.opts, self.secondary_opts = self._parse_decls(
+ param_decls or (), expose_value
+ )
+ self.type: types.ParamType = types.convert_type(type, default)
+
+ # Default nargs to what the type tells us if we have that
+ # information available.
+ if nargs is None:
+ if self.type.is_composite:
+ nargs = self.type.arity
+ else:
+ nargs = 1
+
+ self.required = required
+ self.callback = callback
+ self.nargs = nargs
+ self.multiple = multiple
+ self.expose_value = expose_value
+ self.default = default
+ self.is_eager = is_eager
+ self.metavar = metavar
+ self.envvar = envvar
+ self._custom_shell_complete = shell_complete
+
+ if __debug__:
+ if self.type.is_composite and nargs != self.type.arity:
+ raise ValueError(
+ f"'nargs' must be {self.type.arity} (or None) for"
+ f" type {self.type!r}, but it was {nargs}."
+ )
+
+ # Skip no default or callable default.
+ check_default = default if not callable(default) else None
+
+ if check_default is not None:
+ if multiple:
+ try:
+ # Only check the first value against nargs.
+ check_default = next(_check_iter(check_default), None)
+ except TypeError:
+ raise ValueError(
+ "'default' must be a list when 'multiple' is true."
+ ) from None
+
+ # Can be None for multiple with empty default.
+ if nargs != 1 and check_default is not None:
+ try:
+ _check_iter(check_default)
+ except TypeError:
+ if multiple:
+ message = (
+ "'default' must be a list of lists when 'multiple' is"
+ " true and 'nargs' != 1."
+ )
+ else:
+ message = "'default' must be a list when 'nargs' != 1."
+
+ raise ValueError(message) from None
+
+ if nargs > 1 and len(check_default) != nargs:
+ subject = "item length" if multiple else "length"
+ raise ValueError(
+ f"'default' {subject} must match nargs={nargs}."
+ )
+
+ def to_info_dict(self) -> t.Dict[str, t.Any]:
+ """Gather information that could be useful for a tool generating
+ user-facing documentation.
+
+ Use :meth:`click.Context.to_info_dict` to traverse the entire
+ CLI structure.
+
+ .. versionadded:: 8.0
+ """
+ return {
+ "name": self.name,
+ "param_type_name": self.param_type_name,
+ "opts": self.opts,
+ "secondary_opts": self.secondary_opts,
+ "type": self.type.to_info_dict(),
+ "required": self.required,
+ "nargs": self.nargs,
+ "multiple": self.multiple,
+ "default": self.default,
+ "envvar": self.envvar,
+ }
+
+ def __repr__(self) -> str:
+ return f"<{self.__class__.__name__} {self.name}>"
+
+ def _parse_decls(
+ self, decls: t.Sequence[str], expose_value: bool
+ ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]:
+ raise NotImplementedError()
+
+ @property
+ def human_readable_name(self) -> str:
+ """Returns the human readable name of this parameter. This is the
+ same as the name for options, but the metavar for arguments.
+ """
+ return self.name # type: ignore
+
+ def make_metavar(self) -> str:
+ if self.metavar is not None:
+ return self.metavar
+
+ metavar = self.type.get_metavar(self)
+
+ if metavar is None:
+ metavar = self.type.name.upper()
+
+ if self.nargs != 1:
+ metavar += "..."
+
+ return metavar
+
+ @t.overload
+ def get_default(
+ self, ctx: Context, call: "te.Literal[True]" = True
+ ) -> t.Optional[t.Any]:
+ ...
+
+ @t.overload
+ def get_default(
+ self, ctx: Context, call: bool = ...
+ ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]:
+ ...
+
+ def get_default(
+ self, ctx: Context, call: bool = True
+ ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]:
+ """Get the default for the parameter. Tries
+ :meth:`Context.lookup_default` first, then the local default.
+
+ :param ctx: Current context.
+ :param call: If the default is a callable, call it. Disable to
+ return the callable instead.
+
+ .. versionchanged:: 8.0.2
+ Type casting is no longer performed when getting a default.
+
+ .. versionchanged:: 8.0.1
+ Type casting can fail in resilient parsing mode. Invalid
+ defaults will not prevent showing help text.
+
+ .. versionchanged:: 8.0
+ Looks at ``ctx.default_map`` first.
+
+ .. versionchanged:: 8.0
+ Added the ``call`` parameter.
+ """
+ value = ctx.lookup_default(self.name, call=False) # type: ignore
+
+ if value is None:
+ value = self.default
+
+ if call and callable(value):
+ value = value()
+
+ return value
+
+ def add_to_parser(self, parser: OptionParser, ctx: Context) -> None:
+ raise NotImplementedError()
+
+ def consume_value(
+ self, ctx: Context, opts: t.Mapping[str, t.Any]
+ ) -> t.Tuple[t.Any, ParameterSource]:
+ value = opts.get(self.name) # type: ignore
+ source = ParameterSource.COMMANDLINE
+
+ if value is None:
+ value = self.value_from_envvar(ctx)
+ source = ParameterSource.ENVIRONMENT
+
+ if value is None:
+ value = ctx.lookup_default(self.name) # type: ignore
+ source = ParameterSource.DEFAULT_MAP
+
+ if value is None:
+ value = self.get_default(ctx)
+ source = ParameterSource.DEFAULT
+
+ return value, source
+
+ def type_cast_value(self, ctx: Context, value: t.Any) -> t.Any:
+ """Convert and validate a value against the option's
+ :attr:`type`, :attr:`multiple`, and :attr:`nargs`.
+ """
+ if value is None:
+ return () if self.multiple or self.nargs == -1 else None
+
+ def check_iter(value: t.Any) -> t.Iterator[t.Any]:
+ try:
+ return _check_iter(value)
+ except TypeError:
+ # This should only happen when passing in args manually,
+ # the parser should construct an iterable when parsing
+ # the command line.
+ raise BadParameter(
+ _("Value must be an iterable."), ctx=ctx, param=self
+ ) from None
+
+ if self.nargs == 1 or self.type.is_composite:
+
+ def convert(value: t.Any) -> t.Any:
+ return self.type(value, param=self, ctx=ctx)
+
+ elif self.nargs == -1:
+
+ def convert(value: t.Any) -> t.Any: # t.Tuple[t.Any, ...]
+ return tuple(self.type(x, self, ctx) for x in check_iter(value))
+
+ else: # nargs > 1
+
+ def convert(value: t.Any) -> t.Any: # t.Tuple[t.Any, ...]
+ value = tuple(check_iter(value))
+
+ if len(value) != self.nargs:
+ raise BadParameter(
+ ngettext(
+ "Takes {nargs} values but 1 was given.",
+ "Takes {nargs} values but {len} were given.",
+ len(value),
+ ).format(nargs=self.nargs, len=len(value)),
+ ctx=ctx,
+ param=self,
+ )
+
+ return tuple(self.type(x, self, ctx) for x in value)
+
+ if self.multiple:
+ return tuple(convert(x) for x in check_iter(value))
+
+ return convert(value)
+
+ def value_is_missing(self, value: t.Any) -> bool:
+ if value is None:
+ return True
+
+ if (self.nargs != 1 or self.multiple) and value == ():
+ return True
+
+ return False
+
+ def process_value(self, ctx: Context, value: t.Any) -> t.Any:
+ value = self.type_cast_value(ctx, value)
+
+ if self.required and self.value_is_missing(value):
+ raise MissingParameter(ctx=ctx, param=self)
+
+ if self.callback is not None:
+ value = self.callback(ctx, self, value)
+
+ return value
+
+ def resolve_envvar_value(self, ctx: Context) -> t.Optional[str]:
+ if self.envvar is None:
+ return None
+
+ if isinstance(self.envvar, str):
+ rv = os.environ.get(self.envvar)
+
+ if rv:
+ return rv
+ else:
+ for envvar in self.envvar:
+ rv = os.environ.get(envvar)
+
+ if rv:
+ return rv
+
+ return None
+
+ def value_from_envvar(self, ctx: Context) -> t.Optional[t.Any]:
+ rv: t.Optional[t.Any] = self.resolve_envvar_value(ctx)
+
+ if rv is not None and self.nargs != 1:
+ rv = self.type.split_envvar_value(rv)
+
+ return rv
+
+ def handle_parse_result(
+ self, ctx: Context, opts: t.Mapping[str, t.Any], args: t.List[str]
+ ) -> t.Tuple[t.Any, t.List[str]]:
+ with augment_usage_errors(ctx, param=self):
+ value, source = self.consume_value(ctx, opts)
+ ctx.set_parameter_source(self.name, source) # type: ignore
+
+ try:
+ value = self.process_value(ctx, value)
+ except Exception:
+ if not ctx.resilient_parsing:
+ raise
+
+ value = None
+
+ if self.expose_value:
+ ctx.params[self.name] = value # type: ignore
+
+ return value, args
+
+ def get_help_record(self, ctx: Context) -> t.Optional[t.Tuple[str, str]]:
+ pass
+
+ def get_usage_pieces(self, ctx: Context) -> t.List[str]:
+ return []
+
+ def get_error_hint(self, ctx: Context) -> str:
+ """Get a stringified version of the param for use in error messages to
+ indicate which param caused the error.
+ """
+ hint_list = self.opts or [self.human_readable_name]
+ return " / ".join(f"'{x}'" for x in hint_list)
+
+ def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]:
+ """Return a list of completions for the incomplete value. If a
+ ``shell_complete`` function was given during init, it is used.
+ Otherwise, the :attr:`type`
+ :meth:`~click.types.ParamType.shell_complete` function is used.
+
+ :param ctx: Invocation context for this command.
+ :param incomplete: Value being completed. May be empty.
+
+ .. versionadded:: 8.0
+ """
+ if self._custom_shell_complete is not None:
+ results = self._custom_shell_complete(ctx, self, incomplete)
+
+ if results and isinstance(results[0], str):
+ from click.shell_completion import CompletionItem
+
+ results = [CompletionItem(c) for c in results]
+
+ return t.cast(t.List["CompletionItem"], results)
+
+ return self.type.shell_complete(ctx, self, incomplete)
+
+
+class Option(Parameter):
+ """Options are usually optional values on the command line and
+ have some extra features that arguments don't have.
+
+ All other parameters are passed onwards to the parameter constructor.
+
+ :param show_default: Show the default value for this option in its
+ help text. Values are not shown by default, unless
+ :attr:`Context.show_default` is ``True``. If this value is a
+ string, it shows that string in parentheses instead of the
+ actual value. This is particularly useful for dynamic options.
+ For single option boolean flags, the default remains hidden if
+ its value is ``False``.
+ :param show_envvar: Controls if an environment variable should be
+ shown on the help page. Normally, environment variables are not
+ shown.
+ :param prompt: If set to ``True`` or a non empty string then the
+ user will be prompted for input. If set to ``True`` the prompt
+ will be the option name capitalized.
+ :param confirmation_prompt: Prompt a second time to confirm the
+ value if it was prompted for. Can be set to a string instead of
+ ``True`` to customize the message.
+ :param prompt_required: If set to ``False``, the user will be
+ prompted for input only when the option was specified as a flag
+ without a value.
+ :param hide_input: If this is ``True`` then the input on the prompt
+ will be hidden from the user. This is useful for password input.
+ :param is_flag: forces this option to act as a flag. The default is
+ auto detection.
+ :param flag_value: which value should be used for this flag if it's
+ enabled. This is set to a boolean automatically if
+ the option string contains a slash to mark two options.
+ :param multiple: if this is set to `True` then the argument is accepted
+ multiple times and recorded. This is similar to ``nargs``
+ in how it works but supports arbitrary number of
+ arguments.
+ :param count: this flag makes an option increment an integer.
+ :param allow_from_autoenv: if this is enabled then the value of this
+ parameter will be pulled from an environment
+ variable in case a prefix is defined on the
+ context.
+ :param help: the help string.
+ :param hidden: hide this option from help outputs.
+ :param attrs: Other command arguments described in :class:`Parameter`.
+
+ .. versionchanged:: 8.1.0
+ Help text indentation is cleaned here instead of only in the
+ ``@option`` decorator.
+
+ .. versionchanged:: 8.1.0
+ The ``show_default`` parameter overrides
+ ``Context.show_default``.
+
+ .. versionchanged:: 8.1.0
+ The default of a single option boolean flag is not shown if the
+ default value is ``False``.
+
+ .. versionchanged:: 8.0.1
+ ``type`` is detected from ``flag_value`` if given.
+ """
+
+ param_type_name = "option"
+
+ def __init__(
+ self,
+ param_decls: t.Optional[t.Sequence[str]] = None,
+ show_default: t.Union[bool, str, None] = None,
+ prompt: t.Union[bool, str] = False,
+ confirmation_prompt: t.Union[bool, str] = False,
+ prompt_required: bool = True,
+ hide_input: bool = False,
+ is_flag: t.Optional[bool] = None,
+ flag_value: t.Optional[t.Any] = None,
+ multiple: bool = False,
+ count: bool = False,
+ allow_from_autoenv: bool = True,
+ type: t.Optional[t.Union[types.ParamType, t.Any]] = None,
+ help: t.Optional[str] = None,
+ hidden: bool = False,
+ show_choices: bool = True,
+ show_envvar: bool = False,
+ **attrs: t.Any,
+ ) -> None:
+ if help:
+ help = inspect.cleandoc(help)
+
+ default_is_missing = "default" not in attrs
+ super().__init__(param_decls, type=type, multiple=multiple, **attrs)
+
+ if prompt is True:
+ if self.name is None:
+ raise TypeError("'name' is required with 'prompt=True'.")
+
+ prompt_text: t.Optional[str] = self.name.replace("_", " ").capitalize()
+ elif prompt is False:
+ prompt_text = None
+ else:
+ prompt_text = prompt
+
+ self.prompt = prompt_text
+ self.confirmation_prompt = confirmation_prompt
+ self.prompt_required = prompt_required
+ self.hide_input = hide_input
+ self.hidden = hidden
+
+ # If prompt is enabled but not required, then the option can be
+ # used as a flag to indicate using prompt or flag_value.
+ self._flag_needs_value = self.prompt is not None and not self.prompt_required
+
+ if is_flag is None:
+ if flag_value is not None:
+ # Implicitly a flag because flag_value was set.
+ is_flag = True
+ elif self._flag_needs_value:
+ # Not a flag, but when used as a flag it shows a prompt.
+ is_flag = False
+ else:
+ # Implicitly a flag because flag options were given.
+ is_flag = bool(self.secondary_opts)
+ elif is_flag is False and not self._flag_needs_value:
+ # Not a flag, and prompt is not enabled, can be used as a
+ # flag if flag_value is set.
+ self._flag_needs_value = flag_value is not None
+
+ self.default: t.Union[t.Any, t.Callable[[], t.Any]]
+
+ if is_flag and default_is_missing and not self.required:
+ if multiple:
+ self.default = ()
+ else:
+ self.default = False
+
+ if flag_value is None:
+ flag_value = not self.default
+
+ self.type: types.ParamType
+ if is_flag and type is None:
+ # Re-guess the type from the flag value instead of the
+ # default.
+ self.type = types.convert_type(None, flag_value)
+
+ self.is_flag: bool = is_flag
+ self.is_bool_flag: bool = is_flag and isinstance(self.type, types.BoolParamType)
+ self.flag_value: t.Any = flag_value
+
+ # Counting
+ self.count = count
+ if count:
+ if type is None:
+ self.type = types.IntRange(min=0)
+ if default_is_missing:
+ self.default = 0
+
+ self.allow_from_autoenv = allow_from_autoenv
+ self.help = help
+ self.show_default = show_default
+ self.show_choices = show_choices
+ self.show_envvar = show_envvar
+
+ if __debug__:
+ if self.nargs == -1:
+ raise TypeError("nargs=-1 is not supported for options.")
+
+ if self.prompt and self.is_flag and not self.is_bool_flag:
+ raise TypeError("'prompt' is not valid for non-boolean flag.")
+
+ if not self.is_bool_flag and self.secondary_opts:
+ raise TypeError("Secondary flag is not valid for non-boolean flag.")
+
+ if self.is_bool_flag and self.hide_input and self.prompt is not None:
+ raise TypeError(
+ "'prompt' with 'hide_input' is not valid for boolean flag."
+ )
+
+ if self.count:
+ if self.multiple:
+ raise TypeError("'count' is not valid with 'multiple'.")
+
+ if self.is_flag:
+ raise TypeError("'count' is not valid with 'is_flag'.")
+
+ def to_info_dict(self) -> t.Dict[str, t.Any]:
+ info_dict = super().to_info_dict()
+ info_dict.update(
+ help=self.help,
+ prompt=self.prompt,
+ is_flag=self.is_flag,
+ flag_value=self.flag_value,
+ count=self.count,
+ hidden=self.hidden,
+ )
+ return info_dict
+
+ def _parse_decls(
+ self, decls: t.Sequence[str], expose_value: bool
+ ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]:
+ opts = []
+ secondary_opts = []
+ name = None
+ possible_names = []
+
+ for decl in decls:
+ if decl.isidentifier():
+ if name is not None:
+ raise TypeError(f"Name '{name}' defined twice")
+ name = decl
+ else:
+ split_char = ";" if decl[:1] == "/" else "/"
+ if split_char in decl:
+ first, second = decl.split(split_char, 1)
+ first = first.rstrip()
+ if first:
+ possible_names.append(split_opt(first))
+ opts.append(first)
+ second = second.lstrip()
+ if second:
+ secondary_opts.append(second.lstrip())
+ if first == second:
+ raise ValueError(
+ f"Boolean option {decl!r} cannot use the"
+ " same flag for true/false."
+ )
+ else:
+ possible_names.append(split_opt(decl))
+ opts.append(decl)
+
+ if name is None and possible_names:
+ possible_names.sort(key=lambda x: -len(x[0])) # group long options first
+ name = possible_names[0][1].replace("-", "_").lower()
+ if not name.isidentifier():
+ name = None
+
+ if name is None:
+ if not expose_value:
+ return None, opts, secondary_opts
+ raise TypeError("Could not determine name for option")
+
+ if not opts and not secondary_opts:
+ raise TypeError(
+ f"No options defined but a name was passed ({name})."
+ " Did you mean to declare an argument instead? Did"
+ f" you mean to pass '--{name}'?"
+ )
+
+ return name, opts, secondary_opts
+
+ def add_to_parser(self, parser: OptionParser, ctx: Context) -> None:
+ if self.multiple:
+ action = "append"
+ elif self.count:
+ action = "count"
+ else:
+ action = "store"
+
+ if self.is_flag:
+ action = f"{action}_const"
+
+ if self.is_bool_flag and self.secondary_opts:
+ parser.add_option(
+ obj=self, opts=self.opts, dest=self.name, action=action, const=True
+ )
+ parser.add_option(
+ obj=self,
+ opts=self.secondary_opts,
+ dest=self.name,
+ action=action,
+ const=False,
+ )
+ else:
+ parser.add_option(
+ obj=self,
+ opts=self.opts,
+ dest=self.name,
+ action=action,
+ const=self.flag_value,
+ )
+ else:
+ parser.add_option(
+ obj=self,
+ opts=self.opts,
+ dest=self.name,
+ action=action,
+ nargs=self.nargs,
+ )
+
+ def get_help_record(self, ctx: Context) -> t.Optional[t.Tuple[str, str]]:
+ if self.hidden:
+ return None
+
+ any_prefix_is_slash = False
+
+ def _write_opts(opts: t.Sequence[str]) -> str:
+ nonlocal any_prefix_is_slash
+
+ rv, any_slashes = join_options(opts)
+
+ if any_slashes:
+ any_prefix_is_slash = True
+
+ if not self.is_flag and not self.count:
+ rv += f" {self.make_metavar()}"
+
+ return rv
+
+ rv = [_write_opts(self.opts)]
+
+ if self.secondary_opts:
+ rv.append(_write_opts(self.secondary_opts))
+
+ help = self.help or ""
+ extra = []
+
+ if self.show_envvar:
+ envvar = self.envvar
+
+ if envvar is None:
+ if (
+ self.allow_from_autoenv
+ and ctx.auto_envvar_prefix is not None
+ and self.name is not None
+ ):
+ envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}"
+
+ if envvar is not None:
+ var_str = (
+ envvar
+ if isinstance(envvar, str)
+ else ", ".join(str(d) for d in envvar)
+ )
+ extra.append(_("env var: {var}").format(var=var_str))
+
+ # Temporarily enable resilient parsing to avoid type casting
+ # failing for the default. Might be possible to extend this to
+ # help formatting in general.
+ resilient = ctx.resilient_parsing
+ ctx.resilient_parsing = True
+
+ try:
+ default_value = self.get_default(ctx, call=False)
+ finally:
+ ctx.resilient_parsing = resilient
+
+ show_default = False
+ show_default_is_str = False
+
+ if self.show_default is not None:
+ if isinstance(self.show_default, str):
+ show_default_is_str = show_default = True
+ else:
+ show_default = self.show_default
+ elif ctx.show_default is not None:
+ show_default = ctx.show_default
+
+ if show_default_is_str or (show_default and (default_value is not None)):
+ if show_default_is_str:
+ default_string = f"({self.show_default})"
+ elif isinstance(default_value, (list, tuple)):
+ default_string = ", ".join(str(d) for d in default_value)
+ elif inspect.isfunction(default_value):
+ default_string = _("(dynamic)")
+ elif self.is_bool_flag and self.secondary_opts:
+ # For boolean flags that have distinct True/False opts,
+ # use the opt without prefix instead of the value.
+ default_string = split_opt(
+ (self.opts if self.default else self.secondary_opts)[0]
+ )[1]
+ elif self.is_bool_flag and not self.secondary_opts and not default_value:
+ default_string = ""
+ else:
+ default_string = str(default_value)
+
+ if default_string:
+ extra.append(_("default: {default}").format(default=default_string))
+
+ if (
+ isinstance(self.type, types._NumberRangeBase)
+ # skip count with default range type
+ and not (self.count and self.type.min == 0 and self.type.max is None)
+ ):
+ range_str = self.type._describe_range()
+
+ if range_str:
+ extra.append(range_str)
+
+ if self.required:
+ extra.append(_("required"))
+
+ if extra:
+ extra_str = "; ".join(extra)
+ help = f"{help} [{extra_str}]" if help else f"[{extra_str}]"
+
+ return ("; " if any_prefix_is_slash else " / ").join(rv), help
+
+ @t.overload
+ def get_default(
+ self, ctx: Context, call: "te.Literal[True]" = True
+ ) -> t.Optional[t.Any]:
+ ...
+
+ @t.overload
+ def get_default(
+ self, ctx: Context, call: bool = ...
+ ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]:
+ ...
+
+ def get_default(
+ self, ctx: Context, call: bool = True
+ ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]:
+ # If we're a non boolean flag our default is more complex because
+ # we need to look at all flags in the same group to figure out
+ # if we're the default one in which case we return the flag
+ # value as default.
+ if self.is_flag and not self.is_bool_flag:
+ for param in ctx.command.params:
+ if param.name == self.name and param.default:
+ return t.cast(Option, param).flag_value
+
+ return None
+
+ return super().get_default(ctx, call=call)
+
+ def prompt_for_value(self, ctx: Context) -> t.Any:
+ """This is an alternative flow that can be activated in the full
+ value processing if a value does not exist. It will prompt the
+ user until a valid value exists and then returns the processed
+ value as result.
+ """
+ assert self.prompt is not None
+
+ # Calculate the default before prompting anything to be stable.
+ default = self.get_default(ctx)
+
+ # If this is a prompt for a flag we need to handle this
+ # differently.
+ if self.is_bool_flag:
+ return confirm(self.prompt, default)
+
+ return prompt(
+ self.prompt,
+ default=default,
+ type=self.type,
+ hide_input=self.hide_input,
+ show_choices=self.show_choices,
+ confirmation_prompt=self.confirmation_prompt,
+ value_proc=lambda x: self.process_value(ctx, x),
+ )
+
+ def resolve_envvar_value(self, ctx: Context) -> t.Optional[str]:
+ rv = super().resolve_envvar_value(ctx)
+
+ if rv is not None:
+ return rv
+
+ if (
+ self.allow_from_autoenv
+ and ctx.auto_envvar_prefix is not None
+ and self.name is not None
+ ):
+ envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}"
+ rv = os.environ.get(envvar)
+
+ if rv:
+ return rv
+
+ return None
+
+ def value_from_envvar(self, ctx: Context) -> t.Optional[t.Any]:
+ rv: t.Optional[t.Any] = self.resolve_envvar_value(ctx)
+
+ if rv is None:
+ return None
+
+ value_depth = (self.nargs != 1) + bool(self.multiple)
+
+ if value_depth > 0:
+ rv = self.type.split_envvar_value(rv)
+
+ if self.multiple and self.nargs != 1:
+ rv = batch(rv, self.nargs)
+
+ return rv
+
+ def consume_value(
+ self, ctx: Context, opts: t.Mapping[str, "Parameter"]
+ ) -> t.Tuple[t.Any, ParameterSource]:
+ value, source = super().consume_value(ctx, opts)
+
+ # The parser will emit a sentinel value if the option can be
+ # given as a flag without a value. This is different from None
+ # to distinguish from the flag not being given at all.
+ if value is _flag_needs_value:
+ if self.prompt is not None and not ctx.resilient_parsing:
+ value = self.prompt_for_value(ctx)
+ source = ParameterSource.PROMPT
+ else:
+ value = self.flag_value
+ source = ParameterSource.COMMANDLINE
+
+ elif (
+ self.multiple
+ and value is not None
+ and any(v is _flag_needs_value for v in value)
+ ):
+ value = [self.flag_value if v is _flag_needs_value else v for v in value]
+ source = ParameterSource.COMMANDLINE
+
+ # The value wasn't set, or used the param's default, prompt if
+ # prompting is enabled.
+ elif (
+ source in {None, ParameterSource.DEFAULT}
+ and self.prompt is not None
+ and (self.required or self.prompt_required)
+ and not ctx.resilient_parsing
+ ):
+ value = self.prompt_for_value(ctx)
+ source = ParameterSource.PROMPT
+
+ return value, source
+
+
+class Argument(Parameter):
+ """Arguments are positional parameters to a command. They generally
+ provide fewer features than options but can have infinite ``nargs``
+ and are required by default.
+
+ All parameters are passed onwards to the constructor of :class:`Parameter`.
+ """
+
+ param_type_name = "argument"
+
+ def __init__(
+ self,
+ param_decls: t.Sequence[str],
+ required: t.Optional[bool] = None,
+ **attrs: t.Any,
+ ) -> None:
+ if required is None:
+ if attrs.get("default") is not None:
+ required = False
+ else:
+ required = attrs.get("nargs", 1) > 0
+
+ if "multiple" in attrs:
+ raise TypeError("__init__() got an unexpected keyword argument 'multiple'.")
+
+ super().__init__(param_decls, required=required, **attrs)
+
+ if __debug__:
+ if self.default is not None and self.nargs == -1:
+ raise TypeError("'default' is not supported for nargs=-1.")
+
+ @property
+ def human_readable_name(self) -> str:
+ if self.metavar is not None:
+ return self.metavar
+ return self.name.upper() # type: ignore
+
+ def make_metavar(self) -> str:
+ if self.metavar is not None:
+ return self.metavar
+ var = self.type.get_metavar(self)
+ if not var:
+ var = self.name.upper() # type: ignore
+ if not self.required:
+ var = f"[{var}]"
+ if self.nargs != 1:
+ var += "..."
+ return var
+
+ def _parse_decls(
+ self, decls: t.Sequence[str], expose_value: bool
+ ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]:
+ if not decls:
+ if not expose_value:
+ return None, [], []
+ raise TypeError("Could not determine name for argument")
+ if len(decls) == 1:
+ name = arg = decls[0]
+ name = name.replace("-", "_").lower()
+ else:
+ raise TypeError(
+ "Arguments take exactly one parameter declaration, got"
+ f" {len(decls)}."
+ )
+ return name, [arg], []
+
+ def get_usage_pieces(self, ctx: Context) -> t.List[str]:
+ return [self.make_metavar()]
+
+ def get_error_hint(self, ctx: Context) -> str:
+ return f"'{self.make_metavar()}'"
+
+ def add_to_parser(self, parser: OptionParser, ctx: Context) -> None:
+ parser.add_argument(dest=self.name, nargs=self.nargs, obj=self)
diff --git a/billinglayer/python/click/decorators.py b/billinglayer/python/click/decorators.py
new file mode 100644
index 0000000..d9bba95
--- /dev/null
+++ b/billinglayer/python/click/decorators.py
@@ -0,0 +1,561 @@
+import inspect
+import types
+import typing as t
+from functools import update_wrapper
+from gettext import gettext as _
+
+from .core import Argument
+from .core import Command
+from .core import Context
+from .core import Group
+from .core import Option
+from .core import Parameter
+from .globals import get_current_context
+from .utils import echo
+
+if t.TYPE_CHECKING:
+ import typing_extensions as te
+
+ P = te.ParamSpec("P")
+
+R = t.TypeVar("R")
+T = t.TypeVar("T")
+_AnyCallable = t.Callable[..., t.Any]
+FC = t.TypeVar("FC", bound=t.Union[_AnyCallable, Command])
+
+
+def pass_context(f: "t.Callable[te.Concatenate[Context, P], R]") -> "t.Callable[P, R]":
+ """Marks a callback as wanting to receive the current context
+ object as first argument.
+ """
+
+ def new_func(*args: "P.args", **kwargs: "P.kwargs") -> "R":
+ return f(get_current_context(), *args, **kwargs)
+
+ return update_wrapper(new_func, f)
+
+
+def pass_obj(f: "t.Callable[te.Concatenate[t.Any, P], R]") -> "t.Callable[P, R]":
+ """Similar to :func:`pass_context`, but only pass the object on the
+ context onwards (:attr:`Context.obj`). This is useful if that object
+ represents the state of a nested system.
+ """
+
+ def new_func(*args: "P.args", **kwargs: "P.kwargs") -> "R":
+ return f(get_current_context().obj, *args, **kwargs)
+
+ return update_wrapper(new_func, f)
+
+
+def make_pass_decorator(
+ object_type: t.Type[T], ensure: bool = False
+) -> t.Callable[["t.Callable[te.Concatenate[T, P], R]"], "t.Callable[P, R]"]:
+ """Given an object type this creates a decorator that will work
+ similar to :func:`pass_obj` but instead of passing the object of the
+ current context, it will find the innermost context of type
+ :func:`object_type`.
+
+ This generates a decorator that works roughly like this::
+
+ from functools import update_wrapper
+
+ def decorator(f):
+ @pass_context
+ def new_func(ctx, *args, **kwargs):
+ obj = ctx.find_object(object_type)
+ return ctx.invoke(f, obj, *args, **kwargs)
+ return update_wrapper(new_func, f)
+ return decorator
+
+ :param object_type: the type of the object to pass.
+ :param ensure: if set to `True`, a new object will be created and
+ remembered on the context if it's not there yet.
+ """
+
+ def decorator(f: "t.Callable[te.Concatenate[T, P], R]") -> "t.Callable[P, R]":
+ def new_func(*args: "P.args", **kwargs: "P.kwargs") -> "R":
+ ctx = get_current_context()
+
+ obj: t.Optional[T]
+ if ensure:
+ obj = ctx.ensure_object(object_type)
+ else:
+ obj = ctx.find_object(object_type)
+
+ if obj is None:
+ raise RuntimeError(
+ "Managed to invoke callback without a context"
+ f" object of type {object_type.__name__!r}"
+ " existing."
+ )
+
+ return ctx.invoke(f, obj, *args, **kwargs)
+
+ return update_wrapper(new_func, f)
+
+ return decorator # type: ignore[return-value]
+
+
+def pass_meta_key(
+ key: str, *, doc_description: t.Optional[str] = None
+) -> "t.Callable[[t.Callable[te.Concatenate[t.Any, P], R]], t.Callable[P, R]]":
+ """Create a decorator that passes a key from
+ :attr:`click.Context.meta` as the first argument to the decorated
+ function.
+
+ :param key: Key in ``Context.meta`` to pass.
+ :param doc_description: Description of the object being passed,
+ inserted into the decorator's docstring. Defaults to "the 'key'
+ key from Context.meta".
+
+ .. versionadded:: 8.0
+ """
+
+ def decorator(f: "t.Callable[te.Concatenate[t.Any, P], R]") -> "t.Callable[P, R]":
+ def new_func(*args: "P.args", **kwargs: "P.kwargs") -> R:
+ ctx = get_current_context()
+ obj = ctx.meta[key]
+ return ctx.invoke(f, obj, *args, **kwargs)
+
+ return update_wrapper(new_func, f)
+
+ if doc_description is None:
+ doc_description = f"the {key!r} key from :attr:`click.Context.meta`"
+
+ decorator.__doc__ = (
+ f"Decorator that passes {doc_description} as the first argument"
+ " to the decorated function."
+ )
+ return decorator # type: ignore[return-value]
+
+
+CmdType = t.TypeVar("CmdType", bound=Command)
+
+
+# variant: no call, directly as decorator for a function.
+@t.overload
+def command(name: _AnyCallable) -> Command:
+ ...
+
+
+# variant: with positional name and with positional or keyword cls argument:
+# @command(namearg, CommandCls, ...) or @command(namearg, cls=CommandCls, ...)
+@t.overload
+def command(
+ name: t.Optional[str],
+ cls: t.Type[CmdType],
+ **attrs: t.Any,
+) -> t.Callable[[_AnyCallable], CmdType]:
+ ...
+
+
+# variant: name omitted, cls _must_ be a keyword argument, @command(cls=CommandCls, ...)
+@t.overload
+def command(
+ name: None = None,
+ *,
+ cls: t.Type[CmdType],
+ **attrs: t.Any,
+) -> t.Callable[[_AnyCallable], CmdType]:
+ ...
+
+
+# variant: with optional string name, no cls argument provided.
+@t.overload
+def command(
+ name: t.Optional[str] = ..., cls: None = None, **attrs: t.Any
+) -> t.Callable[[_AnyCallable], Command]:
+ ...
+
+
+def command(
+ name: t.Union[t.Optional[str], _AnyCallable] = None,
+ cls: t.Optional[t.Type[CmdType]] = None,
+ **attrs: t.Any,
+) -> t.Union[Command, t.Callable[[_AnyCallable], t.Union[Command, CmdType]]]:
+ r"""Creates a new :class:`Command` and uses the decorated function as
+ callback. This will also automatically attach all decorated
+ :func:`option`\s and :func:`argument`\s as parameters to the command.
+
+ The name of the command defaults to the name of the function with
+ underscores replaced by dashes. If you want to change that, you can
+ pass the intended name as the first argument.
+
+ All keyword arguments are forwarded to the underlying command class.
+ For the ``params`` argument, any decorated params are appended to
+ the end of the list.
+
+ Once decorated the function turns into a :class:`Command` instance
+ that can be invoked as a command line utility or be attached to a
+ command :class:`Group`.
+
+ :param name: the name of the command. This defaults to the function
+ name with underscores replaced by dashes.
+ :param cls: the command class to instantiate. This defaults to
+ :class:`Command`.
+
+ .. versionchanged:: 8.1
+ This decorator can be applied without parentheses.
+
+ .. versionchanged:: 8.1
+ The ``params`` argument can be used. Decorated params are
+ appended to the end of the list.
+ """
+
+ func: t.Optional[t.Callable[[_AnyCallable], t.Any]] = None
+
+ if callable(name):
+ func = name
+ name = None
+ assert cls is None, "Use 'command(cls=cls)(callable)' to specify a class."
+ assert not attrs, "Use 'command(**kwargs)(callable)' to provide arguments."
+
+ if cls is None:
+ cls = t.cast(t.Type[CmdType], Command)
+
+ def decorator(f: _AnyCallable) -> CmdType:
+ if isinstance(f, Command):
+ raise TypeError("Attempted to convert a callback into a command twice.")
+
+ attr_params = attrs.pop("params", None)
+ params = attr_params if attr_params is not None else []
+
+ try:
+ decorator_params = f.__click_params__ # type: ignore
+ except AttributeError:
+ pass
+ else:
+ del f.__click_params__ # type: ignore
+ params.extend(reversed(decorator_params))
+
+ if attrs.get("help") is None:
+ attrs["help"] = f.__doc__
+
+ if t.TYPE_CHECKING:
+ assert cls is not None
+ assert not callable(name)
+
+ cmd = cls(
+ name=name or f.__name__.lower().replace("_", "-"),
+ callback=f,
+ params=params,
+ **attrs,
+ )
+ cmd.__doc__ = f.__doc__
+ return cmd
+
+ if func is not None:
+ return decorator(func)
+
+ return decorator
+
+
+GrpType = t.TypeVar("GrpType", bound=Group)
+
+
+# variant: no call, directly as decorator for a function.
+@t.overload
+def group(name: _AnyCallable) -> Group:
+ ...
+
+
+# variant: with positional name and with positional or keyword cls argument:
+# @group(namearg, GroupCls, ...) or @group(namearg, cls=GroupCls, ...)
+@t.overload
+def group(
+ name: t.Optional[str],
+ cls: t.Type[GrpType],
+ **attrs: t.Any,
+) -> t.Callable[[_AnyCallable], GrpType]:
+ ...
+
+
+# variant: name omitted, cls _must_ be a keyword argument, @group(cmd=GroupCls, ...)
+@t.overload
+def group(
+ name: None = None,
+ *,
+ cls: t.Type[GrpType],
+ **attrs: t.Any,
+) -> t.Callable[[_AnyCallable], GrpType]:
+ ...
+
+
+# variant: with optional string name, no cls argument provided.
+@t.overload
+def group(
+ name: t.Optional[str] = ..., cls: None = None, **attrs: t.Any
+) -> t.Callable[[_AnyCallable], Group]:
+ ...
+
+
+def group(
+ name: t.Union[str, _AnyCallable, None] = None,
+ cls: t.Optional[t.Type[GrpType]] = None,
+ **attrs: t.Any,
+) -> t.Union[Group, t.Callable[[_AnyCallable], t.Union[Group, GrpType]]]:
+ """Creates a new :class:`Group` with a function as callback. This
+ works otherwise the same as :func:`command` just that the `cls`
+ parameter is set to :class:`Group`.
+
+ .. versionchanged:: 8.1
+ This decorator can be applied without parentheses.
+ """
+ if cls is None:
+ cls = t.cast(t.Type[GrpType], Group)
+
+ if callable(name):
+ return command(cls=cls, **attrs)(name)
+
+ return command(name, cls, **attrs)
+
+
+def _param_memo(f: t.Callable[..., t.Any], param: Parameter) -> None:
+ if isinstance(f, Command):
+ f.params.append(param)
+ else:
+ if not hasattr(f, "__click_params__"):
+ f.__click_params__ = [] # type: ignore
+
+ f.__click_params__.append(param) # type: ignore
+
+
+def argument(
+ *param_decls: str, cls: t.Optional[t.Type[Argument]] = None, **attrs: t.Any
+) -> t.Callable[[FC], FC]:
+ """Attaches an argument to the command. All positional arguments are
+ passed as parameter declarations to :class:`Argument`; all keyword
+ arguments are forwarded unchanged (except ``cls``).
+ This is equivalent to creating an :class:`Argument` instance manually
+ and attaching it to the :attr:`Command.params` list.
+
+ For the default argument class, refer to :class:`Argument` and
+ :class:`Parameter` for descriptions of parameters.
+
+ :param cls: the argument class to instantiate. This defaults to
+ :class:`Argument`.
+ :param param_decls: Passed as positional arguments to the constructor of
+ ``cls``.
+ :param attrs: Passed as keyword arguments to the constructor of ``cls``.
+ """
+ if cls is None:
+ cls = Argument
+
+ def decorator(f: FC) -> FC:
+ _param_memo(f, cls(param_decls, **attrs))
+ return f
+
+ return decorator
+
+
+def option(
+ *param_decls: str, cls: t.Optional[t.Type[Option]] = None, **attrs: t.Any
+) -> t.Callable[[FC], FC]:
+ """Attaches an option to the command. All positional arguments are
+ passed as parameter declarations to :class:`Option`; all keyword
+ arguments are forwarded unchanged (except ``cls``).
+ This is equivalent to creating an :class:`Option` instance manually
+ and attaching it to the :attr:`Command.params` list.
+
+ For the default option class, refer to :class:`Option` and
+ :class:`Parameter` for descriptions of parameters.
+
+ :param cls: the option class to instantiate. This defaults to
+ :class:`Option`.
+ :param param_decls: Passed as positional arguments to the constructor of
+ ``cls``.
+ :param attrs: Passed as keyword arguments to the constructor of ``cls``.
+ """
+ if cls is None:
+ cls = Option
+
+ def decorator(f: FC) -> FC:
+ _param_memo(f, cls(param_decls, **attrs))
+ return f
+
+ return decorator
+
+
+def confirmation_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]:
+ """Add a ``--yes`` option which shows a prompt before continuing if
+ not passed. If the prompt is declined, the program will exit.
+
+ :param param_decls: One or more option names. Defaults to the single
+ value ``"--yes"``.
+ :param kwargs: Extra arguments are passed to :func:`option`.
+ """
+
+ def callback(ctx: Context, param: Parameter, value: bool) -> None:
+ if not value:
+ ctx.abort()
+
+ if not param_decls:
+ param_decls = ("--yes",)
+
+ kwargs.setdefault("is_flag", True)
+ kwargs.setdefault("callback", callback)
+ kwargs.setdefault("expose_value", False)
+ kwargs.setdefault("prompt", "Do you want to continue?")
+ kwargs.setdefault("help", "Confirm the action without prompting.")
+ return option(*param_decls, **kwargs)
+
+
+def password_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]:
+ """Add a ``--password`` option which prompts for a password, hiding
+ input and asking to enter the value again for confirmation.
+
+ :param param_decls: One or more option names. Defaults to the single
+ value ``"--password"``.
+ :param kwargs: Extra arguments are passed to :func:`option`.
+ """
+ if not param_decls:
+ param_decls = ("--password",)
+
+ kwargs.setdefault("prompt", True)
+ kwargs.setdefault("confirmation_prompt", True)
+ kwargs.setdefault("hide_input", True)
+ return option(*param_decls, **kwargs)
+
+
+def version_option(
+ version: t.Optional[str] = None,
+ *param_decls: str,
+ package_name: t.Optional[str] = None,
+ prog_name: t.Optional[str] = None,
+ message: t.Optional[str] = None,
+ **kwargs: t.Any,
+) -> t.Callable[[FC], FC]:
+ """Add a ``--version`` option which immediately prints the version
+ number and exits the program.
+
+ If ``version`` is not provided, Click will try to detect it using
+ :func:`importlib.metadata.version` to get the version for the
+ ``package_name``. On Python < 3.8, the ``importlib_metadata``
+ backport must be installed.
+
+ If ``package_name`` is not provided, Click will try to detect it by
+ inspecting the stack frames. This will be used to detect the
+ version, so it must match the name of the installed package.
+
+ :param version: The version number to show. If not provided, Click
+ will try to detect it.
+ :param param_decls: One or more option names. Defaults to the single
+ value ``"--version"``.
+ :param package_name: The package name to detect the version from. If
+ not provided, Click will try to detect it.
+ :param prog_name: The name of the CLI to show in the message. If not
+ provided, it will be detected from the command.
+ :param message: The message to show. The values ``%(prog)s``,
+ ``%(package)s``, and ``%(version)s`` are available. Defaults to
+ ``"%(prog)s, version %(version)s"``.
+ :param kwargs: Extra arguments are passed to :func:`option`.
+ :raise RuntimeError: ``version`` could not be detected.
+
+ .. versionchanged:: 8.0
+ Add the ``package_name`` parameter, and the ``%(package)s``
+ value for messages.
+
+ .. versionchanged:: 8.0
+ Use :mod:`importlib.metadata` instead of ``pkg_resources``. The
+ version is detected based on the package name, not the entry
+ point name. The Python package name must match the installed
+ package name, or be passed with ``package_name=``.
+ """
+ if message is None:
+ message = _("%(prog)s, version %(version)s")
+
+ if version is None and package_name is None:
+ frame = inspect.currentframe()
+ f_back = frame.f_back if frame is not None else None
+ f_globals = f_back.f_globals if f_back is not None else None
+ # break reference cycle
+ # https://docs.python.org/3/library/inspect.html#the-interpreter-stack
+ del frame
+
+ if f_globals is not None:
+ package_name = f_globals.get("__name__")
+
+ if package_name == "__main__":
+ package_name = f_globals.get("__package__")
+
+ if package_name:
+ package_name = package_name.partition(".")[0]
+
+ def callback(ctx: Context, param: Parameter, value: bool) -> None:
+ if not value or ctx.resilient_parsing:
+ return
+
+ nonlocal prog_name
+ nonlocal version
+
+ if prog_name is None:
+ prog_name = ctx.find_root().info_name
+
+ if version is None and package_name is not None:
+ metadata: t.Optional[types.ModuleType]
+
+ try:
+ from importlib import metadata # type: ignore
+ except ImportError:
+ # Python < 3.8
+ import importlib_metadata as metadata # type: ignore
+
+ try:
+ version = metadata.version(package_name) # type: ignore
+ except metadata.PackageNotFoundError: # type: ignore
+ raise RuntimeError(
+ f"{package_name!r} is not installed. Try passing"
+ " 'package_name' instead."
+ ) from None
+
+ if version is None:
+ raise RuntimeError(
+ f"Could not determine the version for {package_name!r} automatically."
+ )
+
+ echo(
+ message % {"prog": prog_name, "package": package_name, "version": version},
+ color=ctx.color,
+ )
+ ctx.exit()
+
+ if not param_decls:
+ param_decls = ("--version",)
+
+ kwargs.setdefault("is_flag", True)
+ kwargs.setdefault("expose_value", False)
+ kwargs.setdefault("is_eager", True)
+ kwargs.setdefault("help", _("Show the version and exit."))
+ kwargs["callback"] = callback
+ return option(*param_decls, **kwargs)
+
+
+def help_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]:
+ """Add a ``--help`` option which immediately prints the help page
+ and exits the program.
+
+ This is usually unnecessary, as the ``--help`` option is added to
+ each command automatically unless ``add_help_option=False`` is
+ passed.
+
+ :param param_decls: One or more option names. Defaults to the single
+ value ``"--help"``.
+ :param kwargs: Extra arguments are passed to :func:`option`.
+ """
+
+ def callback(ctx: Context, param: Parameter, value: bool) -> None:
+ if not value or ctx.resilient_parsing:
+ return
+
+ echo(ctx.get_help(), color=ctx.color)
+ ctx.exit()
+
+ if not param_decls:
+ param_decls = ("--help",)
+
+ kwargs.setdefault("is_flag", True)
+ kwargs.setdefault("expose_value", False)
+ kwargs.setdefault("is_eager", True)
+ kwargs.setdefault("help", _("Show this message and exit."))
+ kwargs["callback"] = callback
+ return option(*param_decls, **kwargs)
diff --git a/billinglayer/python/click/exceptions.py b/billinglayer/python/click/exceptions.py
new file mode 100644
index 0000000..fe68a36
--- /dev/null
+++ b/billinglayer/python/click/exceptions.py
@@ -0,0 +1,288 @@
+import typing as t
+from gettext import gettext as _
+from gettext import ngettext
+
+from ._compat import get_text_stderr
+from .utils import echo
+from .utils import format_filename
+
+if t.TYPE_CHECKING:
+ from .core import Command
+ from .core import Context
+ from .core import Parameter
+
+
+def _join_param_hints(
+ param_hint: t.Optional[t.Union[t.Sequence[str], str]]
+) -> t.Optional[str]:
+ if param_hint is not None and not isinstance(param_hint, str):
+ return " / ".join(repr(x) for x in param_hint)
+
+ return param_hint
+
+
+class ClickException(Exception):
+ """An exception that Click can handle and show to the user."""
+
+ #: The exit code for this exception.
+ exit_code = 1
+
+ def __init__(self, message: str) -> None:
+ super().__init__(message)
+ self.message = message
+
+ def format_message(self) -> str:
+ return self.message
+
+ def __str__(self) -> str:
+ return self.message
+
+ def show(self, file: t.Optional[t.IO[t.Any]] = None) -> None:
+ if file is None:
+ file = get_text_stderr()
+
+ echo(_("Error: {message}").format(message=self.format_message()), file=file)
+
+
+class UsageError(ClickException):
+ """An internal exception that signals a usage error. This typically
+ aborts any further handling.
+
+ :param message: the error message to display.
+ :param ctx: optionally the context that caused this error. Click will
+ fill in the context automatically in some situations.
+ """
+
+ exit_code = 2
+
+ def __init__(self, message: str, ctx: t.Optional["Context"] = None) -> None:
+ super().__init__(message)
+ self.ctx = ctx
+ self.cmd: t.Optional["Command"] = self.ctx.command if self.ctx else None
+
+ def show(self, file: t.Optional[t.IO[t.Any]] = None) -> None:
+ if file is None:
+ file = get_text_stderr()
+ color = None
+ hint = ""
+ if (
+ self.ctx is not None
+ and self.ctx.command.get_help_option(self.ctx) is not None
+ ):
+ hint = _("Try '{command} {option}' for help.").format(
+ command=self.ctx.command_path, option=self.ctx.help_option_names[0]
+ )
+ hint = f"{hint}\n"
+ if self.ctx is not None:
+ color = self.ctx.color
+ echo(f"{self.ctx.get_usage()}\n{hint}", file=file, color=color)
+ echo(
+ _("Error: {message}").format(message=self.format_message()),
+ file=file,
+ color=color,
+ )
+
+
+class BadParameter(UsageError):
+ """An exception that formats out a standardized error message for a
+ bad parameter. This is useful when thrown from a callback or type as
+ Click will attach contextual information to it (for instance, which
+ parameter it is).
+
+ .. versionadded:: 2.0
+
+ :param param: the parameter object that caused this error. This can
+ be left out, and Click will attach this info itself
+ if possible.
+ :param param_hint: a string that shows up as parameter name. This
+ can be used as alternative to `param` in cases
+ where custom validation should happen. If it is
+ a string it's used as such, if it's a list then
+ each item is quoted and separated.
+ """
+
+ def __init__(
+ self,
+ message: str,
+ ctx: t.Optional["Context"] = None,
+ param: t.Optional["Parameter"] = None,
+ param_hint: t.Optional[str] = None,
+ ) -> None:
+ super().__init__(message, ctx)
+ self.param = param
+ self.param_hint = param_hint
+
+ def format_message(self) -> str:
+ if self.param_hint is not None:
+ param_hint = self.param_hint
+ elif self.param is not None:
+ param_hint = self.param.get_error_hint(self.ctx) # type: ignore
+ else:
+ return _("Invalid value: {message}").format(message=self.message)
+
+ return _("Invalid value for {param_hint}: {message}").format(
+ param_hint=_join_param_hints(param_hint), message=self.message
+ )
+
+
+class MissingParameter(BadParameter):
+ """Raised if click required an option or argument but it was not
+ provided when invoking the script.
+
+ .. versionadded:: 4.0
+
+ :param param_type: a string that indicates the type of the parameter.
+ The default is to inherit the parameter type from
+ the given `param`. Valid values are ``'parameter'``,
+ ``'option'`` or ``'argument'``.
+ """
+
+ def __init__(
+ self,
+ message: t.Optional[str] = None,
+ ctx: t.Optional["Context"] = None,
+ param: t.Optional["Parameter"] = None,
+ param_hint: t.Optional[str] = None,
+ param_type: t.Optional[str] = None,
+ ) -> None:
+ super().__init__(message or "", ctx, param, param_hint)
+ self.param_type = param_type
+
+ def format_message(self) -> str:
+ if self.param_hint is not None:
+ param_hint: t.Optional[str] = self.param_hint
+ elif self.param is not None:
+ param_hint = self.param.get_error_hint(self.ctx) # type: ignore
+ else:
+ param_hint = None
+
+ param_hint = _join_param_hints(param_hint)
+ param_hint = f" {param_hint}" if param_hint else ""
+
+ param_type = self.param_type
+ if param_type is None and self.param is not None:
+ param_type = self.param.param_type_name
+
+ msg = self.message
+ if self.param is not None:
+ msg_extra = self.param.type.get_missing_message(self.param)
+ if msg_extra:
+ if msg:
+ msg += f". {msg_extra}"
+ else:
+ msg = msg_extra
+
+ msg = f" {msg}" if msg else ""
+
+ # Translate param_type for known types.
+ if param_type == "argument":
+ missing = _("Missing argument")
+ elif param_type == "option":
+ missing = _("Missing option")
+ elif param_type == "parameter":
+ missing = _("Missing parameter")
+ else:
+ missing = _("Missing {param_type}").format(param_type=param_type)
+
+ return f"{missing}{param_hint}.{msg}"
+
+ def __str__(self) -> str:
+ if not self.message:
+ param_name = self.param.name if self.param else None
+ return _("Missing parameter: {param_name}").format(param_name=param_name)
+ else:
+ return self.message
+
+
+class NoSuchOption(UsageError):
+ """Raised if click attempted to handle an option that does not
+ exist.
+
+ .. versionadded:: 4.0
+ """
+
+ def __init__(
+ self,
+ option_name: str,
+ message: t.Optional[str] = None,
+ possibilities: t.Optional[t.Sequence[str]] = None,
+ ctx: t.Optional["Context"] = None,
+ ) -> None:
+ if message is None:
+ message = _("No such option: {name}").format(name=option_name)
+
+ super().__init__(message, ctx)
+ self.option_name = option_name
+ self.possibilities = possibilities
+
+ def format_message(self) -> str:
+ if not self.possibilities:
+ return self.message
+
+ possibility_str = ", ".join(sorted(self.possibilities))
+ suggest = ngettext(
+ "Did you mean {possibility}?",
+ "(Possible options: {possibilities})",
+ len(self.possibilities),
+ ).format(possibility=possibility_str, possibilities=possibility_str)
+ return f"{self.message} {suggest}"
+
+
+class BadOptionUsage(UsageError):
+ """Raised if an option is generally supplied but the use of the option
+ was incorrect. This is for instance raised if the number of arguments
+ for an option is not correct.
+
+ .. versionadded:: 4.0
+
+ :param option_name: the name of the option being used incorrectly.
+ """
+
+ def __init__(
+ self, option_name: str, message: str, ctx: t.Optional["Context"] = None
+ ) -> None:
+ super().__init__(message, ctx)
+ self.option_name = option_name
+
+
+class BadArgumentUsage(UsageError):
+ """Raised if an argument is generally supplied but the use of the argument
+ was incorrect. This is for instance raised if the number of values
+ for an argument is not correct.
+
+ .. versionadded:: 6.0
+ """
+
+
+class FileError(ClickException):
+ """Raised if a file cannot be opened."""
+
+ def __init__(self, filename: str, hint: t.Optional[str] = None) -> None:
+ if hint is None:
+ hint = _("unknown error")
+
+ super().__init__(hint)
+ self.ui_filename: str = format_filename(filename)
+ self.filename = filename
+
+ def format_message(self) -> str:
+ return _("Could not open file {filename!r}: {message}").format(
+ filename=self.ui_filename, message=self.message
+ )
+
+
+class Abort(RuntimeError):
+ """An internal signalling exception that signals Click to abort."""
+
+
+class Exit(RuntimeError):
+ """An exception that indicates that the application should exit with some
+ status code.
+
+ :param code: the status code to exit with.
+ """
+
+ __slots__ = ("exit_code",)
+
+ def __init__(self, code: int = 0) -> None:
+ self.exit_code: int = code
diff --git a/billinglayer/python/click/formatting.py b/billinglayer/python/click/formatting.py
new file mode 100644
index 0000000..ddd2a2f
--- /dev/null
+++ b/billinglayer/python/click/formatting.py
@@ -0,0 +1,301 @@
+import typing as t
+from contextlib import contextmanager
+from gettext import gettext as _
+
+from ._compat import term_len
+from .parser import split_opt
+
+# Can force a width. This is used by the test system
+FORCED_WIDTH: t.Optional[int] = None
+
+
+def measure_table(rows: t.Iterable[t.Tuple[str, str]]) -> t.Tuple[int, ...]:
+ widths: t.Dict[int, int] = {}
+
+ for row in rows:
+ for idx, col in enumerate(row):
+ widths[idx] = max(widths.get(idx, 0), term_len(col))
+
+ return tuple(y for x, y in sorted(widths.items()))
+
+
+def iter_rows(
+ rows: t.Iterable[t.Tuple[str, str]], col_count: int
+) -> t.Iterator[t.Tuple[str, ...]]:
+ for row in rows:
+ yield row + ("",) * (col_count - len(row))
+
+
+def wrap_text(
+ text: str,
+ width: int = 78,
+ initial_indent: str = "",
+ subsequent_indent: str = "",
+ preserve_paragraphs: bool = False,
+) -> str:
+ """A helper function that intelligently wraps text. By default, it
+ assumes that it operates on a single paragraph of text but if the
+ `preserve_paragraphs` parameter is provided it will intelligently
+ handle paragraphs (defined by two empty lines).
+
+ If paragraphs are handled, a paragraph can be prefixed with an empty
+ line containing the ``\\b`` character (``\\x08``) to indicate that
+ no rewrapping should happen in that block.
+
+ :param text: the text that should be rewrapped.
+ :param width: the maximum width for the text.
+ :param initial_indent: the initial indent that should be placed on the
+ first line as a string.
+ :param subsequent_indent: the indent string that should be placed on
+ each consecutive line.
+ :param preserve_paragraphs: if this flag is set then the wrapping will
+ intelligently handle paragraphs.
+ """
+ from ._textwrap import TextWrapper
+
+ text = text.expandtabs()
+ wrapper = TextWrapper(
+ width,
+ initial_indent=initial_indent,
+ subsequent_indent=subsequent_indent,
+ replace_whitespace=False,
+ )
+ if not preserve_paragraphs:
+ return wrapper.fill(text)
+
+ p: t.List[t.Tuple[int, bool, str]] = []
+ buf: t.List[str] = []
+ indent = None
+
+ def _flush_par() -> None:
+ if not buf:
+ return
+ if buf[0].strip() == "\b":
+ p.append((indent or 0, True, "\n".join(buf[1:])))
+ else:
+ p.append((indent or 0, False, " ".join(buf)))
+ del buf[:]
+
+ for line in text.splitlines():
+ if not line:
+ _flush_par()
+ indent = None
+ else:
+ if indent is None:
+ orig_len = term_len(line)
+ line = line.lstrip()
+ indent = orig_len - term_len(line)
+ buf.append(line)
+ _flush_par()
+
+ rv = []
+ for indent, raw, text in p:
+ with wrapper.extra_indent(" " * indent):
+ if raw:
+ rv.append(wrapper.indent_only(text))
+ else:
+ rv.append(wrapper.fill(text))
+
+ return "\n\n".join(rv)
+
+
+class HelpFormatter:
+ """This class helps with formatting text-based help pages. It's
+ usually just needed for very special internal cases, but it's also
+ exposed so that developers can write their own fancy outputs.
+
+ At present, it always writes into memory.
+
+ :param indent_increment: the additional increment for each level.
+ :param width: the width for the text. This defaults to the terminal
+ width clamped to a maximum of 78.
+ """
+
+ def __init__(
+ self,
+ indent_increment: int = 2,
+ width: t.Optional[int] = None,
+ max_width: t.Optional[int] = None,
+ ) -> None:
+ import shutil
+
+ self.indent_increment = indent_increment
+ if max_width is None:
+ max_width = 80
+ if width is None:
+ width = FORCED_WIDTH
+ if width is None:
+ width = max(min(shutil.get_terminal_size().columns, max_width) - 2, 50)
+ self.width = width
+ self.current_indent = 0
+ self.buffer: t.List[str] = []
+
+ def write(self, string: str) -> None:
+ """Writes a unicode string into the internal buffer."""
+ self.buffer.append(string)
+
+ def indent(self) -> None:
+ """Increases the indentation."""
+ self.current_indent += self.indent_increment
+
+ def dedent(self) -> None:
+ """Decreases the indentation."""
+ self.current_indent -= self.indent_increment
+
+ def write_usage(
+ self, prog: str, args: str = "", prefix: t.Optional[str] = None
+ ) -> None:
+ """Writes a usage line into the buffer.
+
+ :param prog: the program name.
+ :param args: whitespace separated list of arguments.
+ :param prefix: The prefix for the first line. Defaults to
+ ``"Usage: "``.
+ """
+ if prefix is None:
+ prefix = f"{_('Usage:')} "
+
+ usage_prefix = f"{prefix:>{self.current_indent}}{prog} "
+ text_width = self.width - self.current_indent
+
+ if text_width >= (term_len(usage_prefix) + 20):
+ # The arguments will fit to the right of the prefix.
+ indent = " " * term_len(usage_prefix)
+ self.write(
+ wrap_text(
+ args,
+ text_width,
+ initial_indent=usage_prefix,
+ subsequent_indent=indent,
+ )
+ )
+ else:
+ # The prefix is too long, put the arguments on the next line.
+ self.write(usage_prefix)
+ self.write("\n")
+ indent = " " * (max(self.current_indent, term_len(prefix)) + 4)
+ self.write(
+ wrap_text(
+ args, text_width, initial_indent=indent, subsequent_indent=indent
+ )
+ )
+
+ self.write("\n")
+
+ def write_heading(self, heading: str) -> None:
+ """Writes a heading into the buffer."""
+ self.write(f"{'':>{self.current_indent}}{heading}:\n")
+
+ def write_paragraph(self) -> None:
+ """Writes a paragraph into the buffer."""
+ if self.buffer:
+ self.write("\n")
+
+ def write_text(self, text: str) -> None:
+ """Writes re-indented text into the buffer. This rewraps and
+ preserves paragraphs.
+ """
+ indent = " " * self.current_indent
+ self.write(
+ wrap_text(
+ text,
+ self.width,
+ initial_indent=indent,
+ subsequent_indent=indent,
+ preserve_paragraphs=True,
+ )
+ )
+ self.write("\n")
+
+ def write_dl(
+ self,
+ rows: t.Sequence[t.Tuple[str, str]],
+ col_max: int = 30,
+ col_spacing: int = 2,
+ ) -> None:
+ """Writes a definition list into the buffer. This is how options
+ and commands are usually formatted.
+
+ :param rows: a list of two item tuples for the terms and values.
+ :param col_max: the maximum width of the first column.
+ :param col_spacing: the number of spaces between the first and
+ second column.
+ """
+ rows = list(rows)
+ widths = measure_table(rows)
+ if len(widths) != 2:
+ raise TypeError("Expected two columns for definition list")
+
+ first_col = min(widths[0], col_max) + col_spacing
+
+ for first, second in iter_rows(rows, len(widths)):
+ self.write(f"{'':>{self.current_indent}}{first}")
+ if not second:
+ self.write("\n")
+ continue
+ if term_len(first) <= first_col - col_spacing:
+ self.write(" " * (first_col - term_len(first)))
+ else:
+ self.write("\n")
+ self.write(" " * (first_col + self.current_indent))
+
+ text_width = max(self.width - first_col - 2, 10)
+ wrapped_text = wrap_text(second, text_width, preserve_paragraphs=True)
+ lines = wrapped_text.splitlines()
+
+ if lines:
+ self.write(f"{lines[0]}\n")
+
+ for line in lines[1:]:
+ self.write(f"{'':>{first_col + self.current_indent}}{line}\n")
+ else:
+ self.write("\n")
+
+ @contextmanager
+ def section(self, name: str) -> t.Iterator[None]:
+ """Helpful context manager that writes a paragraph, a heading,
+ and the indents.
+
+ :param name: the section name that is written as heading.
+ """
+ self.write_paragraph()
+ self.write_heading(name)
+ self.indent()
+ try:
+ yield
+ finally:
+ self.dedent()
+
+ @contextmanager
+ def indentation(self) -> t.Iterator[None]:
+ """A context manager that increases the indentation."""
+ self.indent()
+ try:
+ yield
+ finally:
+ self.dedent()
+
+ def getvalue(self) -> str:
+ """Returns the buffer contents."""
+ return "".join(self.buffer)
+
+
+def join_options(options: t.Sequence[str]) -> t.Tuple[str, bool]:
+ """Given a list of option strings this joins them in the most appropriate
+ way and returns them in the form ``(formatted_string,
+ any_prefix_is_slash)`` where the second item in the tuple is a flag that
+ indicates if any of the option prefixes was a slash.
+ """
+ rv = []
+ any_prefix_is_slash = False
+
+ for opt in options:
+ prefix = split_opt(opt)[0]
+
+ if prefix == "/":
+ any_prefix_is_slash = True
+
+ rv.append((len(prefix), opt))
+
+ rv.sort(key=lambda x: x[0])
+ return ", ".join(x[1] for x in rv), any_prefix_is_slash
diff --git a/billinglayer/python/click/globals.py b/billinglayer/python/click/globals.py
new file mode 100644
index 0000000..480058f
--- /dev/null
+++ b/billinglayer/python/click/globals.py
@@ -0,0 +1,68 @@
+import typing as t
+from threading import local
+
+if t.TYPE_CHECKING:
+ import typing_extensions as te
+ from .core import Context
+
+_local = local()
+
+
+@t.overload
+def get_current_context(silent: "te.Literal[False]" = False) -> "Context":
+ ...
+
+
+@t.overload
+def get_current_context(silent: bool = ...) -> t.Optional["Context"]:
+ ...
+
+
+def get_current_context(silent: bool = False) -> t.Optional["Context"]:
+ """Returns the current click context. This can be used as a way to
+ access the current context object from anywhere. This is a more implicit
+ alternative to the :func:`pass_context` decorator. This function is
+ primarily useful for helpers such as :func:`echo` which might be
+ interested in changing its behavior based on the current context.
+
+ To push the current context, :meth:`Context.scope` can be used.
+
+ .. versionadded:: 5.0
+
+ :param silent: if set to `True` the return value is `None` if no context
+ is available. The default behavior is to raise a
+ :exc:`RuntimeError`.
+ """
+ try:
+ return t.cast("Context", _local.stack[-1])
+ except (AttributeError, IndexError) as e:
+ if not silent:
+ raise RuntimeError("There is no active click context.") from e
+
+ return None
+
+
+def push_context(ctx: "Context") -> None:
+ """Pushes a new context to the current stack."""
+ _local.__dict__.setdefault("stack", []).append(ctx)
+
+
+def pop_context() -> None:
+ """Removes the top level from the stack."""
+ _local.stack.pop()
+
+
+def resolve_color_default(color: t.Optional[bool] = None) -> t.Optional[bool]:
+ """Internal helper to get the default value of the color flag. If a
+ value is passed it's returned unchanged, otherwise it's looked up from
+ the current context.
+ """
+ if color is not None:
+ return color
+
+ ctx = get_current_context(silent=True)
+
+ if ctx is not None:
+ return ctx.color
+
+ return None
diff --git a/billinglayer/python/click/parser.py b/billinglayer/python/click/parser.py
new file mode 100644
index 0000000..5fa7adf
--- /dev/null
+++ b/billinglayer/python/click/parser.py
@@ -0,0 +1,529 @@
+"""
+This module started out as largely a copy paste from the stdlib's
+optparse module with the features removed that we do not need from
+optparse because we implement them in Click on a higher level (for
+instance type handling, help formatting and a lot more).
+
+The plan is to remove more and more from here over time.
+
+The reason this is a different module and not optparse from the stdlib
+is that there are differences in 2.x and 3.x about the error messages
+generated and optparse in the stdlib uses gettext for no good reason
+and might cause us issues.
+
+Click uses parts of optparse written by Gregory P. Ward and maintained
+by the Python Software Foundation. This is limited to code in parser.py.
+
+Copyright 2001-2006 Gregory P. Ward. All rights reserved.
+Copyright 2002-2006 Python Software Foundation. All rights reserved.
+"""
+# This code uses parts of optparse written by Gregory P. Ward and
+# maintained by the Python Software Foundation.
+# Copyright 2001-2006 Gregory P. Ward
+# Copyright 2002-2006 Python Software Foundation
+import typing as t
+from collections import deque
+from gettext import gettext as _
+from gettext import ngettext
+
+from .exceptions import BadArgumentUsage
+from .exceptions import BadOptionUsage
+from .exceptions import NoSuchOption
+from .exceptions import UsageError
+
+if t.TYPE_CHECKING:
+ import typing_extensions as te
+ from .core import Argument as CoreArgument
+ from .core import Context
+ from .core import Option as CoreOption
+ from .core import Parameter as CoreParameter
+
+V = t.TypeVar("V")
+
+# Sentinel value that indicates an option was passed as a flag without a
+# value but is not a flag option. Option.consume_value uses this to
+# prompt or use the flag_value.
+_flag_needs_value = object()
+
+
+def _unpack_args(
+ args: t.Sequence[str], nargs_spec: t.Sequence[int]
+) -> t.Tuple[t.Sequence[t.Union[str, t.Sequence[t.Optional[str]], None]], t.List[str]]:
+ """Given an iterable of arguments and an iterable of nargs specifications,
+ it returns a tuple with all the unpacked arguments at the first index
+ and all remaining arguments as the second.
+
+ The nargs specification is the number of arguments that should be consumed
+ or `-1` to indicate that this position should eat up all the remainders.
+
+ Missing items are filled with `None`.
+ """
+ args = deque(args)
+ nargs_spec = deque(nargs_spec)
+ rv: t.List[t.Union[str, t.Tuple[t.Optional[str], ...], None]] = []
+ spos: t.Optional[int] = None
+
+ def _fetch(c: "te.Deque[V]") -> t.Optional[V]:
+ try:
+ if spos is None:
+ return c.popleft()
+ else:
+ return c.pop()
+ except IndexError:
+ return None
+
+ while nargs_spec:
+ nargs = _fetch(nargs_spec)
+
+ if nargs is None:
+ continue
+
+ if nargs == 1:
+ rv.append(_fetch(args))
+ elif nargs > 1:
+ x = [_fetch(args) for _ in range(nargs)]
+
+ # If we're reversed, we're pulling in the arguments in reverse,
+ # so we need to turn them around.
+ if spos is not None:
+ x.reverse()
+
+ rv.append(tuple(x))
+ elif nargs < 0:
+ if spos is not None:
+ raise TypeError("Cannot have two nargs < 0")
+
+ spos = len(rv)
+ rv.append(None)
+
+ # spos is the position of the wildcard (star). If it's not `None`,
+ # we fill it with the remainder.
+ if spos is not None:
+ rv[spos] = tuple(args)
+ args = []
+ rv[spos + 1 :] = reversed(rv[spos + 1 :])
+
+ return tuple(rv), list(args)
+
+
+def split_opt(opt: str) -> t.Tuple[str, str]:
+ first = opt[:1]
+ if first.isalnum():
+ return "", opt
+ if opt[1:2] == first:
+ return opt[:2], opt[2:]
+ return first, opt[1:]
+
+
+def normalize_opt(opt: str, ctx: t.Optional["Context"]) -> str:
+ if ctx is None or ctx.token_normalize_func is None:
+ return opt
+ prefix, opt = split_opt(opt)
+ return f"{prefix}{ctx.token_normalize_func(opt)}"
+
+
+def split_arg_string(string: str) -> t.List[str]:
+ """Split an argument string as with :func:`shlex.split`, but don't
+ fail if the string is incomplete. Ignores a missing closing quote or
+ incomplete escape sequence and uses the partial token as-is.
+
+ .. code-block:: python
+
+ split_arg_string("example 'my file")
+ ["example", "my file"]
+
+ split_arg_string("example my\\")
+ ["example", "my"]
+
+ :param string: String to split.
+ """
+ import shlex
+
+ lex = shlex.shlex(string, posix=True)
+ lex.whitespace_split = True
+ lex.commenters = ""
+ out = []
+
+ try:
+ for token in lex:
+ out.append(token)
+ except ValueError:
+ # Raised when end-of-string is reached in an invalid state. Use
+ # the partial token as-is. The quote or escape character is in
+ # lex.state, not lex.token.
+ out.append(lex.token)
+
+ return out
+
+
+class Option:
+ def __init__(
+ self,
+ obj: "CoreOption",
+ opts: t.Sequence[str],
+ dest: t.Optional[str],
+ action: t.Optional[str] = None,
+ nargs: int = 1,
+ const: t.Optional[t.Any] = None,
+ ):
+ self._short_opts = []
+ self._long_opts = []
+ self.prefixes: t.Set[str] = set()
+
+ for opt in opts:
+ prefix, value = split_opt(opt)
+ if not prefix:
+ raise ValueError(f"Invalid start character for option ({opt})")
+ self.prefixes.add(prefix[0])
+ if len(prefix) == 1 and len(value) == 1:
+ self._short_opts.append(opt)
+ else:
+ self._long_opts.append(opt)
+ self.prefixes.add(prefix)
+
+ if action is None:
+ action = "store"
+
+ self.dest = dest
+ self.action = action
+ self.nargs = nargs
+ self.const = const
+ self.obj = obj
+
+ @property
+ def takes_value(self) -> bool:
+ return self.action in ("store", "append")
+
+ def process(self, value: t.Any, state: "ParsingState") -> None:
+ if self.action == "store":
+ state.opts[self.dest] = value # type: ignore
+ elif self.action == "store_const":
+ state.opts[self.dest] = self.const # type: ignore
+ elif self.action == "append":
+ state.opts.setdefault(self.dest, []).append(value) # type: ignore
+ elif self.action == "append_const":
+ state.opts.setdefault(self.dest, []).append(self.const) # type: ignore
+ elif self.action == "count":
+ state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 # type: ignore
+ else:
+ raise ValueError(f"unknown action '{self.action}'")
+ state.order.append(self.obj)
+
+
+class Argument:
+ def __init__(self, obj: "CoreArgument", dest: t.Optional[str], nargs: int = 1):
+ self.dest = dest
+ self.nargs = nargs
+ self.obj = obj
+
+ def process(
+ self,
+ value: t.Union[t.Optional[str], t.Sequence[t.Optional[str]]],
+ state: "ParsingState",
+ ) -> None:
+ if self.nargs > 1:
+ assert value is not None
+ holes = sum(1 for x in value if x is None)
+ if holes == len(value):
+ value = None
+ elif holes != 0:
+ raise BadArgumentUsage(
+ _("Argument {name!r} takes {nargs} values.").format(
+ name=self.dest, nargs=self.nargs
+ )
+ )
+
+ if self.nargs == -1 and self.obj.envvar is not None and value == ():
+ # Replace empty tuple with None so that a value from the
+ # environment may be tried.
+ value = None
+
+ state.opts[self.dest] = value # type: ignore
+ state.order.append(self.obj)
+
+
+class ParsingState:
+ def __init__(self, rargs: t.List[str]) -> None:
+ self.opts: t.Dict[str, t.Any] = {}
+ self.largs: t.List[str] = []
+ self.rargs = rargs
+ self.order: t.List["CoreParameter"] = []
+
+
+class OptionParser:
+ """The option parser is an internal class that is ultimately used to
+ parse options and arguments. It's modelled after optparse and brings
+ a similar but vastly simplified API. It should generally not be used
+ directly as the high level Click classes wrap it for you.
+
+ It's not nearly as extensible as optparse or argparse as it does not
+ implement features that are implemented on a higher level (such as
+ types or defaults).
+
+ :param ctx: optionally the :class:`~click.Context` where this parser
+ should go with.
+ """
+
+ def __init__(self, ctx: t.Optional["Context"] = None) -> None:
+ #: The :class:`~click.Context` for this parser. This might be
+ #: `None` for some advanced use cases.
+ self.ctx = ctx
+ #: This controls how the parser deals with interspersed arguments.
+ #: If this is set to `False`, the parser will stop on the first
+ #: non-option. Click uses this to implement nested subcommands
+ #: safely.
+ self.allow_interspersed_args: bool = True
+ #: This tells the parser how to deal with unknown options. By
+ #: default it will error out (which is sensible), but there is a
+ #: second mode where it will ignore it and continue processing
+ #: after shifting all the unknown options into the resulting args.
+ self.ignore_unknown_options: bool = False
+
+ if ctx is not None:
+ self.allow_interspersed_args = ctx.allow_interspersed_args
+ self.ignore_unknown_options = ctx.ignore_unknown_options
+
+ self._short_opt: t.Dict[str, Option] = {}
+ self._long_opt: t.Dict[str, Option] = {}
+ self._opt_prefixes = {"-", "--"}
+ self._args: t.List[Argument] = []
+
+ def add_option(
+ self,
+ obj: "CoreOption",
+ opts: t.Sequence[str],
+ dest: t.Optional[str],
+ action: t.Optional[str] = None,
+ nargs: int = 1,
+ const: t.Optional[t.Any] = None,
+ ) -> None:
+ """Adds a new option named `dest` to the parser. The destination
+ is not inferred (unlike with optparse) and needs to be explicitly
+ provided. Action can be any of ``store``, ``store_const``,
+ ``append``, ``append_const`` or ``count``.
+
+ The `obj` can be used to identify the option in the order list
+ that is returned from the parser.
+ """
+ opts = [normalize_opt(opt, self.ctx) for opt in opts]
+ option = Option(obj, opts, dest, action=action, nargs=nargs, const=const)
+ self._opt_prefixes.update(option.prefixes)
+ for opt in option._short_opts:
+ self._short_opt[opt] = option
+ for opt in option._long_opts:
+ self._long_opt[opt] = option
+
+ def add_argument(
+ self, obj: "CoreArgument", dest: t.Optional[str], nargs: int = 1
+ ) -> None:
+ """Adds a positional argument named `dest` to the parser.
+
+ The `obj` can be used to identify the option in the order list
+ that is returned from the parser.
+ """
+ self._args.append(Argument(obj, dest=dest, nargs=nargs))
+
+ def parse_args(
+ self, args: t.List[str]
+ ) -> t.Tuple[t.Dict[str, t.Any], t.List[str], t.List["CoreParameter"]]:
+ """Parses positional arguments and returns ``(values, args, order)``
+ for the parsed options and arguments as well as the leftover
+ arguments if there are any. The order is a list of objects as they
+ appear on the command line. If arguments appear multiple times they
+ will be memorized multiple times as well.
+ """
+ state = ParsingState(args)
+ try:
+ self._process_args_for_options(state)
+ self._process_args_for_args(state)
+ except UsageError:
+ if self.ctx is None or not self.ctx.resilient_parsing:
+ raise
+ return state.opts, state.largs, state.order
+
+ def _process_args_for_args(self, state: ParsingState) -> None:
+ pargs, args = _unpack_args(
+ state.largs + state.rargs, [x.nargs for x in self._args]
+ )
+
+ for idx, arg in enumerate(self._args):
+ arg.process(pargs[idx], state)
+
+ state.largs = args
+ state.rargs = []
+
+ def _process_args_for_options(self, state: ParsingState) -> None:
+ while state.rargs:
+ arg = state.rargs.pop(0)
+ arglen = len(arg)
+ # Double dashes always handled explicitly regardless of what
+ # prefixes are valid.
+ if arg == "--":
+ return
+ elif arg[:1] in self._opt_prefixes and arglen > 1:
+ self._process_opts(arg, state)
+ elif self.allow_interspersed_args:
+ state.largs.append(arg)
+ else:
+ state.rargs.insert(0, arg)
+ return
+
+ # Say this is the original argument list:
+ # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)]
+ # ^
+ # (we are about to process arg(i)).
+ #
+ # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of
+ # [arg0, ..., arg(i-1)] (any options and their arguments will have
+ # been removed from largs).
+ #
+ # The while loop will usually consume 1 or more arguments per pass.
+ # If it consumes 1 (eg. arg is an option that takes no arguments),
+ # then after _process_arg() is done the situation is:
+ #
+ # largs = subset of [arg0, ..., arg(i)]
+ # rargs = [arg(i+1), ..., arg(N-1)]
+ #
+ # If allow_interspersed_args is false, largs will always be
+ # *empty* -- still a subset of [arg0, ..., arg(i-1)], but
+ # not a very interesting subset!
+
+ def _match_long_opt(
+ self, opt: str, explicit_value: t.Optional[str], state: ParsingState
+ ) -> None:
+ if opt not in self._long_opt:
+ from difflib import get_close_matches
+
+ possibilities = get_close_matches(opt, self._long_opt)
+ raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx)
+
+ option = self._long_opt[opt]
+ if option.takes_value:
+ # At this point it's safe to modify rargs by injecting the
+ # explicit value, because no exception is raised in this
+ # branch. This means that the inserted value will be fully
+ # consumed.
+ if explicit_value is not None:
+ state.rargs.insert(0, explicit_value)
+
+ value = self._get_value_from_state(opt, option, state)
+
+ elif explicit_value is not None:
+ raise BadOptionUsage(
+ opt, _("Option {name!r} does not take a value.").format(name=opt)
+ )
+
+ else:
+ value = None
+
+ option.process(value, state)
+
+ def _match_short_opt(self, arg: str, state: ParsingState) -> None:
+ stop = False
+ i = 1
+ prefix = arg[0]
+ unknown_options = []
+
+ for ch in arg[1:]:
+ opt = normalize_opt(f"{prefix}{ch}", self.ctx)
+ option = self._short_opt.get(opt)
+ i += 1
+
+ if not option:
+ if self.ignore_unknown_options:
+ unknown_options.append(ch)
+ continue
+ raise NoSuchOption(opt, ctx=self.ctx)
+ if option.takes_value:
+ # Any characters left in arg? Pretend they're the
+ # next arg, and stop consuming characters of arg.
+ if i < len(arg):
+ state.rargs.insert(0, arg[i:])
+ stop = True
+
+ value = self._get_value_from_state(opt, option, state)
+
+ else:
+ value = None
+
+ option.process(value, state)
+
+ if stop:
+ break
+
+ # If we got any unknown options we recombine the string of the
+ # remaining options and re-attach the prefix, then report that
+ # to the state as new larg. This way there is basic combinatorics
+ # that can be achieved while still ignoring unknown arguments.
+ if self.ignore_unknown_options and unknown_options:
+ state.largs.append(f"{prefix}{''.join(unknown_options)}")
+
+ def _get_value_from_state(
+ self, option_name: str, option: Option, state: ParsingState
+ ) -> t.Any:
+ nargs = option.nargs
+
+ if len(state.rargs) < nargs:
+ if option.obj._flag_needs_value:
+ # Option allows omitting the value.
+ value = _flag_needs_value
+ else:
+ raise BadOptionUsage(
+ option_name,
+ ngettext(
+ "Option {name!r} requires an argument.",
+ "Option {name!r} requires {nargs} arguments.",
+ nargs,
+ ).format(name=option_name, nargs=nargs),
+ )
+ elif nargs == 1:
+ next_rarg = state.rargs[0]
+
+ if (
+ option.obj._flag_needs_value
+ and isinstance(next_rarg, str)
+ and next_rarg[:1] in self._opt_prefixes
+ and len(next_rarg) > 1
+ ):
+ # The next arg looks like the start of an option, don't
+ # use it as the value if omitting the value is allowed.
+ value = _flag_needs_value
+ else:
+ value = state.rargs.pop(0)
+ else:
+ value = tuple(state.rargs[:nargs])
+ del state.rargs[:nargs]
+
+ return value
+
+ def _process_opts(self, arg: str, state: ParsingState) -> None:
+ explicit_value = None
+ # Long option handling happens in two parts. The first part is
+ # supporting explicitly attached values. In any case, we will try
+ # to long match the option first.
+ if "=" in arg:
+ long_opt, explicit_value = arg.split("=", 1)
+ else:
+ long_opt = arg
+ norm_long_opt = normalize_opt(long_opt, self.ctx)
+
+ # At this point we will match the (assumed) long option through
+ # the long option matching code. Note that this allows options
+ # like "-foo" to be matched as long options.
+ try:
+ self._match_long_opt(norm_long_opt, explicit_value, state)
+ except NoSuchOption:
+ # At this point the long option matching failed, and we need
+ # to try with short options. However there is a special rule
+ # which says, that if we have a two character options prefix
+ # (applies to "--foo" for instance), we do not dispatch to the
+ # short option code and will instead raise the no option
+ # error.
+ if arg[:2] not in self._opt_prefixes:
+ self._match_short_opt(arg, state)
+ return
+
+ if not self.ignore_unknown_options:
+ raise
+
+ state.largs.append(arg)
diff --git a/billinglayer/python/click/py.typed b/billinglayer/python/click/py.typed
new file mode 100644
index 0000000..e69de29
diff --git a/billinglayer/python/click/shell_completion.py b/billinglayer/python/click/shell_completion.py
new file mode 100644
index 0000000..dc9e00b
--- /dev/null
+++ b/billinglayer/python/click/shell_completion.py
@@ -0,0 +1,596 @@
+import os
+import re
+import typing as t
+from gettext import gettext as _
+
+from .core import Argument
+from .core import BaseCommand
+from .core import Context
+from .core import MultiCommand
+from .core import Option
+from .core import Parameter
+from .core import ParameterSource
+from .parser import split_arg_string
+from .utils import echo
+
+
+def shell_complete(
+ cli: BaseCommand,
+ ctx_args: t.MutableMapping[str, t.Any],
+ prog_name: str,
+ complete_var: str,
+ instruction: str,
+) -> int:
+ """Perform shell completion for the given CLI program.
+
+ :param cli: Command being called.
+ :param ctx_args: Extra arguments to pass to
+ ``cli.make_context``.
+ :param prog_name: Name of the executable in the shell.
+ :param complete_var: Name of the environment variable that holds
+ the completion instruction.
+ :param instruction: Value of ``complete_var`` with the completion
+ instruction and shell, in the form ``instruction_shell``.
+ :return: Status code to exit with.
+ """
+ shell, _, instruction = instruction.partition("_")
+ comp_cls = get_completion_class(shell)
+
+ if comp_cls is None:
+ return 1
+
+ comp = comp_cls(cli, ctx_args, prog_name, complete_var)
+
+ if instruction == "source":
+ echo(comp.source())
+ return 0
+
+ if instruction == "complete":
+ echo(comp.complete())
+ return 0
+
+ return 1
+
+
+class CompletionItem:
+ """Represents a completion value and metadata about the value. The
+ default metadata is ``type`` to indicate special shell handling,
+ and ``help`` if a shell supports showing a help string next to the
+ value.
+
+ Arbitrary parameters can be passed when creating the object, and
+ accessed using ``item.attr``. If an attribute wasn't passed,
+ accessing it returns ``None``.
+
+ :param value: The completion suggestion.
+ :param type: Tells the shell script to provide special completion
+ support for the type. Click uses ``"dir"`` and ``"file"``.
+ :param help: String shown next to the value if supported.
+ :param kwargs: Arbitrary metadata. The built-in implementations
+ don't use this, but custom type completions paired with custom
+ shell support could use it.
+ """
+
+ __slots__ = ("value", "type", "help", "_info")
+
+ def __init__(
+ self,
+ value: t.Any,
+ type: str = "plain",
+ help: t.Optional[str] = None,
+ **kwargs: t.Any,
+ ) -> None:
+ self.value: t.Any = value
+ self.type: str = type
+ self.help: t.Optional[str] = help
+ self._info = kwargs
+
+ def __getattr__(self, name: str) -> t.Any:
+ return self._info.get(name)
+
+
+# Only Bash >= 4.4 has the nosort option.
+_SOURCE_BASH = """\
+%(complete_func)s() {
+ local IFS=$'\\n'
+ local response
+
+ response=$(env COMP_WORDS="${COMP_WORDS[*]}" COMP_CWORD=$COMP_CWORD \
+%(complete_var)s=bash_complete $1)
+
+ for completion in $response; do
+ IFS=',' read type value <<< "$completion"
+
+ if [[ $type == 'dir' ]]; then
+ COMPREPLY=()
+ compopt -o dirnames
+ elif [[ $type == 'file' ]]; then
+ COMPREPLY=()
+ compopt -o default
+ elif [[ $type == 'plain' ]]; then
+ COMPREPLY+=($value)
+ fi
+ done
+
+ return 0
+}
+
+%(complete_func)s_setup() {
+ complete -o nosort -F %(complete_func)s %(prog_name)s
+}
+
+%(complete_func)s_setup;
+"""
+
+_SOURCE_ZSH = """\
+#compdef %(prog_name)s
+
+%(complete_func)s() {
+ local -a completions
+ local -a completions_with_descriptions
+ local -a response
+ (( ! $+commands[%(prog_name)s] )) && return 1
+
+ response=("${(@f)$(env COMP_WORDS="${words[*]}" COMP_CWORD=$((CURRENT-1)) \
+%(complete_var)s=zsh_complete %(prog_name)s)}")
+
+ for type key descr in ${response}; do
+ if [[ "$type" == "plain" ]]; then
+ if [[ "$descr" == "_" ]]; then
+ completions+=("$key")
+ else
+ completions_with_descriptions+=("$key":"$descr")
+ fi
+ elif [[ "$type" == "dir" ]]; then
+ _path_files -/
+ elif [[ "$type" == "file" ]]; then
+ _path_files -f
+ fi
+ done
+
+ if [ -n "$completions_with_descriptions" ]; then
+ _describe -V unsorted completions_with_descriptions -U
+ fi
+
+ if [ -n "$completions" ]; then
+ compadd -U -V unsorted -a completions
+ fi
+}
+
+if [[ $zsh_eval_context[-1] == loadautofunc ]]; then
+ # autoload from fpath, call function directly
+ %(complete_func)s "$@"
+else
+ # eval/source/. command, register function for later
+ compdef %(complete_func)s %(prog_name)s
+fi
+"""
+
+_SOURCE_FISH = """\
+function %(complete_func)s;
+ set -l response (env %(complete_var)s=fish_complete COMP_WORDS=(commandline -cp) \
+COMP_CWORD=(commandline -t) %(prog_name)s);
+
+ for completion in $response;
+ set -l metadata (string split "," $completion);
+
+ if test $metadata[1] = "dir";
+ __fish_complete_directories $metadata[2];
+ else if test $metadata[1] = "file";
+ __fish_complete_path $metadata[2];
+ else if test $metadata[1] = "plain";
+ echo $metadata[2];
+ end;
+ end;
+end;
+
+complete --no-files --command %(prog_name)s --arguments \
+"(%(complete_func)s)";
+"""
+
+
+class ShellComplete:
+ """Base class for providing shell completion support. A subclass for
+ a given shell will override attributes and methods to implement the
+ completion instructions (``source`` and ``complete``).
+
+ :param cli: Command being called.
+ :param prog_name: Name of the executable in the shell.
+ :param complete_var: Name of the environment variable that holds
+ the completion instruction.
+
+ .. versionadded:: 8.0
+ """
+
+ name: t.ClassVar[str]
+ """Name to register the shell as with :func:`add_completion_class`.
+ This is used in completion instructions (``{name}_source`` and
+ ``{name}_complete``).
+ """
+
+ source_template: t.ClassVar[str]
+ """Completion script template formatted by :meth:`source`. This must
+ be provided by subclasses.
+ """
+
+ def __init__(
+ self,
+ cli: BaseCommand,
+ ctx_args: t.MutableMapping[str, t.Any],
+ prog_name: str,
+ complete_var: str,
+ ) -> None:
+ self.cli = cli
+ self.ctx_args = ctx_args
+ self.prog_name = prog_name
+ self.complete_var = complete_var
+
+ @property
+ def func_name(self) -> str:
+ """The name of the shell function defined by the completion
+ script.
+ """
+ safe_name = re.sub(r"\W*", "", self.prog_name.replace("-", "_"), flags=re.ASCII)
+ return f"_{safe_name}_completion"
+
+ def source_vars(self) -> t.Dict[str, t.Any]:
+ """Vars for formatting :attr:`source_template`.
+
+ By default this provides ``complete_func``, ``complete_var``,
+ and ``prog_name``.
+ """
+ return {
+ "complete_func": self.func_name,
+ "complete_var": self.complete_var,
+ "prog_name": self.prog_name,
+ }
+
+ def source(self) -> str:
+ """Produce the shell script that defines the completion
+ function. By default this ``%``-style formats
+ :attr:`source_template` with the dict returned by
+ :meth:`source_vars`.
+ """
+ return self.source_template % self.source_vars()
+
+ def get_completion_args(self) -> t.Tuple[t.List[str], str]:
+ """Use the env vars defined by the shell script to return a
+ tuple of ``args, incomplete``. This must be implemented by
+ subclasses.
+ """
+ raise NotImplementedError
+
+ def get_completions(
+ self, args: t.List[str], incomplete: str
+ ) -> t.List[CompletionItem]:
+ """Determine the context and last complete command or parameter
+ from the complete args. Call that object's ``shell_complete``
+ method to get the completions for the incomplete value.
+
+ :param args: List of complete args before the incomplete value.
+ :param incomplete: Value being completed. May be empty.
+ """
+ ctx = _resolve_context(self.cli, self.ctx_args, self.prog_name, args)
+ obj, incomplete = _resolve_incomplete(ctx, args, incomplete)
+ return obj.shell_complete(ctx, incomplete)
+
+ def format_completion(self, item: CompletionItem) -> str:
+ """Format a completion item into the form recognized by the
+ shell script. This must be implemented by subclasses.
+
+ :param item: Completion item to format.
+ """
+ raise NotImplementedError
+
+ def complete(self) -> str:
+ """Produce the completion data to send back to the shell.
+
+ By default this calls :meth:`get_completion_args`, gets the
+ completions, then calls :meth:`format_completion` for each
+ completion.
+ """
+ args, incomplete = self.get_completion_args()
+ completions = self.get_completions(args, incomplete)
+ out = [self.format_completion(item) for item in completions]
+ return "\n".join(out)
+
+
+class BashComplete(ShellComplete):
+ """Shell completion for Bash."""
+
+ name = "bash"
+ source_template = _SOURCE_BASH
+
+ @staticmethod
+ def _check_version() -> None:
+ import subprocess
+
+ output = subprocess.run(
+ ["bash", "-c", 'echo "${BASH_VERSION}"'], stdout=subprocess.PIPE
+ )
+ match = re.search(r"^(\d+)\.(\d+)\.\d+", output.stdout.decode())
+
+ if match is not None:
+ major, minor = match.groups()
+
+ if major < "4" or major == "4" and minor < "4":
+ echo(
+ _(
+ "Shell completion is not supported for Bash"
+ " versions older than 4.4."
+ ),
+ err=True,
+ )
+ else:
+ echo(
+ _("Couldn't detect Bash version, shell completion is not supported."),
+ err=True,
+ )
+
+ def source(self) -> str:
+ self._check_version()
+ return super().source()
+
+ def get_completion_args(self) -> t.Tuple[t.List[str], str]:
+ cwords = split_arg_string(os.environ["COMP_WORDS"])
+ cword = int(os.environ["COMP_CWORD"])
+ args = cwords[1:cword]
+
+ try:
+ incomplete = cwords[cword]
+ except IndexError:
+ incomplete = ""
+
+ return args, incomplete
+
+ def format_completion(self, item: CompletionItem) -> str:
+ return f"{item.type},{item.value}"
+
+
+class ZshComplete(ShellComplete):
+ """Shell completion for Zsh."""
+
+ name = "zsh"
+ source_template = _SOURCE_ZSH
+
+ def get_completion_args(self) -> t.Tuple[t.List[str], str]:
+ cwords = split_arg_string(os.environ["COMP_WORDS"])
+ cword = int(os.environ["COMP_CWORD"])
+ args = cwords[1:cword]
+
+ try:
+ incomplete = cwords[cword]
+ except IndexError:
+ incomplete = ""
+
+ return args, incomplete
+
+ def format_completion(self, item: CompletionItem) -> str:
+ return f"{item.type}\n{item.value}\n{item.help if item.help else '_'}"
+
+
+class FishComplete(ShellComplete):
+ """Shell completion for Fish."""
+
+ name = "fish"
+ source_template = _SOURCE_FISH
+
+ def get_completion_args(self) -> t.Tuple[t.List[str], str]:
+ cwords = split_arg_string(os.environ["COMP_WORDS"])
+ incomplete = os.environ["COMP_CWORD"]
+ args = cwords[1:]
+
+ # Fish stores the partial word in both COMP_WORDS and
+ # COMP_CWORD, remove it from complete args.
+ if incomplete and args and args[-1] == incomplete:
+ args.pop()
+
+ return args, incomplete
+
+ def format_completion(self, item: CompletionItem) -> str:
+ if item.help:
+ return f"{item.type},{item.value}\t{item.help}"
+
+ return f"{item.type},{item.value}"
+
+
+ShellCompleteType = t.TypeVar("ShellCompleteType", bound=t.Type[ShellComplete])
+
+
+_available_shells: t.Dict[str, t.Type[ShellComplete]] = {
+ "bash": BashComplete,
+ "fish": FishComplete,
+ "zsh": ZshComplete,
+}
+
+
+def add_completion_class(
+ cls: ShellCompleteType, name: t.Optional[str] = None
+) -> ShellCompleteType:
+ """Register a :class:`ShellComplete` subclass under the given name.
+ The name will be provided by the completion instruction environment
+ variable during completion.
+
+ :param cls: The completion class that will handle completion for the
+ shell.
+ :param name: Name to register the class under. Defaults to the
+ class's ``name`` attribute.
+ """
+ if name is None:
+ name = cls.name
+
+ _available_shells[name] = cls
+
+ return cls
+
+
+def get_completion_class(shell: str) -> t.Optional[t.Type[ShellComplete]]:
+ """Look up a registered :class:`ShellComplete` subclass by the name
+ provided by the completion instruction environment variable. If the
+ name isn't registered, returns ``None``.
+
+ :param shell: Name the class is registered under.
+ """
+ return _available_shells.get(shell)
+
+
+def _is_incomplete_argument(ctx: Context, param: Parameter) -> bool:
+ """Determine if the given parameter is an argument that can still
+ accept values.
+
+ :param ctx: Invocation context for the command represented by the
+ parsed complete args.
+ :param param: Argument object being checked.
+ """
+ if not isinstance(param, Argument):
+ return False
+
+ assert param.name is not None
+ # Will be None if expose_value is False.
+ value = ctx.params.get(param.name)
+ return (
+ param.nargs == -1
+ or ctx.get_parameter_source(param.name) is not ParameterSource.COMMANDLINE
+ or (
+ param.nargs > 1
+ and isinstance(value, (tuple, list))
+ and len(value) < param.nargs
+ )
+ )
+
+
+def _start_of_option(ctx: Context, value: str) -> bool:
+ """Check if the value looks like the start of an option."""
+ if not value:
+ return False
+
+ c = value[0]
+ return c in ctx._opt_prefixes
+
+
+def _is_incomplete_option(ctx: Context, args: t.List[str], param: Parameter) -> bool:
+ """Determine if the given parameter is an option that needs a value.
+
+ :param args: List of complete args before the incomplete value.
+ :param param: Option object being checked.
+ """
+ if not isinstance(param, Option):
+ return False
+
+ if param.is_flag or param.count:
+ return False
+
+ last_option = None
+
+ for index, arg in enumerate(reversed(args)):
+ if index + 1 > param.nargs:
+ break
+
+ if _start_of_option(ctx, arg):
+ last_option = arg
+
+ return last_option is not None and last_option in param.opts
+
+
+def _resolve_context(
+ cli: BaseCommand,
+ ctx_args: t.MutableMapping[str, t.Any],
+ prog_name: str,
+ args: t.List[str],
+) -> Context:
+ """Produce the context hierarchy starting with the command and
+ traversing the complete arguments. This only follows the commands,
+ it doesn't trigger input prompts or callbacks.
+
+ :param cli: Command being called.
+ :param prog_name: Name of the executable in the shell.
+ :param args: List of complete args before the incomplete value.
+ """
+ ctx_args["resilient_parsing"] = True
+ ctx = cli.make_context(prog_name, args.copy(), **ctx_args)
+ args = ctx.protected_args + ctx.args
+
+ while args:
+ command = ctx.command
+
+ if isinstance(command, MultiCommand):
+ if not command.chain:
+ name, cmd, args = command.resolve_command(ctx, args)
+
+ if cmd is None:
+ return ctx
+
+ ctx = cmd.make_context(name, args, parent=ctx, resilient_parsing=True)
+ args = ctx.protected_args + ctx.args
+ else:
+ sub_ctx = ctx
+
+ while args:
+ name, cmd, args = command.resolve_command(ctx, args)
+
+ if cmd is None:
+ return ctx
+
+ sub_ctx = cmd.make_context(
+ name,
+ args,
+ parent=ctx,
+ allow_extra_args=True,
+ allow_interspersed_args=False,
+ resilient_parsing=True,
+ )
+ args = sub_ctx.args
+
+ ctx = sub_ctx
+ args = [*sub_ctx.protected_args, *sub_ctx.args]
+ else:
+ break
+
+ return ctx
+
+
+def _resolve_incomplete(
+ ctx: Context, args: t.List[str], incomplete: str
+) -> t.Tuple[t.Union[BaseCommand, Parameter], str]:
+ """Find the Click object that will handle the completion of the
+ incomplete value. Return the object and the incomplete value.
+
+ :param ctx: Invocation context for the command represented by
+ the parsed complete args.
+ :param args: List of complete args before the incomplete value.
+ :param incomplete: Value being completed. May be empty.
+ """
+ # Different shells treat an "=" between a long option name and
+ # value differently. Might keep the value joined, return the "="
+ # as a separate item, or return the split name and value. Always
+ # split and discard the "=" to make completion easier.
+ if incomplete == "=":
+ incomplete = ""
+ elif "=" in incomplete and _start_of_option(ctx, incomplete):
+ name, _, incomplete = incomplete.partition("=")
+ args.append(name)
+
+ # The "--" marker tells Click to stop treating values as options
+ # even if they start with the option character. If it hasn't been
+ # given and the incomplete arg looks like an option, the current
+ # command will provide option name completions.
+ if "--" not in args and _start_of_option(ctx, incomplete):
+ return ctx.command, incomplete
+
+ params = ctx.command.get_params(ctx)
+
+ # If the last complete arg is an option name with an incomplete
+ # value, the option will provide value completions.
+ for param in params:
+ if _is_incomplete_option(ctx, args, param):
+ return param, incomplete
+
+ # It's not an option name or value. The first argument without a
+ # parsed value will provide value completions.
+ for param in params:
+ if _is_incomplete_argument(ctx, param):
+ return param, incomplete
+
+ # There were no unparsed arguments, the command may be a group that
+ # will provide command name completions.
+ return ctx.command, incomplete
diff --git a/billinglayer/python/click/termui.py b/billinglayer/python/click/termui.py
new file mode 100644
index 0000000..db7a4b2
--- /dev/null
+++ b/billinglayer/python/click/termui.py
@@ -0,0 +1,784 @@
+import inspect
+import io
+import itertools
+import sys
+import typing as t
+from gettext import gettext as _
+
+from ._compat import isatty
+from ._compat import strip_ansi
+from .exceptions import Abort
+from .exceptions import UsageError
+from .globals import resolve_color_default
+from .types import Choice
+from .types import convert_type
+from .types import ParamType
+from .utils import echo
+from .utils import LazyFile
+
+if t.TYPE_CHECKING:
+ from ._termui_impl import ProgressBar
+
+V = t.TypeVar("V")
+
+# The prompt functions to use. The doc tools currently override these
+# functions to customize how they work.
+visible_prompt_func: t.Callable[[str], str] = input
+
+_ansi_colors = {
+ "black": 30,
+ "red": 31,
+ "green": 32,
+ "yellow": 33,
+ "blue": 34,
+ "magenta": 35,
+ "cyan": 36,
+ "white": 37,
+ "reset": 39,
+ "bright_black": 90,
+ "bright_red": 91,
+ "bright_green": 92,
+ "bright_yellow": 93,
+ "bright_blue": 94,
+ "bright_magenta": 95,
+ "bright_cyan": 96,
+ "bright_white": 97,
+}
+_ansi_reset_all = "\033[0m"
+
+
+def hidden_prompt_func(prompt: str) -> str:
+ import getpass
+
+ return getpass.getpass(prompt)
+
+
+def _build_prompt(
+ text: str,
+ suffix: str,
+ show_default: bool = False,
+ default: t.Optional[t.Any] = None,
+ show_choices: bool = True,
+ type: t.Optional[ParamType] = None,
+) -> str:
+ prompt = text
+ if type is not None and show_choices and isinstance(type, Choice):
+ prompt += f" ({', '.join(map(str, type.choices))})"
+ if default is not None and show_default:
+ prompt = f"{prompt} [{_format_default(default)}]"
+ return f"{prompt}{suffix}"
+
+
+def _format_default(default: t.Any) -> t.Any:
+ if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"):
+ return default.name
+
+ return default
+
+
+def prompt(
+ text: str,
+ default: t.Optional[t.Any] = None,
+ hide_input: bool = False,
+ confirmation_prompt: t.Union[bool, str] = False,
+ type: t.Optional[t.Union[ParamType, t.Any]] = None,
+ value_proc: t.Optional[t.Callable[[str], t.Any]] = None,
+ prompt_suffix: str = ": ",
+ show_default: bool = True,
+ err: bool = False,
+ show_choices: bool = True,
+) -> t.Any:
+ """Prompts a user for input. This is a convenience function that can
+ be used to prompt a user for input later.
+
+ If the user aborts the input by sending an interrupt signal, this
+ function will catch it and raise a :exc:`Abort` exception.
+
+ :param text: the text to show for the prompt.
+ :param default: the default value to use if no input happens. If this
+ is not given it will prompt until it's aborted.
+ :param hide_input: if this is set to true then the input value will
+ be hidden.
+ :param confirmation_prompt: Prompt a second time to confirm the
+ value. Can be set to a string instead of ``True`` to customize
+ the message.
+ :param type: the type to use to check the value against.
+ :param value_proc: if this parameter is provided it's a function that
+ is invoked instead of the type conversion to
+ convert a value.
+ :param prompt_suffix: a suffix that should be added to the prompt.
+ :param show_default: shows or hides the default value in the prompt.
+ :param err: if set to true the file defaults to ``stderr`` instead of
+ ``stdout``, the same as with echo.
+ :param show_choices: Show or hide choices if the passed type is a Choice.
+ For example if type is a Choice of either day or week,
+ show_choices is true and text is "Group by" then the
+ prompt will be "Group by (day, week): ".
+
+ .. versionadded:: 8.0
+ ``confirmation_prompt`` can be a custom string.
+
+ .. versionadded:: 7.0
+ Added the ``show_choices`` parameter.
+
+ .. versionadded:: 6.0
+ Added unicode support for cmd.exe on Windows.
+
+ .. versionadded:: 4.0
+ Added the `err` parameter.
+
+ """
+
+ def prompt_func(text: str) -> str:
+ f = hidden_prompt_func if hide_input else visible_prompt_func
+ try:
+ # Write the prompt separately so that we get nice
+ # coloring through colorama on Windows
+ echo(text.rstrip(" "), nl=False, err=err)
+ # Echo a space to stdout to work around an issue where
+ # readline causes backspace to clear the whole line.
+ return f(" ")
+ except (KeyboardInterrupt, EOFError):
+ # getpass doesn't print a newline if the user aborts input with ^C.
+ # Allegedly this behavior is inherited from getpass(3).
+ # A doc bug has been filed at https://bugs.python.org/issue24711
+ if hide_input:
+ echo(None, err=err)
+ raise Abort() from None
+
+ if value_proc is None:
+ value_proc = convert_type(type, default)
+
+ prompt = _build_prompt(
+ text, prompt_suffix, show_default, default, show_choices, type
+ )
+
+ if confirmation_prompt:
+ if confirmation_prompt is True:
+ confirmation_prompt = _("Repeat for confirmation")
+
+ confirmation_prompt = _build_prompt(confirmation_prompt, prompt_suffix)
+
+ while True:
+ while True:
+ value = prompt_func(prompt)
+ if value:
+ break
+ elif default is not None:
+ value = default
+ break
+ try:
+ result = value_proc(value)
+ except UsageError as e:
+ if hide_input:
+ echo(_("Error: The value you entered was invalid."), err=err)
+ else:
+ echo(_("Error: {e.message}").format(e=e), err=err) # noqa: B306
+ continue
+ if not confirmation_prompt:
+ return result
+ while True:
+ value2 = prompt_func(confirmation_prompt)
+ is_empty = not value and not value2
+ if value2 or is_empty:
+ break
+ if value == value2:
+ return result
+ echo(_("Error: The two entered values do not match."), err=err)
+
+
+def confirm(
+ text: str,
+ default: t.Optional[bool] = False,
+ abort: bool = False,
+ prompt_suffix: str = ": ",
+ show_default: bool = True,
+ err: bool = False,
+) -> bool:
+ """Prompts for confirmation (yes/no question).
+
+ If the user aborts the input by sending a interrupt signal this
+ function will catch it and raise a :exc:`Abort` exception.
+
+ :param text: the question to ask.
+ :param default: The default value to use when no input is given. If
+ ``None``, repeat until input is given.
+ :param abort: if this is set to `True` a negative answer aborts the
+ exception by raising :exc:`Abort`.
+ :param prompt_suffix: a suffix that should be added to the prompt.
+ :param show_default: shows or hides the default value in the prompt.
+ :param err: if set to true the file defaults to ``stderr`` instead of
+ ``stdout``, the same as with echo.
+
+ .. versionchanged:: 8.0
+ Repeat until input is given if ``default`` is ``None``.
+
+ .. versionadded:: 4.0
+ Added the ``err`` parameter.
+ """
+ prompt = _build_prompt(
+ text,
+ prompt_suffix,
+ show_default,
+ "y/n" if default is None else ("Y/n" if default else "y/N"),
+ )
+
+ while True:
+ try:
+ # Write the prompt separately so that we get nice
+ # coloring through colorama on Windows
+ echo(prompt.rstrip(" "), nl=False, err=err)
+ # Echo a space to stdout to work around an issue where
+ # readline causes backspace to clear the whole line.
+ value = visible_prompt_func(" ").lower().strip()
+ except (KeyboardInterrupt, EOFError):
+ raise Abort() from None
+ if value in ("y", "yes"):
+ rv = True
+ elif value in ("n", "no"):
+ rv = False
+ elif default is not None and value == "":
+ rv = default
+ else:
+ echo(_("Error: invalid input"), err=err)
+ continue
+ break
+ if abort and not rv:
+ raise Abort()
+ return rv
+
+
+def echo_via_pager(
+ text_or_generator: t.Union[t.Iterable[str], t.Callable[[], t.Iterable[str]], str],
+ color: t.Optional[bool] = None,
+) -> None:
+ """This function takes a text and shows it via an environment specific
+ pager on stdout.
+
+ .. versionchanged:: 3.0
+ Added the `color` flag.
+
+ :param text_or_generator: the text to page, or alternatively, a
+ generator emitting the text to page.
+ :param color: controls if the pager supports ANSI colors or not. The
+ default is autodetection.
+ """
+ color = resolve_color_default(color)
+
+ if inspect.isgeneratorfunction(text_or_generator):
+ i = t.cast(t.Callable[[], t.Iterable[str]], text_or_generator)()
+ elif isinstance(text_or_generator, str):
+ i = [text_or_generator]
+ else:
+ i = iter(t.cast(t.Iterable[str], text_or_generator))
+
+ # convert every element of i to a text type if necessary
+ text_generator = (el if isinstance(el, str) else str(el) for el in i)
+
+ from ._termui_impl import pager
+
+ return pager(itertools.chain(text_generator, "\n"), color)
+
+
+def progressbar(
+ iterable: t.Optional[t.Iterable[V]] = None,
+ length: t.Optional[int] = None,
+ label: t.Optional[str] = None,
+ show_eta: bool = True,
+ show_percent: t.Optional[bool] = None,
+ show_pos: bool = False,
+ item_show_func: t.Optional[t.Callable[[t.Optional[V]], t.Optional[str]]] = None,
+ fill_char: str = "#",
+ empty_char: str = "-",
+ bar_template: str = "%(label)s [%(bar)s] %(info)s",
+ info_sep: str = " ",
+ width: int = 36,
+ file: t.Optional[t.TextIO] = None,
+ color: t.Optional[bool] = None,
+ update_min_steps: int = 1,
+) -> "ProgressBar[V]":
+ """This function creates an iterable context manager that can be used
+ to iterate over something while showing a progress bar. It will
+ either iterate over the `iterable` or `length` items (that are counted
+ up). While iteration happens, this function will print a rendered
+ progress bar to the given `file` (defaults to stdout) and will attempt
+ to calculate remaining time and more. By default, this progress bar
+ will not be rendered if the file is not a terminal.
+
+ The context manager creates the progress bar. When the context
+ manager is entered the progress bar is already created. With every
+ iteration over the progress bar, the iterable passed to the bar is
+ advanced and the bar is updated. When the context manager exits,
+ a newline is printed and the progress bar is finalized on screen.
+
+ Note: The progress bar is currently designed for use cases where the
+ total progress can be expected to take at least several seconds.
+ Because of this, the ProgressBar class object won't display
+ progress that is considered too fast, and progress where the time
+ between steps is less than a second.
+
+ No printing must happen or the progress bar will be unintentionally
+ destroyed.
+
+ Example usage::
+
+ with progressbar(items) as bar:
+ for item in bar:
+ do_something_with(item)
+
+ Alternatively, if no iterable is specified, one can manually update the
+ progress bar through the `update()` method instead of directly
+ iterating over the progress bar. The update method accepts the number
+ of steps to increment the bar with::
+
+ with progressbar(length=chunks.total_bytes) as bar:
+ for chunk in chunks:
+ process_chunk(chunk)
+ bar.update(chunks.bytes)
+
+ The ``update()`` method also takes an optional value specifying the
+ ``current_item`` at the new position. This is useful when used
+ together with ``item_show_func`` to customize the output for each
+ manual step::
+
+ with click.progressbar(
+ length=total_size,
+ label='Unzipping archive',
+ item_show_func=lambda a: a.filename
+ ) as bar:
+ for archive in zip_file:
+ archive.extract()
+ bar.update(archive.size, archive)
+
+ :param iterable: an iterable to iterate over. If not provided the length
+ is required.
+ :param length: the number of items to iterate over. By default the
+ progressbar will attempt to ask the iterator about its
+ length, which might or might not work. If an iterable is
+ also provided this parameter can be used to override the
+ length. If an iterable is not provided the progress bar
+ will iterate over a range of that length.
+ :param label: the label to show next to the progress bar.
+ :param show_eta: enables or disables the estimated time display. This is
+ automatically disabled if the length cannot be
+ determined.
+ :param show_percent: enables or disables the percentage display. The
+ default is `True` if the iterable has a length or
+ `False` if not.
+ :param show_pos: enables or disables the absolute position display. The
+ default is `False`.
+ :param item_show_func: A function called with the current item which
+ can return a string to show next to the progress bar. If the
+ function returns ``None`` nothing is shown. The current item can
+ be ``None``, such as when entering and exiting the bar.
+ :param fill_char: the character to use to show the filled part of the
+ progress bar.
+ :param empty_char: the character to use to show the non-filled part of
+ the progress bar.
+ :param bar_template: the format string to use as template for the bar.
+ The parameters in it are ``label`` for the label,
+ ``bar`` for the progress bar and ``info`` for the
+ info section.
+ :param info_sep: the separator between multiple info items (eta etc.)
+ :param width: the width of the progress bar in characters, 0 means full
+ terminal width
+ :param file: The file to write to. If this is not a terminal then
+ only the label is printed.
+ :param color: controls if the terminal supports ANSI colors or not. The
+ default is autodetection. This is only needed if ANSI
+ codes are included anywhere in the progress bar output
+ which is not the case by default.
+ :param update_min_steps: Render only when this many updates have
+ completed. This allows tuning for very fast iterators.
+
+ .. versionchanged:: 8.0
+ Output is shown even if execution time is less than 0.5 seconds.
+
+ .. versionchanged:: 8.0
+ ``item_show_func`` shows the current item, not the previous one.
+
+ .. versionchanged:: 8.0
+ Labels are echoed if the output is not a TTY. Reverts a change
+ in 7.0 that removed all output.
+
+ .. versionadded:: 8.0
+ Added the ``update_min_steps`` parameter.
+
+ .. versionchanged:: 4.0
+ Added the ``color`` parameter. Added the ``update`` method to
+ the object.
+
+ .. versionadded:: 2.0
+ """
+ from ._termui_impl import ProgressBar
+
+ color = resolve_color_default(color)
+ return ProgressBar(
+ iterable=iterable,
+ length=length,
+ show_eta=show_eta,
+ show_percent=show_percent,
+ show_pos=show_pos,
+ item_show_func=item_show_func,
+ fill_char=fill_char,
+ empty_char=empty_char,
+ bar_template=bar_template,
+ info_sep=info_sep,
+ file=file,
+ label=label,
+ width=width,
+ color=color,
+ update_min_steps=update_min_steps,
+ )
+
+
+def clear() -> None:
+ """Clears the terminal screen. This will have the effect of clearing
+ the whole visible space of the terminal and moving the cursor to the
+ top left. This does not do anything if not connected to a terminal.
+
+ .. versionadded:: 2.0
+ """
+ if not isatty(sys.stdout):
+ return
+
+ # ANSI escape \033[2J clears the screen, \033[1;1H moves the cursor
+ echo("\033[2J\033[1;1H", nl=False)
+
+
+def _interpret_color(
+ color: t.Union[int, t.Tuple[int, int, int], str], offset: int = 0
+) -> str:
+ if isinstance(color, int):
+ return f"{38 + offset};5;{color:d}"
+
+ if isinstance(color, (tuple, list)):
+ r, g, b = color
+ return f"{38 + offset};2;{r:d};{g:d};{b:d}"
+
+ return str(_ansi_colors[color] + offset)
+
+
+def style(
+ text: t.Any,
+ fg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None,
+ bg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None,
+ bold: t.Optional[bool] = None,
+ dim: t.Optional[bool] = None,
+ underline: t.Optional[bool] = None,
+ overline: t.Optional[bool] = None,
+ italic: t.Optional[bool] = None,
+ blink: t.Optional[bool] = None,
+ reverse: t.Optional[bool] = None,
+ strikethrough: t.Optional[bool] = None,
+ reset: bool = True,
+) -> str:
+ """Styles a text with ANSI styles and returns the new string. By
+ default the styling is self contained which means that at the end
+ of the string a reset code is issued. This can be prevented by
+ passing ``reset=False``.
+
+ Examples::
+
+ click.echo(click.style('Hello World!', fg='green'))
+ click.echo(click.style('ATTENTION!', blink=True))
+ click.echo(click.style('Some things', reverse=True, fg='cyan'))
+ click.echo(click.style('More colors', fg=(255, 12, 128), bg=117))
+
+ Supported color names:
+
+ * ``black`` (might be a gray)
+ * ``red``
+ * ``green``
+ * ``yellow`` (might be an orange)
+ * ``blue``
+ * ``magenta``
+ * ``cyan``
+ * ``white`` (might be light gray)
+ * ``bright_black``
+ * ``bright_red``
+ * ``bright_green``
+ * ``bright_yellow``
+ * ``bright_blue``
+ * ``bright_magenta``
+ * ``bright_cyan``
+ * ``bright_white``
+ * ``reset`` (reset the color code only)
+
+ If the terminal supports it, color may also be specified as:
+
+ - An integer in the interval [0, 255]. The terminal must support
+ 8-bit/256-color mode.
+ - An RGB tuple of three integers in [0, 255]. The terminal must
+ support 24-bit/true-color mode.
+
+ See https://en.wikipedia.org/wiki/ANSI_color and
+ https://gist.github.com/XVilka/8346728 for more information.
+
+ :param text: the string to style with ansi codes.
+ :param fg: if provided this will become the foreground color.
+ :param bg: if provided this will become the background color.
+ :param bold: if provided this will enable or disable bold mode.
+ :param dim: if provided this will enable or disable dim mode. This is
+ badly supported.
+ :param underline: if provided this will enable or disable underline.
+ :param overline: if provided this will enable or disable overline.
+ :param italic: if provided this will enable or disable italic.
+ :param blink: if provided this will enable or disable blinking.
+ :param reverse: if provided this will enable or disable inverse
+ rendering (foreground becomes background and the
+ other way round).
+ :param strikethrough: if provided this will enable or disable
+ striking through text.
+ :param reset: by default a reset-all code is added at the end of the
+ string which means that styles do not carry over. This
+ can be disabled to compose styles.
+
+ .. versionchanged:: 8.0
+ A non-string ``message`` is converted to a string.
+
+ .. versionchanged:: 8.0
+ Added support for 256 and RGB color codes.
+
+ .. versionchanged:: 8.0
+ Added the ``strikethrough``, ``italic``, and ``overline``
+ parameters.
+
+ .. versionchanged:: 7.0
+ Added support for bright colors.
+
+ .. versionadded:: 2.0
+ """
+ if not isinstance(text, str):
+ text = str(text)
+
+ bits = []
+
+ if fg:
+ try:
+ bits.append(f"\033[{_interpret_color(fg)}m")
+ except KeyError:
+ raise TypeError(f"Unknown color {fg!r}") from None
+
+ if bg:
+ try:
+ bits.append(f"\033[{_interpret_color(bg, 10)}m")
+ except KeyError:
+ raise TypeError(f"Unknown color {bg!r}") from None
+
+ if bold is not None:
+ bits.append(f"\033[{1 if bold else 22}m")
+ if dim is not None:
+ bits.append(f"\033[{2 if dim else 22}m")
+ if underline is not None:
+ bits.append(f"\033[{4 if underline else 24}m")
+ if overline is not None:
+ bits.append(f"\033[{53 if overline else 55}m")
+ if italic is not None:
+ bits.append(f"\033[{3 if italic else 23}m")
+ if blink is not None:
+ bits.append(f"\033[{5 if blink else 25}m")
+ if reverse is not None:
+ bits.append(f"\033[{7 if reverse else 27}m")
+ if strikethrough is not None:
+ bits.append(f"\033[{9 if strikethrough else 29}m")
+ bits.append(text)
+ if reset:
+ bits.append(_ansi_reset_all)
+ return "".join(bits)
+
+
+def unstyle(text: str) -> str:
+ """Removes ANSI styling information from a string. Usually it's not
+ necessary to use this function as Click's echo function will
+ automatically remove styling if necessary.
+
+ .. versionadded:: 2.0
+
+ :param text: the text to remove style information from.
+ """
+ return strip_ansi(text)
+
+
+def secho(
+ message: t.Optional[t.Any] = None,
+ file: t.Optional[t.IO[t.AnyStr]] = None,
+ nl: bool = True,
+ err: bool = False,
+ color: t.Optional[bool] = None,
+ **styles: t.Any,
+) -> None:
+ """This function combines :func:`echo` and :func:`style` into one
+ call. As such the following two calls are the same::
+
+ click.secho('Hello World!', fg='green')
+ click.echo(click.style('Hello World!', fg='green'))
+
+ All keyword arguments are forwarded to the underlying functions
+ depending on which one they go with.
+
+ Non-string types will be converted to :class:`str`. However,
+ :class:`bytes` are passed directly to :meth:`echo` without applying
+ style. If you want to style bytes that represent text, call
+ :meth:`bytes.decode` first.
+
+ .. versionchanged:: 8.0
+ A non-string ``message`` is converted to a string. Bytes are
+ passed through without style applied.
+
+ .. versionadded:: 2.0
+ """
+ if message is not None and not isinstance(message, (bytes, bytearray)):
+ message = style(message, **styles)
+
+ return echo(message, file=file, nl=nl, err=err, color=color)
+
+
+def edit(
+ text: t.Optional[t.AnyStr] = None,
+ editor: t.Optional[str] = None,
+ env: t.Optional[t.Mapping[str, str]] = None,
+ require_save: bool = True,
+ extension: str = ".txt",
+ filename: t.Optional[str] = None,
+) -> t.Optional[t.AnyStr]:
+ r"""Edits the given text in the defined editor. If an editor is given
+ (should be the full path to the executable but the regular operating
+ system search path is used for finding the executable) it overrides
+ the detected editor. Optionally, some environment variables can be
+ used. If the editor is closed without changes, `None` is returned. In
+ case a file is edited directly the return value is always `None` and
+ `require_save` and `extension` are ignored.
+
+ If the editor cannot be opened a :exc:`UsageError` is raised.
+
+ Note for Windows: to simplify cross-platform usage, the newlines are
+ automatically converted from POSIX to Windows and vice versa. As such,
+ the message here will have ``\n`` as newline markers.
+
+ :param text: the text to edit.
+ :param editor: optionally the editor to use. Defaults to automatic
+ detection.
+ :param env: environment variables to forward to the editor.
+ :param require_save: if this is true, then not saving in the editor
+ will make the return value become `None`.
+ :param extension: the extension to tell the editor about. This defaults
+ to `.txt` but changing this might change syntax
+ highlighting.
+ :param filename: if provided it will edit this file instead of the
+ provided text contents. It will not use a temporary
+ file as an indirection in that case.
+ """
+ from ._termui_impl import Editor
+
+ ed = Editor(editor=editor, env=env, require_save=require_save, extension=extension)
+
+ if filename is None:
+ return ed.edit(text)
+
+ ed.edit_file(filename)
+ return None
+
+
+def launch(url: str, wait: bool = False, locate: bool = False) -> int:
+ """This function launches the given URL (or filename) in the default
+ viewer application for this file type. If this is an executable, it
+ might launch the executable in a new session. The return value is
+ the exit code of the launched application. Usually, ``0`` indicates
+ success.
+
+ Examples::
+
+ click.launch('https://click.palletsprojects.com/')
+ click.launch('/my/downloaded/file', locate=True)
+
+ .. versionadded:: 2.0
+
+ :param url: URL or filename of the thing to launch.
+ :param wait: Wait for the program to exit before returning. This
+ only works if the launched program blocks. In particular,
+ ``xdg-open`` on Linux does not block.
+ :param locate: if this is set to `True` then instead of launching the
+ application associated with the URL it will attempt to
+ launch a file manager with the file located. This
+ might have weird effects if the URL does not point to
+ the filesystem.
+ """
+ from ._termui_impl import open_url
+
+ return open_url(url, wait=wait, locate=locate)
+
+
+# If this is provided, getchar() calls into this instead. This is used
+# for unittesting purposes.
+_getchar: t.Optional[t.Callable[[bool], str]] = None
+
+
+def getchar(echo: bool = False) -> str:
+ """Fetches a single character from the terminal and returns it. This
+ will always return a unicode character and under certain rare
+ circumstances this might return more than one character. The
+ situations which more than one character is returned is when for
+ whatever reason multiple characters end up in the terminal buffer or
+ standard input was not actually a terminal.
+
+ Note that this will always read from the terminal, even if something
+ is piped into the standard input.
+
+ Note for Windows: in rare cases when typing non-ASCII characters, this
+ function might wait for a second character and then return both at once.
+ This is because certain Unicode characters look like special-key markers.
+
+ .. versionadded:: 2.0
+
+ :param echo: if set to `True`, the character read will also show up on
+ the terminal. The default is to not show it.
+ """
+ global _getchar
+
+ if _getchar is None:
+ from ._termui_impl import getchar as f
+
+ _getchar = f
+
+ return _getchar(echo)
+
+
+def raw_terminal() -> t.ContextManager[int]:
+ from ._termui_impl import raw_terminal as f
+
+ return f()
+
+
+def pause(info: t.Optional[str] = None, err: bool = False) -> None:
+ """This command stops execution and waits for the user to press any
+ key to continue. This is similar to the Windows batch "pause"
+ command. If the program is not run through a terminal, this command
+ will instead do nothing.
+
+ .. versionadded:: 2.0
+
+ .. versionadded:: 4.0
+ Added the `err` parameter.
+
+ :param info: The message to print before pausing. Defaults to
+ ``"Press any key to continue..."``.
+ :param err: if set to message goes to ``stderr`` instead of
+ ``stdout``, the same as with echo.
+ """
+ if not isatty(sys.stdin) or not isatty(sys.stdout):
+ return
+
+ if info is None:
+ info = _("Press any key to continue...")
+
+ try:
+ if info:
+ echo(info, nl=False, err=err)
+ try:
+ getchar()
+ except (KeyboardInterrupt, EOFError):
+ pass
+ finally:
+ if info:
+ echo(err=err)
diff --git a/billinglayer/python/click/testing.py b/billinglayer/python/click/testing.py
new file mode 100644
index 0000000..e0df0d2
--- /dev/null
+++ b/billinglayer/python/click/testing.py
@@ -0,0 +1,479 @@
+import contextlib
+import io
+import os
+import shlex
+import shutil
+import sys
+import tempfile
+import typing as t
+from types import TracebackType
+
+from . import formatting
+from . import termui
+from . import utils
+from ._compat import _find_binary_reader
+
+if t.TYPE_CHECKING:
+ from .core import BaseCommand
+
+
+class EchoingStdin:
+ def __init__(self, input: t.BinaryIO, output: t.BinaryIO) -> None:
+ self._input = input
+ self._output = output
+ self._paused = False
+
+ def __getattr__(self, x: str) -> t.Any:
+ return getattr(self._input, x)
+
+ def _echo(self, rv: bytes) -> bytes:
+ if not self._paused:
+ self._output.write(rv)
+
+ return rv
+
+ def read(self, n: int = -1) -> bytes:
+ return self._echo(self._input.read(n))
+
+ def read1(self, n: int = -1) -> bytes:
+ return self._echo(self._input.read1(n)) # type: ignore
+
+ def readline(self, n: int = -1) -> bytes:
+ return self._echo(self._input.readline(n))
+
+ def readlines(self) -> t.List[bytes]:
+ return [self._echo(x) for x in self._input.readlines()]
+
+ def __iter__(self) -> t.Iterator[bytes]:
+ return iter(self._echo(x) for x in self._input)
+
+ def __repr__(self) -> str:
+ return repr(self._input)
+
+
+@contextlib.contextmanager
+def _pause_echo(stream: t.Optional[EchoingStdin]) -> t.Iterator[None]:
+ if stream is None:
+ yield
+ else:
+ stream._paused = True
+ yield
+ stream._paused = False
+
+
+class _NamedTextIOWrapper(io.TextIOWrapper):
+ def __init__(
+ self, buffer: t.BinaryIO, name: str, mode: str, **kwargs: t.Any
+ ) -> None:
+ super().__init__(buffer, **kwargs)
+ self._name = name
+ self._mode = mode
+
+ @property
+ def name(self) -> str:
+ return self._name
+
+ @property
+ def mode(self) -> str:
+ return self._mode
+
+
+def make_input_stream(
+ input: t.Optional[t.Union[str, bytes, t.IO[t.Any]]], charset: str
+) -> t.BinaryIO:
+ # Is already an input stream.
+ if hasattr(input, "read"):
+ rv = _find_binary_reader(t.cast(t.IO[t.Any], input))
+
+ if rv is not None:
+ return rv
+
+ raise TypeError("Could not find binary reader for input stream.")
+
+ if input is None:
+ input = b""
+ elif isinstance(input, str):
+ input = input.encode(charset)
+
+ return io.BytesIO(input)
+
+
+class Result:
+ """Holds the captured result of an invoked CLI script."""
+
+ def __init__(
+ self,
+ runner: "CliRunner",
+ stdout_bytes: bytes,
+ stderr_bytes: t.Optional[bytes],
+ return_value: t.Any,
+ exit_code: int,
+ exception: t.Optional[BaseException],
+ exc_info: t.Optional[
+ t.Tuple[t.Type[BaseException], BaseException, TracebackType]
+ ] = None,
+ ):
+ #: The runner that created the result
+ self.runner = runner
+ #: The standard output as bytes.
+ self.stdout_bytes = stdout_bytes
+ #: The standard error as bytes, or None if not available
+ self.stderr_bytes = stderr_bytes
+ #: The value returned from the invoked command.
+ #:
+ #: .. versionadded:: 8.0
+ self.return_value = return_value
+ #: The exit code as integer.
+ self.exit_code = exit_code
+ #: The exception that happened if one did.
+ self.exception = exception
+ #: The traceback
+ self.exc_info = exc_info
+
+ @property
+ def output(self) -> str:
+ """The (standard) output as unicode string."""
+ return self.stdout
+
+ @property
+ def stdout(self) -> str:
+ """The standard output as unicode string."""
+ return self.stdout_bytes.decode(self.runner.charset, "replace").replace(
+ "\r\n", "\n"
+ )
+
+ @property
+ def stderr(self) -> str:
+ """The standard error as unicode string."""
+ if self.stderr_bytes is None:
+ raise ValueError("stderr not separately captured")
+ return self.stderr_bytes.decode(self.runner.charset, "replace").replace(
+ "\r\n", "\n"
+ )
+
+ def __repr__(self) -> str:
+ exc_str = repr(self.exception) if self.exception else "okay"
+ return f"<{type(self).__name__} {exc_str}>"
+
+
+class CliRunner:
+ """The CLI runner provides functionality to invoke a Click command line
+ script for unittesting purposes in a isolated environment. This only
+ works in single-threaded systems without any concurrency as it changes the
+ global interpreter state.
+
+ :param charset: the character set for the input and output data.
+ :param env: a dictionary with environment variables for overriding.
+ :param echo_stdin: if this is set to `True`, then reading from stdin writes
+ to stdout. This is useful for showing examples in
+ some circumstances. Note that regular prompts
+ will automatically echo the input.
+ :param mix_stderr: if this is set to `False`, then stdout and stderr are
+ preserved as independent streams. This is useful for
+ Unix-philosophy apps that have predictable stdout and
+ noisy stderr, such that each may be measured
+ independently
+ """
+
+ def __init__(
+ self,
+ charset: str = "utf-8",
+ env: t.Optional[t.Mapping[str, t.Optional[str]]] = None,
+ echo_stdin: bool = False,
+ mix_stderr: bool = True,
+ ) -> None:
+ self.charset = charset
+ self.env: t.Mapping[str, t.Optional[str]] = env or {}
+ self.echo_stdin = echo_stdin
+ self.mix_stderr = mix_stderr
+
+ def get_default_prog_name(self, cli: "BaseCommand") -> str:
+ """Given a command object it will return the default program name
+ for it. The default is the `name` attribute or ``"root"`` if not
+ set.
+ """
+ return cli.name or "root"
+
+ def make_env(
+ self, overrides: t.Optional[t.Mapping[str, t.Optional[str]]] = None
+ ) -> t.Mapping[str, t.Optional[str]]:
+ """Returns the environment overrides for invoking a script."""
+ rv = dict(self.env)
+ if overrides:
+ rv.update(overrides)
+ return rv
+
+ @contextlib.contextmanager
+ def isolation(
+ self,
+ input: t.Optional[t.Union[str, bytes, t.IO[t.Any]]] = None,
+ env: t.Optional[t.Mapping[str, t.Optional[str]]] = None,
+ color: bool = False,
+ ) -> t.Iterator[t.Tuple[io.BytesIO, t.Optional[io.BytesIO]]]:
+ """A context manager that sets up the isolation for invoking of a
+ command line tool. This sets up stdin with the given input data
+ and `os.environ` with the overrides from the given dictionary.
+ This also rebinds some internals in Click to be mocked (like the
+ prompt functionality).
+
+ This is automatically done in the :meth:`invoke` method.
+
+ :param input: the input stream to put into sys.stdin.
+ :param env: the environment overrides as dictionary.
+ :param color: whether the output should contain color codes. The
+ application can still override this explicitly.
+
+ .. versionchanged:: 8.0
+ ``stderr`` is opened with ``errors="backslashreplace"``
+ instead of the default ``"strict"``.
+
+ .. versionchanged:: 4.0
+ Added the ``color`` parameter.
+ """
+ bytes_input = make_input_stream(input, self.charset)
+ echo_input = None
+
+ old_stdin = sys.stdin
+ old_stdout = sys.stdout
+ old_stderr = sys.stderr
+ old_forced_width = formatting.FORCED_WIDTH
+ formatting.FORCED_WIDTH = 80
+
+ env = self.make_env(env)
+
+ bytes_output = io.BytesIO()
+
+ if self.echo_stdin:
+ bytes_input = echo_input = t.cast(
+ t.BinaryIO, EchoingStdin(bytes_input, bytes_output)
+ )
+
+ sys.stdin = text_input = _NamedTextIOWrapper(
+ bytes_input, encoding=self.charset, name="", mode="r"
+ )
+
+ if self.echo_stdin:
+ # Force unbuffered reads, otherwise TextIOWrapper reads a
+ # large chunk which is echoed early.
+ text_input._CHUNK_SIZE = 1 # type: ignore
+
+ sys.stdout = _NamedTextIOWrapper(
+ bytes_output, encoding=self.charset, name="", mode="w"
+ )
+
+ bytes_error = None
+ if self.mix_stderr:
+ sys.stderr = sys.stdout
+ else:
+ bytes_error = io.BytesIO()
+ sys.stderr = _NamedTextIOWrapper(
+ bytes_error,
+ encoding=self.charset,
+ name="",
+ mode="w",
+ errors="backslashreplace",
+ )
+
+ @_pause_echo(echo_input) # type: ignore
+ def visible_input(prompt: t.Optional[str] = None) -> str:
+ sys.stdout.write(prompt or "")
+ val = text_input.readline().rstrip("\r\n")
+ sys.stdout.write(f"{val}\n")
+ sys.stdout.flush()
+ return val
+
+ @_pause_echo(echo_input) # type: ignore
+ def hidden_input(prompt: t.Optional[str] = None) -> str:
+ sys.stdout.write(f"{prompt or ''}\n")
+ sys.stdout.flush()
+ return text_input.readline().rstrip("\r\n")
+
+ @_pause_echo(echo_input) # type: ignore
+ def _getchar(echo: bool) -> str:
+ char = sys.stdin.read(1)
+
+ if echo:
+ sys.stdout.write(char)
+
+ sys.stdout.flush()
+ return char
+
+ default_color = color
+
+ def should_strip_ansi(
+ stream: t.Optional[t.IO[t.Any]] = None, color: t.Optional[bool] = None
+ ) -> bool:
+ if color is None:
+ return not default_color
+ return not color
+
+ old_visible_prompt_func = termui.visible_prompt_func
+ old_hidden_prompt_func = termui.hidden_prompt_func
+ old__getchar_func = termui._getchar
+ old_should_strip_ansi = utils.should_strip_ansi # type: ignore
+ termui.visible_prompt_func = visible_input
+ termui.hidden_prompt_func = hidden_input
+ termui._getchar = _getchar
+ utils.should_strip_ansi = should_strip_ansi # type: ignore
+
+ old_env = {}
+ try:
+ for key, value in env.items():
+ old_env[key] = os.environ.get(key)
+ if value is None:
+ try:
+ del os.environ[key]
+ except Exception:
+ pass
+ else:
+ os.environ[key] = value
+ yield (bytes_output, bytes_error)
+ finally:
+ for key, value in old_env.items():
+ if value is None:
+ try:
+ del os.environ[key]
+ except Exception:
+ pass
+ else:
+ os.environ[key] = value
+ sys.stdout = old_stdout
+ sys.stderr = old_stderr
+ sys.stdin = old_stdin
+ termui.visible_prompt_func = old_visible_prompt_func
+ termui.hidden_prompt_func = old_hidden_prompt_func
+ termui._getchar = old__getchar_func
+ utils.should_strip_ansi = old_should_strip_ansi # type: ignore
+ formatting.FORCED_WIDTH = old_forced_width
+
+ def invoke(
+ self,
+ cli: "BaseCommand",
+ args: t.Optional[t.Union[str, t.Sequence[str]]] = None,
+ input: t.Optional[t.Union[str, bytes, t.IO[t.Any]]] = None,
+ env: t.Optional[t.Mapping[str, t.Optional[str]]] = None,
+ catch_exceptions: bool = True,
+ color: bool = False,
+ **extra: t.Any,
+ ) -> Result:
+ """Invokes a command in an isolated environment. The arguments are
+ forwarded directly to the command line script, the `extra` keyword
+ arguments are passed to the :meth:`~clickpkg.Command.main` function of
+ the command.
+
+ This returns a :class:`Result` object.
+
+ :param cli: the command to invoke
+ :param args: the arguments to invoke. It may be given as an iterable
+ or a string. When given as string it will be interpreted
+ as a Unix shell command. More details at
+ :func:`shlex.split`.
+ :param input: the input data for `sys.stdin`.
+ :param env: the environment overrides.
+ :param catch_exceptions: Whether to catch any other exceptions than
+ ``SystemExit``.
+ :param extra: the keyword arguments to pass to :meth:`main`.
+ :param color: whether the output should contain color codes. The
+ application can still override this explicitly.
+
+ .. versionchanged:: 8.0
+ The result object has the ``return_value`` attribute with
+ the value returned from the invoked command.
+
+ .. versionchanged:: 4.0
+ Added the ``color`` parameter.
+
+ .. versionchanged:: 3.0
+ Added the ``catch_exceptions`` parameter.
+
+ .. versionchanged:: 3.0
+ The result object has the ``exc_info`` attribute with the
+ traceback if available.
+ """
+ exc_info = None
+ with self.isolation(input=input, env=env, color=color) as outstreams:
+ return_value = None
+ exception: t.Optional[BaseException] = None
+ exit_code = 0
+
+ if isinstance(args, str):
+ args = shlex.split(args)
+
+ try:
+ prog_name = extra.pop("prog_name")
+ except KeyError:
+ prog_name = self.get_default_prog_name(cli)
+
+ try:
+ return_value = cli.main(args=args or (), prog_name=prog_name, **extra)
+ except SystemExit as e:
+ exc_info = sys.exc_info()
+ e_code = t.cast(t.Optional[t.Union[int, t.Any]], e.code)
+
+ if e_code is None:
+ e_code = 0
+
+ if e_code != 0:
+ exception = e
+
+ if not isinstance(e_code, int):
+ sys.stdout.write(str(e_code))
+ sys.stdout.write("\n")
+ e_code = 1
+
+ exit_code = e_code
+
+ except Exception as e:
+ if not catch_exceptions:
+ raise
+ exception = e
+ exit_code = 1
+ exc_info = sys.exc_info()
+ finally:
+ sys.stdout.flush()
+ stdout = outstreams[0].getvalue()
+ if self.mix_stderr:
+ stderr = None
+ else:
+ stderr = outstreams[1].getvalue() # type: ignore
+
+ return Result(
+ runner=self,
+ stdout_bytes=stdout,
+ stderr_bytes=stderr,
+ return_value=return_value,
+ exit_code=exit_code,
+ exception=exception,
+ exc_info=exc_info, # type: ignore
+ )
+
+ @contextlib.contextmanager
+ def isolated_filesystem(
+ self, temp_dir: t.Optional[t.Union[str, "os.PathLike[str]"]] = None
+ ) -> t.Iterator[str]:
+ """A context manager that creates a temporary directory and
+ changes the current working directory to it. This isolates tests
+ that affect the contents of the CWD to prevent them from
+ interfering with each other.
+
+ :param temp_dir: Create the temporary directory under this
+ directory. If given, the created directory is not removed
+ when exiting.
+
+ .. versionchanged:: 8.0
+ Added the ``temp_dir`` parameter.
+ """
+ cwd = os.getcwd()
+ dt = tempfile.mkdtemp(dir=temp_dir)
+ os.chdir(dt)
+
+ try:
+ yield dt
+ finally:
+ os.chdir(cwd)
+
+ if temp_dir is None:
+ try:
+ shutil.rmtree(dt)
+ except OSError: # noqa: B014
+ pass
diff --git a/billinglayer/python/click/types.py b/billinglayer/python/click/types.py
new file mode 100644
index 0000000..2b1d179
--- /dev/null
+++ b/billinglayer/python/click/types.py
@@ -0,0 +1,1089 @@
+import os
+import stat
+import sys
+import typing as t
+from datetime import datetime
+from gettext import gettext as _
+from gettext import ngettext
+
+from ._compat import _get_argv_encoding
+from ._compat import open_stream
+from .exceptions import BadParameter
+from .utils import format_filename
+from .utils import LazyFile
+from .utils import safecall
+
+if t.TYPE_CHECKING:
+ import typing_extensions as te
+ from .core import Context
+ from .core import Parameter
+ from .shell_completion import CompletionItem
+
+
+class ParamType:
+ """Represents the type of a parameter. Validates and converts values
+ from the command line or Python into the correct type.
+
+ To implement a custom type, subclass and implement at least the
+ following:
+
+ - The :attr:`name` class attribute must be set.
+ - Calling an instance of the type with ``None`` must return
+ ``None``. This is already implemented by default.
+ - :meth:`convert` must convert string values to the correct type.
+ - :meth:`convert` must accept values that are already the correct
+ type.
+ - It must be able to convert a value if the ``ctx`` and ``param``
+ arguments are ``None``. This can occur when converting prompt
+ input.
+ """
+
+ is_composite: t.ClassVar[bool] = False
+ arity: t.ClassVar[int] = 1
+
+ #: the descriptive name of this type
+ name: str
+
+ #: if a list of this type is expected and the value is pulled from a
+ #: string environment variable, this is what splits it up. `None`
+ #: means any whitespace. For all parameters the general rule is that
+ #: whitespace splits them up. The exception are paths and files which
+ #: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on
+ #: Windows).
+ envvar_list_splitter: t.ClassVar[t.Optional[str]] = None
+
+ def to_info_dict(self) -> t.Dict[str, t.Any]:
+ """Gather information that could be useful for a tool generating
+ user-facing documentation.
+
+ Use :meth:`click.Context.to_info_dict` to traverse the entire
+ CLI structure.
+
+ .. versionadded:: 8.0
+ """
+ # The class name without the "ParamType" suffix.
+ param_type = type(self).__name__.partition("ParamType")[0]
+ param_type = param_type.partition("ParameterType")[0]
+
+ # Custom subclasses might not remember to set a name.
+ if hasattr(self, "name"):
+ name = self.name
+ else:
+ name = param_type
+
+ return {"param_type": param_type, "name": name}
+
+ def __call__(
+ self,
+ value: t.Any,
+ param: t.Optional["Parameter"] = None,
+ ctx: t.Optional["Context"] = None,
+ ) -> t.Any:
+ if value is not None:
+ return self.convert(value, param, ctx)
+
+ def get_metavar(self, param: "Parameter") -> t.Optional[str]:
+ """Returns the metavar default for this param if it provides one."""
+
+ def get_missing_message(self, param: "Parameter") -> t.Optional[str]:
+ """Optionally might return extra information about a missing
+ parameter.
+
+ .. versionadded:: 2.0
+ """
+
+ def convert(
+ self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"]
+ ) -> t.Any:
+ """Convert the value to the correct type. This is not called if
+ the value is ``None`` (the missing value).
+
+ This must accept string values from the command line, as well as
+ values that are already the correct type. It may also convert
+ other compatible types.
+
+ The ``param`` and ``ctx`` arguments may be ``None`` in certain
+ situations, such as when converting prompt input.
+
+ If the value cannot be converted, call :meth:`fail` with a
+ descriptive message.
+
+ :param value: The value to convert.
+ :param param: The parameter that is using this type to convert
+ its value. May be ``None``.
+ :param ctx: The current context that arrived at this value. May
+ be ``None``.
+ """
+ return value
+
+ def split_envvar_value(self, rv: str) -> t.Sequence[str]:
+ """Given a value from an environment variable this splits it up
+ into small chunks depending on the defined envvar list splitter.
+
+ If the splitter is set to `None`, which means that whitespace splits,
+ then leading and trailing whitespace is ignored. Otherwise, leading
+ and trailing splitters usually lead to empty items being included.
+ """
+ return (rv or "").split(self.envvar_list_splitter)
+
+ def fail(
+ self,
+ message: str,
+ param: t.Optional["Parameter"] = None,
+ ctx: t.Optional["Context"] = None,
+ ) -> "t.NoReturn":
+ """Helper method to fail with an invalid value message."""
+ raise BadParameter(message, ctx=ctx, param=param)
+
+ def shell_complete(
+ self, ctx: "Context", param: "Parameter", incomplete: str
+ ) -> t.List["CompletionItem"]:
+ """Return a list of
+ :class:`~click.shell_completion.CompletionItem` objects for the
+ incomplete value. Most types do not provide completions, but
+ some do, and this allows custom types to provide custom
+ completions as well.
+
+ :param ctx: Invocation context for this command.
+ :param param: The parameter that is requesting completion.
+ :param incomplete: Value being completed. May be empty.
+
+ .. versionadded:: 8.0
+ """
+ return []
+
+
+class CompositeParamType(ParamType):
+ is_composite = True
+
+ @property
+ def arity(self) -> int: # type: ignore
+ raise NotImplementedError()
+
+
+class FuncParamType(ParamType):
+ def __init__(self, func: t.Callable[[t.Any], t.Any]) -> None:
+ self.name: str = func.__name__
+ self.func = func
+
+ def to_info_dict(self) -> t.Dict[str, t.Any]:
+ info_dict = super().to_info_dict()
+ info_dict["func"] = self.func
+ return info_dict
+
+ def convert(
+ self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"]
+ ) -> t.Any:
+ try:
+ return self.func(value)
+ except ValueError:
+ try:
+ value = str(value)
+ except UnicodeError:
+ value = value.decode("utf-8", "replace")
+
+ self.fail(value, param, ctx)
+
+
+class UnprocessedParamType(ParamType):
+ name = "text"
+
+ def convert(
+ self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"]
+ ) -> t.Any:
+ return value
+
+ def __repr__(self) -> str:
+ return "UNPROCESSED"
+
+
+class StringParamType(ParamType):
+ name = "text"
+
+ def convert(
+ self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"]
+ ) -> t.Any:
+ if isinstance(value, bytes):
+ enc = _get_argv_encoding()
+ try:
+ value = value.decode(enc)
+ except UnicodeError:
+ fs_enc = sys.getfilesystemencoding()
+ if fs_enc != enc:
+ try:
+ value = value.decode(fs_enc)
+ except UnicodeError:
+ value = value.decode("utf-8", "replace")
+ else:
+ value = value.decode("utf-8", "replace")
+ return value
+ return str(value)
+
+ def __repr__(self) -> str:
+ return "STRING"
+
+
+class Choice(ParamType):
+ """The choice type allows a value to be checked against a fixed set
+ of supported values. All of these values have to be strings.
+
+ You should only pass a list or tuple of choices. Other iterables
+ (like generators) may lead to surprising results.
+
+ The resulting value will always be one of the originally passed choices
+ regardless of ``case_sensitive`` or any ``ctx.token_normalize_func``
+ being specified.
+
+ See :ref:`choice-opts` for an example.
+
+ :param case_sensitive: Set to false to make choices case
+ insensitive. Defaults to true.
+ """
+
+ name = "choice"
+
+ def __init__(self, choices: t.Sequence[str], case_sensitive: bool = True) -> None:
+ self.choices = choices
+ self.case_sensitive = case_sensitive
+
+ def to_info_dict(self) -> t.Dict[str, t.Any]:
+ info_dict = super().to_info_dict()
+ info_dict["choices"] = self.choices
+ info_dict["case_sensitive"] = self.case_sensitive
+ return info_dict
+
+ def get_metavar(self, param: "Parameter") -> str:
+ choices_str = "|".join(self.choices)
+
+ # Use curly braces to indicate a required argument.
+ if param.required and param.param_type_name == "argument":
+ return f"{{{choices_str}}}"
+
+ # Use square braces to indicate an option or optional argument.
+ return f"[{choices_str}]"
+
+ def get_missing_message(self, param: "Parameter") -> str:
+ return _("Choose from:\n\t{choices}").format(choices=",\n\t".join(self.choices))
+
+ def convert(
+ self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"]
+ ) -> t.Any:
+ # Match through normalization and case sensitivity
+ # first do token_normalize_func, then lowercase
+ # preserve original `value` to produce an accurate message in
+ # `self.fail`
+ normed_value = value
+ normed_choices = {choice: choice for choice in self.choices}
+
+ if ctx is not None and ctx.token_normalize_func is not None:
+ normed_value = ctx.token_normalize_func(value)
+ normed_choices = {
+ ctx.token_normalize_func(normed_choice): original
+ for normed_choice, original in normed_choices.items()
+ }
+
+ if not self.case_sensitive:
+ normed_value = normed_value.casefold()
+ normed_choices = {
+ normed_choice.casefold(): original
+ for normed_choice, original in normed_choices.items()
+ }
+
+ if normed_value in normed_choices:
+ return normed_choices[normed_value]
+
+ choices_str = ", ".join(map(repr, self.choices))
+ self.fail(
+ ngettext(
+ "{value!r} is not {choice}.",
+ "{value!r} is not one of {choices}.",
+ len(self.choices),
+ ).format(value=value, choice=choices_str, choices=choices_str),
+ param,
+ ctx,
+ )
+
+ def __repr__(self) -> str:
+ return f"Choice({list(self.choices)})"
+
+ def shell_complete(
+ self, ctx: "Context", param: "Parameter", incomplete: str
+ ) -> t.List["CompletionItem"]:
+ """Complete choices that start with the incomplete value.
+
+ :param ctx: Invocation context for this command.
+ :param param: The parameter that is requesting completion.
+ :param incomplete: Value being completed. May be empty.
+
+ .. versionadded:: 8.0
+ """
+ from click.shell_completion import CompletionItem
+
+ str_choices = map(str, self.choices)
+
+ if self.case_sensitive:
+ matched = (c for c in str_choices if c.startswith(incomplete))
+ else:
+ incomplete = incomplete.lower()
+ matched = (c for c in str_choices if c.lower().startswith(incomplete))
+
+ return [CompletionItem(c) for c in matched]
+
+
+class DateTime(ParamType):
+ """The DateTime type converts date strings into `datetime` objects.
+
+ The format strings which are checked are configurable, but default to some
+ common (non-timezone aware) ISO 8601 formats.
+
+ When specifying *DateTime* formats, you should only pass a list or a tuple.
+ Other iterables, like generators, may lead to surprising results.
+
+ The format strings are processed using ``datetime.strptime``, and this
+ consequently defines the format strings which are allowed.
+
+ Parsing is tried using each format, in order, and the first format which
+ parses successfully is used.
+
+ :param formats: A list or tuple of date format strings, in the order in
+ which they should be tried. Defaults to
+ ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``,
+ ``'%Y-%m-%d %H:%M:%S'``.
+ """
+
+ name = "datetime"
+
+ def __init__(self, formats: t.Optional[t.Sequence[str]] = None):
+ self.formats: t.Sequence[str] = formats or [
+ "%Y-%m-%d",
+ "%Y-%m-%dT%H:%M:%S",
+ "%Y-%m-%d %H:%M:%S",
+ ]
+
+ def to_info_dict(self) -> t.Dict[str, t.Any]:
+ info_dict = super().to_info_dict()
+ info_dict["formats"] = self.formats
+ return info_dict
+
+ def get_metavar(self, param: "Parameter") -> str:
+ return f"[{'|'.join(self.formats)}]"
+
+ def _try_to_convert_date(self, value: t.Any, format: str) -> t.Optional[datetime]:
+ try:
+ return datetime.strptime(value, format)
+ except ValueError:
+ return None
+
+ def convert(
+ self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"]
+ ) -> t.Any:
+ if isinstance(value, datetime):
+ return value
+
+ for format in self.formats:
+ converted = self._try_to_convert_date(value, format)
+
+ if converted is not None:
+ return converted
+
+ formats_str = ", ".join(map(repr, self.formats))
+ self.fail(
+ ngettext(
+ "{value!r} does not match the format {format}.",
+ "{value!r} does not match the formats {formats}.",
+ len(self.formats),
+ ).format(value=value, format=formats_str, formats=formats_str),
+ param,
+ ctx,
+ )
+
+ def __repr__(self) -> str:
+ return "DateTime"
+
+
+class _NumberParamTypeBase(ParamType):
+ _number_class: t.ClassVar[t.Type[t.Any]]
+
+ def convert(
+ self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"]
+ ) -> t.Any:
+ try:
+ return self._number_class(value)
+ except ValueError:
+ self.fail(
+ _("{value!r} is not a valid {number_type}.").format(
+ value=value, number_type=self.name
+ ),
+ param,
+ ctx,
+ )
+
+
+class _NumberRangeBase(_NumberParamTypeBase):
+ def __init__(
+ self,
+ min: t.Optional[float] = None,
+ max: t.Optional[float] = None,
+ min_open: bool = False,
+ max_open: bool = False,
+ clamp: bool = False,
+ ) -> None:
+ self.min = min
+ self.max = max
+ self.min_open = min_open
+ self.max_open = max_open
+ self.clamp = clamp
+
+ def to_info_dict(self) -> t.Dict[str, t.Any]:
+ info_dict = super().to_info_dict()
+ info_dict.update(
+ min=self.min,
+ max=self.max,
+ min_open=self.min_open,
+ max_open=self.max_open,
+ clamp=self.clamp,
+ )
+ return info_dict
+
+ def convert(
+ self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"]
+ ) -> t.Any:
+ import operator
+
+ rv = super().convert(value, param, ctx)
+ lt_min: bool = self.min is not None and (
+ operator.le if self.min_open else operator.lt
+ )(rv, self.min)
+ gt_max: bool = self.max is not None and (
+ operator.ge if self.max_open else operator.gt
+ )(rv, self.max)
+
+ if self.clamp:
+ if lt_min:
+ return self._clamp(self.min, 1, self.min_open) # type: ignore
+
+ if gt_max:
+ return self._clamp(self.max, -1, self.max_open) # type: ignore
+
+ if lt_min or gt_max:
+ self.fail(
+ _("{value} is not in the range {range}.").format(
+ value=rv, range=self._describe_range()
+ ),
+ param,
+ ctx,
+ )
+
+ return rv
+
+ def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float:
+ """Find the valid value to clamp to bound in the given
+ direction.
+
+ :param bound: The boundary value.
+ :param dir: 1 or -1 indicating the direction to move.
+ :param open: If true, the range does not include the bound.
+ """
+ raise NotImplementedError
+
+ def _describe_range(self) -> str:
+ """Describe the range for use in help text."""
+ if self.min is None:
+ op = "<" if self.max_open else "<="
+ return f"x{op}{self.max}"
+
+ if self.max is None:
+ op = ">" if self.min_open else ">="
+ return f"x{op}{self.min}"
+
+ lop = "<" if self.min_open else "<="
+ rop = "<" if self.max_open else "<="
+ return f"{self.min}{lop}x{rop}{self.max}"
+
+ def __repr__(self) -> str:
+ clamp = " clamped" if self.clamp else ""
+ return f"<{type(self).__name__} {self._describe_range()}{clamp}>"
+
+
+class IntParamType(_NumberParamTypeBase):
+ name = "integer"
+ _number_class = int
+
+ def __repr__(self) -> str:
+ return "INT"
+
+
+class IntRange(_NumberRangeBase, IntParamType):
+ """Restrict an :data:`click.INT` value to a range of accepted
+ values. See :ref:`ranges`.
+
+ If ``min`` or ``max`` are not passed, any value is accepted in that
+ direction. If ``min_open`` or ``max_open`` are enabled, the
+ corresponding boundary is not included in the range.
+
+ If ``clamp`` is enabled, a value outside the range is clamped to the
+ boundary instead of failing.
+
+ .. versionchanged:: 8.0
+ Added the ``min_open`` and ``max_open`` parameters.
+ """
+
+ name = "integer range"
+
+ def _clamp( # type: ignore
+ self, bound: int, dir: "te.Literal[1, -1]", open: bool
+ ) -> int:
+ if not open:
+ return bound
+
+ return bound + dir
+
+
+class FloatParamType(_NumberParamTypeBase):
+ name = "float"
+ _number_class = float
+
+ def __repr__(self) -> str:
+ return "FLOAT"
+
+
+class FloatRange(_NumberRangeBase, FloatParamType):
+ """Restrict a :data:`click.FLOAT` value to a range of accepted
+ values. See :ref:`ranges`.
+
+ If ``min`` or ``max`` are not passed, any value is accepted in that
+ direction. If ``min_open`` or ``max_open`` are enabled, the
+ corresponding boundary is not included in the range.
+
+ If ``clamp`` is enabled, a value outside the range is clamped to the
+ boundary instead of failing. This is not supported if either
+ boundary is marked ``open``.
+
+ .. versionchanged:: 8.0
+ Added the ``min_open`` and ``max_open`` parameters.
+ """
+
+ name = "float range"
+
+ def __init__(
+ self,
+ min: t.Optional[float] = None,
+ max: t.Optional[float] = None,
+ min_open: bool = False,
+ max_open: bool = False,
+ clamp: bool = False,
+ ) -> None:
+ super().__init__(
+ min=min, max=max, min_open=min_open, max_open=max_open, clamp=clamp
+ )
+
+ if (min_open or max_open) and clamp:
+ raise TypeError("Clamping is not supported for open bounds.")
+
+ def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float:
+ if not open:
+ return bound
+
+ # Could use Python 3.9's math.nextafter here, but clamping an
+ # open float range doesn't seem to be particularly useful. It's
+ # left up to the user to write a callback to do it if needed.
+ raise RuntimeError("Clamping is not supported for open bounds.")
+
+
+class BoolParamType(ParamType):
+ name = "boolean"
+
+ def convert(
+ self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"]
+ ) -> t.Any:
+ if value in {False, True}:
+ return bool(value)
+
+ norm = value.strip().lower()
+
+ if norm in {"1", "true", "t", "yes", "y", "on"}:
+ return True
+
+ if norm in {"0", "false", "f", "no", "n", "off"}:
+ return False
+
+ self.fail(
+ _("{value!r} is not a valid boolean.").format(value=value), param, ctx
+ )
+
+ def __repr__(self) -> str:
+ return "BOOL"
+
+
+class UUIDParameterType(ParamType):
+ name = "uuid"
+
+ def convert(
+ self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"]
+ ) -> t.Any:
+ import uuid
+
+ if isinstance(value, uuid.UUID):
+ return value
+
+ value = value.strip()
+
+ try:
+ return uuid.UUID(value)
+ except ValueError:
+ self.fail(
+ _("{value!r} is not a valid UUID.").format(value=value), param, ctx
+ )
+
+ def __repr__(self) -> str:
+ return "UUID"
+
+
+class File(ParamType):
+ """Declares a parameter to be a file for reading or writing. The file
+ is automatically closed once the context tears down (after the command
+ finished working).
+
+ Files can be opened for reading or writing. The special value ``-``
+ indicates stdin or stdout depending on the mode.
+
+ By default, the file is opened for reading text data, but it can also be
+ opened in binary mode or for writing. The encoding parameter can be used
+ to force a specific encoding.
+
+ The `lazy` flag controls if the file should be opened immediately or upon
+ first IO. The default is to be non-lazy for standard input and output
+ streams as well as files opened for reading, `lazy` otherwise. When opening a
+ file lazily for reading, it is still opened temporarily for validation, but
+ will not be held open until first IO. lazy is mainly useful when opening
+ for writing to avoid creating the file until it is needed.
+
+ Starting with Click 2.0, files can also be opened atomically in which
+ case all writes go into a separate file in the same folder and upon
+ completion the file will be moved over to the original location. This
+ is useful if a file regularly read by other users is modified.
+
+ See :ref:`file-args` for more information.
+ """
+
+ name = "filename"
+ envvar_list_splitter: t.ClassVar[str] = os.path.pathsep
+
+ def __init__(
+ self,
+ mode: str = "r",
+ encoding: t.Optional[str] = None,
+ errors: t.Optional[str] = "strict",
+ lazy: t.Optional[bool] = None,
+ atomic: bool = False,
+ ) -> None:
+ self.mode = mode
+ self.encoding = encoding
+ self.errors = errors
+ self.lazy = lazy
+ self.atomic = atomic
+
+ def to_info_dict(self) -> t.Dict[str, t.Any]:
+ info_dict = super().to_info_dict()
+ info_dict.update(mode=self.mode, encoding=self.encoding)
+ return info_dict
+
+ def resolve_lazy_flag(self, value: "t.Union[str, os.PathLike[str]]") -> bool:
+ if self.lazy is not None:
+ return self.lazy
+ if os.fspath(value) == "-":
+ return False
+ elif "w" in self.mode:
+ return True
+ return False
+
+ def convert(
+ self,
+ value: t.Union[str, "os.PathLike[str]", t.IO[t.Any]],
+ param: t.Optional["Parameter"],
+ ctx: t.Optional["Context"],
+ ) -> t.IO[t.Any]:
+ if _is_file_like(value):
+ return value
+
+ value = t.cast("t.Union[str, os.PathLike[str]]", value)
+
+ try:
+ lazy = self.resolve_lazy_flag(value)
+
+ if lazy:
+ lf = LazyFile(
+ value, self.mode, self.encoding, self.errors, atomic=self.atomic
+ )
+
+ if ctx is not None:
+ ctx.call_on_close(lf.close_intelligently)
+
+ return t.cast(t.IO[t.Any], lf)
+
+ f, should_close = open_stream(
+ value, self.mode, self.encoding, self.errors, atomic=self.atomic
+ )
+
+ # If a context is provided, we automatically close the file
+ # at the end of the context execution (or flush out). If a
+ # context does not exist, it's the caller's responsibility to
+ # properly close the file. This for instance happens when the
+ # type is used with prompts.
+ if ctx is not None:
+ if should_close:
+ ctx.call_on_close(safecall(f.close))
+ else:
+ ctx.call_on_close(safecall(f.flush))
+
+ return f
+ except OSError as e: # noqa: B014
+ self.fail(f"'{format_filename(value)}': {e.strerror}", param, ctx)
+
+ def shell_complete(
+ self, ctx: "Context", param: "Parameter", incomplete: str
+ ) -> t.List["CompletionItem"]:
+ """Return a special completion marker that tells the completion
+ system to use the shell to provide file path completions.
+
+ :param ctx: Invocation context for this command.
+ :param param: The parameter that is requesting completion.
+ :param incomplete: Value being completed. May be empty.
+
+ .. versionadded:: 8.0
+ """
+ from click.shell_completion import CompletionItem
+
+ return [CompletionItem(incomplete, type="file")]
+
+
+def _is_file_like(value: t.Any) -> "te.TypeGuard[t.IO[t.Any]]":
+ return hasattr(value, "read") or hasattr(value, "write")
+
+
+class Path(ParamType):
+ """The ``Path`` type is similar to the :class:`File` type, but
+ returns the filename instead of an open file. Various checks can be
+ enabled to validate the type of file and permissions.
+
+ :param exists: The file or directory needs to exist for the value to
+ be valid. If this is not set to ``True``, and the file does not
+ exist, then all further checks are silently skipped.
+ :param file_okay: Allow a file as a value.
+ :param dir_okay: Allow a directory as a value.
+ :param readable: if true, a readable check is performed.
+ :param writable: if true, a writable check is performed.
+ :param executable: if true, an executable check is performed.
+ :param resolve_path: Make the value absolute and resolve any
+ symlinks. A ``~`` is not expanded, as this is supposed to be
+ done by the shell only.
+ :param allow_dash: Allow a single dash as a value, which indicates
+ a standard stream (but does not open it). Use
+ :func:`~click.open_file` to handle opening this value.
+ :param path_type: Convert the incoming path value to this type. If
+ ``None``, keep Python's default, which is ``str``. Useful to
+ convert to :class:`pathlib.Path`.
+
+ .. versionchanged:: 8.1
+ Added the ``executable`` parameter.
+
+ .. versionchanged:: 8.0
+ Allow passing ``path_type=pathlib.Path``.
+
+ .. versionchanged:: 6.0
+ Added the ``allow_dash`` parameter.
+ """
+
+ envvar_list_splitter: t.ClassVar[str] = os.path.pathsep
+
+ def __init__(
+ self,
+ exists: bool = False,
+ file_okay: bool = True,
+ dir_okay: bool = True,
+ writable: bool = False,
+ readable: bool = True,
+ resolve_path: bool = False,
+ allow_dash: bool = False,
+ path_type: t.Optional[t.Type[t.Any]] = None,
+ executable: bool = False,
+ ):
+ self.exists = exists
+ self.file_okay = file_okay
+ self.dir_okay = dir_okay
+ self.readable = readable
+ self.writable = writable
+ self.executable = executable
+ self.resolve_path = resolve_path
+ self.allow_dash = allow_dash
+ self.type = path_type
+
+ if self.file_okay and not self.dir_okay:
+ self.name: str = _("file")
+ elif self.dir_okay and not self.file_okay:
+ self.name = _("directory")
+ else:
+ self.name = _("path")
+
+ def to_info_dict(self) -> t.Dict[str, t.Any]:
+ info_dict = super().to_info_dict()
+ info_dict.update(
+ exists=self.exists,
+ file_okay=self.file_okay,
+ dir_okay=self.dir_okay,
+ writable=self.writable,
+ readable=self.readable,
+ allow_dash=self.allow_dash,
+ )
+ return info_dict
+
+ def coerce_path_result(
+ self, value: "t.Union[str, os.PathLike[str]]"
+ ) -> "t.Union[str, bytes, os.PathLike[str]]":
+ if self.type is not None and not isinstance(value, self.type):
+ if self.type is str:
+ return os.fsdecode(value)
+ elif self.type is bytes:
+ return os.fsencode(value)
+ else:
+ return t.cast("os.PathLike[str]", self.type(value))
+
+ return value
+
+ def convert(
+ self,
+ value: "t.Union[str, os.PathLike[str]]",
+ param: t.Optional["Parameter"],
+ ctx: t.Optional["Context"],
+ ) -> "t.Union[str, bytes, os.PathLike[str]]":
+ rv = value
+
+ is_dash = self.file_okay and self.allow_dash and rv in (b"-", "-")
+
+ if not is_dash:
+ if self.resolve_path:
+ # os.path.realpath doesn't resolve symlinks on Windows
+ # until Python 3.8. Use pathlib for now.
+ import pathlib
+
+ rv = os.fsdecode(pathlib.Path(rv).resolve())
+
+ try:
+ st = os.stat(rv)
+ except OSError:
+ if not self.exists:
+ return self.coerce_path_result(rv)
+ self.fail(
+ _("{name} {filename!r} does not exist.").format(
+ name=self.name.title(), filename=format_filename(value)
+ ),
+ param,
+ ctx,
+ )
+
+ if not self.file_okay and stat.S_ISREG(st.st_mode):
+ self.fail(
+ _("{name} {filename!r} is a file.").format(
+ name=self.name.title(), filename=format_filename(value)
+ ),
+ param,
+ ctx,
+ )
+ if not self.dir_okay and stat.S_ISDIR(st.st_mode):
+ self.fail(
+ _("{name} '{filename}' is a directory.").format(
+ name=self.name.title(), filename=format_filename(value)
+ ),
+ param,
+ ctx,
+ )
+
+ if self.readable and not os.access(rv, os.R_OK):
+ self.fail(
+ _("{name} {filename!r} is not readable.").format(
+ name=self.name.title(), filename=format_filename(value)
+ ),
+ param,
+ ctx,
+ )
+
+ if self.writable and not os.access(rv, os.W_OK):
+ self.fail(
+ _("{name} {filename!r} is not writable.").format(
+ name=self.name.title(), filename=format_filename(value)
+ ),
+ param,
+ ctx,
+ )
+
+ if self.executable and not os.access(value, os.X_OK):
+ self.fail(
+ _("{name} {filename!r} is not executable.").format(
+ name=self.name.title(), filename=format_filename(value)
+ ),
+ param,
+ ctx,
+ )
+
+ return self.coerce_path_result(rv)
+
+ def shell_complete(
+ self, ctx: "Context", param: "Parameter", incomplete: str
+ ) -> t.List["CompletionItem"]:
+ """Return a special completion marker that tells the completion
+ system to use the shell to provide path completions for only
+ directories or any paths.
+
+ :param ctx: Invocation context for this command.
+ :param param: The parameter that is requesting completion.
+ :param incomplete: Value being completed. May be empty.
+
+ .. versionadded:: 8.0
+ """
+ from click.shell_completion import CompletionItem
+
+ type = "dir" if self.dir_okay and not self.file_okay else "file"
+ return [CompletionItem(incomplete, type=type)]
+
+
+class Tuple(CompositeParamType):
+ """The default behavior of Click is to apply a type on a value directly.
+ This works well in most cases, except for when `nargs` is set to a fixed
+ count and different types should be used for different items. In this
+ case the :class:`Tuple` type can be used. This type can only be used
+ if `nargs` is set to a fixed number.
+
+ For more information see :ref:`tuple-type`.
+
+ This can be selected by using a Python tuple literal as a type.
+
+ :param types: a list of types that should be used for the tuple items.
+ """
+
+ def __init__(self, types: t.Sequence[t.Union[t.Type[t.Any], ParamType]]) -> None:
+ self.types: t.Sequence[ParamType] = [convert_type(ty) for ty in types]
+
+ def to_info_dict(self) -> t.Dict[str, t.Any]:
+ info_dict = super().to_info_dict()
+ info_dict["types"] = [t.to_info_dict() for t in self.types]
+ return info_dict
+
+ @property
+ def name(self) -> str: # type: ignore
+ return f"<{' '.join(ty.name for ty in self.types)}>"
+
+ @property
+ def arity(self) -> int: # type: ignore
+ return len(self.types)
+
+ def convert(
+ self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"]
+ ) -> t.Any:
+ len_type = len(self.types)
+ len_value = len(value)
+
+ if len_value != len_type:
+ self.fail(
+ ngettext(
+ "{len_type} values are required, but {len_value} was given.",
+ "{len_type} values are required, but {len_value} were given.",
+ len_value,
+ ).format(len_type=len_type, len_value=len_value),
+ param=param,
+ ctx=ctx,
+ )
+
+ return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value))
+
+
+def convert_type(ty: t.Optional[t.Any], default: t.Optional[t.Any] = None) -> ParamType:
+ """Find the most appropriate :class:`ParamType` for the given Python
+ type. If the type isn't provided, it can be inferred from a default
+ value.
+ """
+ guessed_type = False
+
+ if ty is None and default is not None:
+ if isinstance(default, (tuple, list)):
+ # If the default is empty, ty will remain None and will
+ # return STRING.
+ if default:
+ item = default[0]
+
+ # A tuple of tuples needs to detect the inner types.
+ # Can't call convert recursively because that would
+ # incorrectly unwind the tuple to a single type.
+ if isinstance(item, (tuple, list)):
+ ty = tuple(map(type, item))
+ else:
+ ty = type(item)
+ else:
+ ty = type(default)
+
+ guessed_type = True
+
+ if isinstance(ty, tuple):
+ return Tuple(ty)
+
+ if isinstance(ty, ParamType):
+ return ty
+
+ if ty is str or ty is None:
+ return STRING
+
+ if ty is int:
+ return INT
+
+ if ty is float:
+ return FLOAT
+
+ if ty is bool:
+ return BOOL
+
+ if guessed_type:
+ return STRING
+
+ if __debug__:
+ try:
+ if issubclass(ty, ParamType):
+ raise AssertionError(
+ f"Attempted to use an uninstantiated parameter type ({ty})."
+ )
+ except TypeError:
+ # ty is an instance (correct), so issubclass fails.
+ pass
+
+ return FuncParamType(ty)
+
+
+#: A dummy parameter type that just does nothing. From a user's
+#: perspective this appears to just be the same as `STRING` but
+#: internally no string conversion takes place if the input was bytes.
+#: This is usually useful when working with file paths as they can
+#: appear in bytes and unicode.
+#:
+#: For path related uses the :class:`Path` type is a better choice but
+#: there are situations where an unprocessed type is useful which is why
+#: it is is provided.
+#:
+#: .. versionadded:: 4.0
+UNPROCESSED = UnprocessedParamType()
+
+#: A unicode string parameter type which is the implicit default. This
+#: can also be selected by using ``str`` as type.
+STRING = StringParamType()
+
+#: An integer parameter. This can also be selected by using ``int`` as
+#: type.
+INT = IntParamType()
+
+#: A floating point value parameter. This can also be selected by using
+#: ``float`` as type.
+FLOAT = FloatParamType()
+
+#: A boolean parameter. This is the default for boolean flags. This can
+#: also be selected by using ``bool`` as a type.
+BOOL = BoolParamType()
+
+#: A UUID parameter.
+UUID = UUIDParameterType()
diff --git a/billinglayer/python/click/utils.py b/billinglayer/python/click/utils.py
new file mode 100644
index 0000000..d536434
--- /dev/null
+++ b/billinglayer/python/click/utils.py
@@ -0,0 +1,624 @@
+import os
+import re
+import sys
+import typing as t
+from functools import update_wrapper
+from types import ModuleType
+from types import TracebackType
+
+from ._compat import _default_text_stderr
+from ._compat import _default_text_stdout
+from ._compat import _find_binary_writer
+from ._compat import auto_wrap_for_ansi
+from ._compat import binary_streams
+from ._compat import open_stream
+from ._compat import should_strip_ansi
+from ._compat import strip_ansi
+from ._compat import text_streams
+from ._compat import WIN
+from .globals import resolve_color_default
+
+if t.TYPE_CHECKING:
+ import typing_extensions as te
+
+ P = te.ParamSpec("P")
+
+R = t.TypeVar("R")
+
+
+def _posixify(name: str) -> str:
+ return "-".join(name.split()).lower()
+
+
+def safecall(func: "t.Callable[P, R]") -> "t.Callable[P, t.Optional[R]]":
+ """Wraps a function so that it swallows exceptions."""
+
+ def wrapper(*args: "P.args", **kwargs: "P.kwargs") -> t.Optional[R]:
+ try:
+ return func(*args, **kwargs)
+ except Exception:
+ pass
+ return None
+
+ return update_wrapper(wrapper, func)
+
+
+def make_str(value: t.Any) -> str:
+ """Converts a value into a valid string."""
+ if isinstance(value, bytes):
+ try:
+ return value.decode(sys.getfilesystemencoding())
+ except UnicodeError:
+ return value.decode("utf-8", "replace")
+ return str(value)
+
+
+def make_default_short_help(help: str, max_length: int = 45) -> str:
+ """Returns a condensed version of help string."""
+ # Consider only the first paragraph.
+ paragraph_end = help.find("\n\n")
+
+ if paragraph_end != -1:
+ help = help[:paragraph_end]
+
+ # Collapse newlines, tabs, and spaces.
+ words = help.split()
+
+ if not words:
+ return ""
+
+ # The first paragraph started with a "no rewrap" marker, ignore it.
+ if words[0] == "\b":
+ words = words[1:]
+
+ total_length = 0
+ last_index = len(words) - 1
+
+ for i, word in enumerate(words):
+ total_length += len(word) + (i > 0)
+
+ if total_length > max_length: # too long, truncate
+ break
+
+ if word[-1] == ".": # sentence end, truncate without "..."
+ return " ".join(words[: i + 1])
+
+ if total_length == max_length and i != last_index:
+ break # not at sentence end, truncate with "..."
+ else:
+ return " ".join(words) # no truncation needed
+
+ # Account for the length of the suffix.
+ total_length += len("...")
+
+ # remove words until the length is short enough
+ while i > 0:
+ total_length -= len(words[i]) + (i > 0)
+
+ if total_length <= max_length:
+ break
+
+ i -= 1
+
+ return " ".join(words[:i]) + "..."
+
+
+class LazyFile:
+ """A lazy file works like a regular file but it does not fully open
+ the file but it does perform some basic checks early to see if the
+ filename parameter does make sense. This is useful for safely opening
+ files for writing.
+ """
+
+ def __init__(
+ self,
+ filename: t.Union[str, "os.PathLike[str]"],
+ mode: str = "r",
+ encoding: t.Optional[str] = None,
+ errors: t.Optional[str] = "strict",
+ atomic: bool = False,
+ ):
+ self.name: str = os.fspath(filename)
+ self.mode = mode
+ self.encoding = encoding
+ self.errors = errors
+ self.atomic = atomic
+ self._f: t.Optional[t.IO[t.Any]]
+ self.should_close: bool
+
+ if self.name == "-":
+ self._f, self.should_close = open_stream(filename, mode, encoding, errors)
+ else:
+ if "r" in mode:
+ # Open and close the file in case we're opening it for
+ # reading so that we can catch at least some errors in
+ # some cases early.
+ open(filename, mode).close()
+ self._f = None
+ self.should_close = True
+
+ def __getattr__(self, name: str) -> t.Any:
+ return getattr(self.open(), name)
+
+ def __repr__(self) -> str:
+ if self._f is not None:
+ return repr(self._f)
+ return f""
+
+ def open(self) -> t.IO[t.Any]:
+ """Opens the file if it's not yet open. This call might fail with
+ a :exc:`FileError`. Not handling this error will produce an error
+ that Click shows.
+ """
+ if self._f is not None:
+ return self._f
+ try:
+ rv, self.should_close = open_stream(
+ self.name, self.mode, self.encoding, self.errors, atomic=self.atomic
+ )
+ except OSError as e: # noqa: E402
+ from .exceptions import FileError
+
+ raise FileError(self.name, hint=e.strerror) from e
+ self._f = rv
+ return rv
+
+ def close(self) -> None:
+ """Closes the underlying file, no matter what."""
+ if self._f is not None:
+ self._f.close()
+
+ def close_intelligently(self) -> None:
+ """This function only closes the file if it was opened by the lazy
+ file wrapper. For instance this will never close stdin.
+ """
+ if self.should_close:
+ self.close()
+
+ def __enter__(self) -> "LazyFile":
+ return self
+
+ def __exit__(
+ self,
+ exc_type: t.Optional[t.Type[BaseException]],
+ exc_value: t.Optional[BaseException],
+ tb: t.Optional[TracebackType],
+ ) -> None:
+ self.close_intelligently()
+
+ def __iter__(self) -> t.Iterator[t.AnyStr]:
+ self.open()
+ return iter(self._f) # type: ignore
+
+
+class KeepOpenFile:
+ def __init__(self, file: t.IO[t.Any]) -> None:
+ self._file: t.IO[t.Any] = file
+
+ def __getattr__(self, name: str) -> t.Any:
+ return getattr(self._file, name)
+
+ def __enter__(self) -> "KeepOpenFile":
+ return self
+
+ def __exit__(
+ self,
+ exc_type: t.Optional[t.Type[BaseException]],
+ exc_value: t.Optional[BaseException],
+ tb: t.Optional[TracebackType],
+ ) -> None:
+ pass
+
+ def __repr__(self) -> str:
+ return repr(self._file)
+
+ def __iter__(self) -> t.Iterator[t.AnyStr]:
+ return iter(self._file)
+
+
+def echo(
+ message: t.Optional[t.Any] = None,
+ file: t.Optional[t.IO[t.Any]] = None,
+ nl: bool = True,
+ err: bool = False,
+ color: t.Optional[bool] = None,
+) -> None:
+ """Print a message and newline to stdout or a file. This should be
+ used instead of :func:`print` because it provides better support
+ for different data, files, and environments.
+
+ Compared to :func:`print`, this does the following:
+
+ - Ensures that the output encoding is not misconfigured on Linux.
+ - Supports Unicode in the Windows console.
+ - Supports writing to binary outputs, and supports writing bytes
+ to text outputs.
+ - Supports colors and styles on Windows.
+ - Removes ANSI color and style codes if the output does not look
+ like an interactive terminal.
+ - Always flushes the output.
+
+ :param message: The string or bytes to output. Other objects are
+ converted to strings.
+ :param file: The file to write to. Defaults to ``stdout``.
+ :param err: Write to ``stderr`` instead of ``stdout``.
+ :param nl: Print a newline after the message. Enabled by default.
+ :param color: Force showing or hiding colors and other styles. By
+ default Click will remove color if the output does not look like
+ an interactive terminal.
+
+ .. versionchanged:: 6.0
+ Support Unicode output on the Windows console. Click does not
+ modify ``sys.stdout``, so ``sys.stdout.write()`` and ``print()``
+ will still not support Unicode.
+
+ .. versionchanged:: 4.0
+ Added the ``color`` parameter.
+
+ .. versionadded:: 3.0
+ Added the ``err`` parameter.
+
+ .. versionchanged:: 2.0
+ Support colors on Windows if colorama is installed.
+ """
+ if file is None:
+ if err:
+ file = _default_text_stderr()
+ else:
+ file = _default_text_stdout()
+
+ # There are no standard streams attached to write to. For example,
+ # pythonw on Windows.
+ if file is None:
+ return
+
+ # Convert non bytes/text into the native string type.
+ if message is not None and not isinstance(message, (str, bytes, bytearray)):
+ out: t.Optional[t.Union[str, bytes]] = str(message)
+ else:
+ out = message
+
+ if nl:
+ out = out or ""
+ if isinstance(out, str):
+ out += "\n"
+ else:
+ out += b"\n"
+
+ if not out:
+ file.flush()
+ return
+
+ # If there is a message and the value looks like bytes, we manually
+ # need to find the binary stream and write the message in there.
+ # This is done separately so that most stream types will work as you
+ # would expect. Eg: you can write to StringIO for other cases.
+ if isinstance(out, (bytes, bytearray)):
+ binary_file = _find_binary_writer(file)
+
+ if binary_file is not None:
+ file.flush()
+ binary_file.write(out)
+ binary_file.flush()
+ return
+
+ # ANSI style code support. For no message or bytes, nothing happens.
+ # When outputting to a file instead of a terminal, strip codes.
+ else:
+ color = resolve_color_default(color)
+
+ if should_strip_ansi(file, color):
+ out = strip_ansi(out)
+ elif WIN:
+ if auto_wrap_for_ansi is not None:
+ file = auto_wrap_for_ansi(file) # type: ignore
+ elif not color:
+ out = strip_ansi(out)
+
+ file.write(out) # type: ignore
+ file.flush()
+
+
+def get_binary_stream(name: "te.Literal['stdin', 'stdout', 'stderr']") -> t.BinaryIO:
+ """Returns a system stream for byte processing.
+
+ :param name: the name of the stream to open. Valid names are ``'stdin'``,
+ ``'stdout'`` and ``'stderr'``
+ """
+ opener = binary_streams.get(name)
+ if opener is None:
+ raise TypeError(f"Unknown standard stream '{name}'")
+ return opener()
+
+
+def get_text_stream(
+ name: "te.Literal['stdin', 'stdout', 'stderr']",
+ encoding: t.Optional[str] = None,
+ errors: t.Optional[str] = "strict",
+) -> t.TextIO:
+ """Returns a system stream for text processing. This usually returns
+ a wrapped stream around a binary stream returned from
+ :func:`get_binary_stream` but it also can take shortcuts for already
+ correctly configured streams.
+
+ :param name: the name of the stream to open. Valid names are ``'stdin'``,
+ ``'stdout'`` and ``'stderr'``
+ :param encoding: overrides the detected default encoding.
+ :param errors: overrides the default error mode.
+ """
+ opener = text_streams.get(name)
+ if opener is None:
+ raise TypeError(f"Unknown standard stream '{name}'")
+ return opener(encoding, errors)
+
+
+def open_file(
+ filename: str,
+ mode: str = "r",
+ encoding: t.Optional[str] = None,
+ errors: t.Optional[str] = "strict",
+ lazy: bool = False,
+ atomic: bool = False,
+) -> t.IO[t.Any]:
+ """Open a file, with extra behavior to handle ``'-'`` to indicate
+ a standard stream, lazy open on write, and atomic write. Similar to
+ the behavior of the :class:`~click.File` param type.
+
+ If ``'-'`` is given to open ``stdout`` or ``stdin``, the stream is
+ wrapped so that using it in a context manager will not close it.
+ This makes it possible to use the function without accidentally
+ closing a standard stream:
+
+ .. code-block:: python
+
+ with open_file(filename) as f:
+ ...
+
+ :param filename: The name of the file to open, or ``'-'`` for
+ ``stdin``/``stdout``.
+ :param mode: The mode in which to open the file.
+ :param encoding: The encoding to decode or encode a file opened in
+ text mode.
+ :param errors: The error handling mode.
+ :param lazy: Wait to open the file until it is accessed. For read
+ mode, the file is temporarily opened to raise access errors
+ early, then closed until it is read again.
+ :param atomic: Write to a temporary file and replace the given file
+ on close.
+
+ .. versionadded:: 3.0
+ """
+ if lazy:
+ return t.cast(
+ t.IO[t.Any], LazyFile(filename, mode, encoding, errors, atomic=atomic)
+ )
+
+ f, should_close = open_stream(filename, mode, encoding, errors, atomic=atomic)
+
+ if not should_close:
+ f = t.cast(t.IO[t.Any], KeepOpenFile(f))
+
+ return f
+
+
+def format_filename(
+ filename: "t.Union[str, bytes, os.PathLike[str], os.PathLike[bytes]]",
+ shorten: bool = False,
+) -> str:
+ """Format a filename as a string for display. Ensures the filename can be
+ displayed by replacing any invalid bytes or surrogate escapes in the name
+ with the replacement character ``�``.
+
+ Invalid bytes or surrogate escapes will raise an error when written to a
+ stream with ``errors="strict". This will typically happen with ``stdout``
+ when the locale is something like ``en_GB.UTF-8``.
+
+ Many scenarios *are* safe to write surrogates though, due to PEP 538 and
+ PEP 540, including:
+
+ - Writing to ``stderr``, which uses ``errors="backslashreplace"``.
+ - The system has ``LANG=C.UTF-8``, ``C``, or ``POSIX``. Python opens
+ stdout and stderr with ``errors="surrogateescape"``.
+ - None of ``LANG/LC_*`` are set. Python assumes ``LANG=C.UTF-8``.
+ - Python is started in UTF-8 mode with ``PYTHONUTF8=1`` or ``-X utf8``.
+ Python opens stdout and stderr with ``errors="surrogateescape"``.
+
+ :param filename: formats a filename for UI display. This will also convert
+ the filename into unicode without failing.
+ :param shorten: this optionally shortens the filename to strip of the
+ path that leads up to it.
+ """
+ if shorten:
+ filename = os.path.basename(filename)
+ else:
+ filename = os.fspath(filename)
+
+ if isinstance(filename, bytes):
+ filename = filename.decode(sys.getfilesystemencoding(), "replace")
+ else:
+ filename = filename.encode("utf-8", "surrogateescape").decode(
+ "utf-8", "replace"
+ )
+
+ return filename
+
+
+def get_app_dir(app_name: str, roaming: bool = True, force_posix: bool = False) -> str:
+ r"""Returns the config folder for the application. The default behavior
+ is to return whatever is most appropriate for the operating system.
+
+ To give you an idea, for an app called ``"Foo Bar"``, something like
+ the following folders could be returned:
+
+ Mac OS X:
+ ``~/Library/Application Support/Foo Bar``
+ Mac OS X (POSIX):
+ ``~/.foo-bar``
+ Unix:
+ ``~/.config/foo-bar``
+ Unix (POSIX):
+ ``~/.foo-bar``
+ Windows (roaming):
+ ``C:\Users\\AppData\Roaming\Foo Bar``
+ Windows (not roaming):
+ ``C:\Users\\AppData\Local\Foo Bar``
+
+ .. versionadded:: 2.0
+
+ :param app_name: the application name. This should be properly capitalized
+ and can contain whitespace.
+ :param roaming: controls if the folder should be roaming or not on Windows.
+ Has no effect otherwise.
+ :param force_posix: if this is set to `True` then on any POSIX system the
+ folder will be stored in the home folder with a leading
+ dot instead of the XDG config home or darwin's
+ application support folder.
+ """
+ if WIN:
+ key = "APPDATA" if roaming else "LOCALAPPDATA"
+ folder = os.environ.get(key)
+ if folder is None:
+ folder = os.path.expanduser("~")
+ return os.path.join(folder, app_name)
+ if force_posix:
+ return os.path.join(os.path.expanduser(f"~/.{_posixify(app_name)}"))
+ if sys.platform == "darwin":
+ return os.path.join(
+ os.path.expanduser("~/Library/Application Support"), app_name
+ )
+ return os.path.join(
+ os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")),
+ _posixify(app_name),
+ )
+
+
+class PacifyFlushWrapper:
+ """This wrapper is used to catch and suppress BrokenPipeErrors resulting
+ from ``.flush()`` being called on broken pipe during the shutdown/final-GC
+ of the Python interpreter. Notably ``.flush()`` is always called on
+ ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any
+ other cleanup code, and the case where the underlying file is not a broken
+ pipe, all calls and attributes are proxied.
+ """
+
+ def __init__(self, wrapped: t.IO[t.Any]) -> None:
+ self.wrapped = wrapped
+
+ def flush(self) -> None:
+ try:
+ self.wrapped.flush()
+ except OSError as e:
+ import errno
+
+ if e.errno != errno.EPIPE:
+ raise
+
+ def __getattr__(self, attr: str) -> t.Any:
+ return getattr(self.wrapped, attr)
+
+
+def _detect_program_name(
+ path: t.Optional[str] = None, _main: t.Optional[ModuleType] = None
+) -> str:
+ """Determine the command used to run the program, for use in help
+ text. If a file or entry point was executed, the file name is
+ returned. If ``python -m`` was used to execute a module or package,
+ ``python -m name`` is returned.
+
+ This doesn't try to be too precise, the goal is to give a concise
+ name for help text. Files are only shown as their name without the
+ path. ``python`` is only shown for modules, and the full path to
+ ``sys.executable`` is not shown.
+
+ :param path: The Python file being executed. Python puts this in
+ ``sys.argv[0]``, which is used by default.
+ :param _main: The ``__main__`` module. This should only be passed
+ during internal testing.
+
+ .. versionadded:: 8.0
+ Based on command args detection in the Werkzeug reloader.
+
+ :meta private:
+ """
+ if _main is None:
+ _main = sys.modules["__main__"]
+
+ if not path:
+ path = sys.argv[0]
+
+ # The value of __package__ indicates how Python was called. It may
+ # not exist if a setuptools script is installed as an egg. It may be
+ # set incorrectly for entry points created with pip on Windows.
+ # It is set to "" inside a Shiv or PEX zipapp.
+ if getattr(_main, "__package__", None) in {None, ""} or (
+ os.name == "nt"
+ and _main.__package__ == ""
+ and not os.path.exists(path)
+ and os.path.exists(f"{path}.exe")
+ ):
+ # Executed a file, like "python app.py".
+ return os.path.basename(path)
+
+ # Executed a module, like "python -m example".
+ # Rewritten by Python from "-m script" to "/path/to/script.py".
+ # Need to look at main module to determine how it was executed.
+ py_module = t.cast(str, _main.__package__)
+ name = os.path.splitext(os.path.basename(path))[0]
+
+ # A submodule like "example.cli".
+ if name != "__main__":
+ py_module = f"{py_module}.{name}"
+
+ return f"python -m {py_module.lstrip('.')}"
+
+
+def _expand_args(
+ args: t.Iterable[str],
+ *,
+ user: bool = True,
+ env: bool = True,
+ glob_recursive: bool = True,
+) -> t.List[str]:
+ """Simulate Unix shell expansion with Python functions.
+
+ See :func:`glob.glob`, :func:`os.path.expanduser`, and
+ :func:`os.path.expandvars`.
+
+ This is intended for use on Windows, where the shell does not do any
+ expansion. It may not exactly match what a Unix shell would do.
+
+ :param args: List of command line arguments to expand.
+ :param user: Expand user home directory.
+ :param env: Expand environment variables.
+ :param glob_recursive: ``**`` matches directories recursively.
+
+ .. versionchanged:: 8.1
+ Invalid glob patterns are treated as empty expansions rather
+ than raising an error.
+
+ .. versionadded:: 8.0
+
+ :meta private:
+ """
+ from glob import glob
+
+ out = []
+
+ for arg in args:
+ if user:
+ arg = os.path.expanduser(arg)
+
+ if env:
+ arg = os.path.expandvars(arg)
+
+ try:
+ matches = glob(arg, recursive=glob_recursive)
+ except re.error:
+ matches = []
+
+ if not matches:
+ out.append(arg)
+ else:
+ out.extend(matches)
+
+ return out
diff --git a/billinglayer/python/cryptography-41.0.3.dist-info/INSTALLER b/billinglayer/python/cryptography-41.0.3.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/billinglayer/python/cryptography-41.0.3.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/billinglayer/python/cryptography-41.0.3.dist-info/LICENSE b/billinglayer/python/cryptography-41.0.3.dist-info/LICENSE
new file mode 100644
index 0000000..b11f379
--- /dev/null
+++ b/billinglayer/python/cryptography-41.0.3.dist-info/LICENSE
@@ -0,0 +1,3 @@
+This software is made available under the terms of *either* of the licenses
+found in LICENSE.APACHE or LICENSE.BSD. Contributions to cryptography are made
+under the terms of *both* these licenses.
diff --git a/billinglayer/python/cryptography-41.0.3.dist-info/LICENSE.APACHE b/billinglayer/python/cryptography-41.0.3.dist-info/LICENSE.APACHE
new file mode 100644
index 0000000..62589ed
--- /dev/null
+++ b/billinglayer/python/cryptography-41.0.3.dist-info/LICENSE.APACHE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ https://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ https://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/billinglayer/python/cryptography-41.0.3.dist-info/LICENSE.BSD b/billinglayer/python/cryptography-41.0.3.dist-info/LICENSE.BSD
new file mode 100644
index 0000000..ec1a29d
--- /dev/null
+++ b/billinglayer/python/cryptography-41.0.3.dist-info/LICENSE.BSD
@@ -0,0 +1,27 @@
+Copyright (c) Individual contributors.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ 1. Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+ 3. Neither the name of PyCA Cryptography nor the names of its contributors
+ may be used to endorse or promote products derived from this software
+ without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/billinglayer/python/cryptography-41.0.3.dist-info/METADATA b/billinglayer/python/cryptography-41.0.3.dist-info/METADATA
new file mode 100644
index 0000000..1eba09c
--- /dev/null
+++ b/billinglayer/python/cryptography-41.0.3.dist-info/METADATA
@@ -0,0 +1,133 @@
+Metadata-Version: 2.1
+Name: cryptography
+Version: 41.0.3
+Summary: cryptography is a package which provides cryptographic recipes and primitives to Python developers.
+Author-email: The Python Cryptographic Authority and individual contributors
+License: Apache-2.0 OR BSD-3-Clause
+Project-URL: homepage, https://github.com/pyca/cryptography
+Project-URL: documentation, https://cryptography.io/
+Project-URL: source, https://github.com/pyca/cryptography/
+Project-URL: issues, https://github.com/pyca/cryptography/issues
+Project-URL: changelog, https://cryptography.io/en/latest/changelog/
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Natural Language :: English
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: POSIX :: BSD
+Classifier: Operating System :: POSIX :: Linux
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Security :: Cryptography
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+License-File: LICENSE.APACHE
+License-File: LICENSE.BSD
+Requires-Dist: cffi (>=1.12)
+Provides-Extra: docs
+Requires-Dist: sphinx (>=5.3.0) ; extra == 'docs'
+Requires-Dist: sphinx-rtd-theme (>=1.1.1) ; extra == 'docs'
+Provides-Extra: docstest
+Requires-Dist: pyenchant (>=1.6.11) ; extra == 'docstest'
+Requires-Dist: twine (>=1.12.0) ; extra == 'docstest'
+Requires-Dist: sphinxcontrib-spelling (>=4.0.1) ; extra == 'docstest'
+Provides-Extra: nox
+Requires-Dist: nox ; extra == 'nox'
+Provides-Extra: pep8test
+Requires-Dist: black ; extra == 'pep8test'
+Requires-Dist: ruff ; extra == 'pep8test'
+Requires-Dist: mypy ; extra == 'pep8test'
+Requires-Dist: check-sdist ; extra == 'pep8test'
+Provides-Extra: sdist
+Requires-Dist: build ; extra == 'sdist'
+Provides-Extra: ssh
+Requires-Dist: bcrypt (>=3.1.5) ; extra == 'ssh'
+Provides-Extra: test
+Requires-Dist: pytest (>=6.2.0) ; extra == 'test'
+Requires-Dist: pytest-benchmark ; extra == 'test'
+Requires-Dist: pytest-cov ; extra == 'test'
+Requires-Dist: pytest-xdist ; extra == 'test'
+Requires-Dist: pretend ; extra == 'test'
+Provides-Extra: test-randomorder
+Requires-Dist: pytest-randomly ; extra == 'test-randomorder'
+
+pyca/cryptography
+=================
+
+.. image:: https://img.shields.io/pypi/v/cryptography.svg
+ :target: https://pypi.org/project/cryptography/
+ :alt: Latest Version
+
+.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest
+ :target: https://cryptography.io
+ :alt: Latest Docs
+
+.. image:: https://github.com/pyca/cryptography/workflows/CI/badge.svg?branch=main
+ :target: https://github.com/pyca/cryptography/actions?query=workflow%3ACI+branch%3Amain
+
+
+``cryptography`` is a package which provides cryptographic recipes and
+primitives to Python developers. Our goal is for it to be your "cryptographic
+standard library". It supports Python 3.7+ and PyPy3 7.3.10+.
+
+``cryptography`` includes both high level recipes and low level interfaces to
+common cryptographic algorithms such as symmetric ciphers, message digests, and
+key derivation functions. For example, to encrypt something with
+``cryptography``'s high level symmetric encryption recipe:
+
+.. code-block:: pycon
+
+ >>> from cryptography.fernet import Fernet
+ >>> # Put this somewhere safe!
+ >>> key = Fernet.generate_key()
+ >>> f = Fernet(key)
+ >>> token = f.encrypt(b"A really secret message. Not for prying eyes.")
+ >>> token
+ b'...'
+ >>> f.decrypt(token)
+ b'A really secret message. Not for prying eyes.'
+
+You can find more information in the `documentation`_.
+
+You can install ``cryptography`` with:
+
+.. code-block:: console
+
+ $ pip install cryptography
+
+For full details see `the installation documentation`_.
+
+Discussion
+~~~~~~~~~~
+
+If you run into bugs, you can file them in our `issue tracker`_.
+
+We maintain a `cryptography-dev`_ mailing list for development discussion.
+
+You can also join ``#pyca`` on ``irc.libera.chat`` to ask questions or get
+involved.
+
+Security
+~~~~~~~~
+
+Need to report a security issue? Please consult our `security reporting`_
+documentation.
+
+
+.. _`documentation`: https://cryptography.io/
+.. _`the installation documentation`: https://cryptography.io/en/latest/installation/
+.. _`issue tracker`: https://github.com/pyca/cryptography/issues
+.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev
+.. _`security reporting`: https://cryptography.io/en/latest/security/
diff --git a/billinglayer/python/cryptography-41.0.3.dist-info/RECORD b/billinglayer/python/cryptography-41.0.3.dist-info/RECORD
new file mode 100644
index 0000000..452f033
--- /dev/null
+++ b/billinglayer/python/cryptography-41.0.3.dist-info/RECORD
@@ -0,0 +1,172 @@
+cryptography-41.0.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+cryptography-41.0.3.dist-info/LICENSE,sha256=Pgx8CRqUi4JTO6mP18u0BDLW8amsv4X1ki0vmak65rs,197
+cryptography-41.0.3.dist-info/LICENSE.APACHE,sha256=qsc7MUj20dcRHbyjIJn2jSbGRMaBOuHk8F9leaomY_4,11360
+cryptography-41.0.3.dist-info/LICENSE.BSD,sha256=YCxMdILeZHndLpeTzaJ15eY9dz2s0eymiSMqtwCPtPs,1532
+cryptography-41.0.3.dist-info/METADATA,sha256=fGKXAMLFFzeETs2WZI8i9seJnNtvEsWWxLx6Fk3sBTM,5175
+cryptography-41.0.3.dist-info/RECORD,,
+cryptography-41.0.3.dist-info/WHEEL,sha256=E44f5mm0c9FVszd8tVo9b_r3SFjxQ7VIZjTMBOg4nG0,148
+cryptography-41.0.3.dist-info/top_level.txt,sha256=KNaT-Sn2K4uxNaEbe6mYdDn3qWDMlp4y-MtWfB73nJc,13
+cryptography/__about__.py,sha256=QMP22GBLX29OFkJ9LPAingf-MaCOruWiMbxHS1BLIXo,445
+cryptography/__init__.py,sha256=iVPlBlXWTJyiFeRedxcbMPhyHB34viOM10d72vGnWuE,364
+cryptography/__pycache__/__about__.cpython-311.pyc,,
+cryptography/__pycache__/__init__.cpython-311.pyc,,
+cryptography/__pycache__/exceptions.cpython-311.pyc,,
+cryptography/__pycache__/fernet.cpython-311.pyc,,
+cryptography/__pycache__/utils.cpython-311.pyc,,
+cryptography/exceptions.py,sha256=EHe7XM2_OtdOM1bZE0ci-4GUhtOlEQ6fQXhK2Igf0qA,1118
+cryptography/fernet.py,sha256=TVZy4Dtkpl7kWIpvuKcNldE95IEjTQ0MfHgRsLdnDSM,6886
+cryptography/hazmat/__init__.py,sha256=5IwrLWrVp0AjEr_4FdWG_V057NSJGY_W4egNNsuct0g,455
+cryptography/hazmat/__pycache__/__init__.cpython-311.pyc,,
+cryptography/hazmat/__pycache__/_oid.cpython-311.pyc,,
+cryptography/hazmat/_oid.py,sha256=gxhMHKpu9Xsi6uHCGZ_-soYMXj_izOIFaxjUKWbCPeE,14441
+cryptography/hazmat/backends/__init__.py,sha256=O5jvKFQdZnXhKeqJ-HtulaEL9Ni7mr1mDzZY5kHlYhI,361
+cryptography/hazmat/backends/__pycache__/__init__.cpython-311.pyc,,
+cryptography/hazmat/backends/openssl/__init__.py,sha256=p3jmJfnCag9iE5sdMrN6VvVEu55u46xaS_IjoI0SrmA,305
+cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-311.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/aead.cpython-311.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-311.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/ciphers.cpython-311.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/cmac.cpython-311.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/decode_asn1.cpython-311.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/ec.cpython-311.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/rsa.cpython-311.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/utils.cpython-311.pyc,,
+cryptography/hazmat/backends/openssl/aead.py,sha256=s3zXcVQf0COIOuOzI8usebWpznGnyZ7GhnmlJYu7QXA,15967
+cryptography/hazmat/backends/openssl/backend.py,sha256=sNMXDL0YPS9vdXFoufEDFOQJh_uMfpUnErd1j1Rdzf0,73231
+cryptography/hazmat/backends/openssl/ciphers.py,sha256=lxWrvnufudsDI2bpwNs2c8XLILbAE2j2rMSD1nhnPVg,10358
+cryptography/hazmat/backends/openssl/cmac.py,sha256=pHgQOIRfR4cIDa5ltcKFtgjqPTXbOLyRQmmqv9JlbUk,3035
+cryptography/hazmat/backends/openssl/decode_asn1.py,sha256=kz6gys8wuJhrx4QyU6enYx7UatNHr0LB3TI1jH3oQ54,1148
+cryptography/hazmat/backends/openssl/ec.py,sha256=GKzh3mZKvgsM1jqM88-4XikHHalpV-Efyskclt8yxYg,11474
+cryptography/hazmat/backends/openssl/rsa.py,sha256=P_ak-2zvA6VBt_P0ldzTSCUkcjo2GhYt_HLn8CVvWtE,21825
+cryptography/hazmat/backends/openssl/utils.py,sha256=UoguO26QzwN4lsMAltsIrgAlbi3SOeSrexZs1-QPNu8,2190
+cryptography/hazmat/bindings/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
+cryptography/hazmat/bindings/__pycache__/__init__.cpython-311.pyc,,
+cryptography/hazmat/bindings/_rust.abi3.so,sha256=NoXkkoRL2bk3270hxaqUPt4KO9kC0ZEQcNte-_H6N_4,13435728
+cryptography/hazmat/bindings/_rust/__init__.pyi,sha256=IumK7zP9Ko3HjLLb5hwZiY2rbfmfsuyTZLLcHOMvSdk,981
+cryptography/hazmat/bindings/_rust/_openssl.pyi,sha256=mpNJLuYLbCVrd5i33FBTmWwL_55Dw7JPkSLlSX9Q7oI,230
+cryptography/hazmat/bindings/_rust/asn1.pyi,sha256=9CyI-grOsLQB_hfnhJPoG9dNOdJ7Zg6B0iUpzCowh44,592
+cryptography/hazmat/bindings/_rust/exceptions.pyi,sha256=exXr2xw_0pB1kk93cYbM3MohbzoUkjOms1ZMUi0uQZE,640
+cryptography/hazmat/bindings/_rust/ocsp.pyi,sha256=RzVaLkY0y9L8W8opAL_uVD8bySKxP23pSQtEbLOStXI,905
+cryptography/hazmat/bindings/_rust/openssl/__init__.pyi,sha256=j764U4RRBZbDuOfjQxRqU7rCf74kgM-3AnTIjLdRy3E,970
+cryptography/hazmat/bindings/_rust/openssl/dh.pyi,sha256=0FVY1t5qM9HV_ZKDIcdJI2a72i1fHKyTvYIJb5UnH4M,896
+cryptography/hazmat/bindings/_rust/openssl/dsa.pyi,sha256=43in4PCsm2kz_H7RQFLBKqhDsUmb4yWop6dpYeVDg-4,764
+cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi,sha256=E2GXAgibfRGqKxskH8MfZI8gHFoMJJOTjG7Elg2gOww,629
+cryptography/hazmat/bindings/_rust/openssl/ed448.pyi,sha256=pk_kx5Biq8O53d2joOT-cXuwCrbFPicV7iaqYdeiIAI,603
+cryptography/hazmat/bindings/_rust/openssl/hashes.pyi,sha256=J8HoN0GdtPcjRAfNHr5Elva_nkmQfq63L75_z9dd8Uc,573
+cryptography/hazmat/bindings/_rust/openssl/hmac.pyi,sha256=ZmLJ73pmxcZFC1XosWEiXMRYtvJJor3ZLdCQOJu85Cw,662
+cryptography/hazmat/bindings/_rust/openssl/kdf.pyi,sha256=wPS5c7NLspM2632II0I4iH1RSxZvSRtBOVqmpyQATfk,544
+cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi,sha256=9iogF7Q4i81IkOS-IMXp6HvxFF_3cNy_ucrAjVQnn14,540
+cryptography/hazmat/bindings/_rust/openssl/x25519.pyi,sha256=-1F5QDZfrdhmDLKTeSERuuDUHBTV-EhxIYk9mjpwcG4,616
+cryptography/hazmat/bindings/_rust/openssl/x448.pyi,sha256=SdL4blscYBEvuWY4SuNAY1s5zFaGj38eQ-bulVBZvFg,590
+cryptography/hazmat/bindings/_rust/pkcs7.pyi,sha256=VkTC78wjJgb_qrboOYIFPuFZ3W46zsr6zsxnlrOMwao,460
+cryptography/hazmat/bindings/_rust/x509.pyi,sha256=j6AbXBZSXeJHLSrXnaapbiPfle-znfk9uJUa_zqxgy4,1878
+cryptography/hazmat/bindings/openssl/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
+cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-311.pyc,,
+cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-311.pyc,,
+cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-311.pyc,,
+cryptography/hazmat/bindings/openssl/_conditional.py,sha256=DeECq7AKguhs390ZmxgItdqPLzyrKGJk-3KlHJMkXoY,9098
+cryptography/hazmat/bindings/openssl/binding.py,sha256=0x3kzvq2grHu4gbbgEIzEVrX6unp71EEs1hx0o-uuOM,6696
+cryptography/hazmat/primitives/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
+cryptography/hazmat/primitives/__pycache__/__init__.cpython-311.pyc,,
+cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-311.pyc,,
+cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-311.pyc,,
+cryptography/hazmat/primitives/__pycache__/_serialization.cpython-311.pyc,,
+cryptography/hazmat/primitives/__pycache__/cmac.cpython-311.pyc,,
+cryptography/hazmat/primitives/__pycache__/constant_time.cpython-311.pyc,,
+cryptography/hazmat/primitives/__pycache__/hashes.cpython-311.pyc,,
+cryptography/hazmat/primitives/__pycache__/hmac.cpython-311.pyc,,
+cryptography/hazmat/primitives/__pycache__/keywrap.cpython-311.pyc,,
+cryptography/hazmat/primitives/__pycache__/padding.cpython-311.pyc,,
+cryptography/hazmat/primitives/__pycache__/poly1305.cpython-311.pyc,,
+cryptography/hazmat/primitives/_asymmetric.py,sha256=RhgcouUB6HTiFDBrR1LxqkMjpUxIiNvQ1r_zJjRG6qQ,532
+cryptography/hazmat/primitives/_cipheralgorithm.py,sha256=7LPkpw-DrgyvmBMUjvXeBvojVZPtXhFgfelUftnxPGw,1093
+cryptography/hazmat/primitives/_serialization.py,sha256=U0DU0ZzOLJppCQsh9EJH6vGYoHotBolfNyRyx3wr1l0,5216
+cryptography/hazmat/primitives/asymmetric/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
+cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-311.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-311.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-311.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-311.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-311.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-311.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-311.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-311.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-311.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-311.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-311.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-311.pyc,,
+cryptography/hazmat/primitives/asymmetric/dh.py,sha256=XsthqjvExWWOyePs0PxT4MestU9QeGuL-Hx7fWzTguQ,7013
+cryptography/hazmat/primitives/asymmetric/dsa.py,sha256=aaTY7EMLTzaWs-jhOMpMAfa2GnfhoqsCKZPKAs35L40,8263
+cryptography/hazmat/primitives/asymmetric/ec.py,sha256=L1WoWPYevJ6Pk2T1etbnHbvr6AeXFccckPNNiyUVoNM,12867
+cryptography/hazmat/primitives/asymmetric/ed25519.py,sha256=wl2NCCP4bZdUCqZGMkOOd6eaxjU1vXPAIwzUuFPE__w,3489
+cryptography/hazmat/primitives/asymmetric/ed448.py,sha256=2MCJ87qcyCCsjj0OvrfWFxPX8CgaC3d0mr78bt_vDIY,3440
+cryptography/hazmat/primitives/asymmetric/padding.py,sha256=6p8Ojiax_2tcm1aTnNOAkinriCJ67nSTxugg34f-hzk,2717
+cryptography/hazmat/primitives/asymmetric/rsa.py,sha256=vxvOryF00WL8mZQv9bs_-LlgobYLiPYfX246_j_ICtA,11623
+cryptography/hazmat/primitives/asymmetric/types.py,sha256=LnsOJym-wmPUJ7Knu_7bCNU3kIiELCd6krOaW_JU08I,2996
+cryptography/hazmat/primitives/asymmetric/utils.py,sha256=DPTs6T4F-UhwzFQTh-1fSEpQzazH2jf2xpIro3ItF4o,790
+cryptography/hazmat/primitives/asymmetric/x25519.py,sha256=8YJAIaU7w09jTnPU_cLwd98fMHIECgfA3R7P3Ktv-CA,3437
+cryptography/hazmat/primitives/asymmetric/x448.py,sha256=y-Yj-rgciiuH1g6FJLZftvAqgOnzT1on9gCisru7vBc,3358
+cryptography/hazmat/primitives/ciphers/__init__.py,sha256=kAyb9NSczqTrCWj0HEoVp3Cxo7AHW8ibPFQz-ZHsOtA,680
+cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-311.pyc,,
+cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-311.pyc,,
+cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-311.pyc,,
+cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-311.pyc,,
+cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-311.pyc,,
+cryptography/hazmat/primitives/ciphers/aead.py,sha256=DY7qKmbt0bgB1GB7i-fQrbjEfwFG8wfUfVHvc7DA2YY,12067
+cryptography/hazmat/primitives/ciphers/algorithms.py,sha256=SCDskXc9xyzsz0NjND6tAX8t17jYTbUB2sww1ub9GuY,5000
+cryptography/hazmat/primitives/ciphers/base.py,sha256=PqNDltHdDxBhLhgtfO707H07sSOLA6ZVwjZlalOJTAo,8286
+cryptography/hazmat/primitives/ciphers/modes.py,sha256=YJQXi4PJGIIZ1rgchbMH47Ed-YiUcUSjLPEOuV8rgGE,8361
+cryptography/hazmat/primitives/cmac.py,sha256=YaeWksCYaqVoqf9zHRThAJ95ZvPUioAOfXwZUWiPzD8,2065
+cryptography/hazmat/primitives/constant_time.py,sha256=xdunWT0nf8OvKdcqUhhlFKayGp4_PgVJRU2W1wLSr_A,422
+cryptography/hazmat/primitives/hashes.py,sha256=VJpnbK2sQN2bEqwRTOoCB4nuxYx5CnqFiScMJNyhsrI,5115
+cryptography/hazmat/primitives/hmac.py,sha256=RpB3z9z5skirCQrm7zQbtnp9pLMnAjrlTUvKqF5aDDc,423
+cryptography/hazmat/primitives/kdf/__init__.py,sha256=4XibZnrYq4hh5xBjWiIXzaYW6FKx8hPbVaa_cB9zS64,750
+cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-311.pyc,,
+cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-311.pyc,,
+cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-311.pyc,,
+cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-311.pyc,,
+cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-311.pyc,,
+cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-311.pyc,,
+cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-311.pyc,,
+cryptography/hazmat/primitives/kdf/concatkdf.py,sha256=wGYWgILmxQWnCPkbAH1RpsCHrdKgmYrCEVrCvXVGCo8,3726
+cryptography/hazmat/primitives/kdf/hkdf.py,sha256=bBYr1yUIbOlJIEd6ZoLYcXm_yd-H54An9kNcFIJ3kbo,3045
+cryptography/hazmat/primitives/kdf/kbkdf.py,sha256=qPL6TmDUmkus6CW3ylTJfG8N8egZhjQOyXrSyLLpnak,9232
+cryptography/hazmat/primitives/kdf/pbkdf2.py,sha256=1CCH9Q5gXUpnZd3c8d8bCXgpJ3s2hZZGBnuG7FH1waM,2012
+cryptography/hazmat/primitives/kdf/scrypt.py,sha256=4QONhjxA_ZtuQtQ7QV3FnbB8ftrFnM52B4HPfV7hFys,2354
+cryptography/hazmat/primitives/kdf/x963kdf.py,sha256=S3B4Enk2Yxj9txpairotaXkavuZqQ6t6MB5a28U02ek,2002
+cryptography/hazmat/primitives/keywrap.py,sha256=Qb_N2V_E1Dti5VtDXnrtTYtJDZ8aMpur8BY5yxrXclg,5678
+cryptography/hazmat/primitives/padding.py,sha256=8pCeLaqwQPSGf51j06U5C_INvgYWVWPv3m9mxUERGmU,6242
+cryptography/hazmat/primitives/poly1305.py,sha256=P5EPQV-RB_FJPahpg01u0Ts4S_PnAmsroxIGXbGeRRo,355
+cryptography/hazmat/primitives/serialization/__init__.py,sha256=6ZlL3EicEzoGdMOat86w8y_XICCnlHdCjFI97rMxRDg,1653
+cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-311.pyc,,
+cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-311.pyc,,
+cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-311.pyc,,
+cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-311.pyc,,
+cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-311.pyc,,
+cryptography/hazmat/primitives/serialization/base.py,sha256=VZjIIqnbb-x38qpg2Wf_IxZvqjsgcEzNQtQoeJiQfpw,1986
+cryptography/hazmat/primitives/serialization/pkcs12.py,sha256=NOzFxArlZhdjfgfugs8nERho1eyaxujXKGUKINchek4,6767
+cryptography/hazmat/primitives/serialization/pkcs7.py,sha256=BCvlPubXQOunb76emISK89PX9qXcBQI2CRPNe85VTZk,7392
+cryptography/hazmat/primitives/serialization/ssh.py,sha256=aLCYLPY3W1kerfCwadn5aYNzwcwIQl9c7RcsB8CKfuc,51027
+cryptography/hazmat/primitives/twofactor/__init__.py,sha256=tmMZGB-g4IU1r7lIFqASU019zr0uPp_wEBYcwdDCKCA,258
+cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-311.pyc,,
+cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-311.pyc,,
+cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-311.pyc,,
+cryptography/hazmat/primitives/twofactor/hotp.py,sha256=uZ0PSKYDZOL0aAobiw1Zd2HD0W2Ei1niUNC2v7Tnpc8,3010
+cryptography/hazmat/primitives/twofactor/totp.py,sha256=cMbWlAapOM1SfezEx9MoMHpCW9ingNXCg6OsGv4T8jc,1473
+cryptography/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+cryptography/utils.py,sha256=DfdXc9M4kmAboE2a0pPiISt5LVnW-jhhXURy8nDHae0,4018
+cryptography/x509/__init__.py,sha256=DzZE8bR-3iiVi3Wrcq7-g5Pm64fCr5aqsTNyi_rjJu0,7870
+cryptography/x509/__pycache__/__init__.cpython-311.pyc,,
+cryptography/x509/__pycache__/base.cpython-311.pyc,,
+cryptography/x509/__pycache__/certificate_transparency.cpython-311.pyc,,
+cryptography/x509/__pycache__/extensions.cpython-311.pyc,,
+cryptography/x509/__pycache__/general_name.cpython-311.pyc,,
+cryptography/x509/__pycache__/name.cpython-311.pyc,,
+cryptography/x509/__pycache__/ocsp.cpython-311.pyc,,
+cryptography/x509/__pycache__/oid.cpython-311.pyc,,
+cryptography/x509/base.py,sha256=FbS6EFE3uJ3O-zbFPRjsO6DckrNSN5TJNZMJcnzUWFQ,35677
+cryptography/x509/certificate_transparency.py,sha256=6HvzAD0dlSQVxy6tnDhGj0-pisp1MaJ9bxQNRr92inI,2261
+cryptography/x509/extensions.py,sha256=rFEcfZiFvcONs1ot03d68dAMK2U75w0s3g9mhyWBRcI,68365
+cryptography/x509/general_name.py,sha256=zm8GxNgVJuLD6rN488c5zdHhxp5gUxeRzw8enZMWDQ0,7868
+cryptography/x509/name.py,sha256=aZ2dpsinhkza3eTxT1vNmWuFMQ7fmcA0hs4npgnkf9Q,14855
+cryptography/x509/ocsp.py,sha256=48iW7xbZ9mZLELSEl7Wwjb4vYhOQ3KcNtqgKsAb_UD0,18534
+cryptography/x509/oid.py,sha256=fFosjGsnIB_w_0YrzZv1ggkSVwZl7xmY0zofKZNZkDA,829
diff --git a/billinglayer/python/cryptography-41.0.3.dist-info/WHEEL b/billinglayer/python/cryptography-41.0.3.dist-info/WHEEL
new file mode 100644
index 0000000..9aa479e
--- /dev/null
+++ b/billinglayer/python/cryptography-41.0.3.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.41.0)
+Root-Is-Purelib: false
+Tag: cp37-abi3-manylinux_2_17_x86_64
+Tag: cp37-abi3-manylinux2014_x86_64
+
diff --git a/billinglayer/python/cryptography-41.0.3.dist-info/top_level.txt b/billinglayer/python/cryptography-41.0.3.dist-info/top_level.txt
new file mode 100644
index 0000000..0d38bc5
--- /dev/null
+++ b/billinglayer/python/cryptography-41.0.3.dist-info/top_level.txt
@@ -0,0 +1 @@
+cryptography
diff --git a/billinglayer/python/cryptography/__about__.py b/billinglayer/python/cryptography/__about__.py
new file mode 100644
index 0000000..27273ef
--- /dev/null
+++ b/billinglayer/python/cryptography/__about__.py
@@ -0,0 +1,17 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+__all__ = [
+ "__version__",
+ "__author__",
+ "__copyright__",
+]
+
+__version__ = "41.0.3"
+
+
+__author__ = "The Python Cryptographic Authority and individual contributors"
+__copyright__ = f"Copyright 2013-2023 {__author__}"
diff --git a/billinglayer/python/cryptography/__init__.py b/billinglayer/python/cryptography/__init__.py
new file mode 100644
index 0000000..86b9a25
--- /dev/null
+++ b/billinglayer/python/cryptography/__init__.py
@@ -0,0 +1,13 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+from cryptography.__about__ import __author__, __copyright__, __version__
+
+__all__ = [
+ "__version__",
+ "__author__",
+ "__copyright__",
+]
diff --git a/billinglayer/python/cryptography/__pycache__/__about__.cpython-311.pyc b/billinglayer/python/cryptography/__pycache__/__about__.cpython-311.pyc
new file mode 100644
index 0000000..2f43a0d
Binary files /dev/null and b/billinglayer/python/cryptography/__pycache__/__about__.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/__pycache__/__init__.cpython-311.pyc b/billinglayer/python/cryptography/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..ac92a4d
Binary files /dev/null and b/billinglayer/python/cryptography/__pycache__/__init__.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/__pycache__/exceptions.cpython-311.pyc b/billinglayer/python/cryptography/__pycache__/exceptions.cpython-311.pyc
new file mode 100644
index 0000000..1600a94
Binary files /dev/null and b/billinglayer/python/cryptography/__pycache__/exceptions.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/__pycache__/fernet.cpython-311.pyc b/billinglayer/python/cryptography/__pycache__/fernet.cpython-311.pyc
new file mode 100644
index 0000000..de22a49
Binary files /dev/null and b/billinglayer/python/cryptography/__pycache__/fernet.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/__pycache__/utils.cpython-311.pyc b/billinglayer/python/cryptography/__pycache__/utils.cpython-311.pyc
new file mode 100644
index 0000000..1b11710
Binary files /dev/null and b/billinglayer/python/cryptography/__pycache__/utils.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/exceptions.py b/billinglayer/python/cryptography/exceptions.py
new file mode 100644
index 0000000..47fdd18
--- /dev/null
+++ b/billinglayer/python/cryptography/exceptions.py
@@ -0,0 +1,54 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography.hazmat.bindings._rust import exceptions as rust_exceptions
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+
+_Reasons = rust_exceptions._Reasons
+
+
+class UnsupportedAlgorithm(Exception):
+ def __init__(
+ self, message: str, reason: typing.Optional[_Reasons] = None
+ ) -> None:
+ super().__init__(message)
+ self._reason = reason
+
+
+class AlreadyFinalized(Exception):
+ pass
+
+
+class AlreadyUpdated(Exception):
+ pass
+
+
+class NotYetFinalized(Exception):
+ pass
+
+
+class InvalidTag(Exception):
+ pass
+
+
+class InvalidSignature(Exception):
+ pass
+
+
+class InternalError(Exception):
+ def __init__(
+ self, msg: str, err_code: typing.List[rust_openssl.OpenSSLError]
+ ) -> None:
+ super().__init__(msg)
+ self.err_code = err_code
+
+
+class InvalidKey(Exception):
+ pass
diff --git a/billinglayer/python/cryptography/fernet.py b/billinglayer/python/cryptography/fernet.py
new file mode 100644
index 0000000..ad8fb40
--- /dev/null
+++ b/billinglayer/python/cryptography/fernet.py
@@ -0,0 +1,221 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import base64
+import binascii
+import os
+import time
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import InvalidSignature
+from cryptography.hazmat.primitives import hashes, padding
+from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
+from cryptography.hazmat.primitives.hmac import HMAC
+
+
+class InvalidToken(Exception):
+ pass
+
+
+_MAX_CLOCK_SKEW = 60
+
+
+class Fernet:
+ def __init__(
+ self,
+ key: typing.Union[bytes, str],
+ backend: typing.Any = None,
+ ) -> None:
+ try:
+ key = base64.urlsafe_b64decode(key)
+ except binascii.Error as exc:
+ raise ValueError(
+ "Fernet key must be 32 url-safe base64-encoded bytes."
+ ) from exc
+ if len(key) != 32:
+ raise ValueError(
+ "Fernet key must be 32 url-safe base64-encoded bytes."
+ )
+
+ self._signing_key = key[:16]
+ self._encryption_key = key[16:]
+
+ @classmethod
+ def generate_key(cls) -> bytes:
+ return base64.urlsafe_b64encode(os.urandom(32))
+
+ def encrypt(self, data: bytes) -> bytes:
+ return self.encrypt_at_time(data, int(time.time()))
+
+ def encrypt_at_time(self, data: bytes, current_time: int) -> bytes:
+ iv = os.urandom(16)
+ return self._encrypt_from_parts(data, current_time, iv)
+
+ def _encrypt_from_parts(
+ self, data: bytes, current_time: int, iv: bytes
+ ) -> bytes:
+ utils._check_bytes("data", data)
+
+ padder = padding.PKCS7(algorithms.AES.block_size).padder()
+ padded_data = padder.update(data) + padder.finalize()
+ encryptor = Cipher(
+ algorithms.AES(self._encryption_key),
+ modes.CBC(iv),
+ ).encryptor()
+ ciphertext = encryptor.update(padded_data) + encryptor.finalize()
+
+ basic_parts = (
+ b"\x80"
+ + current_time.to_bytes(length=8, byteorder="big")
+ + iv
+ + ciphertext
+ )
+
+ h = HMAC(self._signing_key, hashes.SHA256())
+ h.update(basic_parts)
+ hmac = h.finalize()
+ return base64.urlsafe_b64encode(basic_parts + hmac)
+
+ def decrypt(
+ self, token: typing.Union[bytes, str], ttl: typing.Optional[int] = None
+ ) -> bytes:
+ timestamp, data = Fernet._get_unverified_token_data(token)
+ if ttl is None:
+ time_info = None
+ else:
+ time_info = (ttl, int(time.time()))
+ return self._decrypt_data(data, timestamp, time_info)
+
+ def decrypt_at_time(
+ self, token: typing.Union[bytes, str], ttl: int, current_time: int
+ ) -> bytes:
+ if ttl is None:
+ raise ValueError(
+ "decrypt_at_time() can only be used with a non-None ttl"
+ )
+ timestamp, data = Fernet._get_unverified_token_data(token)
+ return self._decrypt_data(data, timestamp, (ttl, current_time))
+
+ def extract_timestamp(self, token: typing.Union[bytes, str]) -> int:
+ timestamp, data = Fernet._get_unverified_token_data(token)
+ # Verify the token was not tampered with.
+ self._verify_signature(data)
+ return timestamp
+
+ @staticmethod
+ def _get_unverified_token_data(
+ token: typing.Union[bytes, str]
+ ) -> typing.Tuple[int, bytes]:
+ if not isinstance(token, (str, bytes)):
+ raise TypeError("token must be bytes or str")
+
+ try:
+ data = base64.urlsafe_b64decode(token)
+ except (TypeError, binascii.Error):
+ raise InvalidToken
+
+ if not data or data[0] != 0x80:
+ raise InvalidToken
+
+ if len(data) < 9:
+ raise InvalidToken
+
+ timestamp = int.from_bytes(data[1:9], byteorder="big")
+ return timestamp, data
+
+ def _verify_signature(self, data: bytes) -> None:
+ h = HMAC(self._signing_key, hashes.SHA256())
+ h.update(data[:-32])
+ try:
+ h.verify(data[-32:])
+ except InvalidSignature:
+ raise InvalidToken
+
+ def _decrypt_data(
+ self,
+ data: bytes,
+ timestamp: int,
+ time_info: typing.Optional[typing.Tuple[int, int]],
+ ) -> bytes:
+ if time_info is not None:
+ ttl, current_time = time_info
+ if timestamp + ttl < current_time:
+ raise InvalidToken
+
+ if current_time + _MAX_CLOCK_SKEW < timestamp:
+ raise InvalidToken
+
+ self._verify_signature(data)
+
+ iv = data[9:25]
+ ciphertext = data[25:-32]
+ decryptor = Cipher(
+ algorithms.AES(self._encryption_key), modes.CBC(iv)
+ ).decryptor()
+ plaintext_padded = decryptor.update(ciphertext)
+ try:
+ plaintext_padded += decryptor.finalize()
+ except ValueError:
+ raise InvalidToken
+ unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder()
+
+ unpadded = unpadder.update(plaintext_padded)
+ try:
+ unpadded += unpadder.finalize()
+ except ValueError:
+ raise InvalidToken
+ return unpadded
+
+
+class MultiFernet:
+ def __init__(self, fernets: typing.Iterable[Fernet]):
+ fernets = list(fernets)
+ if not fernets:
+ raise ValueError(
+ "MultiFernet requires at least one Fernet instance"
+ )
+ self._fernets = fernets
+
+ def encrypt(self, msg: bytes) -> bytes:
+ return self.encrypt_at_time(msg, int(time.time()))
+
+ def encrypt_at_time(self, msg: bytes, current_time: int) -> bytes:
+ return self._fernets[0].encrypt_at_time(msg, current_time)
+
+ def rotate(self, msg: typing.Union[bytes, str]) -> bytes:
+ timestamp, data = Fernet._get_unverified_token_data(msg)
+ for f in self._fernets:
+ try:
+ p = f._decrypt_data(data, timestamp, None)
+ break
+ except InvalidToken:
+ pass
+ else:
+ raise InvalidToken
+
+ iv = os.urandom(16)
+ return self._fernets[0]._encrypt_from_parts(p, timestamp, iv)
+
+ def decrypt(
+ self, msg: typing.Union[bytes, str], ttl: typing.Optional[int] = None
+ ) -> bytes:
+ for f in self._fernets:
+ try:
+ return f.decrypt(msg, ttl)
+ except InvalidToken:
+ pass
+ raise InvalidToken
+
+ def decrypt_at_time(
+ self, msg: typing.Union[bytes, str], ttl: int, current_time: int
+ ) -> bytes:
+ for f in self._fernets:
+ try:
+ return f.decrypt_at_time(msg, ttl, current_time)
+ except InvalidToken:
+ pass
+ raise InvalidToken
diff --git a/billinglayer/python/cryptography/hazmat/__init__.py b/billinglayer/python/cryptography/hazmat/__init__.py
new file mode 100644
index 0000000..b9f1187
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/__init__.py
@@ -0,0 +1,13 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+"""
+Hazardous Materials
+
+This is a "Hazardous Materials" module. You should ONLY use it if you're
+100% absolutely sure that you know what you're doing because this module
+is full of land mines, dragons, and dinosaurs with laser guns.
+"""
diff --git a/billinglayer/python/cryptography/hazmat/__pycache__/__init__.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..b645fd3
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/__pycache__/__init__.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/__pycache__/_oid.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/__pycache__/_oid.cpython-311.pyc
new file mode 100644
index 0000000..92ac8f5
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/__pycache__/_oid.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/_oid.py b/billinglayer/python/cryptography/hazmat/_oid.py
new file mode 100644
index 0000000..01d4b34
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/_oid.py
@@ -0,0 +1,299 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography.hazmat.bindings._rust import (
+ ObjectIdentifier as ObjectIdentifier,
+)
+from cryptography.hazmat.primitives import hashes
+
+
+class ExtensionOID:
+ SUBJECT_DIRECTORY_ATTRIBUTES = ObjectIdentifier("2.5.29.9")
+ SUBJECT_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.14")
+ KEY_USAGE = ObjectIdentifier("2.5.29.15")
+ SUBJECT_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.17")
+ ISSUER_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.18")
+ BASIC_CONSTRAINTS = ObjectIdentifier("2.5.29.19")
+ NAME_CONSTRAINTS = ObjectIdentifier("2.5.29.30")
+ CRL_DISTRIBUTION_POINTS = ObjectIdentifier("2.5.29.31")
+ CERTIFICATE_POLICIES = ObjectIdentifier("2.5.29.32")
+ POLICY_MAPPINGS = ObjectIdentifier("2.5.29.33")
+ AUTHORITY_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.35")
+ POLICY_CONSTRAINTS = ObjectIdentifier("2.5.29.36")
+ EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37")
+ FRESHEST_CRL = ObjectIdentifier("2.5.29.46")
+ INHIBIT_ANY_POLICY = ObjectIdentifier("2.5.29.54")
+ ISSUING_DISTRIBUTION_POINT = ObjectIdentifier("2.5.29.28")
+ AUTHORITY_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.1")
+ SUBJECT_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.11")
+ OCSP_NO_CHECK = ObjectIdentifier("1.3.6.1.5.5.7.48.1.5")
+ TLS_FEATURE = ObjectIdentifier("1.3.6.1.5.5.7.1.24")
+ CRL_NUMBER = ObjectIdentifier("2.5.29.20")
+ DELTA_CRL_INDICATOR = ObjectIdentifier("2.5.29.27")
+ PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier(
+ "1.3.6.1.4.1.11129.2.4.2"
+ )
+ PRECERT_POISON = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.3")
+ SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.5")
+ MS_CERTIFICATE_TEMPLATE = ObjectIdentifier("1.3.6.1.4.1.311.21.7")
+
+
+class OCSPExtensionOID:
+ NONCE = ObjectIdentifier("1.3.6.1.5.5.7.48.1.2")
+ ACCEPTABLE_RESPONSES = ObjectIdentifier("1.3.6.1.5.5.7.48.1.4")
+
+
+class CRLEntryExtensionOID:
+ CERTIFICATE_ISSUER = ObjectIdentifier("2.5.29.29")
+ CRL_REASON = ObjectIdentifier("2.5.29.21")
+ INVALIDITY_DATE = ObjectIdentifier("2.5.29.24")
+
+
+class NameOID:
+ COMMON_NAME = ObjectIdentifier("2.5.4.3")
+ COUNTRY_NAME = ObjectIdentifier("2.5.4.6")
+ LOCALITY_NAME = ObjectIdentifier("2.5.4.7")
+ STATE_OR_PROVINCE_NAME = ObjectIdentifier("2.5.4.8")
+ STREET_ADDRESS = ObjectIdentifier("2.5.4.9")
+ ORGANIZATION_NAME = ObjectIdentifier("2.5.4.10")
+ ORGANIZATIONAL_UNIT_NAME = ObjectIdentifier("2.5.4.11")
+ SERIAL_NUMBER = ObjectIdentifier("2.5.4.5")
+ SURNAME = ObjectIdentifier("2.5.4.4")
+ GIVEN_NAME = ObjectIdentifier("2.5.4.42")
+ TITLE = ObjectIdentifier("2.5.4.12")
+ INITIALS = ObjectIdentifier("2.5.4.43")
+ GENERATION_QUALIFIER = ObjectIdentifier("2.5.4.44")
+ X500_UNIQUE_IDENTIFIER = ObjectIdentifier("2.5.4.45")
+ DN_QUALIFIER = ObjectIdentifier("2.5.4.46")
+ PSEUDONYM = ObjectIdentifier("2.5.4.65")
+ USER_ID = ObjectIdentifier("0.9.2342.19200300.100.1.1")
+ DOMAIN_COMPONENT = ObjectIdentifier("0.9.2342.19200300.100.1.25")
+ EMAIL_ADDRESS = ObjectIdentifier("1.2.840.113549.1.9.1")
+ JURISDICTION_COUNTRY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.3")
+ JURISDICTION_LOCALITY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.1")
+ JURISDICTION_STATE_OR_PROVINCE_NAME = ObjectIdentifier(
+ "1.3.6.1.4.1.311.60.2.1.2"
+ )
+ BUSINESS_CATEGORY = ObjectIdentifier("2.5.4.15")
+ POSTAL_ADDRESS = ObjectIdentifier("2.5.4.16")
+ POSTAL_CODE = ObjectIdentifier("2.5.4.17")
+ INN = ObjectIdentifier("1.2.643.3.131.1.1")
+ OGRN = ObjectIdentifier("1.2.643.100.1")
+ SNILS = ObjectIdentifier("1.2.643.100.3")
+ UNSTRUCTURED_NAME = ObjectIdentifier("1.2.840.113549.1.9.2")
+
+
+class SignatureAlgorithmOID:
+ RSA_WITH_MD5 = ObjectIdentifier("1.2.840.113549.1.1.4")
+ RSA_WITH_SHA1 = ObjectIdentifier("1.2.840.113549.1.1.5")
+ # This is an alternate OID for RSA with SHA1 that is occasionally seen
+ _RSA_WITH_SHA1 = ObjectIdentifier("1.3.14.3.2.29")
+ RSA_WITH_SHA224 = ObjectIdentifier("1.2.840.113549.1.1.14")
+ RSA_WITH_SHA256 = ObjectIdentifier("1.2.840.113549.1.1.11")
+ RSA_WITH_SHA384 = ObjectIdentifier("1.2.840.113549.1.1.12")
+ RSA_WITH_SHA512 = ObjectIdentifier("1.2.840.113549.1.1.13")
+ RSA_WITH_SHA3_224 = ObjectIdentifier("2.16.840.1.101.3.4.3.13")
+ RSA_WITH_SHA3_256 = ObjectIdentifier("2.16.840.1.101.3.4.3.14")
+ RSA_WITH_SHA3_384 = ObjectIdentifier("2.16.840.1.101.3.4.3.15")
+ RSA_WITH_SHA3_512 = ObjectIdentifier("2.16.840.1.101.3.4.3.16")
+ RSASSA_PSS = ObjectIdentifier("1.2.840.113549.1.1.10")
+ ECDSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10045.4.1")
+ ECDSA_WITH_SHA224 = ObjectIdentifier("1.2.840.10045.4.3.1")
+ ECDSA_WITH_SHA256 = ObjectIdentifier("1.2.840.10045.4.3.2")
+ ECDSA_WITH_SHA384 = ObjectIdentifier("1.2.840.10045.4.3.3")
+ ECDSA_WITH_SHA512 = ObjectIdentifier("1.2.840.10045.4.3.4")
+ ECDSA_WITH_SHA3_224 = ObjectIdentifier("2.16.840.1.101.3.4.3.9")
+ ECDSA_WITH_SHA3_256 = ObjectIdentifier("2.16.840.1.101.3.4.3.10")
+ ECDSA_WITH_SHA3_384 = ObjectIdentifier("2.16.840.1.101.3.4.3.11")
+ ECDSA_WITH_SHA3_512 = ObjectIdentifier("2.16.840.1.101.3.4.3.12")
+ DSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10040.4.3")
+ DSA_WITH_SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.3.1")
+ DSA_WITH_SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.3.2")
+ DSA_WITH_SHA384 = ObjectIdentifier("2.16.840.1.101.3.4.3.3")
+ DSA_WITH_SHA512 = ObjectIdentifier("2.16.840.1.101.3.4.3.4")
+ ED25519 = ObjectIdentifier("1.3.101.112")
+ ED448 = ObjectIdentifier("1.3.101.113")
+ GOSTR3411_94_WITH_3410_2001 = ObjectIdentifier("1.2.643.2.2.3")
+ GOSTR3410_2012_WITH_3411_2012_256 = ObjectIdentifier("1.2.643.7.1.1.3.2")
+ GOSTR3410_2012_WITH_3411_2012_512 = ObjectIdentifier("1.2.643.7.1.1.3.3")
+
+
+_SIG_OIDS_TO_HASH: typing.Dict[
+ ObjectIdentifier, typing.Optional[hashes.HashAlgorithm]
+] = {
+ SignatureAlgorithmOID.RSA_WITH_MD5: hashes.MD5(),
+ SignatureAlgorithmOID.RSA_WITH_SHA1: hashes.SHA1(),
+ SignatureAlgorithmOID._RSA_WITH_SHA1: hashes.SHA1(),
+ SignatureAlgorithmOID.RSA_WITH_SHA224: hashes.SHA224(),
+ SignatureAlgorithmOID.RSA_WITH_SHA256: hashes.SHA256(),
+ SignatureAlgorithmOID.RSA_WITH_SHA384: hashes.SHA384(),
+ SignatureAlgorithmOID.RSA_WITH_SHA512: hashes.SHA512(),
+ SignatureAlgorithmOID.RSA_WITH_SHA3_224: hashes.SHA3_224(),
+ SignatureAlgorithmOID.RSA_WITH_SHA3_256: hashes.SHA3_256(),
+ SignatureAlgorithmOID.RSA_WITH_SHA3_384: hashes.SHA3_384(),
+ SignatureAlgorithmOID.RSA_WITH_SHA3_512: hashes.SHA3_512(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA1: hashes.SHA1(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA224: hashes.SHA224(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA256: hashes.SHA256(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA384: hashes.SHA384(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA512: hashes.SHA512(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA3_224: hashes.SHA3_224(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA3_256: hashes.SHA3_256(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA3_384: hashes.SHA3_384(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA3_512: hashes.SHA3_512(),
+ SignatureAlgorithmOID.DSA_WITH_SHA1: hashes.SHA1(),
+ SignatureAlgorithmOID.DSA_WITH_SHA224: hashes.SHA224(),
+ SignatureAlgorithmOID.DSA_WITH_SHA256: hashes.SHA256(),
+ SignatureAlgorithmOID.ED25519: None,
+ SignatureAlgorithmOID.ED448: None,
+ SignatureAlgorithmOID.GOSTR3411_94_WITH_3410_2001: None,
+ SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_256: None,
+ SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: None,
+}
+
+
+class ExtendedKeyUsageOID:
+ SERVER_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.1")
+ CLIENT_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.2")
+ CODE_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.3")
+ EMAIL_PROTECTION = ObjectIdentifier("1.3.6.1.5.5.7.3.4")
+ TIME_STAMPING = ObjectIdentifier("1.3.6.1.5.5.7.3.8")
+ OCSP_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.9")
+ ANY_EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37.0")
+ SMARTCARD_LOGON = ObjectIdentifier("1.3.6.1.4.1.311.20.2.2")
+ KERBEROS_PKINIT_KDC = ObjectIdentifier("1.3.6.1.5.2.3.5")
+ IPSEC_IKE = ObjectIdentifier("1.3.6.1.5.5.7.3.17")
+ CERTIFICATE_TRANSPARENCY = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.4")
+
+
+class AuthorityInformationAccessOID:
+ CA_ISSUERS = ObjectIdentifier("1.3.6.1.5.5.7.48.2")
+ OCSP = ObjectIdentifier("1.3.6.1.5.5.7.48.1")
+
+
+class SubjectInformationAccessOID:
+ CA_REPOSITORY = ObjectIdentifier("1.3.6.1.5.5.7.48.5")
+
+
+class CertificatePoliciesOID:
+ CPS_QUALIFIER = ObjectIdentifier("1.3.6.1.5.5.7.2.1")
+ CPS_USER_NOTICE = ObjectIdentifier("1.3.6.1.5.5.7.2.2")
+ ANY_POLICY = ObjectIdentifier("2.5.29.32.0")
+
+
+class AttributeOID:
+ CHALLENGE_PASSWORD = ObjectIdentifier("1.2.840.113549.1.9.7")
+ UNSTRUCTURED_NAME = ObjectIdentifier("1.2.840.113549.1.9.2")
+
+
+_OID_NAMES = {
+ NameOID.COMMON_NAME: "commonName",
+ NameOID.COUNTRY_NAME: "countryName",
+ NameOID.LOCALITY_NAME: "localityName",
+ NameOID.STATE_OR_PROVINCE_NAME: "stateOrProvinceName",
+ NameOID.STREET_ADDRESS: "streetAddress",
+ NameOID.ORGANIZATION_NAME: "organizationName",
+ NameOID.ORGANIZATIONAL_UNIT_NAME: "organizationalUnitName",
+ NameOID.SERIAL_NUMBER: "serialNumber",
+ NameOID.SURNAME: "surname",
+ NameOID.GIVEN_NAME: "givenName",
+ NameOID.TITLE: "title",
+ NameOID.GENERATION_QUALIFIER: "generationQualifier",
+ NameOID.X500_UNIQUE_IDENTIFIER: "x500UniqueIdentifier",
+ NameOID.DN_QUALIFIER: "dnQualifier",
+ NameOID.PSEUDONYM: "pseudonym",
+ NameOID.USER_ID: "userID",
+ NameOID.DOMAIN_COMPONENT: "domainComponent",
+ NameOID.EMAIL_ADDRESS: "emailAddress",
+ NameOID.JURISDICTION_COUNTRY_NAME: "jurisdictionCountryName",
+ NameOID.JURISDICTION_LOCALITY_NAME: "jurisdictionLocalityName",
+ NameOID.JURISDICTION_STATE_OR_PROVINCE_NAME: (
+ "jurisdictionStateOrProvinceName"
+ ),
+ NameOID.BUSINESS_CATEGORY: "businessCategory",
+ NameOID.POSTAL_ADDRESS: "postalAddress",
+ NameOID.POSTAL_CODE: "postalCode",
+ NameOID.INN: "INN",
+ NameOID.OGRN: "OGRN",
+ NameOID.SNILS: "SNILS",
+ NameOID.UNSTRUCTURED_NAME: "unstructuredName",
+ SignatureAlgorithmOID.RSA_WITH_MD5: "md5WithRSAEncryption",
+ SignatureAlgorithmOID.RSA_WITH_SHA1: "sha1WithRSAEncryption",
+ SignatureAlgorithmOID.RSA_WITH_SHA224: "sha224WithRSAEncryption",
+ SignatureAlgorithmOID.RSA_WITH_SHA256: "sha256WithRSAEncryption",
+ SignatureAlgorithmOID.RSA_WITH_SHA384: "sha384WithRSAEncryption",
+ SignatureAlgorithmOID.RSA_WITH_SHA512: "sha512WithRSAEncryption",
+ SignatureAlgorithmOID.RSASSA_PSS: "RSASSA-PSS",
+ SignatureAlgorithmOID.ECDSA_WITH_SHA1: "ecdsa-with-SHA1",
+ SignatureAlgorithmOID.ECDSA_WITH_SHA224: "ecdsa-with-SHA224",
+ SignatureAlgorithmOID.ECDSA_WITH_SHA256: "ecdsa-with-SHA256",
+ SignatureAlgorithmOID.ECDSA_WITH_SHA384: "ecdsa-with-SHA384",
+ SignatureAlgorithmOID.ECDSA_WITH_SHA512: "ecdsa-with-SHA512",
+ SignatureAlgorithmOID.DSA_WITH_SHA1: "dsa-with-sha1",
+ SignatureAlgorithmOID.DSA_WITH_SHA224: "dsa-with-sha224",
+ SignatureAlgorithmOID.DSA_WITH_SHA256: "dsa-with-sha256",
+ SignatureAlgorithmOID.ED25519: "ed25519",
+ SignatureAlgorithmOID.ED448: "ed448",
+ SignatureAlgorithmOID.GOSTR3411_94_WITH_3410_2001: (
+ "GOST R 34.11-94 with GOST R 34.10-2001"
+ ),
+ SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_256: (
+ "GOST R 34.10-2012 with GOST R 34.11-2012 (256 bit)"
+ ),
+ SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: (
+ "GOST R 34.10-2012 with GOST R 34.11-2012 (512 bit)"
+ ),
+ ExtendedKeyUsageOID.SERVER_AUTH: "serverAuth",
+ ExtendedKeyUsageOID.CLIENT_AUTH: "clientAuth",
+ ExtendedKeyUsageOID.CODE_SIGNING: "codeSigning",
+ ExtendedKeyUsageOID.EMAIL_PROTECTION: "emailProtection",
+ ExtendedKeyUsageOID.TIME_STAMPING: "timeStamping",
+ ExtendedKeyUsageOID.OCSP_SIGNING: "OCSPSigning",
+ ExtendedKeyUsageOID.SMARTCARD_LOGON: "msSmartcardLogin",
+ ExtendedKeyUsageOID.KERBEROS_PKINIT_KDC: "pkInitKDC",
+ ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES: "subjectDirectoryAttributes",
+ ExtensionOID.SUBJECT_KEY_IDENTIFIER: "subjectKeyIdentifier",
+ ExtensionOID.KEY_USAGE: "keyUsage",
+ ExtensionOID.SUBJECT_ALTERNATIVE_NAME: "subjectAltName",
+ ExtensionOID.ISSUER_ALTERNATIVE_NAME: "issuerAltName",
+ ExtensionOID.BASIC_CONSTRAINTS: "basicConstraints",
+ ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS: (
+ "signedCertificateTimestampList"
+ ),
+ ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS: (
+ "signedCertificateTimestampList"
+ ),
+ ExtensionOID.PRECERT_POISON: "ctPoison",
+ ExtensionOID.MS_CERTIFICATE_TEMPLATE: "msCertificateTemplate",
+ CRLEntryExtensionOID.CRL_REASON: "cRLReason",
+ CRLEntryExtensionOID.INVALIDITY_DATE: "invalidityDate",
+ CRLEntryExtensionOID.CERTIFICATE_ISSUER: "certificateIssuer",
+ ExtensionOID.NAME_CONSTRAINTS: "nameConstraints",
+ ExtensionOID.CRL_DISTRIBUTION_POINTS: "cRLDistributionPoints",
+ ExtensionOID.CERTIFICATE_POLICIES: "certificatePolicies",
+ ExtensionOID.POLICY_MAPPINGS: "policyMappings",
+ ExtensionOID.AUTHORITY_KEY_IDENTIFIER: "authorityKeyIdentifier",
+ ExtensionOID.POLICY_CONSTRAINTS: "policyConstraints",
+ ExtensionOID.EXTENDED_KEY_USAGE: "extendedKeyUsage",
+ ExtensionOID.FRESHEST_CRL: "freshestCRL",
+ ExtensionOID.INHIBIT_ANY_POLICY: "inhibitAnyPolicy",
+ ExtensionOID.ISSUING_DISTRIBUTION_POINT: ("issuingDistributionPoint"),
+ ExtensionOID.AUTHORITY_INFORMATION_ACCESS: "authorityInfoAccess",
+ ExtensionOID.SUBJECT_INFORMATION_ACCESS: "subjectInfoAccess",
+ ExtensionOID.OCSP_NO_CHECK: "OCSPNoCheck",
+ ExtensionOID.CRL_NUMBER: "cRLNumber",
+ ExtensionOID.DELTA_CRL_INDICATOR: "deltaCRLIndicator",
+ ExtensionOID.TLS_FEATURE: "TLSFeature",
+ AuthorityInformationAccessOID.OCSP: "OCSP",
+ AuthorityInformationAccessOID.CA_ISSUERS: "caIssuers",
+ SubjectInformationAccessOID.CA_REPOSITORY: "caRepository",
+ CertificatePoliciesOID.CPS_QUALIFIER: "id-qt-cps",
+ CertificatePoliciesOID.CPS_USER_NOTICE: "id-qt-unotice",
+ OCSPExtensionOID.NONCE: "OCSPNonce",
+ AttributeOID.CHALLENGE_PASSWORD: "challengePassword",
+}
diff --git a/billinglayer/python/cryptography/hazmat/backends/__init__.py b/billinglayer/python/cryptography/hazmat/backends/__init__.py
new file mode 100644
index 0000000..b4400aa
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/backends/__init__.py
@@ -0,0 +1,13 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+from typing import Any
+
+
+def default_backend() -> Any:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ return backend
diff --git a/billinglayer/python/cryptography/hazmat/backends/__pycache__/__init__.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/backends/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..f474f9f
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/backends/__pycache__/__init__.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/backends/openssl/__init__.py b/billinglayer/python/cryptography/hazmat/backends/openssl/__init__.py
new file mode 100644
index 0000000..51b0447
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/backends/openssl/__init__.py
@@ -0,0 +1,9 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+from cryptography.hazmat.backends.openssl.backend import backend
+
+__all__ = ["backend"]
diff --git a/billinglayer/python/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..e860eba
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/backends/openssl/__pycache__/aead.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/backends/openssl/__pycache__/aead.cpython-311.pyc
new file mode 100644
index 0000000..7d21854
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/backends/openssl/__pycache__/aead.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-311.pyc
new file mode 100644
index 0000000..39613a8
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/backends/openssl/__pycache__/ciphers.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/backends/openssl/__pycache__/ciphers.cpython-311.pyc
new file mode 100644
index 0000000..b27709b
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/backends/openssl/__pycache__/ciphers.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/backends/openssl/__pycache__/cmac.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/backends/openssl/__pycache__/cmac.cpython-311.pyc
new file mode 100644
index 0000000..bba7f38
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/backends/openssl/__pycache__/cmac.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/backends/openssl/__pycache__/decode_asn1.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/backends/openssl/__pycache__/decode_asn1.cpython-311.pyc
new file mode 100644
index 0000000..6e95d5b
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/backends/openssl/__pycache__/decode_asn1.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/backends/openssl/__pycache__/ec.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/backends/openssl/__pycache__/ec.cpython-311.pyc
new file mode 100644
index 0000000..dc39362
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/backends/openssl/__pycache__/ec.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/backends/openssl/__pycache__/rsa.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/backends/openssl/__pycache__/rsa.cpython-311.pyc
new file mode 100644
index 0000000..74a1dd7
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/backends/openssl/__pycache__/rsa.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/backends/openssl/__pycache__/utils.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/backends/openssl/__pycache__/utils.cpython-311.pyc
new file mode 100644
index 0000000..a62b70b
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/backends/openssl/__pycache__/utils.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/backends/openssl/aead.py b/billinglayer/python/cryptography/hazmat/backends/openssl/aead.py
new file mode 100644
index 0000000..b36f535
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/backends/openssl/aead.py
@@ -0,0 +1,527 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography.exceptions import InvalidTag
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.backend import Backend
+ from cryptography.hazmat.primitives.ciphers.aead import (
+ AESCCM,
+ AESGCM,
+ AESOCB3,
+ AESSIV,
+ ChaCha20Poly1305,
+ )
+
+ _AEADTypes = typing.Union[
+ AESCCM, AESGCM, AESOCB3, AESSIV, ChaCha20Poly1305
+ ]
+
+
+def _is_evp_aead_supported_cipher(
+ backend: Backend, cipher: _AEADTypes
+) -> bool:
+ """
+ Checks whether the given cipher is supported through
+ EVP_AEAD rather than the normal OpenSSL EVP_CIPHER API.
+ """
+ from cryptography.hazmat.primitives.ciphers.aead import ChaCha20Poly1305
+
+ return backend._lib.Cryptography_HAS_EVP_AEAD and isinstance(
+ cipher, ChaCha20Poly1305
+ )
+
+
+def _aead_cipher_supported(backend: Backend, cipher: _AEADTypes) -> bool:
+ if _is_evp_aead_supported_cipher(backend, cipher):
+ return True
+ else:
+ cipher_name = _evp_cipher_cipher_name(cipher)
+ if backend._fips_enabled and cipher_name not in backend._fips_aead:
+ return False
+ # SIV isn't loaded through get_cipherbyname but instead a new fetch API
+ # only available in 3.0+. But if we know we're on 3.0+ then we know
+ # it's supported.
+ if cipher_name.endswith(b"-siv"):
+ return backend._lib.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER == 1
+ else:
+ return (
+ backend._lib.EVP_get_cipherbyname(cipher_name)
+ != backend._ffi.NULL
+ )
+
+
+def _aead_create_ctx(
+ backend: Backend,
+ cipher: _AEADTypes,
+ key: bytes,
+):
+ if _is_evp_aead_supported_cipher(backend, cipher):
+ return _evp_aead_create_ctx(backend, cipher, key)
+ else:
+ return _evp_cipher_create_ctx(backend, cipher, key)
+
+
+def _encrypt(
+ backend: Backend,
+ cipher: _AEADTypes,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.List[bytes],
+ tag_length: int,
+ ctx: typing.Any = None,
+) -> bytes:
+ if _is_evp_aead_supported_cipher(backend, cipher):
+ return _evp_aead_encrypt(
+ backend, cipher, nonce, data, associated_data, tag_length, ctx
+ )
+ else:
+ return _evp_cipher_encrypt(
+ backend, cipher, nonce, data, associated_data, tag_length, ctx
+ )
+
+
+def _decrypt(
+ backend: Backend,
+ cipher: _AEADTypes,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.List[bytes],
+ tag_length: int,
+ ctx: typing.Any = None,
+) -> bytes:
+ if _is_evp_aead_supported_cipher(backend, cipher):
+ return _evp_aead_decrypt(
+ backend, cipher, nonce, data, associated_data, tag_length, ctx
+ )
+ else:
+ return _evp_cipher_decrypt(
+ backend, cipher, nonce, data, associated_data, tag_length, ctx
+ )
+
+
+def _evp_aead_create_ctx(
+ backend: Backend,
+ cipher: _AEADTypes,
+ key: bytes,
+ tag_len: typing.Optional[int] = None,
+):
+ aead_cipher = _evp_aead_get_cipher(backend, cipher)
+ assert aead_cipher is not None
+ key_ptr = backend._ffi.from_buffer(key)
+ tag_len = (
+ backend._lib.EVP_AEAD_DEFAULT_TAG_LENGTH
+ if tag_len is None
+ else tag_len
+ )
+ ctx = backend._lib.Cryptography_EVP_AEAD_CTX_new(
+ aead_cipher, key_ptr, len(key), tag_len
+ )
+ backend.openssl_assert(ctx != backend._ffi.NULL)
+ ctx = backend._ffi.gc(ctx, backend._lib.EVP_AEAD_CTX_free)
+ return ctx
+
+
+def _evp_aead_get_cipher(backend: Backend, cipher: _AEADTypes):
+ from cryptography.hazmat.primitives.ciphers.aead import (
+ ChaCha20Poly1305,
+ )
+
+ # Currently only ChaCha20-Poly1305 is supported using this API
+ assert isinstance(cipher, ChaCha20Poly1305)
+ return backend._lib.EVP_aead_chacha20_poly1305()
+
+
+def _evp_aead_encrypt(
+ backend: Backend,
+ cipher: _AEADTypes,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.List[bytes],
+ tag_length: int,
+ ctx: typing.Any,
+) -> bytes:
+ assert ctx is not None
+
+ aead_cipher = _evp_aead_get_cipher(backend, cipher)
+ assert aead_cipher is not None
+
+ out_len = backend._ffi.new("size_t *")
+ # max_out_len should be in_len plus the result of
+ # EVP_AEAD_max_overhead.
+ max_out_len = len(data) + backend._lib.EVP_AEAD_max_overhead(aead_cipher)
+ out_buf = backend._ffi.new("uint8_t[]", max_out_len)
+ data_ptr = backend._ffi.from_buffer(data)
+ nonce_ptr = backend._ffi.from_buffer(nonce)
+ aad = b"".join(associated_data)
+ aad_ptr = backend._ffi.from_buffer(aad)
+
+ res = backend._lib.EVP_AEAD_CTX_seal(
+ ctx,
+ out_buf,
+ out_len,
+ max_out_len,
+ nonce_ptr,
+ len(nonce),
+ data_ptr,
+ len(data),
+ aad_ptr,
+ len(aad),
+ )
+ backend.openssl_assert(res == 1)
+ encrypted_data = backend._ffi.buffer(out_buf, out_len[0])[:]
+ return encrypted_data
+
+
+def _evp_aead_decrypt(
+ backend: Backend,
+ cipher: _AEADTypes,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.List[bytes],
+ tag_length: int,
+ ctx: typing.Any,
+) -> bytes:
+ if len(data) < tag_length:
+ raise InvalidTag
+
+ assert ctx is not None
+
+ out_len = backend._ffi.new("size_t *")
+ # max_out_len should at least in_len
+ max_out_len = len(data)
+ out_buf = backend._ffi.new("uint8_t[]", max_out_len)
+ data_ptr = backend._ffi.from_buffer(data)
+ nonce_ptr = backend._ffi.from_buffer(nonce)
+ aad = b"".join(associated_data)
+ aad_ptr = backend._ffi.from_buffer(aad)
+
+ res = backend._lib.EVP_AEAD_CTX_open(
+ ctx,
+ out_buf,
+ out_len,
+ max_out_len,
+ nonce_ptr,
+ len(nonce),
+ data_ptr,
+ len(data),
+ aad_ptr,
+ len(aad),
+ )
+
+ if res == 0:
+ backend._consume_errors()
+ raise InvalidTag
+
+ decrypted_data = backend._ffi.buffer(out_buf, out_len[0])[:]
+ return decrypted_data
+
+
+_ENCRYPT = 1
+_DECRYPT = 0
+
+
+def _evp_cipher_cipher_name(cipher: _AEADTypes) -> bytes:
+ from cryptography.hazmat.primitives.ciphers.aead import (
+ AESCCM,
+ AESGCM,
+ AESOCB3,
+ AESSIV,
+ ChaCha20Poly1305,
+ )
+
+ if isinstance(cipher, ChaCha20Poly1305):
+ return b"chacha20-poly1305"
+ elif isinstance(cipher, AESCCM):
+ return f"aes-{len(cipher._key) * 8}-ccm".encode("ascii")
+ elif isinstance(cipher, AESOCB3):
+ return f"aes-{len(cipher._key) * 8}-ocb".encode("ascii")
+ elif isinstance(cipher, AESSIV):
+ return f"aes-{len(cipher._key) * 8 // 2}-siv".encode("ascii")
+ else:
+ assert isinstance(cipher, AESGCM)
+ return f"aes-{len(cipher._key) * 8}-gcm".encode("ascii")
+
+
+def _evp_cipher(cipher_name: bytes, backend: Backend):
+ if cipher_name.endswith(b"-siv"):
+ evp_cipher = backend._lib.EVP_CIPHER_fetch(
+ backend._ffi.NULL,
+ cipher_name,
+ backend._ffi.NULL,
+ )
+ backend.openssl_assert(evp_cipher != backend._ffi.NULL)
+ evp_cipher = backend._ffi.gc(evp_cipher, backend._lib.EVP_CIPHER_free)
+ else:
+ evp_cipher = backend._lib.EVP_get_cipherbyname(cipher_name)
+ backend.openssl_assert(evp_cipher != backend._ffi.NULL)
+
+ return evp_cipher
+
+
+def _evp_cipher_create_ctx(
+ backend: Backend,
+ cipher: _AEADTypes,
+ key: bytes,
+):
+ ctx = backend._lib.EVP_CIPHER_CTX_new()
+ backend.openssl_assert(ctx != backend._ffi.NULL)
+ ctx = backend._ffi.gc(ctx, backend._lib.EVP_CIPHER_CTX_free)
+ cipher_name = _evp_cipher_cipher_name(cipher)
+ evp_cipher = _evp_cipher(cipher_name, backend)
+ key_ptr = backend._ffi.from_buffer(key)
+ res = backend._lib.EVP_CipherInit_ex(
+ ctx,
+ evp_cipher,
+ backend._ffi.NULL,
+ key_ptr,
+ backend._ffi.NULL,
+ 0,
+ )
+ backend.openssl_assert(res != 0)
+ return ctx
+
+
+def _evp_cipher_aead_setup(
+ backend: Backend,
+ cipher_name: bytes,
+ key: bytes,
+ nonce: bytes,
+ tag: typing.Optional[bytes],
+ tag_len: int,
+ operation: int,
+):
+ evp_cipher = _evp_cipher(cipher_name, backend)
+ ctx = backend._lib.EVP_CIPHER_CTX_new()
+ ctx = backend._ffi.gc(ctx, backend._lib.EVP_CIPHER_CTX_free)
+ res = backend._lib.EVP_CipherInit_ex(
+ ctx,
+ evp_cipher,
+ backend._ffi.NULL,
+ backend._ffi.NULL,
+ backend._ffi.NULL,
+ int(operation == _ENCRYPT),
+ )
+ backend.openssl_assert(res != 0)
+ # CCM requires the IVLEN to be set before calling SET_TAG on decrypt
+ res = backend._lib.EVP_CIPHER_CTX_ctrl(
+ ctx,
+ backend._lib.EVP_CTRL_AEAD_SET_IVLEN,
+ len(nonce),
+ backend._ffi.NULL,
+ )
+ backend.openssl_assert(res != 0)
+ if operation == _DECRYPT:
+ assert tag is not None
+ _evp_cipher_set_tag(backend, ctx, tag)
+ elif cipher_name.endswith(b"-ccm"):
+ res = backend._lib.EVP_CIPHER_CTX_ctrl(
+ ctx,
+ backend._lib.EVP_CTRL_AEAD_SET_TAG,
+ tag_len,
+ backend._ffi.NULL,
+ )
+ backend.openssl_assert(res != 0)
+
+ nonce_ptr = backend._ffi.from_buffer(nonce)
+ key_ptr = backend._ffi.from_buffer(key)
+ res = backend._lib.EVP_CipherInit_ex(
+ ctx,
+ backend._ffi.NULL,
+ backend._ffi.NULL,
+ key_ptr,
+ nonce_ptr,
+ int(operation == _ENCRYPT),
+ )
+ backend.openssl_assert(res != 0)
+ return ctx
+
+
+def _evp_cipher_set_tag(backend, ctx, tag: bytes) -> None:
+ tag_ptr = backend._ffi.from_buffer(tag)
+ res = backend._lib.EVP_CIPHER_CTX_ctrl(
+ ctx, backend._lib.EVP_CTRL_AEAD_SET_TAG, len(tag), tag_ptr
+ )
+ backend.openssl_assert(res != 0)
+
+
+def _evp_cipher_set_nonce_operation(
+ backend, ctx, nonce: bytes, operation: int
+) -> None:
+ nonce_ptr = backend._ffi.from_buffer(nonce)
+ res = backend._lib.EVP_CipherInit_ex(
+ ctx,
+ backend._ffi.NULL,
+ backend._ffi.NULL,
+ backend._ffi.NULL,
+ nonce_ptr,
+ int(operation == _ENCRYPT),
+ )
+ backend.openssl_assert(res != 0)
+
+
+def _evp_cipher_set_length(backend: Backend, ctx, data_len: int) -> None:
+ intptr = backend._ffi.new("int *")
+ res = backend._lib.EVP_CipherUpdate(
+ ctx, backend._ffi.NULL, intptr, backend._ffi.NULL, data_len
+ )
+ backend.openssl_assert(res != 0)
+
+
+def _evp_cipher_process_aad(
+ backend: Backend, ctx, associated_data: bytes
+) -> None:
+ outlen = backend._ffi.new("int *")
+ a_data_ptr = backend._ffi.from_buffer(associated_data)
+ res = backend._lib.EVP_CipherUpdate(
+ ctx, backend._ffi.NULL, outlen, a_data_ptr, len(associated_data)
+ )
+ backend.openssl_assert(res != 0)
+
+
+def _evp_cipher_process_data(backend: Backend, ctx, data: bytes) -> bytes:
+ outlen = backend._ffi.new("int *")
+ buf = backend._ffi.new("unsigned char[]", len(data))
+ data_ptr = backend._ffi.from_buffer(data)
+ res = backend._lib.EVP_CipherUpdate(ctx, buf, outlen, data_ptr, len(data))
+ if res == 0:
+ # AES SIV can error here if the data is invalid on decrypt
+ backend._consume_errors()
+ raise InvalidTag
+ return backend._ffi.buffer(buf, outlen[0])[:]
+
+
+def _evp_cipher_encrypt(
+ backend: Backend,
+ cipher: _AEADTypes,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.List[bytes],
+ tag_length: int,
+ ctx: typing.Any = None,
+) -> bytes:
+ from cryptography.hazmat.primitives.ciphers.aead import AESCCM, AESSIV
+
+ if ctx is None:
+ cipher_name = _evp_cipher_cipher_name(cipher)
+ ctx = _evp_cipher_aead_setup(
+ backend,
+ cipher_name,
+ cipher._key,
+ nonce,
+ None,
+ tag_length,
+ _ENCRYPT,
+ )
+ else:
+ _evp_cipher_set_nonce_operation(backend, ctx, nonce, _ENCRYPT)
+
+ # CCM requires us to pass the length of the data before processing
+ # anything.
+ # However calling this with any other AEAD results in an error
+ if isinstance(cipher, AESCCM):
+ _evp_cipher_set_length(backend, ctx, len(data))
+
+ for ad in associated_data:
+ _evp_cipher_process_aad(backend, ctx, ad)
+ processed_data = _evp_cipher_process_data(backend, ctx, data)
+ outlen = backend._ffi.new("int *")
+ # All AEADs we support besides OCB are streaming so they return nothing
+ # in finalization. OCB can return up to (16 byte block - 1) bytes so
+ # we need a buffer here too.
+ buf = backend._ffi.new("unsigned char[]", 16)
+ res = backend._lib.EVP_CipherFinal_ex(ctx, buf, outlen)
+ backend.openssl_assert(res != 0)
+ processed_data += backend._ffi.buffer(buf, outlen[0])[:]
+ tag_buf = backend._ffi.new("unsigned char[]", tag_length)
+ res = backend._lib.EVP_CIPHER_CTX_ctrl(
+ ctx, backend._lib.EVP_CTRL_AEAD_GET_TAG, tag_length, tag_buf
+ )
+ backend.openssl_assert(res != 0)
+ tag = backend._ffi.buffer(tag_buf)[:]
+
+ if isinstance(cipher, AESSIV):
+ # RFC 5297 defines the output as IV || C, where the tag we generate
+ # is the "IV" and C is the ciphertext. This is the opposite of our
+ # other AEADs, which are Ciphertext || Tag
+ backend.openssl_assert(len(tag) == 16)
+ return tag + processed_data
+ else:
+ return processed_data + tag
+
+
+def _evp_cipher_decrypt(
+ backend: Backend,
+ cipher: _AEADTypes,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.List[bytes],
+ tag_length: int,
+ ctx: typing.Any = None,
+) -> bytes:
+ from cryptography.hazmat.primitives.ciphers.aead import AESCCM, AESSIV
+
+ if len(data) < tag_length:
+ raise InvalidTag
+
+ if isinstance(cipher, AESSIV):
+ # RFC 5297 defines the output as IV || C, where the tag we generate
+ # is the "IV" and C is the ciphertext. This is the opposite of our
+ # other AEADs, which are Ciphertext || Tag
+ tag = data[:tag_length]
+ data = data[tag_length:]
+ else:
+ tag = data[-tag_length:]
+ data = data[:-tag_length]
+ if ctx is None:
+ cipher_name = _evp_cipher_cipher_name(cipher)
+ ctx = _evp_cipher_aead_setup(
+ backend,
+ cipher_name,
+ cipher._key,
+ nonce,
+ tag,
+ tag_length,
+ _DECRYPT,
+ )
+ else:
+ _evp_cipher_set_nonce_operation(backend, ctx, nonce, _DECRYPT)
+ _evp_cipher_set_tag(backend, ctx, tag)
+
+ # CCM requires us to pass the length of the data before processing
+ # anything.
+ # However calling this with any other AEAD results in an error
+ if isinstance(cipher, AESCCM):
+ _evp_cipher_set_length(backend, ctx, len(data))
+
+ for ad in associated_data:
+ _evp_cipher_process_aad(backend, ctx, ad)
+ # CCM has a different error path if the tag doesn't match. Errors are
+ # raised in Update and Final is irrelevant.
+ if isinstance(cipher, AESCCM):
+ outlen = backend._ffi.new("int *")
+ buf = backend._ffi.new("unsigned char[]", len(data))
+ d_ptr = backend._ffi.from_buffer(data)
+ res = backend._lib.EVP_CipherUpdate(ctx, buf, outlen, d_ptr, len(data))
+ if res != 1:
+ backend._consume_errors()
+ raise InvalidTag
+
+ processed_data = backend._ffi.buffer(buf, outlen[0])[:]
+ else:
+ processed_data = _evp_cipher_process_data(backend, ctx, data)
+ outlen = backend._ffi.new("int *")
+ # OCB can return up to 15 bytes (16 byte block - 1) in finalization
+ buf = backend._ffi.new("unsigned char[]", 16)
+ res = backend._lib.EVP_CipherFinal_ex(ctx, buf, outlen)
+ processed_data += backend._ffi.buffer(buf, outlen[0])[:]
+ if res == 0:
+ backend._consume_errors()
+ raise InvalidTag
+
+ return processed_data
diff --git a/billinglayer/python/cryptography/hazmat/backends/openssl/backend.py b/billinglayer/python/cryptography/hazmat/backends/openssl/backend.py
new file mode 100644
index 0000000..02d5109
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/backends/openssl/backend.py
@@ -0,0 +1,1935 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import collections
+import contextlib
+import itertools
+import typing
+from contextlib import contextmanager
+
+from cryptography import utils, x509
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.backends.openssl import aead
+from cryptography.hazmat.backends.openssl.ciphers import _CipherContext
+from cryptography.hazmat.backends.openssl.cmac import _CMACContext
+from cryptography.hazmat.backends.openssl.ec import (
+ _EllipticCurvePrivateKey,
+ _EllipticCurvePublicKey,
+)
+from cryptography.hazmat.backends.openssl.rsa import (
+ _RSAPrivateKey,
+ _RSAPublicKey,
+)
+from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+from cryptography.hazmat.bindings.openssl import binding
+from cryptography.hazmat.primitives import hashes, serialization
+from cryptography.hazmat.primitives._asymmetric import AsymmetricPadding
+from cryptography.hazmat.primitives.asymmetric import (
+ dh,
+ dsa,
+ ec,
+ ed448,
+ ed25519,
+ rsa,
+ x448,
+ x25519,
+)
+from cryptography.hazmat.primitives.asymmetric.padding import (
+ MGF1,
+ OAEP,
+ PSS,
+ PKCS1v15,
+)
+from cryptography.hazmat.primitives.asymmetric.types import (
+ PrivateKeyTypes,
+ PublicKeyTypes,
+)
+from cryptography.hazmat.primitives.ciphers import (
+ BlockCipherAlgorithm,
+ CipherAlgorithm,
+)
+from cryptography.hazmat.primitives.ciphers.algorithms import (
+ AES,
+ AES128,
+ AES256,
+ ARC4,
+ SM4,
+ Camellia,
+ ChaCha20,
+ TripleDES,
+ _BlowfishInternal,
+ _CAST5Internal,
+ _IDEAInternal,
+ _SEEDInternal,
+)
+from cryptography.hazmat.primitives.ciphers.modes import (
+ CBC,
+ CFB,
+ CFB8,
+ CTR,
+ ECB,
+ GCM,
+ OFB,
+ XTS,
+ Mode,
+)
+from cryptography.hazmat.primitives.serialization import ssh
+from cryptography.hazmat.primitives.serialization.pkcs12 import (
+ PBES,
+ PKCS12Certificate,
+ PKCS12KeyAndCertificates,
+ PKCS12PrivateKeyTypes,
+ _PKCS12CATypes,
+)
+
+_MemoryBIO = collections.namedtuple("_MemoryBIO", ["bio", "char_ptr"])
+
+
+# Not actually supported, just used as a marker for some serialization tests.
+class _RC2:
+ pass
+
+
+class Backend:
+ """
+ OpenSSL API binding interfaces.
+ """
+
+ name = "openssl"
+
+ # FIPS has opinions about acceptable algorithms and key sizes, but the
+ # disallowed algorithms are still present in OpenSSL. They just error if
+ # you try to use them. To avoid that we allowlist the algorithms in
+ # FIPS 140-3. This isn't ideal, but FIPS 140-3 is trash so here we are.
+ _fips_aead = {
+ b"aes-128-ccm",
+ b"aes-192-ccm",
+ b"aes-256-ccm",
+ b"aes-128-gcm",
+ b"aes-192-gcm",
+ b"aes-256-gcm",
+ }
+ # TripleDES encryption is disallowed/deprecated throughout 2023 in
+ # FIPS 140-3. To keep it simple we denylist any use of TripleDES (TDEA).
+ _fips_ciphers = (AES,)
+ # Sometimes SHA1 is still permissible. That logic is contained
+ # within the various *_supported methods.
+ _fips_hashes = (
+ hashes.SHA224,
+ hashes.SHA256,
+ hashes.SHA384,
+ hashes.SHA512,
+ hashes.SHA512_224,
+ hashes.SHA512_256,
+ hashes.SHA3_224,
+ hashes.SHA3_256,
+ hashes.SHA3_384,
+ hashes.SHA3_512,
+ hashes.SHAKE128,
+ hashes.SHAKE256,
+ )
+ _fips_ecdh_curves = (
+ ec.SECP224R1,
+ ec.SECP256R1,
+ ec.SECP384R1,
+ ec.SECP521R1,
+ )
+ _fips_rsa_min_key_size = 2048
+ _fips_rsa_min_public_exponent = 65537
+ _fips_dsa_min_modulus = 1 << 2048
+ _fips_dh_min_key_size = 2048
+ _fips_dh_min_modulus = 1 << _fips_dh_min_key_size
+
+ def __init__(self) -> None:
+ self._binding = binding.Binding()
+ self._ffi = self._binding.ffi
+ self._lib = self._binding.lib
+ self._fips_enabled = rust_openssl.is_fips_enabled()
+
+ self._cipher_registry: typing.Dict[
+ typing.Tuple[typing.Type[CipherAlgorithm], typing.Type[Mode]],
+ typing.Callable,
+ ] = {}
+ self._register_default_ciphers()
+ self._dh_types = [self._lib.EVP_PKEY_DH]
+ if self._lib.Cryptography_HAS_EVP_PKEY_DHX:
+ self._dh_types.append(self._lib.EVP_PKEY_DHX)
+
+ def __repr__(self) -> str:
+ return "".format(
+ self.openssl_version_text(),
+ self._fips_enabled,
+ self._binding._legacy_provider_loaded,
+ )
+
+ def openssl_assert(
+ self,
+ ok: bool,
+ errors: typing.Optional[typing.List[rust_openssl.OpenSSLError]] = None,
+ ) -> None:
+ return binding._openssl_assert(self._lib, ok, errors=errors)
+
+ def _enable_fips(self) -> None:
+ # This function enables FIPS mode for OpenSSL 3.0.0 on installs that
+ # have the FIPS provider installed properly.
+ self._binding._enable_fips()
+ assert rust_openssl.is_fips_enabled()
+ self._fips_enabled = rust_openssl.is_fips_enabled()
+
+ def openssl_version_text(self) -> str:
+ """
+ Friendly string name of the loaded OpenSSL library. This is not
+ necessarily the same version as it was compiled against.
+
+ Example: OpenSSL 1.1.1d 10 Sep 2019
+ """
+ return self._ffi.string(
+ self._lib.OpenSSL_version(self._lib.OPENSSL_VERSION)
+ ).decode("ascii")
+
+ def openssl_version_number(self) -> int:
+ return self._lib.OpenSSL_version_num()
+
+ def _evp_md_from_algorithm(self, algorithm: hashes.HashAlgorithm):
+ if algorithm.name == "blake2b" or algorithm.name == "blake2s":
+ alg = "{}{}".format(
+ algorithm.name, algorithm.digest_size * 8
+ ).encode("ascii")
+ else:
+ alg = algorithm.name.encode("ascii")
+
+ evp_md = self._lib.EVP_get_digestbyname(alg)
+ return evp_md
+
+ def _evp_md_non_null_from_algorithm(self, algorithm: hashes.HashAlgorithm):
+ evp_md = self._evp_md_from_algorithm(algorithm)
+ self.openssl_assert(evp_md != self._ffi.NULL)
+ return evp_md
+
+ def hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool:
+ if self._fips_enabled and not isinstance(algorithm, self._fips_hashes):
+ return False
+
+ evp_md = self._evp_md_from_algorithm(algorithm)
+ return evp_md != self._ffi.NULL
+
+ def signature_hash_supported(
+ self, algorithm: hashes.HashAlgorithm
+ ) -> bool:
+ # Dedicated check for hashing algorithm use in message digest for
+ # signatures, e.g. RSA PKCS#1 v1.5 SHA1 (sha1WithRSAEncryption).
+ if self._fips_enabled and isinstance(algorithm, hashes.SHA1):
+ return False
+ return self.hash_supported(algorithm)
+
+ def scrypt_supported(self) -> bool:
+ if self._fips_enabled:
+ return False
+ else:
+ return self._lib.Cryptography_HAS_SCRYPT == 1
+
+ def hmac_supported(self, algorithm: hashes.HashAlgorithm) -> bool:
+ # FIPS mode still allows SHA1 for HMAC
+ if self._fips_enabled and isinstance(algorithm, hashes.SHA1):
+ return True
+
+ return self.hash_supported(algorithm)
+
+ def cipher_supported(self, cipher: CipherAlgorithm, mode: Mode) -> bool:
+ if self._fips_enabled:
+ # FIPS mode requires AES. TripleDES is disallowed/deprecated in
+ # FIPS 140-3.
+ if not isinstance(cipher, self._fips_ciphers):
+ return False
+
+ try:
+ adapter = self._cipher_registry[type(cipher), type(mode)]
+ except KeyError:
+ return False
+ evp_cipher = adapter(self, cipher, mode)
+ return self._ffi.NULL != evp_cipher
+
+ def register_cipher_adapter(self, cipher_cls, mode_cls, adapter) -> None:
+ if (cipher_cls, mode_cls) in self._cipher_registry:
+ raise ValueError(
+ "Duplicate registration for: {} {}.".format(
+ cipher_cls, mode_cls
+ )
+ )
+ self._cipher_registry[cipher_cls, mode_cls] = adapter
+
+ def _register_default_ciphers(self) -> None:
+ for cipher_cls in [AES, AES128, AES256]:
+ for mode_cls in [CBC, CTR, ECB, OFB, CFB, CFB8, GCM]:
+ self.register_cipher_adapter(
+ cipher_cls,
+ mode_cls,
+ GetCipherByName(
+ "{cipher.name}-{cipher.key_size}-{mode.name}"
+ ),
+ )
+ for mode_cls in [CBC, CTR, ECB, OFB, CFB]:
+ self.register_cipher_adapter(
+ Camellia,
+ mode_cls,
+ GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}"),
+ )
+ for mode_cls in [CBC, CFB, CFB8, OFB]:
+ self.register_cipher_adapter(
+ TripleDES, mode_cls, GetCipherByName("des-ede3-{mode.name}")
+ )
+ self.register_cipher_adapter(
+ TripleDES, ECB, GetCipherByName("des-ede3")
+ )
+ self.register_cipher_adapter(
+ ChaCha20, type(None), GetCipherByName("chacha20")
+ )
+ self.register_cipher_adapter(AES, XTS, _get_xts_cipher)
+ for mode_cls in [ECB, CBC, OFB, CFB, CTR]:
+ self.register_cipher_adapter(
+ SM4, mode_cls, GetCipherByName("sm4-{mode.name}")
+ )
+ # Don't register legacy ciphers if they're unavailable. Hypothetically
+ # this wouldn't be necessary because we test availability by seeing if
+ # we get an EVP_CIPHER * in the _CipherContext __init__, but OpenSSL 3
+ # will return a valid pointer even though the cipher is unavailable.
+ if (
+ self._binding._legacy_provider_loaded
+ or not self._lib.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER
+ ):
+ for mode_cls in [CBC, CFB, OFB, ECB]:
+ self.register_cipher_adapter(
+ _BlowfishInternal,
+ mode_cls,
+ GetCipherByName("bf-{mode.name}"),
+ )
+ for mode_cls in [CBC, CFB, OFB, ECB]:
+ self.register_cipher_adapter(
+ _SEEDInternal,
+ mode_cls,
+ GetCipherByName("seed-{mode.name}"),
+ )
+ for cipher_cls, mode_cls in itertools.product(
+ [_CAST5Internal, _IDEAInternal],
+ [CBC, OFB, CFB, ECB],
+ ):
+ self.register_cipher_adapter(
+ cipher_cls,
+ mode_cls,
+ GetCipherByName("{cipher.name}-{mode.name}"),
+ )
+ self.register_cipher_adapter(
+ ARC4, type(None), GetCipherByName("rc4")
+ )
+ # We don't actually support RC2, this is just used by some tests.
+ self.register_cipher_adapter(
+ _RC2, type(None), GetCipherByName("rc2")
+ )
+
+ def create_symmetric_encryption_ctx(
+ self, cipher: CipherAlgorithm, mode: Mode
+ ) -> _CipherContext:
+ return _CipherContext(self, cipher, mode, _CipherContext._ENCRYPT)
+
+ def create_symmetric_decryption_ctx(
+ self, cipher: CipherAlgorithm, mode: Mode
+ ) -> _CipherContext:
+ return _CipherContext(self, cipher, mode, _CipherContext._DECRYPT)
+
+ def pbkdf2_hmac_supported(self, algorithm: hashes.HashAlgorithm) -> bool:
+ return self.hmac_supported(algorithm)
+
+ def _consume_errors(self) -> typing.List[rust_openssl.OpenSSLError]:
+ return rust_openssl.capture_error_stack()
+
+ def _bn_to_int(self, bn) -> int:
+ assert bn != self._ffi.NULL
+ self.openssl_assert(not self._lib.BN_is_negative(bn))
+
+ bn_num_bytes = self._lib.BN_num_bytes(bn)
+ bin_ptr = self._ffi.new("unsigned char[]", bn_num_bytes)
+ bin_len = self._lib.BN_bn2bin(bn, bin_ptr)
+ # A zero length means the BN has value 0
+ self.openssl_assert(bin_len >= 0)
+ val = int.from_bytes(self._ffi.buffer(bin_ptr)[:bin_len], "big")
+ return val
+
+ def _int_to_bn(self, num: int):
+ """
+ Converts a python integer to a BIGNUM. The returned BIGNUM will not
+ be garbage collected (to support adding them to structs that take
+ ownership of the object). Be sure to register it for GC if it will
+ be discarded after use.
+ """
+ binary = num.to_bytes(int(num.bit_length() / 8.0 + 1), "big")
+ bn_ptr = self._lib.BN_bin2bn(binary, len(binary), self._ffi.NULL)
+ self.openssl_assert(bn_ptr != self._ffi.NULL)
+ return bn_ptr
+
+ def generate_rsa_private_key(
+ self, public_exponent: int, key_size: int
+ ) -> rsa.RSAPrivateKey:
+ rsa._verify_rsa_parameters(public_exponent, key_size)
+
+ rsa_cdata = self._lib.RSA_new()
+ self.openssl_assert(rsa_cdata != self._ffi.NULL)
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+
+ bn = self._int_to_bn(public_exponent)
+ bn = self._ffi.gc(bn, self._lib.BN_free)
+
+ res = self._lib.RSA_generate_key_ex(
+ rsa_cdata, key_size, bn, self._ffi.NULL
+ )
+ self.openssl_assert(res == 1)
+ evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata)
+
+ # We can skip RSA key validation here since we just generated the key
+ return _RSAPrivateKey(
+ self, rsa_cdata, evp_pkey, unsafe_skip_rsa_key_validation=True
+ )
+
+ def generate_rsa_parameters_supported(
+ self, public_exponent: int, key_size: int
+ ) -> bool:
+ return (
+ public_exponent >= 3
+ and public_exponent & 1 != 0
+ and key_size >= 512
+ )
+
+ def load_rsa_private_numbers(
+ self,
+ numbers: rsa.RSAPrivateNumbers,
+ unsafe_skip_rsa_key_validation: bool,
+ ) -> rsa.RSAPrivateKey:
+ rsa._check_private_key_components(
+ numbers.p,
+ numbers.q,
+ numbers.d,
+ numbers.dmp1,
+ numbers.dmq1,
+ numbers.iqmp,
+ numbers.public_numbers.e,
+ numbers.public_numbers.n,
+ )
+ rsa_cdata = self._lib.RSA_new()
+ self.openssl_assert(rsa_cdata != self._ffi.NULL)
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ p = self._int_to_bn(numbers.p)
+ q = self._int_to_bn(numbers.q)
+ d = self._int_to_bn(numbers.d)
+ dmp1 = self._int_to_bn(numbers.dmp1)
+ dmq1 = self._int_to_bn(numbers.dmq1)
+ iqmp = self._int_to_bn(numbers.iqmp)
+ e = self._int_to_bn(numbers.public_numbers.e)
+ n = self._int_to_bn(numbers.public_numbers.n)
+ res = self._lib.RSA_set0_factors(rsa_cdata, p, q)
+ self.openssl_assert(res == 1)
+ res = self._lib.RSA_set0_key(rsa_cdata, n, e, d)
+ self.openssl_assert(res == 1)
+ res = self._lib.RSA_set0_crt_params(rsa_cdata, dmp1, dmq1, iqmp)
+ self.openssl_assert(res == 1)
+ evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata)
+
+ return _RSAPrivateKey(
+ self,
+ rsa_cdata,
+ evp_pkey,
+ unsafe_skip_rsa_key_validation=unsafe_skip_rsa_key_validation,
+ )
+
+ def load_rsa_public_numbers(
+ self, numbers: rsa.RSAPublicNumbers
+ ) -> rsa.RSAPublicKey:
+ rsa._check_public_key_components(numbers.e, numbers.n)
+ rsa_cdata = self._lib.RSA_new()
+ self.openssl_assert(rsa_cdata != self._ffi.NULL)
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ e = self._int_to_bn(numbers.e)
+ n = self._int_to_bn(numbers.n)
+ res = self._lib.RSA_set0_key(rsa_cdata, n, e, self._ffi.NULL)
+ self.openssl_assert(res == 1)
+ evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata)
+
+ return _RSAPublicKey(self, rsa_cdata, evp_pkey)
+
+ def _create_evp_pkey_gc(self):
+ evp_pkey = self._lib.EVP_PKEY_new()
+ self.openssl_assert(evp_pkey != self._ffi.NULL)
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+ return evp_pkey
+
+ def _rsa_cdata_to_evp_pkey(self, rsa_cdata):
+ evp_pkey = self._create_evp_pkey_gc()
+ res = self._lib.EVP_PKEY_set1_RSA(evp_pkey, rsa_cdata)
+ self.openssl_assert(res == 1)
+ return evp_pkey
+
+ def _bytes_to_bio(self, data: bytes) -> _MemoryBIO:
+ """
+ Return a _MemoryBIO namedtuple of (BIO, char*).
+
+ The char* is the storage for the BIO and it must stay alive until the
+ BIO is finished with.
+ """
+ data_ptr = self._ffi.from_buffer(data)
+ bio = self._lib.BIO_new_mem_buf(data_ptr, len(data))
+ self.openssl_assert(bio != self._ffi.NULL)
+
+ return _MemoryBIO(self._ffi.gc(bio, self._lib.BIO_free), data_ptr)
+
+ def _create_mem_bio_gc(self):
+ """
+ Creates an empty memory BIO.
+ """
+ bio_method = self._lib.BIO_s_mem()
+ self.openssl_assert(bio_method != self._ffi.NULL)
+ bio = self._lib.BIO_new(bio_method)
+ self.openssl_assert(bio != self._ffi.NULL)
+ bio = self._ffi.gc(bio, self._lib.BIO_free)
+ return bio
+
+ def _read_mem_bio(self, bio) -> bytes:
+ """
+ Reads a memory BIO. This only works on memory BIOs.
+ """
+ buf = self._ffi.new("char **")
+ buf_len = self._lib.BIO_get_mem_data(bio, buf)
+ self.openssl_assert(buf_len > 0)
+ self.openssl_assert(buf[0] != self._ffi.NULL)
+ bio_data = self._ffi.buffer(buf[0], buf_len)[:]
+ return bio_data
+
+ def _evp_pkey_to_private_key(
+ self, evp_pkey, unsafe_skip_rsa_key_validation: bool
+ ) -> PrivateKeyTypes:
+ """
+ Return the appropriate type of PrivateKey given an evp_pkey cdata
+ pointer.
+ """
+
+ key_type = self._lib.EVP_PKEY_id(evp_pkey)
+
+ if key_type == self._lib.EVP_PKEY_RSA:
+ rsa_cdata = self._lib.EVP_PKEY_get1_RSA(evp_pkey)
+ self.openssl_assert(rsa_cdata != self._ffi.NULL)
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ return _RSAPrivateKey(
+ self,
+ rsa_cdata,
+ evp_pkey,
+ unsafe_skip_rsa_key_validation=unsafe_skip_rsa_key_validation,
+ )
+ elif (
+ key_type == self._lib.EVP_PKEY_RSA_PSS
+ and not self._lib.CRYPTOGRAPHY_IS_LIBRESSL
+ and not self._lib.CRYPTOGRAPHY_IS_BORINGSSL
+ and not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111E
+ ):
+ # At the moment the way we handle RSA PSS keys is to strip the
+ # PSS constraints from them and treat them as normal RSA keys
+ # Unfortunately the RSA * itself tracks this data so we need to
+ # extract, serialize, and reload it without the constraints.
+ rsa_cdata = self._lib.EVP_PKEY_get1_RSA(evp_pkey)
+ self.openssl_assert(rsa_cdata != self._ffi.NULL)
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ bio = self._create_mem_bio_gc()
+ res = self._lib.i2d_RSAPrivateKey_bio(bio, rsa_cdata)
+ self.openssl_assert(res == 1)
+ return self.load_der_private_key(
+ self._read_mem_bio(bio),
+ password=None,
+ unsafe_skip_rsa_key_validation=unsafe_skip_rsa_key_validation,
+ )
+ elif key_type == self._lib.EVP_PKEY_DSA:
+ return rust_openssl.dsa.private_key_from_ptr(
+ int(self._ffi.cast("uintptr_t", evp_pkey))
+ )
+ elif key_type == self._lib.EVP_PKEY_EC:
+ ec_cdata = self._lib.EVP_PKEY_get1_EC_KEY(evp_pkey)
+ self.openssl_assert(ec_cdata != self._ffi.NULL)
+ ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free)
+ return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey)
+ elif key_type in self._dh_types:
+ return rust_openssl.dh.private_key_from_ptr(
+ int(self._ffi.cast("uintptr_t", evp_pkey))
+ )
+ elif key_type == getattr(self._lib, "EVP_PKEY_ED25519", None):
+ # EVP_PKEY_ED25519 is not present in CRYPTOGRAPHY_IS_LIBRESSL
+ return rust_openssl.ed25519.private_key_from_ptr(
+ int(self._ffi.cast("uintptr_t", evp_pkey))
+ )
+ elif key_type == getattr(self._lib, "EVP_PKEY_X448", None):
+ # EVP_PKEY_X448 is not present in CRYPTOGRAPHY_IS_LIBRESSL
+ return rust_openssl.x448.private_key_from_ptr(
+ int(self._ffi.cast("uintptr_t", evp_pkey))
+ )
+ elif key_type == self._lib.EVP_PKEY_X25519:
+ return rust_openssl.x25519.private_key_from_ptr(
+ int(self._ffi.cast("uintptr_t", evp_pkey))
+ )
+ elif key_type == getattr(self._lib, "EVP_PKEY_ED448", None):
+ # EVP_PKEY_ED448 is not present in CRYPTOGRAPHY_IS_LIBRESSL
+ return rust_openssl.ed448.private_key_from_ptr(
+ int(self._ffi.cast("uintptr_t", evp_pkey))
+ )
+ else:
+ raise UnsupportedAlgorithm("Unsupported key type.")
+
+ def _evp_pkey_to_public_key(self, evp_pkey) -> PublicKeyTypes:
+ """
+ Return the appropriate type of PublicKey given an evp_pkey cdata
+ pointer.
+ """
+
+ key_type = self._lib.EVP_PKEY_id(evp_pkey)
+
+ if key_type == self._lib.EVP_PKEY_RSA:
+ rsa_cdata = self._lib.EVP_PKEY_get1_RSA(evp_pkey)
+ self.openssl_assert(rsa_cdata != self._ffi.NULL)
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ return _RSAPublicKey(self, rsa_cdata, evp_pkey)
+ elif (
+ key_type == self._lib.EVP_PKEY_RSA_PSS
+ and not self._lib.CRYPTOGRAPHY_IS_LIBRESSL
+ and not self._lib.CRYPTOGRAPHY_IS_BORINGSSL
+ and not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111E
+ ):
+ rsa_cdata = self._lib.EVP_PKEY_get1_RSA(evp_pkey)
+ self.openssl_assert(rsa_cdata != self._ffi.NULL)
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ bio = self._create_mem_bio_gc()
+ res = self._lib.i2d_RSAPublicKey_bio(bio, rsa_cdata)
+ self.openssl_assert(res == 1)
+ return self.load_der_public_key(self._read_mem_bio(bio))
+ elif key_type == self._lib.EVP_PKEY_DSA:
+ return rust_openssl.dsa.public_key_from_ptr(
+ int(self._ffi.cast("uintptr_t", evp_pkey))
+ )
+ elif key_type == self._lib.EVP_PKEY_EC:
+ ec_cdata = self._lib.EVP_PKEY_get1_EC_KEY(evp_pkey)
+ if ec_cdata == self._ffi.NULL:
+ errors = self._consume_errors()
+ raise ValueError("Unable to load EC key", errors)
+ ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free)
+ return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey)
+ elif key_type in self._dh_types:
+ return rust_openssl.dh.public_key_from_ptr(
+ int(self._ffi.cast("uintptr_t", evp_pkey))
+ )
+ elif key_type == getattr(self._lib, "EVP_PKEY_ED25519", None):
+ # EVP_PKEY_ED25519 is not present in CRYPTOGRAPHY_IS_LIBRESSL
+ return rust_openssl.ed25519.public_key_from_ptr(
+ int(self._ffi.cast("uintptr_t", evp_pkey))
+ )
+ elif key_type == getattr(self._lib, "EVP_PKEY_X448", None):
+ # EVP_PKEY_X448 is not present in CRYPTOGRAPHY_IS_LIBRESSL
+ return rust_openssl.x448.public_key_from_ptr(
+ int(self._ffi.cast("uintptr_t", evp_pkey))
+ )
+ elif key_type == self._lib.EVP_PKEY_X25519:
+ return rust_openssl.x25519.public_key_from_ptr(
+ int(self._ffi.cast("uintptr_t", evp_pkey))
+ )
+ elif key_type == getattr(self._lib, "EVP_PKEY_ED448", None):
+ # EVP_PKEY_ED448 is not present in CRYPTOGRAPHY_IS_LIBRESSL
+ return rust_openssl.ed448.public_key_from_ptr(
+ int(self._ffi.cast("uintptr_t", evp_pkey))
+ )
+ else:
+ raise UnsupportedAlgorithm("Unsupported key type.")
+
+ def _oaep_hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool:
+ if self._fips_enabled and isinstance(algorithm, hashes.SHA1):
+ return False
+
+ return isinstance(
+ algorithm,
+ (
+ hashes.SHA1,
+ hashes.SHA224,
+ hashes.SHA256,
+ hashes.SHA384,
+ hashes.SHA512,
+ ),
+ )
+
+ def rsa_padding_supported(self, padding: AsymmetricPadding) -> bool:
+ if isinstance(padding, PKCS1v15):
+ return True
+ elif isinstance(padding, PSS) and isinstance(padding._mgf, MGF1):
+ # SHA1 is permissible in MGF1 in FIPS even when SHA1 is blocked
+ # as signature algorithm.
+ if self._fips_enabled and isinstance(
+ padding._mgf._algorithm, hashes.SHA1
+ ):
+ return True
+ else:
+ return self.hash_supported(padding._mgf._algorithm)
+ elif isinstance(padding, OAEP) and isinstance(padding._mgf, MGF1):
+ return self._oaep_hash_supported(
+ padding._mgf._algorithm
+ ) and self._oaep_hash_supported(padding._algorithm)
+ else:
+ return False
+
+ def rsa_encryption_supported(self, padding: AsymmetricPadding) -> bool:
+ if self._fips_enabled and isinstance(padding, PKCS1v15):
+ return False
+ else:
+ return self.rsa_padding_supported(padding)
+
+ def generate_dsa_parameters(self, key_size: int) -> dsa.DSAParameters:
+ if key_size not in (1024, 2048, 3072, 4096):
+ raise ValueError(
+ "Key size must be 1024, 2048, 3072, or 4096 bits."
+ )
+
+ return rust_openssl.dsa.generate_parameters(key_size)
+
+ def generate_dsa_private_key(
+ self, parameters: dsa.DSAParameters
+ ) -> dsa.DSAPrivateKey:
+ return parameters.generate_private_key()
+
+ def generate_dsa_private_key_and_parameters(
+ self, key_size: int
+ ) -> dsa.DSAPrivateKey:
+ parameters = self.generate_dsa_parameters(key_size)
+ return self.generate_dsa_private_key(parameters)
+
+ def load_dsa_private_numbers(
+ self, numbers: dsa.DSAPrivateNumbers
+ ) -> dsa.DSAPrivateKey:
+ dsa._check_dsa_private_numbers(numbers)
+ return rust_openssl.dsa.from_private_numbers(numbers)
+
+ def load_dsa_public_numbers(
+ self, numbers: dsa.DSAPublicNumbers
+ ) -> dsa.DSAPublicKey:
+ dsa._check_dsa_parameters(numbers.parameter_numbers)
+ return rust_openssl.dsa.from_public_numbers(numbers)
+
+ def load_dsa_parameter_numbers(
+ self, numbers: dsa.DSAParameterNumbers
+ ) -> dsa.DSAParameters:
+ dsa._check_dsa_parameters(numbers)
+ return rust_openssl.dsa.from_parameter_numbers(numbers)
+
+ def dsa_supported(self) -> bool:
+ return (
+ not self._lib.CRYPTOGRAPHY_IS_BORINGSSL and not self._fips_enabled
+ )
+
+ def dsa_hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool:
+ if not self.dsa_supported():
+ return False
+ return self.signature_hash_supported(algorithm)
+
+ def cmac_algorithm_supported(self, algorithm) -> bool:
+ return self.cipher_supported(
+ algorithm, CBC(b"\x00" * algorithm.block_size)
+ )
+
+ def create_cmac_ctx(self, algorithm: BlockCipherAlgorithm) -> _CMACContext:
+ return _CMACContext(self, algorithm)
+
+ def load_pem_private_key(
+ self,
+ data: bytes,
+ password: typing.Optional[bytes],
+ unsafe_skip_rsa_key_validation: bool,
+ ) -> PrivateKeyTypes:
+ return self._load_key(
+ self._lib.PEM_read_bio_PrivateKey,
+ data,
+ password,
+ unsafe_skip_rsa_key_validation,
+ )
+
+ def load_pem_public_key(self, data: bytes) -> PublicKeyTypes:
+ mem_bio = self._bytes_to_bio(data)
+ # In OpenSSL 3.0.x the PEM_read_bio_PUBKEY function will invoke
+ # the default password callback if you pass an encrypted private
+ # key. This is very, very, very bad as the default callback can
+ # trigger an interactive console prompt, which will hang the
+ # Python process. We therefore provide our own callback to
+ # catch this and error out properly.
+ userdata = self._ffi.new("CRYPTOGRAPHY_PASSWORD_DATA *")
+ evp_pkey = self._lib.PEM_read_bio_PUBKEY(
+ mem_bio.bio,
+ self._ffi.NULL,
+ self._ffi.addressof(
+ self._lib._original_lib, "Cryptography_pem_password_cb"
+ ),
+ userdata,
+ )
+ if evp_pkey != self._ffi.NULL:
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+ return self._evp_pkey_to_public_key(evp_pkey)
+ else:
+ # It's not a (RSA/DSA/ECDSA) subjectPublicKeyInfo, but we still
+ # need to check to see if it is a pure PKCS1 RSA public key (not
+ # embedded in a subjectPublicKeyInfo)
+ self._consume_errors()
+ res = self._lib.BIO_reset(mem_bio.bio)
+ self.openssl_assert(res == 1)
+ rsa_cdata = self._lib.PEM_read_bio_RSAPublicKey(
+ mem_bio.bio,
+ self._ffi.NULL,
+ self._ffi.addressof(
+ self._lib._original_lib, "Cryptography_pem_password_cb"
+ ),
+ userdata,
+ )
+ if rsa_cdata != self._ffi.NULL:
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata)
+ return _RSAPublicKey(self, rsa_cdata, evp_pkey)
+ else:
+ self._handle_key_loading_error()
+
+ def load_pem_parameters(self, data: bytes) -> dh.DHParameters:
+ return rust_openssl.dh.from_pem_parameters(data)
+
+ def load_der_private_key(
+ self,
+ data: bytes,
+ password: typing.Optional[bytes],
+ unsafe_skip_rsa_key_validation: bool,
+ ) -> PrivateKeyTypes:
+ # OpenSSL has a function called d2i_AutoPrivateKey that in theory
+ # handles this automatically, however it doesn't handle encrypted
+ # private keys. Instead we try to load the key two different ways.
+ # First we'll try to load it as a traditional key.
+ bio_data = self._bytes_to_bio(data)
+ key = self._evp_pkey_from_der_traditional_key(bio_data, password)
+ if key:
+ return self._evp_pkey_to_private_key(
+ key, unsafe_skip_rsa_key_validation
+ )
+ else:
+ # Finally we try to load it with the method that handles encrypted
+ # PKCS8 properly.
+ return self._load_key(
+ self._lib.d2i_PKCS8PrivateKey_bio,
+ data,
+ password,
+ unsafe_skip_rsa_key_validation,
+ )
+
+ def _evp_pkey_from_der_traditional_key(self, bio_data, password):
+ key = self._lib.d2i_PrivateKey_bio(bio_data.bio, self._ffi.NULL)
+ if key != self._ffi.NULL:
+ key = self._ffi.gc(key, self._lib.EVP_PKEY_free)
+ if password is not None:
+ raise TypeError(
+ "Password was given but private key is not encrypted."
+ )
+
+ return key
+ else:
+ self._consume_errors()
+ return None
+
+ def load_der_public_key(self, data: bytes) -> PublicKeyTypes:
+ mem_bio = self._bytes_to_bio(data)
+ evp_pkey = self._lib.d2i_PUBKEY_bio(mem_bio.bio, self._ffi.NULL)
+ if evp_pkey != self._ffi.NULL:
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+ return self._evp_pkey_to_public_key(evp_pkey)
+ else:
+ # It's not a (RSA/DSA/ECDSA) subjectPublicKeyInfo, but we still
+ # need to check to see if it is a pure PKCS1 RSA public key (not
+ # embedded in a subjectPublicKeyInfo)
+ self._consume_errors()
+ res = self._lib.BIO_reset(mem_bio.bio)
+ self.openssl_assert(res == 1)
+ rsa_cdata = self._lib.d2i_RSAPublicKey_bio(
+ mem_bio.bio, self._ffi.NULL
+ )
+ if rsa_cdata != self._ffi.NULL:
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata)
+ return _RSAPublicKey(self, rsa_cdata, evp_pkey)
+ else:
+ self._handle_key_loading_error()
+
+ def load_der_parameters(self, data: bytes) -> dh.DHParameters:
+ return rust_openssl.dh.from_der_parameters(data)
+
+ def _cert2ossl(self, cert: x509.Certificate) -> typing.Any:
+ data = cert.public_bytes(serialization.Encoding.DER)
+ mem_bio = self._bytes_to_bio(data)
+ x509 = self._lib.d2i_X509_bio(mem_bio.bio, self._ffi.NULL)
+ self.openssl_assert(x509 != self._ffi.NULL)
+ x509 = self._ffi.gc(x509, self._lib.X509_free)
+ return x509
+
+ def _ossl2cert(self, x509_ptr: typing.Any) -> x509.Certificate:
+ bio = self._create_mem_bio_gc()
+ res = self._lib.i2d_X509_bio(bio, x509_ptr)
+ self.openssl_assert(res == 1)
+ return x509.load_der_x509_certificate(self._read_mem_bio(bio))
+
+ def _key2ossl(self, key: PKCS12PrivateKeyTypes) -> typing.Any:
+ data = key.private_bytes(
+ serialization.Encoding.DER,
+ serialization.PrivateFormat.PKCS8,
+ serialization.NoEncryption(),
+ )
+ mem_bio = self._bytes_to_bio(data)
+
+ evp_pkey = self._lib.d2i_PrivateKey_bio(
+ mem_bio.bio,
+ self._ffi.NULL,
+ )
+ self.openssl_assert(evp_pkey != self._ffi.NULL)
+ return self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+
+ def _load_key(
+ self, openssl_read_func, data, password, unsafe_skip_rsa_key_validation
+ ) -> PrivateKeyTypes:
+ mem_bio = self._bytes_to_bio(data)
+
+ userdata = self._ffi.new("CRYPTOGRAPHY_PASSWORD_DATA *")
+ if password is not None:
+ utils._check_byteslike("password", password)
+ password_ptr = self._ffi.from_buffer(password)
+ userdata.password = password_ptr
+ userdata.length = len(password)
+
+ evp_pkey = openssl_read_func(
+ mem_bio.bio,
+ self._ffi.NULL,
+ self._ffi.addressof(
+ self._lib._original_lib, "Cryptography_pem_password_cb"
+ ),
+ userdata,
+ )
+
+ if evp_pkey == self._ffi.NULL:
+ if userdata.error != 0:
+ self._consume_errors()
+ if userdata.error == -1:
+ raise TypeError(
+ "Password was not given but private key is encrypted"
+ )
+ else:
+ assert userdata.error == -2
+ raise ValueError(
+ "Passwords longer than {} bytes are not supported "
+ "by this backend.".format(userdata.maxsize - 1)
+ )
+ else:
+ self._handle_key_loading_error()
+
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+
+ if password is not None and userdata.called == 0:
+ raise TypeError(
+ "Password was given but private key is not encrypted."
+ )
+
+ assert (
+ password is not None and userdata.called == 1
+ ) or password is None
+
+ return self._evp_pkey_to_private_key(
+ evp_pkey, unsafe_skip_rsa_key_validation
+ )
+
+ def _handle_key_loading_error(self) -> typing.NoReturn:
+ errors = self._consume_errors()
+
+ if not errors:
+ raise ValueError(
+ "Could not deserialize key data. The data may be in an "
+ "incorrect format or it may be encrypted with an unsupported "
+ "algorithm."
+ )
+
+ elif (
+ errors[0]._lib_reason_match(
+ self._lib.ERR_LIB_EVP, self._lib.EVP_R_BAD_DECRYPT
+ )
+ or errors[0]._lib_reason_match(
+ self._lib.ERR_LIB_PKCS12,
+ self._lib.PKCS12_R_PKCS12_CIPHERFINAL_ERROR,
+ )
+ or (
+ self._lib.Cryptography_HAS_PROVIDERS
+ and errors[0]._lib_reason_match(
+ self._lib.ERR_LIB_PROV,
+ self._lib.PROV_R_BAD_DECRYPT,
+ )
+ )
+ ):
+ raise ValueError("Bad decrypt. Incorrect password?")
+
+ elif any(
+ error._lib_reason_match(
+ self._lib.ERR_LIB_EVP,
+ self._lib.EVP_R_UNSUPPORTED_PRIVATE_KEY_ALGORITHM,
+ )
+ for error in errors
+ ):
+ raise ValueError("Unsupported public key algorithm.")
+
+ else:
+ raise ValueError(
+ "Could not deserialize key data. The data may be in an "
+ "incorrect format, it may be encrypted with an unsupported "
+ "algorithm, or it may be an unsupported key type (e.g. EC "
+ "curves with explicit parameters).",
+ errors,
+ )
+
+ def elliptic_curve_supported(self, curve: ec.EllipticCurve) -> bool:
+ try:
+ curve_nid = self._elliptic_curve_to_nid(curve)
+ except UnsupportedAlgorithm:
+ curve_nid = self._lib.NID_undef
+
+ group = self._lib.EC_GROUP_new_by_curve_name(curve_nid)
+
+ if group == self._ffi.NULL:
+ self._consume_errors()
+ return False
+ else:
+ self.openssl_assert(curve_nid != self._lib.NID_undef)
+ self._lib.EC_GROUP_free(group)
+ return True
+
+ def elliptic_curve_signature_algorithm_supported(
+ self,
+ signature_algorithm: ec.EllipticCurveSignatureAlgorithm,
+ curve: ec.EllipticCurve,
+ ) -> bool:
+ # We only support ECDSA right now.
+ if not isinstance(signature_algorithm, ec.ECDSA):
+ return False
+
+ return self.elliptic_curve_supported(curve)
+
+ def generate_elliptic_curve_private_key(
+ self, curve: ec.EllipticCurve
+ ) -> ec.EllipticCurvePrivateKey:
+ """
+ Generate a new private key on the named curve.
+ """
+
+ if self.elliptic_curve_supported(curve):
+ ec_cdata = self._ec_key_new_by_curve(curve)
+
+ res = self._lib.EC_KEY_generate_key(ec_cdata)
+ self.openssl_assert(res == 1)
+
+ evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata)
+
+ return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey)
+ else:
+ raise UnsupportedAlgorithm(
+ f"Backend object does not support {curve.name}.",
+ _Reasons.UNSUPPORTED_ELLIPTIC_CURVE,
+ )
+
+ def load_elliptic_curve_private_numbers(
+ self, numbers: ec.EllipticCurvePrivateNumbers
+ ) -> ec.EllipticCurvePrivateKey:
+ public = numbers.public_numbers
+
+ ec_cdata = self._ec_key_new_by_curve(public.curve)
+
+ private_value = self._ffi.gc(
+ self._int_to_bn(numbers.private_value), self._lib.BN_clear_free
+ )
+ res = self._lib.EC_KEY_set_private_key(ec_cdata, private_value)
+ if res != 1:
+ self._consume_errors()
+ raise ValueError("Invalid EC key.")
+
+ with self._tmp_bn_ctx() as bn_ctx:
+ self._ec_key_set_public_key_affine_coordinates(
+ ec_cdata, public.x, public.y, bn_ctx
+ )
+ # derive the expected public point and compare it to the one we
+ # just set based on the values we were given. If they don't match
+ # this isn't a valid key pair.
+ group = self._lib.EC_KEY_get0_group(ec_cdata)
+ self.openssl_assert(group != self._ffi.NULL)
+ set_point = backend._lib.EC_KEY_get0_public_key(ec_cdata)
+ self.openssl_assert(set_point != self._ffi.NULL)
+ computed_point = self._lib.EC_POINT_new(group)
+ self.openssl_assert(computed_point != self._ffi.NULL)
+ computed_point = self._ffi.gc(
+ computed_point, self._lib.EC_POINT_free
+ )
+ res = self._lib.EC_POINT_mul(
+ group,
+ computed_point,
+ private_value,
+ self._ffi.NULL,
+ self._ffi.NULL,
+ bn_ctx,
+ )
+ self.openssl_assert(res == 1)
+ if (
+ self._lib.EC_POINT_cmp(
+ group, set_point, computed_point, bn_ctx
+ )
+ != 0
+ ):
+ raise ValueError("Invalid EC key.")
+
+ evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata)
+
+ return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey)
+
+ def load_elliptic_curve_public_numbers(
+ self, numbers: ec.EllipticCurvePublicNumbers
+ ) -> ec.EllipticCurvePublicKey:
+ ec_cdata = self._ec_key_new_by_curve(numbers.curve)
+ with self._tmp_bn_ctx() as bn_ctx:
+ self._ec_key_set_public_key_affine_coordinates(
+ ec_cdata, numbers.x, numbers.y, bn_ctx
+ )
+ evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata)
+
+ return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey)
+
+ def load_elliptic_curve_public_bytes(
+ self, curve: ec.EllipticCurve, point_bytes: bytes
+ ) -> ec.EllipticCurvePublicKey:
+ ec_cdata = self._ec_key_new_by_curve(curve)
+ group = self._lib.EC_KEY_get0_group(ec_cdata)
+ self.openssl_assert(group != self._ffi.NULL)
+ point = self._lib.EC_POINT_new(group)
+ self.openssl_assert(point != self._ffi.NULL)
+ point = self._ffi.gc(point, self._lib.EC_POINT_free)
+ with self._tmp_bn_ctx() as bn_ctx:
+ res = self._lib.EC_POINT_oct2point(
+ group, point, point_bytes, len(point_bytes), bn_ctx
+ )
+ if res != 1:
+ self._consume_errors()
+ raise ValueError("Invalid public bytes for the given curve")
+
+ res = self._lib.EC_KEY_set_public_key(ec_cdata, point)
+ self.openssl_assert(res == 1)
+ evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata)
+ return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey)
+
+ def derive_elliptic_curve_private_key(
+ self, private_value: int, curve: ec.EllipticCurve
+ ) -> ec.EllipticCurvePrivateKey:
+ ec_cdata = self._ec_key_new_by_curve(curve)
+
+ group = self._lib.EC_KEY_get0_group(ec_cdata)
+ self.openssl_assert(group != self._ffi.NULL)
+
+ point = self._lib.EC_POINT_new(group)
+ self.openssl_assert(point != self._ffi.NULL)
+ point = self._ffi.gc(point, self._lib.EC_POINT_free)
+
+ value = self._int_to_bn(private_value)
+ value = self._ffi.gc(value, self._lib.BN_clear_free)
+
+ with self._tmp_bn_ctx() as bn_ctx:
+ res = self._lib.EC_POINT_mul(
+ group, point, value, self._ffi.NULL, self._ffi.NULL, bn_ctx
+ )
+ self.openssl_assert(res == 1)
+
+ bn_x = self._lib.BN_CTX_get(bn_ctx)
+ bn_y = self._lib.BN_CTX_get(bn_ctx)
+
+ res = self._lib.EC_POINT_get_affine_coordinates(
+ group, point, bn_x, bn_y, bn_ctx
+ )
+ if res != 1:
+ self._consume_errors()
+ raise ValueError("Unable to derive key from private_value")
+
+ res = self._lib.EC_KEY_set_public_key(ec_cdata, point)
+ self.openssl_assert(res == 1)
+ private = self._int_to_bn(private_value)
+ private = self._ffi.gc(private, self._lib.BN_clear_free)
+ res = self._lib.EC_KEY_set_private_key(ec_cdata, private)
+ self.openssl_assert(res == 1)
+
+ evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata)
+
+ return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey)
+
+ def _ec_key_new_by_curve(self, curve: ec.EllipticCurve):
+ curve_nid = self._elliptic_curve_to_nid(curve)
+ return self._ec_key_new_by_curve_nid(curve_nid)
+
+ def _ec_key_new_by_curve_nid(self, curve_nid: int):
+ ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid)
+ self.openssl_assert(ec_cdata != self._ffi.NULL)
+ return self._ffi.gc(ec_cdata, self._lib.EC_KEY_free)
+
+ def elliptic_curve_exchange_algorithm_supported(
+ self, algorithm: ec.ECDH, curve: ec.EllipticCurve
+ ) -> bool:
+ if self._fips_enabled and not isinstance(
+ curve, self._fips_ecdh_curves
+ ):
+ return False
+
+ return self.elliptic_curve_supported(curve) and isinstance(
+ algorithm, ec.ECDH
+ )
+
+ def _ec_cdata_to_evp_pkey(self, ec_cdata):
+ evp_pkey = self._create_evp_pkey_gc()
+ res = self._lib.EVP_PKEY_set1_EC_KEY(evp_pkey, ec_cdata)
+ self.openssl_assert(res == 1)
+ return evp_pkey
+
+ def _elliptic_curve_to_nid(self, curve: ec.EllipticCurve) -> int:
+ """
+ Get the NID for a curve name.
+ """
+
+ curve_aliases = {"secp192r1": "prime192v1", "secp256r1": "prime256v1"}
+
+ curve_name = curve_aliases.get(curve.name, curve.name)
+
+ curve_nid = self._lib.OBJ_sn2nid(curve_name.encode())
+ if curve_nid == self._lib.NID_undef:
+ raise UnsupportedAlgorithm(
+ f"{curve.name} is not a supported elliptic curve",
+ _Reasons.UNSUPPORTED_ELLIPTIC_CURVE,
+ )
+ return curve_nid
+
+ @contextmanager
+ def _tmp_bn_ctx(self):
+ bn_ctx = self._lib.BN_CTX_new()
+ self.openssl_assert(bn_ctx != self._ffi.NULL)
+ bn_ctx = self._ffi.gc(bn_ctx, self._lib.BN_CTX_free)
+ self._lib.BN_CTX_start(bn_ctx)
+ try:
+ yield bn_ctx
+ finally:
+ self._lib.BN_CTX_end(bn_ctx)
+
+ def _ec_key_set_public_key_affine_coordinates(
+ self,
+ ec_cdata,
+ x: int,
+ y: int,
+ bn_ctx,
+ ) -> None:
+ """
+ Sets the public key point in the EC_KEY context to the affine x and y
+ values.
+ """
+
+ if x < 0 or y < 0:
+ raise ValueError(
+ "Invalid EC key. Both x and y must be non-negative."
+ )
+
+ x = self._ffi.gc(self._int_to_bn(x), self._lib.BN_free)
+ y = self._ffi.gc(self._int_to_bn(y), self._lib.BN_free)
+ group = self._lib.EC_KEY_get0_group(ec_cdata)
+ self.openssl_assert(group != self._ffi.NULL)
+ point = self._lib.EC_POINT_new(group)
+ self.openssl_assert(point != self._ffi.NULL)
+ point = self._ffi.gc(point, self._lib.EC_POINT_free)
+ res = self._lib.EC_POINT_set_affine_coordinates(
+ group, point, x, y, bn_ctx
+ )
+ if res != 1:
+ self._consume_errors()
+ raise ValueError("Invalid EC key.")
+ res = self._lib.EC_KEY_set_public_key(ec_cdata, point)
+ self.openssl_assert(res == 1)
+
+ def _private_key_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.PrivateFormat,
+ encryption_algorithm: serialization.KeySerializationEncryption,
+ key,
+ evp_pkey,
+ cdata,
+ ) -> bytes:
+ # validate argument types
+ if not isinstance(encoding, serialization.Encoding):
+ raise TypeError("encoding must be an item from the Encoding enum")
+ if not isinstance(format, serialization.PrivateFormat):
+ raise TypeError(
+ "format must be an item from the PrivateFormat enum"
+ )
+ if not isinstance(
+ encryption_algorithm, serialization.KeySerializationEncryption
+ ):
+ raise TypeError(
+ "Encryption algorithm must be a KeySerializationEncryption "
+ "instance"
+ )
+
+ # validate password
+ if isinstance(encryption_algorithm, serialization.NoEncryption):
+ password = b""
+ elif isinstance(
+ encryption_algorithm, serialization.BestAvailableEncryption
+ ):
+ password = encryption_algorithm.password
+ if len(password) > 1023:
+ raise ValueError(
+ "Passwords longer than 1023 bytes are not supported by "
+ "this backend"
+ )
+ elif (
+ isinstance(
+ encryption_algorithm, serialization._KeySerializationEncryption
+ )
+ and encryption_algorithm._format
+ is format
+ is serialization.PrivateFormat.OpenSSH
+ ):
+ password = encryption_algorithm.password
+ else:
+ raise ValueError("Unsupported encryption type")
+
+ # PKCS8 + PEM/DER
+ if format is serialization.PrivateFormat.PKCS8:
+ if encoding is serialization.Encoding.PEM:
+ write_bio = self._lib.PEM_write_bio_PKCS8PrivateKey
+ elif encoding is serialization.Encoding.DER:
+ write_bio = self._lib.i2d_PKCS8PrivateKey_bio
+ else:
+ raise ValueError("Unsupported encoding for PKCS8")
+ return self._private_key_bytes_via_bio(
+ write_bio, evp_pkey, password
+ )
+
+ # TraditionalOpenSSL + PEM/DER
+ if format is serialization.PrivateFormat.TraditionalOpenSSL:
+ if self._fips_enabled and not isinstance(
+ encryption_algorithm, serialization.NoEncryption
+ ):
+ raise ValueError(
+ "Encrypted traditional OpenSSL format is not "
+ "supported in FIPS mode."
+ )
+ key_type = self._lib.EVP_PKEY_id(evp_pkey)
+
+ if encoding is serialization.Encoding.PEM:
+ if key_type == self._lib.EVP_PKEY_RSA:
+ write_bio = self._lib.PEM_write_bio_RSAPrivateKey
+ else:
+ assert key_type == self._lib.EVP_PKEY_EC
+ write_bio = self._lib.PEM_write_bio_ECPrivateKey
+ return self._private_key_bytes_via_bio(
+ write_bio, cdata, password
+ )
+
+ if encoding is serialization.Encoding.DER:
+ if password:
+ raise ValueError(
+ "Encryption is not supported for DER encoded "
+ "traditional OpenSSL keys"
+ )
+ if key_type == self._lib.EVP_PKEY_RSA:
+ write_bio = self._lib.i2d_RSAPrivateKey_bio
+ else:
+ assert key_type == self._lib.EVP_PKEY_EC
+ write_bio = self._lib.i2d_ECPrivateKey_bio
+ return self._bio_func_output(write_bio, cdata)
+
+ raise ValueError("Unsupported encoding for TraditionalOpenSSL")
+
+ # OpenSSH + PEM
+ if format is serialization.PrivateFormat.OpenSSH:
+ if encoding is serialization.Encoding.PEM:
+ return ssh._serialize_ssh_private_key(
+ key, password, encryption_algorithm
+ )
+
+ raise ValueError(
+ "OpenSSH private key format can only be used"
+ " with PEM encoding"
+ )
+
+ # Anything that key-specific code was supposed to handle earlier,
+ # like Raw.
+ raise ValueError("format is invalid with this key")
+
+ def _private_key_bytes_via_bio(
+ self, write_bio, evp_pkey, password
+ ) -> bytes:
+ if not password:
+ evp_cipher = self._ffi.NULL
+ else:
+ # This is a curated value that we will update over time.
+ evp_cipher = self._lib.EVP_get_cipherbyname(b"aes-256-cbc")
+
+ return self._bio_func_output(
+ write_bio,
+ evp_pkey,
+ evp_cipher,
+ password,
+ len(password),
+ self._ffi.NULL,
+ self._ffi.NULL,
+ )
+
+ def _bio_func_output(self, write_bio, *args) -> bytes:
+ bio = self._create_mem_bio_gc()
+ res = write_bio(bio, *args)
+ self.openssl_assert(res == 1)
+ return self._read_mem_bio(bio)
+
+ def _public_key_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.PublicFormat,
+ key,
+ evp_pkey,
+ cdata,
+ ) -> bytes:
+ if not isinstance(encoding, serialization.Encoding):
+ raise TypeError("encoding must be an item from the Encoding enum")
+ if not isinstance(format, serialization.PublicFormat):
+ raise TypeError(
+ "format must be an item from the PublicFormat enum"
+ )
+
+ # SubjectPublicKeyInfo + PEM/DER
+ if format is serialization.PublicFormat.SubjectPublicKeyInfo:
+ if encoding is serialization.Encoding.PEM:
+ write_bio = self._lib.PEM_write_bio_PUBKEY
+ elif encoding is serialization.Encoding.DER:
+ write_bio = self._lib.i2d_PUBKEY_bio
+ else:
+ raise ValueError(
+ "SubjectPublicKeyInfo works only with PEM or DER encoding"
+ )
+ return self._bio_func_output(write_bio, evp_pkey)
+
+ # PKCS1 + PEM/DER
+ if format is serialization.PublicFormat.PKCS1:
+ # Only RSA is supported here.
+ key_type = self._lib.EVP_PKEY_id(evp_pkey)
+ if key_type != self._lib.EVP_PKEY_RSA:
+ raise ValueError("PKCS1 format is supported only for RSA keys")
+
+ if encoding is serialization.Encoding.PEM:
+ write_bio = self._lib.PEM_write_bio_RSAPublicKey
+ elif encoding is serialization.Encoding.DER:
+ write_bio = self._lib.i2d_RSAPublicKey_bio
+ else:
+ raise ValueError("PKCS1 works only with PEM or DER encoding")
+ return self._bio_func_output(write_bio, cdata)
+
+ # OpenSSH + OpenSSH
+ if format is serialization.PublicFormat.OpenSSH:
+ if encoding is serialization.Encoding.OpenSSH:
+ return ssh.serialize_ssh_public_key(key)
+
+ raise ValueError(
+ "OpenSSH format must be used with OpenSSH encoding"
+ )
+
+ # Anything that key-specific code was supposed to handle earlier,
+ # like Raw, CompressedPoint, UncompressedPoint
+ raise ValueError("format is invalid with this key")
+
+ def dh_supported(self) -> bool:
+ return not self._lib.CRYPTOGRAPHY_IS_BORINGSSL
+
+ def generate_dh_parameters(
+ self, generator: int, key_size: int
+ ) -> dh.DHParameters:
+ return rust_openssl.dh.generate_parameters(generator, key_size)
+
+ def generate_dh_private_key(
+ self, parameters: dh.DHParameters
+ ) -> dh.DHPrivateKey:
+ return parameters.generate_private_key()
+
+ def generate_dh_private_key_and_parameters(
+ self, generator: int, key_size: int
+ ) -> dh.DHPrivateKey:
+ return self.generate_dh_private_key(
+ self.generate_dh_parameters(generator, key_size)
+ )
+
+ def load_dh_private_numbers(
+ self, numbers: dh.DHPrivateNumbers
+ ) -> dh.DHPrivateKey:
+ return rust_openssl.dh.from_private_numbers(numbers)
+
+ def load_dh_public_numbers(
+ self, numbers: dh.DHPublicNumbers
+ ) -> dh.DHPublicKey:
+ return rust_openssl.dh.from_public_numbers(numbers)
+
+ def load_dh_parameter_numbers(
+ self, numbers: dh.DHParameterNumbers
+ ) -> dh.DHParameters:
+ return rust_openssl.dh.from_parameter_numbers(numbers)
+
+ def dh_parameters_supported(
+ self, p: int, g: int, q: typing.Optional[int] = None
+ ) -> bool:
+ try:
+ rust_openssl.dh.from_parameter_numbers(
+ dh.DHParameterNumbers(p=p, g=g, q=q)
+ )
+ except ValueError:
+ return False
+ else:
+ return True
+
+ def dh_x942_serialization_supported(self) -> bool:
+ return self._lib.Cryptography_HAS_EVP_PKEY_DHX == 1
+
+ def x25519_load_public_bytes(self, data: bytes) -> x25519.X25519PublicKey:
+ return rust_openssl.x25519.from_public_bytes(data)
+
+ def x25519_load_private_bytes(
+ self, data: bytes
+ ) -> x25519.X25519PrivateKey:
+ return rust_openssl.x25519.from_private_bytes(data)
+
+ def x25519_generate_key(self) -> x25519.X25519PrivateKey:
+ return rust_openssl.x25519.generate_key()
+
+ def x25519_supported(self) -> bool:
+ if self._fips_enabled:
+ return False
+ return not self._lib.CRYPTOGRAPHY_LIBRESSL_LESS_THAN_370
+
+ def x448_load_public_bytes(self, data: bytes) -> x448.X448PublicKey:
+ return rust_openssl.x448.from_public_bytes(data)
+
+ def x448_load_private_bytes(self, data: bytes) -> x448.X448PrivateKey:
+ return rust_openssl.x448.from_private_bytes(data)
+
+ def x448_generate_key(self) -> x448.X448PrivateKey:
+ return rust_openssl.x448.generate_key()
+
+ def x448_supported(self) -> bool:
+ if self._fips_enabled:
+ return False
+ return (
+ not self._lib.CRYPTOGRAPHY_IS_LIBRESSL
+ and not self._lib.CRYPTOGRAPHY_IS_BORINGSSL
+ )
+
+ def ed25519_supported(self) -> bool:
+ if self._fips_enabled:
+ return False
+ return self._lib.CRYPTOGRAPHY_HAS_WORKING_ED25519
+
+ def ed25519_load_public_bytes(
+ self, data: bytes
+ ) -> ed25519.Ed25519PublicKey:
+ return rust_openssl.ed25519.from_public_bytes(data)
+
+ def ed25519_load_private_bytes(
+ self, data: bytes
+ ) -> ed25519.Ed25519PrivateKey:
+ return rust_openssl.ed25519.from_private_bytes(data)
+
+ def ed25519_generate_key(self) -> ed25519.Ed25519PrivateKey:
+ return rust_openssl.ed25519.generate_key()
+
+ def ed448_supported(self) -> bool:
+ if self._fips_enabled:
+ return False
+ return (
+ not self._lib.CRYPTOGRAPHY_IS_LIBRESSL
+ and not self._lib.CRYPTOGRAPHY_IS_BORINGSSL
+ )
+
+ def ed448_load_public_bytes(self, data: bytes) -> ed448.Ed448PublicKey:
+ return rust_openssl.ed448.from_public_bytes(data)
+
+ def ed448_load_private_bytes(self, data: bytes) -> ed448.Ed448PrivateKey:
+ return rust_openssl.ed448.from_private_bytes(data)
+
+ def ed448_generate_key(self) -> ed448.Ed448PrivateKey:
+ return rust_openssl.ed448.generate_key()
+
+ def aead_cipher_supported(self, cipher) -> bool:
+ return aead._aead_cipher_supported(self, cipher)
+
+ def _zero_data(self, data, length: int) -> None:
+ # We clear things this way because at the moment we're not
+ # sure of a better way that can guarantee it overwrites the
+ # memory of a bytearray and doesn't just replace the underlying char *.
+ for i in range(length):
+ data[i] = 0
+
+ @contextlib.contextmanager
+ def _zeroed_null_terminated_buf(self, data):
+ """
+ This method takes bytes, which can be a bytestring or a mutable
+ buffer like a bytearray, and yields a null-terminated version of that
+ data. This is required because PKCS12_parse doesn't take a length with
+ its password char * and ffi.from_buffer doesn't provide null
+ termination. So, to support zeroing the data via bytearray we
+ need to build this ridiculous construct that copies the memory, but
+ zeroes it after use.
+ """
+ if data is None:
+ yield self._ffi.NULL
+ else:
+ data_len = len(data)
+ buf = self._ffi.new("char[]", data_len + 1)
+ self._ffi.memmove(buf, data, data_len)
+ try:
+ yield buf
+ finally:
+ # Cast to a uint8_t * so we can assign by integer
+ self._zero_data(self._ffi.cast("uint8_t *", buf), data_len)
+
+ def load_key_and_certificates_from_pkcs12(
+ self, data: bytes, password: typing.Optional[bytes]
+ ) -> typing.Tuple[
+ typing.Optional[PrivateKeyTypes],
+ typing.Optional[x509.Certificate],
+ typing.List[x509.Certificate],
+ ]:
+ pkcs12 = self.load_pkcs12(data, password)
+ return (
+ pkcs12.key,
+ pkcs12.cert.certificate if pkcs12.cert else None,
+ [cert.certificate for cert in pkcs12.additional_certs],
+ )
+
+ def load_pkcs12(
+ self, data: bytes, password: typing.Optional[bytes]
+ ) -> PKCS12KeyAndCertificates:
+ if password is not None:
+ utils._check_byteslike("password", password)
+
+ bio = self._bytes_to_bio(data)
+ p12 = self._lib.d2i_PKCS12_bio(bio.bio, self._ffi.NULL)
+ if p12 == self._ffi.NULL:
+ self._consume_errors()
+ raise ValueError("Could not deserialize PKCS12 data")
+
+ p12 = self._ffi.gc(p12, self._lib.PKCS12_free)
+ evp_pkey_ptr = self._ffi.new("EVP_PKEY **")
+ x509_ptr = self._ffi.new("X509 **")
+ sk_x509_ptr = self._ffi.new("Cryptography_STACK_OF_X509 **")
+ with self._zeroed_null_terminated_buf(password) as password_buf:
+ res = self._lib.PKCS12_parse(
+ p12, password_buf, evp_pkey_ptr, x509_ptr, sk_x509_ptr
+ )
+ if res == 0:
+ self._consume_errors()
+ raise ValueError("Invalid password or PKCS12 data")
+
+ cert = None
+ key = None
+ additional_certificates = []
+
+ if evp_pkey_ptr[0] != self._ffi.NULL:
+ evp_pkey = self._ffi.gc(evp_pkey_ptr[0], self._lib.EVP_PKEY_free)
+ # We don't support turning off RSA key validation when loading
+ # PKCS12 keys
+ key = self._evp_pkey_to_private_key(
+ evp_pkey, unsafe_skip_rsa_key_validation=False
+ )
+
+ if x509_ptr[0] != self._ffi.NULL:
+ x509 = self._ffi.gc(x509_ptr[0], self._lib.X509_free)
+ cert_obj = self._ossl2cert(x509)
+ name = None
+ maybe_name = self._lib.X509_alias_get0(x509, self._ffi.NULL)
+ if maybe_name != self._ffi.NULL:
+ name = self._ffi.string(maybe_name)
+ cert = PKCS12Certificate(cert_obj, name)
+
+ if sk_x509_ptr[0] != self._ffi.NULL:
+ sk_x509 = self._ffi.gc(sk_x509_ptr[0], self._lib.sk_X509_free)
+ num = self._lib.sk_X509_num(sk_x509_ptr[0])
+
+ # In OpenSSL < 3.0.0 PKCS12 parsing reverses the order of the
+ # certificates.
+ indices: typing.Iterable[int]
+ if (
+ self._lib.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER
+ or self._lib.CRYPTOGRAPHY_IS_BORINGSSL
+ ):
+ indices = range(num)
+ else:
+ indices = reversed(range(num))
+
+ for i in indices:
+ x509 = self._lib.sk_X509_value(sk_x509, i)
+ self.openssl_assert(x509 != self._ffi.NULL)
+ x509 = self._ffi.gc(x509, self._lib.X509_free)
+ addl_cert = self._ossl2cert(x509)
+ addl_name = None
+ maybe_name = self._lib.X509_alias_get0(x509, self._ffi.NULL)
+ if maybe_name != self._ffi.NULL:
+ addl_name = self._ffi.string(maybe_name)
+ additional_certificates.append(
+ PKCS12Certificate(addl_cert, addl_name)
+ )
+
+ return PKCS12KeyAndCertificates(key, cert, additional_certificates)
+
+ def serialize_key_and_certificates_to_pkcs12(
+ self,
+ name: typing.Optional[bytes],
+ key: typing.Optional[PKCS12PrivateKeyTypes],
+ cert: typing.Optional[x509.Certificate],
+ cas: typing.Optional[typing.List[_PKCS12CATypes]],
+ encryption_algorithm: serialization.KeySerializationEncryption,
+ ) -> bytes:
+ password = None
+ if name is not None:
+ utils._check_bytes("name", name)
+
+ if isinstance(encryption_algorithm, serialization.NoEncryption):
+ nid_cert = -1
+ nid_key = -1
+ pkcs12_iter = 0
+ mac_iter = 0
+ mac_alg = self._ffi.NULL
+ elif isinstance(
+ encryption_algorithm, serialization.BestAvailableEncryption
+ ):
+ # PKCS12 encryption is hopeless trash and can never be fixed.
+ # OpenSSL 3 supports PBESv2, but Libre and Boring do not, so
+ # we use PBESv1 with 3DES on the older paths.
+ if self._lib.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER:
+ nid_cert = self._lib.NID_aes_256_cbc
+ nid_key = self._lib.NID_aes_256_cbc
+ else:
+ nid_cert = self._lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC
+ nid_key = self._lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC
+ # At least we can set this higher than OpenSSL's default
+ pkcs12_iter = 20000
+ # mac_iter chosen for compatibility reasons, see:
+ # https://www.openssl.org/docs/man1.1.1/man3/PKCS12_create.html
+ # Did we mention how lousy PKCS12 encryption is?
+ mac_iter = 1
+ # MAC algorithm can only be set on OpenSSL 3.0.0+
+ mac_alg = self._ffi.NULL
+ password = encryption_algorithm.password
+ elif (
+ isinstance(
+ encryption_algorithm, serialization._KeySerializationEncryption
+ )
+ and encryption_algorithm._format
+ is serialization.PrivateFormat.PKCS12
+ ):
+ # Default to OpenSSL's defaults. Behavior will vary based on the
+ # version of OpenSSL cryptography is compiled against.
+ nid_cert = 0
+ nid_key = 0
+ # Use the default iters we use in best available
+ pkcs12_iter = 20000
+ # See the Best Available comment for why this is 1
+ mac_iter = 1
+ password = encryption_algorithm.password
+ keycertalg = encryption_algorithm._key_cert_algorithm
+ if keycertalg is PBES.PBESv1SHA1And3KeyTripleDESCBC:
+ nid_cert = self._lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC
+ nid_key = self._lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC
+ elif keycertalg is PBES.PBESv2SHA256AndAES256CBC:
+ if not self._lib.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER:
+ raise UnsupportedAlgorithm(
+ "PBESv2 is not supported by this version of OpenSSL"
+ )
+ nid_cert = self._lib.NID_aes_256_cbc
+ nid_key = self._lib.NID_aes_256_cbc
+ else:
+ assert keycertalg is None
+ # We use OpenSSL's defaults
+
+ if encryption_algorithm._hmac_hash is not None:
+ if not self._lib.Cryptography_HAS_PKCS12_SET_MAC:
+ raise UnsupportedAlgorithm(
+ "Setting MAC algorithm is not supported by this "
+ "version of OpenSSL."
+ )
+ mac_alg = self._evp_md_non_null_from_algorithm(
+ encryption_algorithm._hmac_hash
+ )
+ self.openssl_assert(mac_alg != self._ffi.NULL)
+ else:
+ mac_alg = self._ffi.NULL
+
+ if encryption_algorithm._kdf_rounds is not None:
+ pkcs12_iter = encryption_algorithm._kdf_rounds
+
+ else:
+ raise ValueError("Unsupported key encryption type")
+
+ if cas is None or len(cas) == 0:
+ sk_x509 = self._ffi.NULL
+ else:
+ sk_x509 = self._lib.sk_X509_new_null()
+ sk_x509 = self._ffi.gc(sk_x509, self._lib.sk_X509_free)
+
+ # This list is to keep the x509 values alive until end of function
+ ossl_cas = []
+ for ca in cas:
+ if isinstance(ca, PKCS12Certificate):
+ ca_alias = ca.friendly_name
+ ossl_ca = self._cert2ossl(ca.certificate)
+ if ca_alias is None:
+ res = self._lib.X509_alias_set1(
+ ossl_ca, self._ffi.NULL, -1
+ )
+ else:
+ res = self._lib.X509_alias_set1(
+ ossl_ca, ca_alias, len(ca_alias)
+ )
+ self.openssl_assert(res == 1)
+ else:
+ ossl_ca = self._cert2ossl(ca)
+ ossl_cas.append(ossl_ca)
+ res = self._lib.sk_X509_push(sk_x509, ossl_ca)
+ backend.openssl_assert(res >= 1)
+
+ with self._zeroed_null_terminated_buf(password) as password_buf:
+ with self._zeroed_null_terminated_buf(name) as name_buf:
+ ossl_cert = self._cert2ossl(cert) if cert else self._ffi.NULL
+ ossl_pkey = (
+ self._key2ossl(key) if key is not None else self._ffi.NULL
+ )
+
+ p12 = self._lib.PKCS12_create(
+ password_buf,
+ name_buf,
+ ossl_pkey,
+ ossl_cert,
+ sk_x509,
+ nid_key,
+ nid_cert,
+ pkcs12_iter,
+ mac_iter,
+ 0,
+ )
+
+ if (
+ self._lib.Cryptography_HAS_PKCS12_SET_MAC
+ and mac_alg != self._ffi.NULL
+ ):
+ self._lib.PKCS12_set_mac(
+ p12,
+ password_buf,
+ -1,
+ self._ffi.NULL,
+ 0,
+ mac_iter,
+ mac_alg,
+ )
+
+ self.openssl_assert(p12 != self._ffi.NULL)
+ p12 = self._ffi.gc(p12, self._lib.PKCS12_free)
+
+ bio = self._create_mem_bio_gc()
+ res = self._lib.i2d_PKCS12_bio(bio, p12)
+ self.openssl_assert(res > 0)
+ return self._read_mem_bio(bio)
+
+ def poly1305_supported(self) -> bool:
+ if self._fips_enabled:
+ return False
+ return self._lib.Cryptography_HAS_POLY1305 == 1
+
+ def pkcs7_supported(self) -> bool:
+ return not self._lib.CRYPTOGRAPHY_IS_BORINGSSL
+
+ def load_pem_pkcs7_certificates(
+ self, data: bytes
+ ) -> typing.List[x509.Certificate]:
+ utils._check_bytes("data", data)
+ bio = self._bytes_to_bio(data)
+ p7 = self._lib.PEM_read_bio_PKCS7(
+ bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL
+ )
+ if p7 == self._ffi.NULL:
+ self._consume_errors()
+ raise ValueError("Unable to parse PKCS7 data")
+
+ p7 = self._ffi.gc(p7, self._lib.PKCS7_free)
+ return self._load_pkcs7_certificates(p7)
+
+ def load_der_pkcs7_certificates(
+ self, data: bytes
+ ) -> typing.List[x509.Certificate]:
+ utils._check_bytes("data", data)
+ bio = self._bytes_to_bio(data)
+ p7 = self._lib.d2i_PKCS7_bio(bio.bio, self._ffi.NULL)
+ if p7 == self._ffi.NULL:
+ self._consume_errors()
+ raise ValueError("Unable to parse PKCS7 data")
+
+ p7 = self._ffi.gc(p7, self._lib.PKCS7_free)
+ return self._load_pkcs7_certificates(p7)
+
+ def _load_pkcs7_certificates(self, p7) -> typing.List[x509.Certificate]:
+ nid = self._lib.OBJ_obj2nid(p7.type)
+ self.openssl_assert(nid != self._lib.NID_undef)
+ if nid != self._lib.NID_pkcs7_signed:
+ raise UnsupportedAlgorithm(
+ "Only basic signed structures are currently supported. NID"
+ " for this data was {}".format(nid),
+ _Reasons.UNSUPPORTED_SERIALIZATION,
+ )
+
+ sk_x509 = p7.d.sign.cert
+ num = self._lib.sk_X509_num(sk_x509)
+ certs = []
+ for i in range(num):
+ x509 = self._lib.sk_X509_value(sk_x509, i)
+ self.openssl_assert(x509 != self._ffi.NULL)
+ cert = self._ossl2cert(x509)
+ certs.append(cert)
+
+ return certs
+
+
+class GetCipherByName:
+ def __init__(self, fmt: str):
+ self._fmt = fmt
+
+ def __call__(self, backend: Backend, cipher: CipherAlgorithm, mode: Mode):
+ cipher_name = self._fmt.format(cipher=cipher, mode=mode).lower()
+ evp_cipher = backend._lib.EVP_get_cipherbyname(
+ cipher_name.encode("ascii")
+ )
+
+ # try EVP_CIPHER_fetch if present
+ if (
+ evp_cipher == backend._ffi.NULL
+ and backend._lib.Cryptography_HAS_300_EVP_CIPHER
+ ):
+ evp_cipher = backend._lib.EVP_CIPHER_fetch(
+ backend._ffi.NULL,
+ cipher_name.encode("ascii"),
+ backend._ffi.NULL,
+ )
+
+ backend._consume_errors()
+ return evp_cipher
+
+
+def _get_xts_cipher(backend: Backend, cipher: AES, mode):
+ cipher_name = f"aes-{cipher.key_size // 2}-xts"
+ return backend._lib.EVP_get_cipherbyname(cipher_name.encode("ascii"))
+
+
+backend = Backend()
diff --git a/billinglayer/python/cryptography/hazmat/backends/openssl/ciphers.py b/billinglayer/python/cryptography/hazmat/backends/openssl/ciphers.py
new file mode 100644
index 0000000..bc42adb
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/backends/openssl/ciphers.py
@@ -0,0 +1,281 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography.exceptions import InvalidTag, UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.primitives import ciphers
+from cryptography.hazmat.primitives.ciphers import algorithms, modes
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.backend import Backend
+
+
+class _CipherContext:
+ _ENCRYPT = 1
+ _DECRYPT = 0
+ _MAX_CHUNK_SIZE = 2**30 - 1
+
+ def __init__(self, backend: Backend, cipher, mode, operation: int) -> None:
+ self._backend = backend
+ self._cipher = cipher
+ self._mode = mode
+ self._operation = operation
+ self._tag: typing.Optional[bytes] = None
+
+ if isinstance(self._cipher, ciphers.BlockCipherAlgorithm):
+ self._block_size_bytes = self._cipher.block_size // 8
+ else:
+ self._block_size_bytes = 1
+
+ ctx = self._backend._lib.EVP_CIPHER_CTX_new()
+ ctx = self._backend._ffi.gc(
+ ctx, self._backend._lib.EVP_CIPHER_CTX_free
+ )
+
+ registry = self._backend._cipher_registry
+ try:
+ adapter = registry[type(cipher), type(mode)]
+ except KeyError:
+ raise UnsupportedAlgorithm(
+ "cipher {} in {} mode is not supported "
+ "by this backend.".format(
+ cipher.name, mode.name if mode else mode
+ ),
+ _Reasons.UNSUPPORTED_CIPHER,
+ )
+
+ evp_cipher = adapter(self._backend, cipher, mode)
+ if evp_cipher == self._backend._ffi.NULL:
+ msg = f"cipher {cipher.name} "
+ if mode is not None:
+ msg += f"in {mode.name} mode "
+ msg += (
+ "is not supported by this backend (Your version of OpenSSL "
+ "may be too old. Current version: {}.)"
+ ).format(self._backend.openssl_version_text())
+ raise UnsupportedAlgorithm(msg, _Reasons.UNSUPPORTED_CIPHER)
+
+ if isinstance(mode, modes.ModeWithInitializationVector):
+ iv_nonce = self._backend._ffi.from_buffer(
+ mode.initialization_vector
+ )
+ elif isinstance(mode, modes.ModeWithTweak):
+ iv_nonce = self._backend._ffi.from_buffer(mode.tweak)
+ elif isinstance(mode, modes.ModeWithNonce):
+ iv_nonce = self._backend._ffi.from_buffer(mode.nonce)
+ elif isinstance(cipher, algorithms.ChaCha20):
+ iv_nonce = self._backend._ffi.from_buffer(cipher.nonce)
+ else:
+ iv_nonce = self._backend._ffi.NULL
+ # begin init with cipher and operation type
+ res = self._backend._lib.EVP_CipherInit_ex(
+ ctx,
+ evp_cipher,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ operation,
+ )
+ self._backend.openssl_assert(res != 0)
+ # set the key length to handle variable key ciphers
+ res = self._backend._lib.EVP_CIPHER_CTX_set_key_length(
+ ctx, len(cipher.key)
+ )
+ self._backend.openssl_assert(res != 0)
+ if isinstance(mode, modes.GCM):
+ res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
+ ctx,
+ self._backend._lib.EVP_CTRL_AEAD_SET_IVLEN,
+ len(iv_nonce),
+ self._backend._ffi.NULL,
+ )
+ self._backend.openssl_assert(res != 0)
+ if mode.tag is not None:
+ res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
+ ctx,
+ self._backend._lib.EVP_CTRL_AEAD_SET_TAG,
+ len(mode.tag),
+ mode.tag,
+ )
+ self._backend.openssl_assert(res != 0)
+ self._tag = mode.tag
+
+ # pass key/iv
+ res = self._backend._lib.EVP_CipherInit_ex(
+ ctx,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.from_buffer(cipher.key),
+ iv_nonce,
+ operation,
+ )
+
+ # Check for XTS mode duplicate keys error
+ errors = self._backend._consume_errors()
+ lib = self._backend._lib
+ if res == 0 and (
+ (
+ not lib.CRYPTOGRAPHY_IS_LIBRESSL
+ and errors[0]._lib_reason_match(
+ lib.ERR_LIB_EVP, lib.EVP_R_XTS_DUPLICATED_KEYS
+ )
+ )
+ or (
+ lib.Cryptography_HAS_PROVIDERS
+ and errors[0]._lib_reason_match(
+ lib.ERR_LIB_PROV, lib.PROV_R_XTS_DUPLICATED_KEYS
+ )
+ )
+ ):
+ raise ValueError("In XTS mode duplicated keys are not allowed")
+
+ self._backend.openssl_assert(res != 0, errors=errors)
+
+ # We purposely disable padding here as it's handled higher up in the
+ # API.
+ self._backend._lib.EVP_CIPHER_CTX_set_padding(ctx, 0)
+ self._ctx = ctx
+
+ def update(self, data: bytes) -> bytes:
+ buf = bytearray(len(data) + self._block_size_bytes - 1)
+ n = self.update_into(data, buf)
+ return bytes(buf[:n])
+
+ def update_into(self, data: bytes, buf: bytes) -> int:
+ total_data_len = len(data)
+ if len(buf) < (total_data_len + self._block_size_bytes - 1):
+ raise ValueError(
+ "buffer must be at least {} bytes for this "
+ "payload".format(len(data) + self._block_size_bytes - 1)
+ )
+
+ data_processed = 0
+ total_out = 0
+ outlen = self._backend._ffi.new("int *")
+ baseoutbuf = self._backend._ffi.from_buffer(buf, require_writable=True)
+ baseinbuf = self._backend._ffi.from_buffer(data)
+
+ while data_processed != total_data_len:
+ outbuf = baseoutbuf + total_out
+ inbuf = baseinbuf + data_processed
+ inlen = min(self._MAX_CHUNK_SIZE, total_data_len - data_processed)
+
+ res = self._backend._lib.EVP_CipherUpdate(
+ self._ctx, outbuf, outlen, inbuf, inlen
+ )
+ if res == 0 and isinstance(self._mode, modes.XTS):
+ self._backend._consume_errors()
+ raise ValueError(
+ "In XTS mode you must supply at least a full block in the "
+ "first update call. For AES this is 16 bytes."
+ )
+ else:
+ self._backend.openssl_assert(res != 0)
+ data_processed += inlen
+ total_out += outlen[0]
+
+ return total_out
+
+ def finalize(self) -> bytes:
+ if (
+ self._operation == self._DECRYPT
+ and isinstance(self._mode, modes.ModeWithAuthenticationTag)
+ and self.tag is None
+ ):
+ raise ValueError(
+ "Authentication tag must be provided when decrypting."
+ )
+
+ buf = self._backend._ffi.new("unsigned char[]", self._block_size_bytes)
+ outlen = self._backend._ffi.new("int *")
+ res = self._backend._lib.EVP_CipherFinal_ex(self._ctx, buf, outlen)
+ if res == 0:
+ errors = self._backend._consume_errors()
+
+ if not errors and isinstance(self._mode, modes.GCM):
+ raise InvalidTag
+
+ lib = self._backend._lib
+ self._backend.openssl_assert(
+ errors[0]._lib_reason_match(
+ lib.ERR_LIB_EVP,
+ lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH,
+ )
+ or (
+ lib.Cryptography_HAS_PROVIDERS
+ and errors[0]._lib_reason_match(
+ lib.ERR_LIB_PROV,
+ lib.PROV_R_WRONG_FINAL_BLOCK_LENGTH,
+ )
+ )
+ or (
+ lib.CRYPTOGRAPHY_IS_BORINGSSL
+ and errors[0].reason
+ == lib.CIPHER_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH
+ ),
+ errors=errors,
+ )
+ raise ValueError(
+ "The length of the provided data is not a multiple of "
+ "the block length."
+ )
+
+ if (
+ isinstance(self._mode, modes.GCM)
+ and self._operation == self._ENCRYPT
+ ):
+ tag_buf = self._backend._ffi.new(
+ "unsigned char[]", self._block_size_bytes
+ )
+ res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
+ self._ctx,
+ self._backend._lib.EVP_CTRL_AEAD_GET_TAG,
+ self._block_size_bytes,
+ tag_buf,
+ )
+ self._backend.openssl_assert(res != 0)
+ self._tag = self._backend._ffi.buffer(tag_buf)[:]
+
+ res = self._backend._lib.EVP_CIPHER_CTX_reset(self._ctx)
+ self._backend.openssl_assert(res == 1)
+ return self._backend._ffi.buffer(buf)[: outlen[0]]
+
+ def finalize_with_tag(self, tag: bytes) -> bytes:
+ tag_len = len(tag)
+ if tag_len < self._mode._min_tag_length:
+ raise ValueError(
+ "Authentication tag must be {} bytes or longer.".format(
+ self._mode._min_tag_length
+ )
+ )
+ elif tag_len > self._block_size_bytes:
+ raise ValueError(
+ "Authentication tag cannot be more than {} bytes.".format(
+ self._block_size_bytes
+ )
+ )
+ res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
+ self._ctx, self._backend._lib.EVP_CTRL_AEAD_SET_TAG, len(tag), tag
+ )
+ self._backend.openssl_assert(res != 0)
+ self._tag = tag
+ return self.finalize()
+
+ def authenticate_additional_data(self, data: bytes) -> None:
+ outlen = self._backend._ffi.new("int *")
+ res = self._backend._lib.EVP_CipherUpdate(
+ self._ctx,
+ self._backend._ffi.NULL,
+ outlen,
+ self._backend._ffi.from_buffer(data),
+ len(data),
+ )
+ self._backend.openssl_assert(res != 0)
+
+ @property
+ def tag(self) -> typing.Optional[bytes]:
+ return self._tag
diff --git a/billinglayer/python/cryptography/hazmat/backends/openssl/cmac.py b/billinglayer/python/cryptography/hazmat/backends/openssl/cmac.py
new file mode 100644
index 0000000..bdd7fec
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/backends/openssl/cmac.py
@@ -0,0 +1,89 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography.exceptions import (
+ InvalidSignature,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.primitives import constant_time
+from cryptography.hazmat.primitives.ciphers.modes import CBC
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.backend import Backend
+ from cryptography.hazmat.primitives import ciphers
+
+
+class _CMACContext:
+ def __init__(
+ self,
+ backend: Backend,
+ algorithm: ciphers.BlockCipherAlgorithm,
+ ctx=None,
+ ) -> None:
+ if not backend.cmac_algorithm_supported(algorithm):
+ raise UnsupportedAlgorithm(
+ "This backend does not support CMAC.",
+ _Reasons.UNSUPPORTED_CIPHER,
+ )
+
+ self._backend = backend
+ self._key = algorithm.key
+ self._algorithm = algorithm
+ self._output_length = algorithm.block_size // 8
+
+ if ctx is None:
+ registry = self._backend._cipher_registry
+ adapter = registry[type(algorithm), CBC]
+
+ evp_cipher = adapter(self._backend, algorithm, CBC)
+
+ ctx = self._backend._lib.CMAC_CTX_new()
+
+ self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
+ ctx = self._backend._ffi.gc(ctx, self._backend._lib.CMAC_CTX_free)
+
+ key_ptr = self._backend._ffi.from_buffer(self._key)
+ res = self._backend._lib.CMAC_Init(
+ ctx,
+ key_ptr,
+ len(self._key),
+ evp_cipher,
+ self._backend._ffi.NULL,
+ )
+ self._backend.openssl_assert(res == 1)
+
+ self._ctx = ctx
+
+ def update(self, data: bytes) -> None:
+ res = self._backend._lib.CMAC_Update(self._ctx, data, len(data))
+ self._backend.openssl_assert(res == 1)
+
+ def finalize(self) -> bytes:
+ buf = self._backend._ffi.new("unsigned char[]", self._output_length)
+ length = self._backend._ffi.new("size_t *", self._output_length)
+ res = self._backend._lib.CMAC_Final(self._ctx, buf, length)
+ self._backend.openssl_assert(res == 1)
+
+ self._ctx = None
+
+ return self._backend._ffi.buffer(buf)[:]
+
+ def copy(self) -> _CMACContext:
+ copied_ctx = self._backend._lib.CMAC_CTX_new()
+ copied_ctx = self._backend._ffi.gc(
+ copied_ctx, self._backend._lib.CMAC_CTX_free
+ )
+ res = self._backend._lib.CMAC_CTX_copy(copied_ctx, self._ctx)
+ self._backend.openssl_assert(res == 1)
+ return _CMACContext(self._backend, self._algorithm, ctx=copied_ctx)
+
+ def verify(self, signature: bytes) -> None:
+ digest = self.finalize()
+ if not constant_time.bytes_eq(digest, signature):
+ raise InvalidSignature("Signature did not match digest.")
diff --git a/billinglayer/python/cryptography/hazmat/backends/openssl/decode_asn1.py b/billinglayer/python/cryptography/hazmat/backends/openssl/decode_asn1.py
new file mode 100644
index 0000000..bf123b6
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/backends/openssl/decode_asn1.py
@@ -0,0 +1,32 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+from cryptography import x509
+
+# CRLReason ::= ENUMERATED {
+# unspecified (0),
+# keyCompromise (1),
+# cACompromise (2),
+# affiliationChanged (3),
+# superseded (4),
+# cessationOfOperation (5),
+# certificateHold (6),
+# -- value 7 is not used
+# removeFromCRL (8),
+# privilegeWithdrawn (9),
+# aACompromise (10) }
+_CRL_ENTRY_REASON_ENUM_TO_CODE = {
+ x509.ReasonFlags.unspecified: 0,
+ x509.ReasonFlags.key_compromise: 1,
+ x509.ReasonFlags.ca_compromise: 2,
+ x509.ReasonFlags.affiliation_changed: 3,
+ x509.ReasonFlags.superseded: 4,
+ x509.ReasonFlags.cessation_of_operation: 5,
+ x509.ReasonFlags.certificate_hold: 6,
+ x509.ReasonFlags.remove_from_crl: 8,
+ x509.ReasonFlags.privilege_withdrawn: 9,
+ x509.ReasonFlags.aa_compromise: 10,
+}
diff --git a/billinglayer/python/cryptography/hazmat/backends/openssl/ec.py b/billinglayer/python/cryptography/hazmat/backends/openssl/ec.py
new file mode 100644
index 0000000..9821bd1
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/backends/openssl/ec.py
@@ -0,0 +1,328 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography.exceptions import (
+ InvalidSignature,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.backends.openssl.utils import (
+ _calculate_digest_and_algorithm,
+ _evp_pkey_derive,
+)
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives.asymmetric import ec
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.backend import Backend
+
+
+def _check_signature_algorithm(
+ signature_algorithm: ec.EllipticCurveSignatureAlgorithm,
+) -> None:
+ if not isinstance(signature_algorithm, ec.ECDSA):
+ raise UnsupportedAlgorithm(
+ "Unsupported elliptic curve signature algorithm.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+
+def _ec_key_curve_sn(backend: Backend, ec_key) -> str:
+ group = backend._lib.EC_KEY_get0_group(ec_key)
+ backend.openssl_assert(group != backend._ffi.NULL)
+
+ nid = backend._lib.EC_GROUP_get_curve_name(group)
+ # The following check is to find EC keys with unnamed curves and raise
+ # an error for now.
+ if nid == backend._lib.NID_undef:
+ raise ValueError(
+ "ECDSA keys with explicit parameters are unsupported at this time"
+ )
+
+ # This is like the above check, but it also catches the case where you
+ # explicitly encoded a curve with the same parameters as a named curve.
+ # Don't do that.
+ if (
+ not backend._lib.CRYPTOGRAPHY_IS_LIBRESSL
+ and backend._lib.EC_GROUP_get_asn1_flag(group) == 0
+ ):
+ raise ValueError(
+ "ECDSA keys with explicit parameters are unsupported at this time"
+ )
+
+ curve_name = backend._lib.OBJ_nid2sn(nid)
+ backend.openssl_assert(curve_name != backend._ffi.NULL)
+
+ sn = backend._ffi.string(curve_name).decode("ascii")
+ return sn
+
+
+def _mark_asn1_named_ec_curve(backend: Backend, ec_cdata):
+ """
+ Set the named curve flag on the EC_KEY. This causes OpenSSL to
+ serialize EC keys along with their curve OID which makes
+ deserialization easier.
+ """
+
+ backend._lib.EC_KEY_set_asn1_flag(
+ ec_cdata, backend._lib.OPENSSL_EC_NAMED_CURVE
+ )
+
+
+def _check_key_infinity(backend: Backend, ec_cdata) -> None:
+ point = backend._lib.EC_KEY_get0_public_key(ec_cdata)
+ backend.openssl_assert(point != backend._ffi.NULL)
+ group = backend._lib.EC_KEY_get0_group(ec_cdata)
+ backend.openssl_assert(group != backend._ffi.NULL)
+ if backend._lib.EC_POINT_is_at_infinity(group, point):
+ raise ValueError(
+ "Cannot load an EC public key where the point is at infinity"
+ )
+
+
+def _sn_to_elliptic_curve(backend: Backend, sn: str) -> ec.EllipticCurve:
+ try:
+ return ec._CURVE_TYPES[sn]()
+ except KeyError:
+ raise UnsupportedAlgorithm(
+ f"{sn} is not a supported elliptic curve",
+ _Reasons.UNSUPPORTED_ELLIPTIC_CURVE,
+ )
+
+
+def _ecdsa_sig_sign(
+ backend: Backend, private_key: _EllipticCurvePrivateKey, data: bytes
+) -> bytes:
+ max_size = backend._lib.ECDSA_size(private_key._ec_key)
+ backend.openssl_assert(max_size > 0)
+
+ sigbuf = backend._ffi.new("unsigned char[]", max_size)
+ siglen_ptr = backend._ffi.new("unsigned int[]", 1)
+ res = backend._lib.ECDSA_sign(
+ 0, data, len(data), sigbuf, siglen_ptr, private_key._ec_key
+ )
+ backend.openssl_assert(res == 1)
+ return backend._ffi.buffer(sigbuf)[: siglen_ptr[0]]
+
+
+def _ecdsa_sig_verify(
+ backend: Backend,
+ public_key: _EllipticCurvePublicKey,
+ signature: bytes,
+ data: bytes,
+) -> None:
+ res = backend._lib.ECDSA_verify(
+ 0, data, len(data), signature, len(signature), public_key._ec_key
+ )
+ if res != 1:
+ backend._consume_errors()
+ raise InvalidSignature
+
+
+class _EllipticCurvePrivateKey(ec.EllipticCurvePrivateKey):
+ def __init__(self, backend: Backend, ec_key_cdata, evp_pkey):
+ self._backend = backend
+ self._ec_key = ec_key_cdata
+ self._evp_pkey = evp_pkey
+
+ sn = _ec_key_curve_sn(backend, ec_key_cdata)
+ self._curve = _sn_to_elliptic_curve(backend, sn)
+ _mark_asn1_named_ec_curve(backend, ec_key_cdata)
+ _check_key_infinity(backend, ec_key_cdata)
+
+ @property
+ def curve(self) -> ec.EllipticCurve:
+ return self._curve
+
+ @property
+ def key_size(self) -> int:
+ return self.curve.key_size
+
+ def exchange(
+ self, algorithm: ec.ECDH, peer_public_key: ec.EllipticCurvePublicKey
+ ) -> bytes:
+ if not (
+ self._backend.elliptic_curve_exchange_algorithm_supported(
+ algorithm, self.curve
+ )
+ ):
+ raise UnsupportedAlgorithm(
+ "This backend does not support the ECDH algorithm.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+
+ if peer_public_key.curve.name != self.curve.name:
+ raise ValueError(
+ "peer_public_key and self are not on the same curve"
+ )
+
+ return _evp_pkey_derive(self._backend, self._evp_pkey, peer_public_key)
+
+ def public_key(self) -> ec.EllipticCurvePublicKey:
+ group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
+ self._backend.openssl_assert(group != self._backend._ffi.NULL)
+
+ curve_nid = self._backend._lib.EC_GROUP_get_curve_name(group)
+ public_ec_key = self._backend._ec_key_new_by_curve_nid(curve_nid)
+
+ point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
+ self._backend.openssl_assert(point != self._backend._ffi.NULL)
+
+ res = self._backend._lib.EC_KEY_set_public_key(public_ec_key, point)
+ self._backend.openssl_assert(res == 1)
+
+ evp_pkey = self._backend._ec_cdata_to_evp_pkey(public_ec_key)
+
+ return _EllipticCurvePublicKey(self._backend, public_ec_key, evp_pkey)
+
+ def private_numbers(self) -> ec.EllipticCurvePrivateNumbers:
+ bn = self._backend._lib.EC_KEY_get0_private_key(self._ec_key)
+ private_value = self._backend._bn_to_int(bn)
+ return ec.EllipticCurvePrivateNumbers(
+ private_value=private_value,
+ public_numbers=self.public_key().public_numbers(),
+ )
+
+ def private_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.PrivateFormat,
+ encryption_algorithm: serialization.KeySerializationEncryption,
+ ) -> bytes:
+ return self._backend._private_key_bytes(
+ encoding,
+ format,
+ encryption_algorithm,
+ self,
+ self._evp_pkey,
+ self._ec_key,
+ )
+
+ def sign(
+ self,
+ data: bytes,
+ signature_algorithm: ec.EllipticCurveSignatureAlgorithm,
+ ) -> bytes:
+ _check_signature_algorithm(signature_algorithm)
+ data, _ = _calculate_digest_and_algorithm(
+ data,
+ signature_algorithm.algorithm,
+ )
+ return _ecdsa_sig_sign(self._backend, self, data)
+
+
+class _EllipticCurvePublicKey(ec.EllipticCurvePublicKey):
+ def __init__(self, backend: Backend, ec_key_cdata, evp_pkey):
+ self._backend = backend
+ self._ec_key = ec_key_cdata
+ self._evp_pkey = evp_pkey
+
+ sn = _ec_key_curve_sn(backend, ec_key_cdata)
+ self._curve = _sn_to_elliptic_curve(backend, sn)
+ _mark_asn1_named_ec_curve(backend, ec_key_cdata)
+ _check_key_infinity(backend, ec_key_cdata)
+
+ @property
+ def curve(self) -> ec.EllipticCurve:
+ return self._curve
+
+ @property
+ def key_size(self) -> int:
+ return self.curve.key_size
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, _EllipticCurvePublicKey):
+ return NotImplemented
+
+ return (
+ self._backend._lib.EVP_PKEY_cmp(self._evp_pkey, other._evp_pkey)
+ == 1
+ )
+
+ def public_numbers(self) -> ec.EllipticCurvePublicNumbers:
+ group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
+ self._backend.openssl_assert(group != self._backend._ffi.NULL)
+
+ point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
+ self._backend.openssl_assert(point != self._backend._ffi.NULL)
+
+ with self._backend._tmp_bn_ctx() as bn_ctx:
+ bn_x = self._backend._lib.BN_CTX_get(bn_ctx)
+ bn_y = self._backend._lib.BN_CTX_get(bn_ctx)
+
+ res = self._backend._lib.EC_POINT_get_affine_coordinates(
+ group, point, bn_x, bn_y, bn_ctx
+ )
+ self._backend.openssl_assert(res == 1)
+
+ x = self._backend._bn_to_int(bn_x)
+ y = self._backend._bn_to_int(bn_y)
+
+ return ec.EllipticCurvePublicNumbers(x=x, y=y, curve=self._curve)
+
+ def _encode_point(self, format: serialization.PublicFormat) -> bytes:
+ if format is serialization.PublicFormat.CompressedPoint:
+ conversion = self._backend._lib.POINT_CONVERSION_COMPRESSED
+ else:
+ assert format is serialization.PublicFormat.UncompressedPoint
+ conversion = self._backend._lib.POINT_CONVERSION_UNCOMPRESSED
+
+ group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
+ self._backend.openssl_assert(group != self._backend._ffi.NULL)
+ point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
+ self._backend.openssl_assert(point != self._backend._ffi.NULL)
+ with self._backend._tmp_bn_ctx() as bn_ctx:
+ buflen = self._backend._lib.EC_POINT_point2oct(
+ group, point, conversion, self._backend._ffi.NULL, 0, bn_ctx
+ )
+ self._backend.openssl_assert(buflen > 0)
+ buf = self._backend._ffi.new("char[]", buflen)
+ res = self._backend._lib.EC_POINT_point2oct(
+ group, point, conversion, buf, buflen, bn_ctx
+ )
+ self._backend.openssl_assert(buflen == res)
+
+ return self._backend._ffi.buffer(buf)[:]
+
+ def public_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.PublicFormat,
+ ) -> bytes:
+ if (
+ encoding is serialization.Encoding.X962
+ or format is serialization.PublicFormat.CompressedPoint
+ or format is serialization.PublicFormat.UncompressedPoint
+ ):
+ if encoding is not serialization.Encoding.X962 or format not in (
+ serialization.PublicFormat.CompressedPoint,
+ serialization.PublicFormat.UncompressedPoint,
+ ):
+ raise ValueError(
+ "X962 encoding must be used with CompressedPoint or "
+ "UncompressedPoint format"
+ )
+
+ return self._encode_point(format)
+ else:
+ return self._backend._public_key_bytes(
+ encoding, format, self, self._evp_pkey, None
+ )
+
+ def verify(
+ self,
+ signature: bytes,
+ data: bytes,
+ signature_algorithm: ec.EllipticCurveSignatureAlgorithm,
+ ) -> None:
+ _check_signature_algorithm(signature_algorithm)
+ data, _ = _calculate_digest_and_algorithm(
+ data,
+ signature_algorithm.algorithm,
+ )
+ _ecdsa_sig_verify(self._backend, self, signature, data)
diff --git a/billinglayer/python/cryptography/hazmat/backends/openssl/rsa.py b/billinglayer/python/cryptography/hazmat/backends/openssl/rsa.py
new file mode 100644
index 0000000..ef27d4e
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/backends/openssl/rsa.py
@@ -0,0 +1,599 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import threading
+import typing
+
+from cryptography.exceptions import (
+ InvalidSignature,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.backends.openssl.utils import (
+ _calculate_digest_and_algorithm,
+)
+from cryptography.hazmat.primitives import hashes, serialization
+from cryptography.hazmat.primitives.asymmetric import utils as asym_utils
+from cryptography.hazmat.primitives.asymmetric.padding import (
+ MGF1,
+ OAEP,
+ PSS,
+ AsymmetricPadding,
+ PKCS1v15,
+ _Auto,
+ _DigestLength,
+ _MaxLength,
+ calculate_max_pss_salt_length,
+)
+from cryptography.hazmat.primitives.asymmetric.rsa import (
+ RSAPrivateKey,
+ RSAPrivateNumbers,
+ RSAPublicKey,
+ RSAPublicNumbers,
+)
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.backend import Backend
+
+
+def _get_rsa_pss_salt_length(
+ backend: Backend,
+ pss: PSS,
+ key: typing.Union[RSAPrivateKey, RSAPublicKey],
+ hash_algorithm: hashes.HashAlgorithm,
+) -> int:
+ salt = pss._salt_length
+
+ if isinstance(salt, _MaxLength):
+ return calculate_max_pss_salt_length(key, hash_algorithm)
+ elif isinstance(salt, _DigestLength):
+ return hash_algorithm.digest_size
+ elif isinstance(salt, _Auto):
+ if isinstance(key, RSAPrivateKey):
+ raise ValueError(
+ "PSS salt length can only be set to AUTO when verifying"
+ )
+ return backend._lib.RSA_PSS_SALTLEN_AUTO
+ else:
+ return salt
+
+
+def _enc_dec_rsa(
+ backend: Backend,
+ key: typing.Union[_RSAPrivateKey, _RSAPublicKey],
+ data: bytes,
+ padding: AsymmetricPadding,
+) -> bytes:
+ if not isinstance(padding, AsymmetricPadding):
+ raise TypeError("Padding must be an instance of AsymmetricPadding.")
+
+ if isinstance(padding, PKCS1v15):
+ padding_enum = backend._lib.RSA_PKCS1_PADDING
+ elif isinstance(padding, OAEP):
+ padding_enum = backend._lib.RSA_PKCS1_OAEP_PADDING
+
+ if not isinstance(padding._mgf, MGF1):
+ raise UnsupportedAlgorithm(
+ "Only MGF1 is supported by this backend.",
+ _Reasons.UNSUPPORTED_MGF,
+ )
+
+ if not backend.rsa_padding_supported(padding):
+ raise UnsupportedAlgorithm(
+ "This combination of padding and hash algorithm is not "
+ "supported by this backend.",
+ _Reasons.UNSUPPORTED_PADDING,
+ )
+
+ else:
+ raise UnsupportedAlgorithm(
+ f"{padding.name} is not supported by this backend.",
+ _Reasons.UNSUPPORTED_PADDING,
+ )
+
+ return _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum, padding)
+
+
+def _enc_dec_rsa_pkey_ctx(
+ backend: Backend,
+ key: typing.Union[_RSAPrivateKey, _RSAPublicKey],
+ data: bytes,
+ padding_enum: int,
+ padding: AsymmetricPadding,
+) -> bytes:
+ init: typing.Callable[[typing.Any], int]
+ crypt: typing.Callable[[typing.Any, typing.Any, int, bytes, int], int]
+ if isinstance(key, _RSAPublicKey):
+ init = backend._lib.EVP_PKEY_encrypt_init
+ crypt = backend._lib.EVP_PKEY_encrypt
+ else:
+ init = backend._lib.EVP_PKEY_decrypt_init
+ crypt = backend._lib.EVP_PKEY_decrypt
+
+ pkey_ctx = backend._lib.EVP_PKEY_CTX_new(key._evp_pkey, backend._ffi.NULL)
+ backend.openssl_assert(pkey_ctx != backend._ffi.NULL)
+ pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free)
+ res = init(pkey_ctx)
+ backend.openssl_assert(res == 1)
+ res = backend._lib.EVP_PKEY_CTX_set_rsa_padding(pkey_ctx, padding_enum)
+ backend.openssl_assert(res > 0)
+ buf_size = backend._lib.EVP_PKEY_size(key._evp_pkey)
+ backend.openssl_assert(buf_size > 0)
+ if isinstance(padding, OAEP):
+ mgf1_md = backend._evp_md_non_null_from_algorithm(
+ padding._mgf._algorithm
+ )
+ res = backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, mgf1_md)
+ backend.openssl_assert(res > 0)
+ oaep_md = backend._evp_md_non_null_from_algorithm(padding._algorithm)
+ res = backend._lib.EVP_PKEY_CTX_set_rsa_oaep_md(pkey_ctx, oaep_md)
+ backend.openssl_assert(res > 0)
+
+ if (
+ isinstance(padding, OAEP)
+ and padding._label is not None
+ and len(padding._label) > 0
+ ):
+ # set0_rsa_oaep_label takes ownership of the char * so we need to
+ # copy it into some new memory
+ labelptr = backend._lib.OPENSSL_malloc(len(padding._label))
+ backend.openssl_assert(labelptr != backend._ffi.NULL)
+ backend._ffi.memmove(labelptr, padding._label, len(padding._label))
+ res = backend._lib.EVP_PKEY_CTX_set0_rsa_oaep_label(
+ pkey_ctx, labelptr, len(padding._label)
+ )
+ backend.openssl_assert(res == 1)
+
+ outlen = backend._ffi.new("size_t *", buf_size)
+ buf = backend._ffi.new("unsigned char[]", buf_size)
+ # Everything from this line onwards is written with the goal of being as
+ # constant-time as is practical given the constraints of Python and our
+ # API. See Bleichenbacher's '98 attack on RSA, and its many many variants.
+ # As such, you should not attempt to change this (particularly to "clean it
+ # up") without understanding why it was written this way (see
+ # Chesterton's Fence), and without measuring to verify you have not
+ # introduced observable time differences.
+ res = crypt(pkey_ctx, buf, outlen, data, len(data))
+ resbuf = backend._ffi.buffer(buf)[: outlen[0]]
+ backend._lib.ERR_clear_error()
+ if res <= 0:
+ raise ValueError("Encryption/decryption failed.")
+ return resbuf
+
+
+def _rsa_sig_determine_padding(
+ backend: Backend,
+ key: typing.Union[_RSAPrivateKey, _RSAPublicKey],
+ padding: AsymmetricPadding,
+ algorithm: typing.Optional[hashes.HashAlgorithm],
+) -> int:
+ if not isinstance(padding, AsymmetricPadding):
+ raise TypeError("Expected provider of AsymmetricPadding.")
+
+ pkey_size = backend._lib.EVP_PKEY_size(key._evp_pkey)
+ backend.openssl_assert(pkey_size > 0)
+
+ if isinstance(padding, PKCS1v15):
+ # Hash algorithm is ignored for PKCS1v15-padding, may be None.
+ padding_enum = backend._lib.RSA_PKCS1_PADDING
+ elif isinstance(padding, PSS):
+ if not isinstance(padding._mgf, MGF1):
+ raise UnsupportedAlgorithm(
+ "Only MGF1 is supported by this backend.",
+ _Reasons.UNSUPPORTED_MGF,
+ )
+
+ # PSS padding requires a hash algorithm
+ if not isinstance(algorithm, hashes.HashAlgorithm):
+ raise TypeError("Expected instance of hashes.HashAlgorithm.")
+
+ # Size of key in bytes - 2 is the maximum
+ # PSS signature length (salt length is checked later)
+ if pkey_size - algorithm.digest_size - 2 < 0:
+ raise ValueError(
+ "Digest too large for key size. Use a larger "
+ "key or different digest."
+ )
+
+ padding_enum = backend._lib.RSA_PKCS1_PSS_PADDING
+ else:
+ raise UnsupportedAlgorithm(
+ f"{padding.name} is not supported by this backend.",
+ _Reasons.UNSUPPORTED_PADDING,
+ )
+
+ return padding_enum
+
+
+# Hash algorithm can be absent (None) to initialize the context without setting
+# any message digest algorithm. This is currently only valid for the PKCS1v15
+# padding type, where it means that the signature data is encoded/decoded
+# as provided, without being wrapped in a DigestInfo structure.
+def _rsa_sig_setup(
+ backend: Backend,
+ padding: AsymmetricPadding,
+ algorithm: typing.Optional[hashes.HashAlgorithm],
+ key: typing.Union[_RSAPublicKey, _RSAPrivateKey],
+ init_func: typing.Callable[[typing.Any], int],
+):
+ padding_enum = _rsa_sig_determine_padding(backend, key, padding, algorithm)
+ pkey_ctx = backend._lib.EVP_PKEY_CTX_new(key._evp_pkey, backend._ffi.NULL)
+ backend.openssl_assert(pkey_ctx != backend._ffi.NULL)
+ pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free)
+ res = init_func(pkey_ctx)
+ if res != 1:
+ errors = backend._consume_errors()
+ raise ValueError("Unable to sign/verify with this key", errors)
+
+ if algorithm is not None:
+ evp_md = backend._evp_md_non_null_from_algorithm(algorithm)
+ res = backend._lib.EVP_PKEY_CTX_set_signature_md(pkey_ctx, evp_md)
+ if res <= 0:
+ backend._consume_errors()
+ raise UnsupportedAlgorithm(
+ "{} is not supported by this backend for RSA signing.".format(
+ algorithm.name
+ ),
+ _Reasons.UNSUPPORTED_HASH,
+ )
+ res = backend._lib.EVP_PKEY_CTX_set_rsa_padding(pkey_ctx, padding_enum)
+ if res <= 0:
+ backend._consume_errors()
+ raise UnsupportedAlgorithm(
+ "{} is not supported for the RSA signature operation.".format(
+ padding.name
+ ),
+ _Reasons.UNSUPPORTED_PADDING,
+ )
+ if isinstance(padding, PSS):
+ assert isinstance(algorithm, hashes.HashAlgorithm)
+ res = backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen(
+ pkey_ctx,
+ _get_rsa_pss_salt_length(backend, padding, key, algorithm),
+ )
+ backend.openssl_assert(res > 0)
+
+ mgf1_md = backend._evp_md_non_null_from_algorithm(
+ padding._mgf._algorithm
+ )
+ res = backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, mgf1_md)
+ backend.openssl_assert(res > 0)
+
+ return pkey_ctx
+
+
+def _rsa_sig_sign(
+ backend: Backend,
+ padding: AsymmetricPadding,
+ algorithm: hashes.HashAlgorithm,
+ private_key: _RSAPrivateKey,
+ data: bytes,
+) -> bytes:
+ pkey_ctx = _rsa_sig_setup(
+ backend,
+ padding,
+ algorithm,
+ private_key,
+ backend._lib.EVP_PKEY_sign_init,
+ )
+ buflen = backend._ffi.new("size_t *")
+ res = backend._lib.EVP_PKEY_sign(
+ pkey_ctx, backend._ffi.NULL, buflen, data, len(data)
+ )
+ backend.openssl_assert(res == 1)
+ buf = backend._ffi.new("unsigned char[]", buflen[0])
+ res = backend._lib.EVP_PKEY_sign(pkey_ctx, buf, buflen, data, len(data))
+ if res != 1:
+ errors = backend._consume_errors()
+ raise ValueError(
+ "Digest or salt length too long for key size. Use a larger key "
+ "or shorter salt length if you are specifying a PSS salt",
+ errors,
+ )
+
+ return backend._ffi.buffer(buf)[:]
+
+
+def _rsa_sig_verify(
+ backend: Backend,
+ padding: AsymmetricPadding,
+ algorithm: hashes.HashAlgorithm,
+ public_key: _RSAPublicKey,
+ signature: bytes,
+ data: bytes,
+) -> None:
+ pkey_ctx = _rsa_sig_setup(
+ backend,
+ padding,
+ algorithm,
+ public_key,
+ backend._lib.EVP_PKEY_verify_init,
+ )
+ res = backend._lib.EVP_PKEY_verify(
+ pkey_ctx, signature, len(signature), data, len(data)
+ )
+ # The previous call can return negative numbers in the event of an
+ # error. This is not a signature failure but we need to fail if it
+ # occurs.
+ backend.openssl_assert(res >= 0)
+ if res == 0:
+ backend._consume_errors()
+ raise InvalidSignature
+
+
+def _rsa_sig_recover(
+ backend: Backend,
+ padding: AsymmetricPadding,
+ algorithm: typing.Optional[hashes.HashAlgorithm],
+ public_key: _RSAPublicKey,
+ signature: bytes,
+) -> bytes:
+ pkey_ctx = _rsa_sig_setup(
+ backend,
+ padding,
+ algorithm,
+ public_key,
+ backend._lib.EVP_PKEY_verify_recover_init,
+ )
+
+ # Attempt to keep the rest of the code in this function as constant/time
+ # as possible. See the comment in _enc_dec_rsa_pkey_ctx. Note that the
+ # buflen parameter is used even though its value may be undefined in the
+ # error case. Due to the tolerant nature of Python slicing this does not
+ # trigger any exceptions.
+ maxlen = backend._lib.EVP_PKEY_size(public_key._evp_pkey)
+ backend.openssl_assert(maxlen > 0)
+ buf = backend._ffi.new("unsigned char[]", maxlen)
+ buflen = backend._ffi.new("size_t *", maxlen)
+ res = backend._lib.EVP_PKEY_verify_recover(
+ pkey_ctx, buf, buflen, signature, len(signature)
+ )
+ resbuf = backend._ffi.buffer(buf)[: buflen[0]]
+ backend._lib.ERR_clear_error()
+ # Assume that all parameter errors are handled during the setup phase and
+ # any error here is due to invalid signature.
+ if res != 1:
+ raise InvalidSignature
+ return resbuf
+
+
+class _RSAPrivateKey(RSAPrivateKey):
+ _evp_pkey: object
+ _rsa_cdata: object
+ _key_size: int
+
+ def __init__(
+ self,
+ backend: Backend,
+ rsa_cdata,
+ evp_pkey,
+ *,
+ unsafe_skip_rsa_key_validation: bool,
+ ):
+ res: int
+ # RSA_check_key is slower in OpenSSL 3.0.0 due to improved
+ # primality checking. In normal use this is unlikely to be a problem
+ # since users don't load new keys constantly, but for TESTING we've
+ # added an init arg that allows skipping the checks. You should not
+ # use this in production code unless you understand the consequences.
+ if not unsafe_skip_rsa_key_validation:
+ res = backend._lib.RSA_check_key(rsa_cdata)
+ if res != 1:
+ errors = backend._consume_errors()
+ raise ValueError("Invalid private key", errors)
+ # 2 is prime and passes an RSA key check, so we also check
+ # if p and q are odd just to be safe.
+ p = backend._ffi.new("BIGNUM **")
+ q = backend._ffi.new("BIGNUM **")
+ backend._lib.RSA_get0_factors(rsa_cdata, p, q)
+ backend.openssl_assert(p[0] != backend._ffi.NULL)
+ backend.openssl_assert(q[0] != backend._ffi.NULL)
+ p_odd = backend._lib.BN_is_odd(p[0])
+ q_odd = backend._lib.BN_is_odd(q[0])
+ if p_odd != 1 or q_odd != 1:
+ errors = backend._consume_errors()
+ raise ValueError("Invalid private key", errors)
+
+ self._backend = backend
+ self._rsa_cdata = rsa_cdata
+ self._evp_pkey = evp_pkey
+ # Used for lazy blinding
+ self._blinded = False
+ self._blinding_lock = threading.Lock()
+
+ n = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.RSA_get0_key(
+ self._rsa_cdata,
+ n,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ )
+ self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
+ self._key_size = self._backend._lib.BN_num_bits(n[0])
+
+ def _enable_blinding(self) -> None:
+ # If you call blind on an already blinded RSA key OpenSSL will turn
+ # it off and back on, which is a performance hit we want to avoid.
+ if not self._blinded:
+ with self._blinding_lock:
+ self._non_threadsafe_enable_blinding()
+
+ def _non_threadsafe_enable_blinding(self) -> None:
+ # This is only a separate function to allow for testing to cover both
+ # branches. It should never be invoked except through _enable_blinding.
+ # Check if it's not True again in case another thread raced past the
+ # first non-locked check.
+ if not self._blinded:
+ res = self._backend._lib.RSA_blinding_on(
+ self._rsa_cdata, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(res == 1)
+ self._blinded = True
+
+ @property
+ def key_size(self) -> int:
+ return self._key_size
+
+ def decrypt(self, ciphertext: bytes, padding: AsymmetricPadding) -> bytes:
+ self._enable_blinding()
+ key_size_bytes = (self.key_size + 7) // 8
+ if key_size_bytes != len(ciphertext):
+ raise ValueError("Ciphertext length must be equal to key size.")
+
+ return _enc_dec_rsa(self._backend, self, ciphertext, padding)
+
+ def public_key(self) -> RSAPublicKey:
+ ctx = self._backend._lib.RSAPublicKey_dup(self._rsa_cdata)
+ self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
+ ctx = self._backend._ffi.gc(ctx, self._backend._lib.RSA_free)
+ evp_pkey = self._backend._rsa_cdata_to_evp_pkey(ctx)
+ return _RSAPublicKey(self._backend, ctx, evp_pkey)
+
+ def private_numbers(self) -> RSAPrivateNumbers:
+ n = self._backend._ffi.new("BIGNUM **")
+ e = self._backend._ffi.new("BIGNUM **")
+ d = self._backend._ffi.new("BIGNUM **")
+ p = self._backend._ffi.new("BIGNUM **")
+ q = self._backend._ffi.new("BIGNUM **")
+ dmp1 = self._backend._ffi.new("BIGNUM **")
+ dmq1 = self._backend._ffi.new("BIGNUM **")
+ iqmp = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.RSA_get0_key(self._rsa_cdata, n, e, d)
+ self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(e[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(d[0] != self._backend._ffi.NULL)
+ self._backend._lib.RSA_get0_factors(self._rsa_cdata, p, q)
+ self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
+ self._backend._lib.RSA_get0_crt_params(
+ self._rsa_cdata, dmp1, dmq1, iqmp
+ )
+ self._backend.openssl_assert(dmp1[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(dmq1[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(iqmp[0] != self._backend._ffi.NULL)
+ return RSAPrivateNumbers(
+ p=self._backend._bn_to_int(p[0]),
+ q=self._backend._bn_to_int(q[0]),
+ d=self._backend._bn_to_int(d[0]),
+ dmp1=self._backend._bn_to_int(dmp1[0]),
+ dmq1=self._backend._bn_to_int(dmq1[0]),
+ iqmp=self._backend._bn_to_int(iqmp[0]),
+ public_numbers=RSAPublicNumbers(
+ e=self._backend._bn_to_int(e[0]),
+ n=self._backend._bn_to_int(n[0]),
+ ),
+ )
+
+ def private_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.PrivateFormat,
+ encryption_algorithm: serialization.KeySerializationEncryption,
+ ) -> bytes:
+ return self._backend._private_key_bytes(
+ encoding,
+ format,
+ encryption_algorithm,
+ self,
+ self._evp_pkey,
+ self._rsa_cdata,
+ )
+
+ def sign(
+ self,
+ data: bytes,
+ padding: AsymmetricPadding,
+ algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm],
+ ) -> bytes:
+ self._enable_blinding()
+ data, algorithm = _calculate_digest_and_algorithm(data, algorithm)
+ return _rsa_sig_sign(self._backend, padding, algorithm, self, data)
+
+
+class _RSAPublicKey(RSAPublicKey):
+ _evp_pkey: object
+ _rsa_cdata: object
+ _key_size: int
+
+ def __init__(self, backend: Backend, rsa_cdata, evp_pkey):
+ self._backend = backend
+ self._rsa_cdata = rsa_cdata
+ self._evp_pkey = evp_pkey
+
+ n = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.RSA_get0_key(
+ self._rsa_cdata,
+ n,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ )
+ self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
+ self._key_size = self._backend._lib.BN_num_bits(n[0])
+
+ @property
+ def key_size(self) -> int:
+ return self._key_size
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, _RSAPublicKey):
+ return NotImplemented
+
+ return (
+ self._backend._lib.EVP_PKEY_cmp(self._evp_pkey, other._evp_pkey)
+ == 1
+ )
+
+ def encrypt(self, plaintext: bytes, padding: AsymmetricPadding) -> bytes:
+ return _enc_dec_rsa(self._backend, self, plaintext, padding)
+
+ def public_numbers(self) -> RSAPublicNumbers:
+ n = self._backend._ffi.new("BIGNUM **")
+ e = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.RSA_get0_key(
+ self._rsa_cdata, n, e, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(e[0] != self._backend._ffi.NULL)
+ return RSAPublicNumbers(
+ e=self._backend._bn_to_int(e[0]),
+ n=self._backend._bn_to_int(n[0]),
+ )
+
+ def public_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.PublicFormat,
+ ) -> bytes:
+ return self._backend._public_key_bytes(
+ encoding, format, self, self._evp_pkey, self._rsa_cdata
+ )
+
+ def verify(
+ self,
+ signature: bytes,
+ data: bytes,
+ padding: AsymmetricPadding,
+ algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm],
+ ) -> None:
+ data, algorithm = _calculate_digest_and_algorithm(data, algorithm)
+ _rsa_sig_verify(
+ self._backend, padding, algorithm, self, signature, data
+ )
+
+ def recover_data_from_signature(
+ self,
+ signature: bytes,
+ padding: AsymmetricPadding,
+ algorithm: typing.Optional[hashes.HashAlgorithm],
+ ) -> bytes:
+ if isinstance(algorithm, asym_utils.Prehashed):
+ raise TypeError(
+ "Prehashed is only supported in the sign and verify methods. "
+ "It cannot be used with recover_data_from_signature."
+ )
+ return _rsa_sig_recover(
+ self._backend, padding, algorithm, self, signature
+ )
diff --git a/billinglayer/python/cryptography/hazmat/backends/openssl/utils.py b/billinglayer/python/cryptography/hazmat/backends/openssl/utils.py
new file mode 100644
index 0000000..5b404de
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/backends/openssl/utils.py
@@ -0,0 +1,63 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography.hazmat.primitives import hashes
+from cryptography.hazmat.primitives.asymmetric.utils import Prehashed
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.backend import Backend
+
+
+def _evp_pkey_derive(backend: Backend, evp_pkey, peer_public_key) -> bytes:
+ ctx = backend._lib.EVP_PKEY_CTX_new(evp_pkey, backend._ffi.NULL)
+ backend.openssl_assert(ctx != backend._ffi.NULL)
+ ctx = backend._ffi.gc(ctx, backend._lib.EVP_PKEY_CTX_free)
+ res = backend._lib.EVP_PKEY_derive_init(ctx)
+ backend.openssl_assert(res == 1)
+
+ if backend._lib.Cryptography_HAS_EVP_PKEY_SET_PEER_EX:
+ res = backend._lib.EVP_PKEY_derive_set_peer_ex(
+ ctx, peer_public_key._evp_pkey, 0
+ )
+ else:
+ res = backend._lib.EVP_PKEY_derive_set_peer(
+ ctx, peer_public_key._evp_pkey
+ )
+ backend.openssl_assert(res == 1)
+
+ keylen = backend._ffi.new("size_t *")
+ res = backend._lib.EVP_PKEY_derive(ctx, backend._ffi.NULL, keylen)
+ backend.openssl_assert(res == 1)
+ backend.openssl_assert(keylen[0] > 0)
+ buf = backend._ffi.new("unsigned char[]", keylen[0])
+ res = backend._lib.EVP_PKEY_derive(ctx, buf, keylen)
+ if res != 1:
+ errors = backend._consume_errors()
+ raise ValueError("Error computing shared key.", errors)
+
+ return backend._ffi.buffer(buf, keylen[0])[:]
+
+
+def _calculate_digest_and_algorithm(
+ data: bytes,
+ algorithm: typing.Union[Prehashed, hashes.HashAlgorithm],
+) -> typing.Tuple[bytes, hashes.HashAlgorithm]:
+ if not isinstance(algorithm, Prehashed):
+ hash_ctx = hashes.Hash(algorithm)
+ hash_ctx.update(data)
+ data = hash_ctx.finalize()
+ else:
+ algorithm = algorithm._algorithm
+
+ if len(data) != algorithm.digest_size:
+ raise ValueError(
+ "The provided data must be the same length as the hash "
+ "algorithm's digest size."
+ )
+
+ return (data, algorithm)
diff --git a/billinglayer/python/cryptography/hazmat/bindings/__init__.py b/billinglayer/python/cryptography/hazmat/bindings/__init__.py
new file mode 100644
index 0000000..b509336
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/bindings/__init__.py
@@ -0,0 +1,3 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
diff --git a/billinglayer/python/cryptography/hazmat/bindings/__pycache__/__init__.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/bindings/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..6d0ef6b
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/bindings/__pycache__/__init__.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/bindings/_rust.abi3.so b/billinglayer/python/cryptography/hazmat/bindings/_rust.abi3.so
new file mode 100755
index 0000000..b5f12d9
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/bindings/_rust.abi3.so differ
diff --git a/billinglayer/python/cryptography/hazmat/bindings/_rust/__init__.pyi b/billinglayer/python/cryptography/hazmat/bindings/_rust/__init__.pyi
new file mode 100644
index 0000000..94a37a2
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/bindings/_rust/__init__.pyi
@@ -0,0 +1,34 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import types
+import typing
+
+def check_pkcs7_padding(data: bytes) -> bool: ...
+def check_ansix923_padding(data: bytes) -> bool: ...
+
+class ObjectIdentifier:
+ def __init__(self, val: str) -> None: ...
+ @property
+ def dotted_string(self) -> str: ...
+ @property
+ def _name(self) -> str: ...
+
+T = typing.TypeVar("T")
+
+class FixedPool(typing.Generic[T]):
+ def __init__(
+ self,
+ create: typing.Callable[[], T],
+ ) -> None: ...
+ def acquire(self) -> PoolAcquisition[T]: ...
+
+class PoolAcquisition(typing.Generic[T]):
+ def __enter__(self) -> T: ...
+ def __exit__(
+ self,
+ exc_type: typing.Optional[typing.Type[BaseException]],
+ exc_value: typing.Optional[BaseException],
+ exc_tb: typing.Optional[types.TracebackType],
+ ) -> None: ...
diff --git a/billinglayer/python/cryptography/hazmat/bindings/_rust/_openssl.pyi b/billinglayer/python/cryptography/hazmat/bindings/_rust/_openssl.pyi
new file mode 100644
index 0000000..8010008
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/bindings/_rust/_openssl.pyi
@@ -0,0 +1,8 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+lib = typing.Any
+ffi = typing.Any
diff --git a/billinglayer/python/cryptography/hazmat/bindings/_rust/asn1.pyi b/billinglayer/python/cryptography/hazmat/bindings/_rust/asn1.pyi
new file mode 100644
index 0000000..a8369ba
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/bindings/_rust/asn1.pyi
@@ -0,0 +1,16 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+class TestCertificate:
+ not_after_tag: int
+ not_before_tag: int
+ issuer_value_tags: typing.List[int]
+ subject_value_tags: typing.List[int]
+
+def decode_dss_signature(signature: bytes) -> typing.Tuple[int, int]: ...
+def encode_dss_signature(r: int, s: int) -> bytes: ...
+def parse_spki_for_data(data: bytes) -> bytes: ...
+def test_parse_certificate(data: bytes) -> TestCertificate: ...
diff --git a/billinglayer/python/cryptography/hazmat/bindings/_rust/exceptions.pyi b/billinglayer/python/cryptography/hazmat/bindings/_rust/exceptions.pyi
new file mode 100644
index 0000000..09f46b1
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/bindings/_rust/exceptions.pyi
@@ -0,0 +1,17 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+class _Reasons:
+ BACKEND_MISSING_INTERFACE: _Reasons
+ UNSUPPORTED_HASH: _Reasons
+ UNSUPPORTED_CIPHER: _Reasons
+ UNSUPPORTED_PADDING: _Reasons
+ UNSUPPORTED_MGF: _Reasons
+ UNSUPPORTED_PUBLIC_KEY_ALGORITHM: _Reasons
+ UNSUPPORTED_ELLIPTIC_CURVE: _Reasons
+ UNSUPPORTED_SERIALIZATION: _Reasons
+ UNSUPPORTED_X509: _Reasons
+ UNSUPPORTED_EXCHANGE_ALGORITHM: _Reasons
+ UNSUPPORTED_DIFFIE_HELLMAN: _Reasons
+ UNSUPPORTED_MAC: _Reasons
diff --git a/billinglayer/python/cryptography/hazmat/bindings/_rust/ocsp.pyi b/billinglayer/python/cryptography/hazmat/bindings/_rust/ocsp.pyi
new file mode 100644
index 0000000..4671eb9
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/bindings/_rust/ocsp.pyi
@@ -0,0 +1,25 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography.hazmat.primitives import hashes
+from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes
+from cryptography.x509.ocsp import (
+ OCSPRequest,
+ OCSPRequestBuilder,
+ OCSPResponse,
+ OCSPResponseBuilder,
+ OCSPResponseStatus,
+)
+
+def load_der_ocsp_request(data: bytes) -> OCSPRequest: ...
+def load_der_ocsp_response(data: bytes) -> OCSPResponse: ...
+def create_ocsp_request(builder: OCSPRequestBuilder) -> OCSPRequest: ...
+def create_ocsp_response(
+ status: OCSPResponseStatus,
+ builder: typing.Optional[OCSPResponseBuilder],
+ private_key: typing.Optional[PrivateKeyTypes],
+ hash_algorithm: typing.Optional[hashes.HashAlgorithm],
+) -> OCSPResponse: ...
diff --git a/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi b/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi
new file mode 100644
index 0000000..82f30d2
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi
@@ -0,0 +1,47 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography.hazmat.bindings._rust.openssl import (
+ dh,
+ dsa,
+ ed448,
+ ed25519,
+ hashes,
+ hmac,
+ kdf,
+ poly1305,
+ x448,
+ x25519,
+)
+
+__all__ = [
+ "openssl_version",
+ "raise_openssl_error",
+ "dh",
+ "dsa",
+ "hashes",
+ "hmac",
+ "kdf",
+ "ed448",
+ "ed25519",
+ "poly1305",
+ "x448",
+ "x25519",
+]
+
+def openssl_version() -> int: ...
+def raise_openssl_error() -> typing.NoReturn: ...
+def capture_error_stack() -> typing.List[OpenSSLError]: ...
+def is_fips_enabled() -> bool: ...
+
+class OpenSSLError:
+ @property
+ def lib(self) -> int: ...
+ @property
+ def reason(self) -> int: ...
+ @property
+ def reason_text(self) -> bytes: ...
+ def _lib_reason_match(self, lib: int, reason: int) -> bool: ...
diff --git a/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/dh.pyi b/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/dh.pyi
new file mode 100644
index 0000000..bfd005d
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/dh.pyi
@@ -0,0 +1,22 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from cryptography.hazmat.primitives.asymmetric import dh
+
+MIN_MODULUS_SIZE: int
+
+class DHPrivateKey: ...
+class DHPublicKey: ...
+class DHParameters: ...
+
+def generate_parameters(generator: int, key_size: int) -> dh.DHParameters: ...
+def private_key_from_ptr(ptr: int) -> dh.DHPrivateKey: ...
+def public_key_from_ptr(ptr: int) -> dh.DHPublicKey: ...
+def from_pem_parameters(data: bytes) -> dh.DHParameters: ...
+def from_der_parameters(data: bytes) -> dh.DHParameters: ...
+def from_private_numbers(numbers: dh.DHPrivateNumbers) -> dh.DHPrivateKey: ...
+def from_public_numbers(numbers: dh.DHPublicNumbers) -> dh.DHPublicKey: ...
+def from_parameter_numbers(
+ numbers: dh.DHParameterNumbers,
+) -> dh.DHParameters: ...
diff --git a/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/dsa.pyi b/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/dsa.pyi
new file mode 100644
index 0000000..5a56f25
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/dsa.pyi
@@ -0,0 +1,20 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from cryptography.hazmat.primitives.asymmetric import dsa
+
+class DSAPrivateKey: ...
+class DSAPublicKey: ...
+class DSAParameters: ...
+
+def generate_parameters(key_size: int) -> dsa.DSAParameters: ...
+def private_key_from_ptr(ptr: int) -> dsa.DSAPrivateKey: ...
+def public_key_from_ptr(ptr: int) -> dsa.DSAPublicKey: ...
+def from_private_numbers(
+ numbers: dsa.DSAPrivateNumbers,
+) -> dsa.DSAPrivateKey: ...
+def from_public_numbers(numbers: dsa.DSAPublicNumbers) -> dsa.DSAPublicKey: ...
+def from_parameter_numbers(
+ numbers: dsa.DSAParameterNumbers,
+) -> dsa.DSAParameters: ...
diff --git a/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi b/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi
new file mode 100644
index 0000000..c7f127f
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi
@@ -0,0 +1,14 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from cryptography.hazmat.primitives.asymmetric import ed25519
+
+class Ed25519PrivateKey: ...
+class Ed25519PublicKey: ...
+
+def generate_key() -> ed25519.Ed25519PrivateKey: ...
+def private_key_from_ptr(ptr: int) -> ed25519.Ed25519PrivateKey: ...
+def public_key_from_ptr(ptr: int) -> ed25519.Ed25519PublicKey: ...
+def from_private_bytes(data: bytes) -> ed25519.Ed25519PrivateKey: ...
+def from_public_bytes(data: bytes) -> ed25519.Ed25519PublicKey: ...
diff --git a/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/ed448.pyi b/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/ed448.pyi
new file mode 100644
index 0000000..1cf5f17
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/ed448.pyi
@@ -0,0 +1,14 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from cryptography.hazmat.primitives.asymmetric import ed448
+
+class Ed448PrivateKey: ...
+class Ed448PublicKey: ...
+
+def generate_key() -> ed448.Ed448PrivateKey: ...
+def private_key_from_ptr(ptr: int) -> ed448.Ed448PrivateKey: ...
+def public_key_from_ptr(ptr: int) -> ed448.Ed448PublicKey: ...
+def from_private_bytes(data: bytes) -> ed448.Ed448PrivateKey: ...
+def from_public_bytes(data: bytes) -> ed448.Ed448PublicKey: ...
diff --git a/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/hashes.pyi b/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/hashes.pyi
new file mode 100644
index 0000000..ca5f42a
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/hashes.pyi
@@ -0,0 +1,17 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography.hazmat.primitives import hashes
+
+class Hash(hashes.HashContext):
+ def __init__(
+ self, algorithm: hashes.HashAlgorithm, backend: typing.Any = None
+ ) -> None: ...
+ @property
+ def algorithm(self) -> hashes.HashAlgorithm: ...
+ def update(self, data: bytes) -> None: ...
+ def finalize(self) -> bytes: ...
+ def copy(self) -> Hash: ...
diff --git a/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/hmac.pyi b/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/hmac.pyi
new file mode 100644
index 0000000..e38d9b5
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/hmac.pyi
@@ -0,0 +1,21 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography.hazmat.primitives import hashes
+
+class HMAC(hashes.HashContext):
+ def __init__(
+ self,
+ key: bytes,
+ algorithm: hashes.HashAlgorithm,
+ backend: typing.Any = None,
+ ) -> None: ...
+ @property
+ def algorithm(self) -> hashes.HashAlgorithm: ...
+ def update(self, data: bytes) -> None: ...
+ def finalize(self) -> bytes: ...
+ def verify(self, signature: bytes) -> None: ...
+ def copy(self) -> HMAC: ...
diff --git a/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/kdf.pyi b/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/kdf.pyi
new file mode 100644
index 0000000..034a8fe
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/kdf.pyi
@@ -0,0 +1,22 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from cryptography.hazmat.primitives.hashes import HashAlgorithm
+
+def derive_pbkdf2_hmac(
+ key_material: bytes,
+ algorithm: HashAlgorithm,
+ salt: bytes,
+ iterations: int,
+ length: int,
+) -> bytes: ...
+def derive_scrypt(
+ key_material: bytes,
+ salt: bytes,
+ n: int,
+ r: int,
+ p: int,
+ max_mem: int,
+ length: int,
+) -> bytes: ...
diff --git a/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi b/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi
new file mode 100644
index 0000000..2e9b0a9
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi
@@ -0,0 +1,13 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+class Poly1305:
+ def __init__(self, key: bytes) -> None: ...
+ @staticmethod
+ def generate_tag(key: bytes, data: bytes) -> bytes: ...
+ @staticmethod
+ def verify_tag(key: bytes, data: bytes, tag: bytes) -> None: ...
+ def update(self, data: bytes) -> None: ...
+ def finalize(self) -> bytes: ...
+ def verify(self, tag: bytes) -> None: ...
diff --git a/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi b/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi
new file mode 100644
index 0000000..90f7cbd
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi
@@ -0,0 +1,14 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from cryptography.hazmat.primitives.asymmetric import x25519
+
+class X25519PrivateKey: ...
+class X25519PublicKey: ...
+
+def generate_key() -> x25519.X25519PrivateKey: ...
+def private_key_from_ptr(ptr: int) -> x25519.X25519PrivateKey: ...
+def public_key_from_ptr(ptr: int) -> x25519.X25519PublicKey: ...
+def from_private_bytes(data: bytes) -> x25519.X25519PrivateKey: ...
+def from_public_bytes(data: bytes) -> x25519.X25519PublicKey: ...
diff --git a/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/x448.pyi b/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/x448.pyi
new file mode 100644
index 0000000..d326c8d
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/bindings/_rust/openssl/x448.pyi
@@ -0,0 +1,14 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from cryptography.hazmat.primitives.asymmetric import x448
+
+class X448PrivateKey: ...
+class X448PublicKey: ...
+
+def generate_key() -> x448.X448PrivateKey: ...
+def private_key_from_ptr(ptr: int) -> x448.X448PrivateKey: ...
+def public_key_from_ptr(ptr: int) -> x448.X448PublicKey: ...
+def from_private_bytes(data: bytes) -> x448.X448PrivateKey: ...
+def from_public_bytes(data: bytes) -> x448.X448PublicKey: ...
diff --git a/billinglayer/python/cryptography/hazmat/bindings/_rust/pkcs7.pyi b/billinglayer/python/cryptography/hazmat/bindings/_rust/pkcs7.pyi
new file mode 100644
index 0000000..66bd850
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/bindings/_rust/pkcs7.pyi
@@ -0,0 +1,15 @@
+import typing
+
+from cryptography import x509
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives.serialization import pkcs7
+
+def serialize_certificates(
+ certs: typing.List[x509.Certificate],
+ encoding: serialization.Encoding,
+) -> bytes: ...
+def sign_and_serialize(
+ builder: pkcs7.PKCS7SignatureBuilder,
+ encoding: serialization.Encoding,
+ options: typing.Iterable[pkcs7.PKCS7Options],
+) -> bytes: ...
diff --git a/billinglayer/python/cryptography/hazmat/bindings/_rust/x509.pyi b/billinglayer/python/cryptography/hazmat/bindings/_rust/x509.pyi
new file mode 100644
index 0000000..24b2f5e
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/bindings/_rust/x509.pyi
@@ -0,0 +1,44 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography import x509
+from cryptography.hazmat.primitives import hashes
+from cryptography.hazmat.primitives.asymmetric.padding import PSS, PKCS1v15
+from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes
+
+def load_pem_x509_certificate(data: bytes) -> x509.Certificate: ...
+def load_pem_x509_certificates(
+ data: bytes,
+) -> typing.List[x509.Certificate]: ...
+def load_der_x509_certificate(data: bytes) -> x509.Certificate: ...
+def load_pem_x509_crl(data: bytes) -> x509.CertificateRevocationList: ...
+def load_der_x509_crl(data: bytes) -> x509.CertificateRevocationList: ...
+def load_pem_x509_csr(data: bytes) -> x509.CertificateSigningRequest: ...
+def load_der_x509_csr(data: bytes) -> x509.CertificateSigningRequest: ...
+def encode_name_bytes(name: x509.Name) -> bytes: ...
+def encode_extension_value(extension: x509.ExtensionType) -> bytes: ...
+def create_x509_certificate(
+ builder: x509.CertificateBuilder,
+ private_key: PrivateKeyTypes,
+ hash_algorithm: typing.Optional[hashes.HashAlgorithm],
+ padding: typing.Optional[typing.Union[PKCS1v15, PSS]],
+) -> x509.Certificate: ...
+def create_x509_csr(
+ builder: x509.CertificateSigningRequestBuilder,
+ private_key: PrivateKeyTypes,
+ hash_algorithm: typing.Optional[hashes.HashAlgorithm],
+) -> x509.CertificateSigningRequest: ...
+def create_x509_crl(
+ builder: x509.CertificateRevocationListBuilder,
+ private_key: PrivateKeyTypes,
+ hash_algorithm: typing.Optional[hashes.HashAlgorithm],
+) -> x509.CertificateRevocationList: ...
+
+class Sct: ...
+class Certificate: ...
+class RevokedCertificate: ...
+class CertificateRevocationList: ...
+class CertificateSigningRequest: ...
diff --git a/billinglayer/python/cryptography/hazmat/bindings/openssl/__init__.py b/billinglayer/python/cryptography/hazmat/bindings/openssl/__init__.py
new file mode 100644
index 0000000..b509336
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/bindings/openssl/__init__.py
@@ -0,0 +1,3 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
diff --git a/billinglayer/python/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..d985039
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-311.pyc
new file mode 100644
index 0000000..a0f7c39
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-311.pyc
new file mode 100644
index 0000000..0e26694
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/bindings/openssl/_conditional.py b/billinglayer/python/cryptography/hazmat/bindings/openssl/_conditional.py
new file mode 100644
index 0000000..5e8ecd0
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/bindings/openssl/_conditional.py
@@ -0,0 +1,329 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+
+def cryptography_has_set_cert_cb() -> typing.List[str]:
+ return [
+ "SSL_CTX_set_cert_cb",
+ "SSL_set_cert_cb",
+ ]
+
+
+def cryptography_has_ssl_st() -> typing.List[str]:
+ return [
+ "SSL_ST_BEFORE",
+ "SSL_ST_OK",
+ "SSL_ST_INIT",
+ "SSL_ST_RENEGOTIATE",
+ ]
+
+
+def cryptography_has_tls_st() -> typing.List[str]:
+ return [
+ "TLS_ST_BEFORE",
+ "TLS_ST_OK",
+ ]
+
+
+def cryptography_has_evp_pkey_dhx() -> typing.List[str]:
+ return [
+ "EVP_PKEY_DHX",
+ ]
+
+
+def cryptography_has_mem_functions() -> typing.List[str]:
+ return [
+ "Cryptography_CRYPTO_set_mem_functions",
+ ]
+
+
+def cryptography_has_x509_store_ctx_get_issuer() -> typing.List[str]:
+ return [
+ "X509_STORE_set_get_issuer",
+ ]
+
+
+def cryptography_has_ed448() -> typing.List[str]:
+ return [
+ "EVP_PKEY_ED448",
+ "NID_ED448",
+ ]
+
+
+def cryptography_has_ed25519() -> typing.List[str]:
+ return [
+ "NID_ED25519",
+ "EVP_PKEY_ED25519",
+ ]
+
+
+def cryptography_has_poly1305() -> typing.List[str]:
+ return [
+ "NID_poly1305",
+ "EVP_PKEY_POLY1305",
+ ]
+
+
+def cryptography_has_evp_digestfinal_xof() -> typing.List[str]:
+ return [
+ "EVP_DigestFinalXOF",
+ ]
+
+
+def cryptography_has_fips() -> typing.List[str]:
+ return [
+ "FIPS_mode_set",
+ "FIPS_mode",
+ ]
+
+
+def cryptography_has_ssl_sigalgs() -> typing.List[str]:
+ return [
+ "SSL_CTX_set1_sigalgs_list",
+ ]
+
+
+def cryptography_has_psk() -> typing.List[str]:
+ return [
+ "SSL_CTX_use_psk_identity_hint",
+ "SSL_CTX_set_psk_server_callback",
+ "SSL_CTX_set_psk_client_callback",
+ ]
+
+
+def cryptography_has_psk_tlsv13() -> typing.List[str]:
+ return [
+ "SSL_CTX_set_psk_find_session_callback",
+ "SSL_CTX_set_psk_use_session_callback",
+ "Cryptography_SSL_SESSION_new",
+ "SSL_CIPHER_find",
+ "SSL_SESSION_set1_master_key",
+ "SSL_SESSION_set_cipher",
+ "SSL_SESSION_set_protocol_version",
+ ]
+
+
+def cryptography_has_custom_ext() -> typing.List[str]:
+ return [
+ "SSL_CTX_add_client_custom_ext",
+ "SSL_CTX_add_server_custom_ext",
+ "SSL_extension_supported",
+ ]
+
+
+def cryptography_has_tlsv13_functions() -> typing.List[str]:
+ return [
+ "SSL_VERIFY_POST_HANDSHAKE",
+ "SSL_CTX_set_ciphersuites",
+ "SSL_verify_client_post_handshake",
+ "SSL_CTX_set_post_handshake_auth",
+ "SSL_set_post_handshake_auth",
+ "SSL_SESSION_get_max_early_data",
+ "SSL_write_early_data",
+ "SSL_read_early_data",
+ "SSL_CTX_set_max_early_data",
+ ]
+
+
+def cryptography_has_raw_key() -> typing.List[str]:
+ return [
+ "EVP_PKEY_new_raw_private_key",
+ "EVP_PKEY_new_raw_public_key",
+ "EVP_PKEY_get_raw_private_key",
+ "EVP_PKEY_get_raw_public_key",
+ ]
+
+
+def cryptography_has_engine() -> typing.List[str]:
+ return [
+ "ENGINE_by_id",
+ "ENGINE_init",
+ "ENGINE_finish",
+ "ENGINE_get_default_RAND",
+ "ENGINE_set_default_RAND",
+ "ENGINE_unregister_RAND",
+ "ENGINE_ctrl_cmd",
+ "ENGINE_free",
+ "ENGINE_get_name",
+ "ENGINE_ctrl_cmd_string",
+ "ENGINE_load_builtin_engines",
+ "ENGINE_load_private_key",
+ "ENGINE_load_public_key",
+ "SSL_CTX_set_client_cert_engine",
+ ]
+
+
+def cryptography_has_verified_chain() -> typing.List[str]:
+ return [
+ "SSL_get0_verified_chain",
+ ]
+
+
+def cryptography_has_srtp() -> typing.List[str]:
+ return [
+ "SSL_CTX_set_tlsext_use_srtp",
+ "SSL_set_tlsext_use_srtp",
+ "SSL_get_selected_srtp_profile",
+ ]
+
+
+def cryptography_has_providers() -> typing.List[str]:
+ return [
+ "OSSL_PROVIDER_load",
+ "OSSL_PROVIDER_unload",
+ "ERR_LIB_PROV",
+ "PROV_R_WRONG_FINAL_BLOCK_LENGTH",
+ "PROV_R_BAD_DECRYPT",
+ ]
+
+
+def cryptography_has_op_no_renegotiation() -> typing.List[str]:
+ return [
+ "SSL_OP_NO_RENEGOTIATION",
+ ]
+
+
+def cryptography_has_dtls_get_data_mtu() -> typing.List[str]:
+ return [
+ "DTLS_get_data_mtu",
+ ]
+
+
+def cryptography_has_300_fips() -> typing.List[str]:
+ return [
+ "EVP_default_properties_is_fips_enabled",
+ "EVP_default_properties_enable_fips",
+ ]
+
+
+def cryptography_has_ssl_cookie() -> typing.List[str]:
+ return [
+ "SSL_OP_COOKIE_EXCHANGE",
+ "DTLSv1_listen",
+ "SSL_CTX_set_cookie_generate_cb",
+ "SSL_CTX_set_cookie_verify_cb",
+ ]
+
+
+def cryptography_has_pkcs7_funcs() -> typing.List[str]:
+ return [
+ "SMIME_write_PKCS7",
+ "PEM_write_bio_PKCS7_stream",
+ "PKCS7_sign_add_signer",
+ "PKCS7_final",
+ "PKCS7_verify",
+ "SMIME_read_PKCS7",
+ "PKCS7_get0_signers",
+ ]
+
+
+def cryptography_has_bn_flags() -> typing.List[str]:
+ return [
+ "BN_FLG_CONSTTIME",
+ "BN_set_flags",
+ "BN_prime_checks_for_size",
+ ]
+
+
+def cryptography_has_evp_pkey_dh() -> typing.List[str]:
+ return [
+ "EVP_PKEY_set1_DH",
+ ]
+
+
+def cryptography_has_300_evp_cipher() -> typing.List[str]:
+ return ["EVP_CIPHER_fetch", "EVP_CIPHER_free"]
+
+
+def cryptography_has_unexpected_eof_while_reading() -> typing.List[str]:
+ return ["SSL_R_UNEXPECTED_EOF_WHILE_READING"]
+
+
+def cryptography_has_pkcs12_set_mac() -> typing.List[str]:
+ return ["PKCS12_set_mac"]
+
+
+def cryptography_has_ssl_op_ignore_unexpected_eof() -> typing.List[str]:
+ return [
+ "SSL_OP_IGNORE_UNEXPECTED_EOF",
+ ]
+
+
+def cryptography_has_get_extms_support() -> typing.List[str]:
+ return ["SSL_get_extms_support"]
+
+
+def cryptography_has_evp_pkey_set_peer_ex() -> typing.List[str]:
+ return ["EVP_PKEY_derive_set_peer_ex"]
+
+
+def cryptography_has_evp_aead() -> typing.List[str]:
+ return [
+ "EVP_aead_chacha20_poly1305",
+ "EVP_AEAD_CTX_free",
+ "EVP_AEAD_CTX_seal",
+ "EVP_AEAD_CTX_open",
+ "EVP_AEAD_max_overhead",
+ "Cryptography_EVP_AEAD_CTX_new",
+ ]
+
+
+# This is a mapping of
+# {condition: function-returning-names-dependent-on-that-condition} so we can
+# loop over them and delete unsupported names at runtime. It will be removed
+# when cffi supports #if in cdef. We use functions instead of just a dict of
+# lists so we can use coverage to measure which are used.
+CONDITIONAL_NAMES = {
+ "Cryptography_HAS_SET_CERT_CB": cryptography_has_set_cert_cb,
+ "Cryptography_HAS_SSL_ST": cryptography_has_ssl_st,
+ "Cryptography_HAS_TLS_ST": cryptography_has_tls_st,
+ "Cryptography_HAS_EVP_PKEY_DHX": cryptography_has_evp_pkey_dhx,
+ "Cryptography_HAS_MEM_FUNCTIONS": cryptography_has_mem_functions,
+ "Cryptography_HAS_X509_STORE_CTX_GET_ISSUER": (
+ cryptography_has_x509_store_ctx_get_issuer
+ ),
+ "Cryptography_HAS_ED448": cryptography_has_ed448,
+ "Cryptography_HAS_ED25519": cryptography_has_ed25519,
+ "Cryptography_HAS_POLY1305": cryptography_has_poly1305,
+ "Cryptography_HAS_FIPS": cryptography_has_fips,
+ "Cryptography_HAS_SIGALGS": cryptography_has_ssl_sigalgs,
+ "Cryptography_HAS_PSK": cryptography_has_psk,
+ "Cryptography_HAS_PSK_TLSv1_3": cryptography_has_psk_tlsv13,
+ "Cryptography_HAS_CUSTOM_EXT": cryptography_has_custom_ext,
+ "Cryptography_HAS_TLSv1_3_FUNCTIONS": cryptography_has_tlsv13_functions,
+ "Cryptography_HAS_RAW_KEY": cryptography_has_raw_key,
+ "Cryptography_HAS_EVP_DIGESTFINAL_XOF": (
+ cryptography_has_evp_digestfinal_xof
+ ),
+ "Cryptography_HAS_ENGINE": cryptography_has_engine,
+ "Cryptography_HAS_VERIFIED_CHAIN": cryptography_has_verified_chain,
+ "Cryptography_HAS_SRTP": cryptography_has_srtp,
+ "Cryptography_HAS_PROVIDERS": cryptography_has_providers,
+ "Cryptography_HAS_OP_NO_RENEGOTIATION": (
+ cryptography_has_op_no_renegotiation
+ ),
+ "Cryptography_HAS_DTLS_GET_DATA_MTU": cryptography_has_dtls_get_data_mtu,
+ "Cryptography_HAS_300_FIPS": cryptography_has_300_fips,
+ "Cryptography_HAS_SSL_COOKIE": cryptography_has_ssl_cookie,
+ "Cryptography_HAS_PKCS7_FUNCS": cryptography_has_pkcs7_funcs,
+ "Cryptography_HAS_BN_FLAGS": cryptography_has_bn_flags,
+ "Cryptography_HAS_EVP_PKEY_DH": cryptography_has_evp_pkey_dh,
+ "Cryptography_HAS_300_EVP_CIPHER": cryptography_has_300_evp_cipher,
+ "Cryptography_HAS_UNEXPECTED_EOF_WHILE_READING": (
+ cryptography_has_unexpected_eof_while_reading
+ ),
+ "Cryptography_HAS_PKCS12_SET_MAC": cryptography_has_pkcs12_set_mac,
+ "Cryptography_HAS_SSL_OP_IGNORE_UNEXPECTED_EOF": (
+ cryptography_has_ssl_op_ignore_unexpected_eof
+ ),
+ "Cryptography_HAS_GET_EXTMS_SUPPORT": cryptography_has_get_extms_support,
+ "Cryptography_HAS_EVP_PKEY_SET_PEER_EX": (
+ cryptography_has_evp_pkey_set_peer_ex
+ ),
+ "Cryptography_HAS_EVP_AEAD": (cryptography_has_evp_aead),
+}
diff --git a/billinglayer/python/cryptography/hazmat/bindings/openssl/binding.py b/billinglayer/python/cryptography/hazmat/bindings/openssl/binding.py
new file mode 100644
index 0000000..b50d631
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/bindings/openssl/binding.py
@@ -0,0 +1,179 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import os
+import sys
+import threading
+import types
+import typing
+import warnings
+
+import cryptography
+from cryptography.exceptions import InternalError
+from cryptography.hazmat.bindings._rust import _openssl, openssl
+from cryptography.hazmat.bindings.openssl._conditional import CONDITIONAL_NAMES
+
+
+def _openssl_assert(
+ lib,
+ ok: bool,
+ errors: typing.Optional[typing.List[openssl.OpenSSLError]] = None,
+) -> None:
+ if not ok:
+ if errors is None:
+ errors = openssl.capture_error_stack()
+
+ raise InternalError(
+ "Unknown OpenSSL error. This error is commonly encountered when "
+ "another library is not cleaning up the OpenSSL error stack. If "
+ "you are using cryptography with another library that uses "
+ "OpenSSL try disabling it before reporting a bug. Otherwise "
+ "please file an issue at https://github.com/pyca/cryptography/"
+ "issues with information on how to reproduce "
+ "this. ({!r})".format(errors),
+ errors,
+ )
+
+
+def _legacy_provider_error(loaded: bool) -> None:
+ if not loaded:
+ raise RuntimeError(
+ "OpenSSL 3.0's legacy provider failed to load. This is a fatal "
+ "error by default, but cryptography supports running without "
+ "legacy algorithms by setting the environment variable "
+ "CRYPTOGRAPHY_OPENSSL_NO_LEGACY. If you did not expect this error,"
+ " you have likely made a mistake with your OpenSSL configuration."
+ )
+
+
+def build_conditional_library(
+ lib: typing.Any,
+ conditional_names: typing.Dict[str, typing.Callable[[], typing.List[str]]],
+) -> typing.Any:
+ conditional_lib = types.ModuleType("lib")
+ conditional_lib._original_lib = lib # type: ignore[attr-defined]
+ excluded_names = set()
+ for condition, names_cb in conditional_names.items():
+ if not getattr(lib, condition):
+ excluded_names.update(names_cb())
+
+ for attr in dir(lib):
+ if attr not in excluded_names:
+ setattr(conditional_lib, attr, getattr(lib, attr))
+
+ return conditional_lib
+
+
+class Binding:
+ """
+ OpenSSL API wrapper.
+ """
+
+ lib: typing.ClassVar = None
+ ffi = _openssl.ffi
+ _lib_loaded = False
+ _init_lock = threading.Lock()
+ _legacy_provider: typing.Any = ffi.NULL
+ _legacy_provider_loaded = False
+ _default_provider: typing.Any = ffi.NULL
+
+ def __init__(self) -> None:
+ self._ensure_ffi_initialized()
+
+ def _enable_fips(self) -> None:
+ # This function enables FIPS mode for OpenSSL 3.0.0 on installs that
+ # have the FIPS provider installed properly.
+ _openssl_assert(self.lib, self.lib.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER)
+ self._base_provider = self.lib.OSSL_PROVIDER_load(
+ self.ffi.NULL, b"base"
+ )
+ _openssl_assert(self.lib, self._base_provider != self.ffi.NULL)
+ self.lib._fips_provider = self.lib.OSSL_PROVIDER_load(
+ self.ffi.NULL, b"fips"
+ )
+ _openssl_assert(self.lib, self.lib._fips_provider != self.ffi.NULL)
+
+ res = self.lib.EVP_default_properties_enable_fips(self.ffi.NULL, 1)
+ _openssl_assert(self.lib, res == 1)
+
+ @classmethod
+ def _ensure_ffi_initialized(cls) -> None:
+ with cls._init_lock:
+ if not cls._lib_loaded:
+ cls.lib = build_conditional_library(
+ _openssl.lib, CONDITIONAL_NAMES
+ )
+ cls._lib_loaded = True
+ # As of OpenSSL 3.0.0 we must register a legacy cipher provider
+ # to get RC2 (needed for junk asymmetric private key
+ # serialization), RC4, Blowfish, IDEA, SEED, etc. These things
+ # are ugly legacy, but we aren't going to get rid of them
+ # any time soon.
+ if cls.lib.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER:
+ if not os.environ.get("CRYPTOGRAPHY_OPENSSL_NO_LEGACY"):
+ cls._legacy_provider = cls.lib.OSSL_PROVIDER_load(
+ cls.ffi.NULL, b"legacy"
+ )
+ cls._legacy_provider_loaded = (
+ cls._legacy_provider != cls.ffi.NULL
+ )
+ _legacy_provider_error(cls._legacy_provider_loaded)
+
+ cls._default_provider = cls.lib.OSSL_PROVIDER_load(
+ cls.ffi.NULL, b"default"
+ )
+ _openssl_assert(
+ cls.lib, cls._default_provider != cls.ffi.NULL
+ )
+
+ @classmethod
+ def init_static_locks(cls) -> None:
+ cls._ensure_ffi_initialized()
+
+
+def _verify_package_version(version: str) -> None:
+ # Occasionally we run into situations where the version of the Python
+ # package does not match the version of the shared object that is loaded.
+ # This may occur in environments where multiple versions of cryptography
+ # are installed and available in the python path. To avoid errors cropping
+ # up later this code checks that the currently imported package and the
+ # shared object that were loaded have the same version and raise an
+ # ImportError if they do not
+ so_package_version = _openssl.ffi.string(
+ _openssl.lib.CRYPTOGRAPHY_PACKAGE_VERSION
+ )
+ if version.encode("ascii") != so_package_version:
+ raise ImportError(
+ "The version of cryptography does not match the loaded "
+ "shared object. This can happen if you have multiple copies of "
+ "cryptography installed in your Python path. Please try creating "
+ "a new virtual environment to resolve this issue. "
+ "Loaded python version: {}, shared object version: {}".format(
+ version, so_package_version
+ )
+ )
+
+ _openssl_assert(
+ _openssl.lib,
+ _openssl.lib.OpenSSL_version_num() == openssl.openssl_version(),
+ )
+
+
+_verify_package_version(cryptography.__version__)
+
+Binding.init_static_locks()
+
+if (
+ sys.platform == "win32"
+ and os.environ.get("PROCESSOR_ARCHITEW6432") is not None
+):
+ warnings.warn(
+ "You are using cryptography on a 32-bit Python on a 64-bit Windows "
+ "Operating System. Cryptography will be significantly faster if you "
+ "switch to using a 64-bit Python.",
+ UserWarning,
+ stacklevel=2,
+ )
diff --git a/billinglayer/python/cryptography/hazmat/primitives/__init__.py b/billinglayer/python/cryptography/hazmat/primitives/__init__.py
new file mode 100644
index 0000000..b509336
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/__init__.py
@@ -0,0 +1,3 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
diff --git a/billinglayer/python/cryptography/hazmat/primitives/__pycache__/__init__.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..1a59cde
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/__pycache__/__init__.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-311.pyc
new file mode 100644
index 0000000..aac4fad
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-311.pyc
new file mode 100644
index 0000000..e061f96
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-311.pyc
new file mode 100644
index 0000000..010c1ef
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/__pycache__/cmac.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/__pycache__/cmac.cpython-311.pyc
new file mode 100644
index 0000000..bb43fd6
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/__pycache__/cmac.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-311.pyc
new file mode 100644
index 0000000..32c2408
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/__pycache__/hashes.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/__pycache__/hashes.cpython-311.pyc
new file mode 100644
index 0000000..eb71d3c
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/__pycache__/hashes.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/__pycache__/hmac.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/__pycache__/hmac.cpython-311.pyc
new file mode 100644
index 0000000..674ee64
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/__pycache__/hmac.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-311.pyc
new file mode 100644
index 0000000..5d67e36
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/__pycache__/padding.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/__pycache__/padding.cpython-311.pyc
new file mode 100644
index 0000000..e85b7e8
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/__pycache__/padding.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-311.pyc
new file mode 100644
index 0000000..a0927fc
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/_asymmetric.py b/billinglayer/python/cryptography/hazmat/primitives/_asymmetric.py
new file mode 100644
index 0000000..ea55ffd
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/_asymmetric.py
@@ -0,0 +1,19 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+
+# This exists to break an import cycle. It is normally accessible from the
+# asymmetric padding module.
+
+
+class AsymmetricPadding(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def name(self) -> str:
+ """
+ A string naming this padding (e.g. "PSS", "PKCS1").
+ """
diff --git a/billinglayer/python/cryptography/hazmat/primitives/_cipheralgorithm.py b/billinglayer/python/cryptography/hazmat/primitives/_cipheralgorithm.py
new file mode 100644
index 0000000..3b880b6
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/_cipheralgorithm.py
@@ -0,0 +1,45 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import typing
+
+# This exists to break an import cycle. It is normally accessible from the
+# ciphers module.
+
+
+class CipherAlgorithm(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def name(self) -> str:
+ """
+ A string naming this mode (e.g. "AES", "Camellia").
+ """
+
+ @property
+ @abc.abstractmethod
+ def key_sizes(self) -> typing.FrozenSet[int]:
+ """
+ Valid key sizes for this algorithm in bits
+ """
+
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ The size of the key being used as an integer in bits (e.g. 128, 256).
+ """
+
+
+class BlockCipherAlgorithm(CipherAlgorithm):
+ key: bytes
+
+ @property
+ @abc.abstractmethod
+ def block_size(self) -> int:
+ """
+ The size of a block as an integer in bits (e.g. 64, 128).
+ """
diff --git a/billinglayer/python/cryptography/hazmat/primitives/_serialization.py b/billinglayer/python/cryptography/hazmat/primitives/_serialization.py
new file mode 100644
index 0000000..34f3fbc
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/_serialization.py
@@ -0,0 +1,170 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import typing
+
+from cryptography import utils
+from cryptography.hazmat.primitives.hashes import HashAlgorithm
+
+# This exists to break an import cycle. These classes are normally accessible
+# from the serialization module.
+
+
+class PBES(utils.Enum):
+ PBESv1SHA1And3KeyTripleDESCBC = "PBESv1 using SHA1 and 3-Key TripleDES"
+ PBESv2SHA256AndAES256CBC = "PBESv2 using SHA256 PBKDF2 and AES256 CBC"
+
+
+class Encoding(utils.Enum):
+ PEM = "PEM"
+ DER = "DER"
+ OpenSSH = "OpenSSH"
+ Raw = "Raw"
+ X962 = "ANSI X9.62"
+ SMIME = "S/MIME"
+
+
+class PrivateFormat(utils.Enum):
+ PKCS8 = "PKCS8"
+ TraditionalOpenSSL = "TraditionalOpenSSL"
+ Raw = "Raw"
+ OpenSSH = "OpenSSH"
+ PKCS12 = "PKCS12"
+
+ def encryption_builder(self) -> KeySerializationEncryptionBuilder:
+ if self not in (PrivateFormat.OpenSSH, PrivateFormat.PKCS12):
+ raise ValueError(
+ "encryption_builder only supported with PrivateFormat.OpenSSH"
+ " and PrivateFormat.PKCS12"
+ )
+ return KeySerializationEncryptionBuilder(self)
+
+
+class PublicFormat(utils.Enum):
+ SubjectPublicKeyInfo = "X.509 subjectPublicKeyInfo with PKCS#1"
+ PKCS1 = "Raw PKCS#1"
+ OpenSSH = "OpenSSH"
+ Raw = "Raw"
+ CompressedPoint = "X9.62 Compressed Point"
+ UncompressedPoint = "X9.62 Uncompressed Point"
+
+
+class ParameterFormat(utils.Enum):
+ PKCS3 = "PKCS3"
+
+
+class KeySerializationEncryption(metaclass=abc.ABCMeta):
+ pass
+
+
+class BestAvailableEncryption(KeySerializationEncryption):
+ def __init__(self, password: bytes):
+ if not isinstance(password, bytes) or len(password) == 0:
+ raise ValueError("Password must be 1 or more bytes.")
+
+ self.password = password
+
+
+class NoEncryption(KeySerializationEncryption):
+ pass
+
+
+class KeySerializationEncryptionBuilder:
+ def __init__(
+ self,
+ format: PrivateFormat,
+ *,
+ _kdf_rounds: typing.Optional[int] = None,
+ _hmac_hash: typing.Optional[HashAlgorithm] = None,
+ _key_cert_algorithm: typing.Optional[PBES] = None,
+ ) -> None:
+ self._format = format
+
+ self._kdf_rounds = _kdf_rounds
+ self._hmac_hash = _hmac_hash
+ self._key_cert_algorithm = _key_cert_algorithm
+
+ def kdf_rounds(self, rounds: int) -> KeySerializationEncryptionBuilder:
+ if self._kdf_rounds is not None:
+ raise ValueError("kdf_rounds already set")
+
+ if not isinstance(rounds, int):
+ raise TypeError("kdf_rounds must be an integer")
+
+ if rounds < 1:
+ raise ValueError("kdf_rounds must be a positive integer")
+
+ return KeySerializationEncryptionBuilder(
+ self._format,
+ _kdf_rounds=rounds,
+ _hmac_hash=self._hmac_hash,
+ _key_cert_algorithm=self._key_cert_algorithm,
+ )
+
+ def hmac_hash(
+ self, algorithm: HashAlgorithm
+ ) -> KeySerializationEncryptionBuilder:
+ if self._format is not PrivateFormat.PKCS12:
+ raise TypeError(
+ "hmac_hash only supported with PrivateFormat.PKCS12"
+ )
+
+ if self._hmac_hash is not None:
+ raise ValueError("hmac_hash already set")
+ return KeySerializationEncryptionBuilder(
+ self._format,
+ _kdf_rounds=self._kdf_rounds,
+ _hmac_hash=algorithm,
+ _key_cert_algorithm=self._key_cert_algorithm,
+ )
+
+ def key_cert_algorithm(
+ self, algorithm: PBES
+ ) -> KeySerializationEncryptionBuilder:
+ if self._format is not PrivateFormat.PKCS12:
+ raise TypeError(
+ "key_cert_algorithm only supported with "
+ "PrivateFormat.PKCS12"
+ )
+ if self._key_cert_algorithm is not None:
+ raise ValueError("key_cert_algorithm already set")
+ return KeySerializationEncryptionBuilder(
+ self._format,
+ _kdf_rounds=self._kdf_rounds,
+ _hmac_hash=self._hmac_hash,
+ _key_cert_algorithm=algorithm,
+ )
+
+ def build(self, password: bytes) -> KeySerializationEncryption:
+ if not isinstance(password, bytes) or len(password) == 0:
+ raise ValueError("Password must be 1 or more bytes.")
+
+ return _KeySerializationEncryption(
+ self._format,
+ password,
+ kdf_rounds=self._kdf_rounds,
+ hmac_hash=self._hmac_hash,
+ key_cert_algorithm=self._key_cert_algorithm,
+ )
+
+
+class _KeySerializationEncryption(KeySerializationEncryption):
+ def __init__(
+ self,
+ format: PrivateFormat,
+ password: bytes,
+ *,
+ kdf_rounds: typing.Optional[int],
+ hmac_hash: typing.Optional[HashAlgorithm],
+ key_cert_algorithm: typing.Optional[PBES],
+ ):
+ self._format = format
+ self.password = password
+
+ self._kdf_rounds = kdf_rounds
+ self._hmac_hash = hmac_hash
+ self._key_cert_algorithm = key_cert_algorithm
diff --git a/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__init__.py b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__init__.py
new file mode 100644
index 0000000..b509336
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__init__.py
@@ -0,0 +1,3 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
diff --git a/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..ede5f72
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-311.pyc
new file mode 100644
index 0000000..e08ec6b
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-311.pyc
new file mode 100644
index 0000000..ee12879
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-311.pyc
new file mode 100644
index 0000000..d27ca2e
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-311.pyc
new file mode 100644
index 0000000..e669f30
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-311.pyc
new file mode 100644
index 0000000..ec4d488
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-311.pyc
new file mode 100644
index 0000000..0a57226
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-311.pyc
new file mode 100644
index 0000000..90e4359
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-311.pyc
new file mode 100644
index 0000000..a9a09f0
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-311.pyc
new file mode 100644
index 0000000..ef1acfb
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-311.pyc
new file mode 100644
index 0000000..197f16e
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-311.pyc
new file mode 100644
index 0000000..d01d6c0
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/asymmetric/dh.py b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/dh.py
new file mode 100644
index 0000000..751bcc4
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/dh.py
@@ -0,0 +1,261 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import typing
+
+from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+from cryptography.hazmat.primitives import _serialization
+
+
+def generate_parameters(
+ generator: int, key_size: int, backend: typing.Any = None
+) -> DHParameters:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.generate_dh_parameters(generator, key_size)
+
+
+class DHParameterNumbers:
+ def __init__(self, p: int, g: int, q: typing.Optional[int] = None) -> None:
+ if not isinstance(p, int) or not isinstance(g, int):
+ raise TypeError("p and g must be integers")
+ if q is not None and not isinstance(q, int):
+ raise TypeError("q must be integer or None")
+
+ if g < 2:
+ raise ValueError("DH generator must be 2 or greater")
+
+ if p.bit_length() < rust_openssl.dh.MIN_MODULUS_SIZE:
+ raise ValueError(
+ f"p (modulus) must be at least "
+ f"{rust_openssl.dh.MIN_MODULUS_SIZE}-bit"
+ )
+
+ self._p = p
+ self._g = g
+ self._q = q
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DHParameterNumbers):
+ return NotImplemented
+
+ return (
+ self._p == other._p and self._g == other._g and self._q == other._q
+ )
+
+ def parameters(self, backend: typing.Any = None) -> DHParameters:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_dh_parameter_numbers(self)
+
+ @property
+ def p(self) -> int:
+ return self._p
+
+ @property
+ def g(self) -> int:
+ return self._g
+
+ @property
+ def q(self) -> typing.Optional[int]:
+ return self._q
+
+
+class DHPublicNumbers:
+ def __init__(self, y: int, parameter_numbers: DHParameterNumbers) -> None:
+ if not isinstance(y, int):
+ raise TypeError("y must be an integer.")
+
+ if not isinstance(parameter_numbers, DHParameterNumbers):
+ raise TypeError(
+ "parameters must be an instance of DHParameterNumbers."
+ )
+
+ self._y = y
+ self._parameter_numbers = parameter_numbers
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DHPublicNumbers):
+ return NotImplemented
+
+ return (
+ self._y == other._y
+ and self._parameter_numbers == other._parameter_numbers
+ )
+
+ def public_key(self, backend: typing.Any = None) -> DHPublicKey:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_dh_public_numbers(self)
+
+ @property
+ def y(self) -> int:
+ return self._y
+
+ @property
+ def parameter_numbers(self) -> DHParameterNumbers:
+ return self._parameter_numbers
+
+
+class DHPrivateNumbers:
+ def __init__(self, x: int, public_numbers: DHPublicNumbers) -> None:
+ if not isinstance(x, int):
+ raise TypeError("x must be an integer.")
+
+ if not isinstance(public_numbers, DHPublicNumbers):
+ raise TypeError(
+ "public_numbers must be an instance of " "DHPublicNumbers."
+ )
+
+ self._x = x
+ self._public_numbers = public_numbers
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DHPrivateNumbers):
+ return NotImplemented
+
+ return (
+ self._x == other._x
+ and self._public_numbers == other._public_numbers
+ )
+
+ def private_key(self, backend: typing.Any = None) -> DHPrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_dh_private_numbers(self)
+
+ @property
+ def public_numbers(self) -> DHPublicNumbers:
+ return self._public_numbers
+
+ @property
+ def x(self) -> int:
+ return self._x
+
+
+class DHParameters(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def generate_private_key(self) -> DHPrivateKey:
+ """
+ Generates and returns a DHPrivateKey.
+ """
+
+ @abc.abstractmethod
+ def parameter_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.ParameterFormat,
+ ) -> bytes:
+ """
+ Returns the parameters serialized as bytes.
+ """
+
+ @abc.abstractmethod
+ def parameter_numbers(self) -> DHParameterNumbers:
+ """
+ Returns a DHParameterNumbers.
+ """
+
+
+DHParametersWithSerialization = DHParameters
+DHParameters.register(rust_openssl.dh.DHParameters)
+
+
+class DHPublicKey(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ The bit length of the prime modulus.
+ """
+
+ @abc.abstractmethod
+ def parameters(self) -> DHParameters:
+ """
+ The DHParameters object associated with this public key.
+ """
+
+ @abc.abstractmethod
+ def public_numbers(self) -> DHPublicNumbers:
+ """
+ Returns a DHPublicNumbers.
+ """
+
+ @abc.abstractmethod
+ def public_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PublicFormat,
+ ) -> bytes:
+ """
+ Returns the key serialized as bytes.
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other: object) -> bool:
+ """
+ Checks equality.
+ """
+
+
+DHPublicKeyWithSerialization = DHPublicKey
+DHPublicKey.register(rust_openssl.dh.DHPublicKey)
+
+
+class DHPrivateKey(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ The bit length of the prime modulus.
+ """
+
+ @abc.abstractmethod
+ def public_key(self) -> DHPublicKey:
+ """
+ The DHPublicKey associated with this private key.
+ """
+
+ @abc.abstractmethod
+ def parameters(self) -> DHParameters:
+ """
+ The DHParameters object associated with this private key.
+ """
+
+ @abc.abstractmethod
+ def exchange(self, peer_public_key: DHPublicKey) -> bytes:
+ """
+ Given peer's DHPublicKey, carry out the key exchange and
+ return shared key as bytes.
+ """
+
+ @abc.abstractmethod
+ def private_numbers(self) -> DHPrivateNumbers:
+ """
+ Returns a DHPrivateNumbers.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PrivateFormat,
+ encryption_algorithm: _serialization.KeySerializationEncryption,
+ ) -> bytes:
+ """
+ Returns the key serialized as bytes.
+ """
+
+
+DHPrivateKeyWithSerialization = DHPrivateKey
+DHPrivateKey.register(rust_openssl.dh.DHPrivateKey)
diff --git a/billinglayer/python/cryptography/hazmat/primitives/asymmetric/dsa.py b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/dsa.py
new file mode 100644
index 0000000..a8c52de
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/dsa.py
@@ -0,0 +1,299 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import typing
+
+from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+from cryptography.hazmat.primitives import _serialization, hashes
+from cryptography.hazmat.primitives.asymmetric import utils as asym_utils
+
+
+class DSAParameters(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def generate_private_key(self) -> DSAPrivateKey:
+ """
+ Generates and returns a DSAPrivateKey.
+ """
+
+ @abc.abstractmethod
+ def parameter_numbers(self) -> DSAParameterNumbers:
+ """
+ Returns a DSAParameterNumbers.
+ """
+
+
+DSAParametersWithNumbers = DSAParameters
+DSAParameters.register(rust_openssl.dsa.DSAParameters)
+
+
+class DSAPrivateKey(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ The bit length of the prime modulus.
+ """
+
+ @abc.abstractmethod
+ def public_key(self) -> DSAPublicKey:
+ """
+ The DSAPublicKey associated with this private key.
+ """
+
+ @abc.abstractmethod
+ def parameters(self) -> DSAParameters:
+ """
+ The DSAParameters object associated with this private key.
+ """
+
+ @abc.abstractmethod
+ def sign(
+ self,
+ data: bytes,
+ algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm],
+ ) -> bytes:
+ """
+ Signs the data
+ """
+
+ @abc.abstractmethod
+ def private_numbers(self) -> DSAPrivateNumbers:
+ """
+ Returns a DSAPrivateNumbers.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PrivateFormat,
+ encryption_algorithm: _serialization.KeySerializationEncryption,
+ ) -> bytes:
+ """
+ Returns the key serialized as bytes.
+ """
+
+
+DSAPrivateKeyWithSerialization = DSAPrivateKey
+DSAPrivateKey.register(rust_openssl.dsa.DSAPrivateKey)
+
+
+class DSAPublicKey(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ The bit length of the prime modulus.
+ """
+
+ @abc.abstractmethod
+ def parameters(self) -> DSAParameters:
+ """
+ The DSAParameters object associated with this public key.
+ """
+
+ @abc.abstractmethod
+ def public_numbers(self) -> DSAPublicNumbers:
+ """
+ Returns a DSAPublicNumbers.
+ """
+
+ @abc.abstractmethod
+ def public_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PublicFormat,
+ ) -> bytes:
+ """
+ Returns the key serialized as bytes.
+ """
+
+ @abc.abstractmethod
+ def verify(
+ self,
+ signature: bytes,
+ data: bytes,
+ algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm],
+ ) -> None:
+ """
+ Verifies the signature of the data.
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other: object) -> bool:
+ """
+ Checks equality.
+ """
+
+
+DSAPublicKeyWithSerialization = DSAPublicKey
+DSAPublicKey.register(rust_openssl.dsa.DSAPublicKey)
+
+
+class DSAParameterNumbers:
+ def __init__(self, p: int, q: int, g: int):
+ if (
+ not isinstance(p, int)
+ or not isinstance(q, int)
+ or not isinstance(g, int)
+ ):
+ raise TypeError(
+ "DSAParameterNumbers p, q, and g arguments must be integers."
+ )
+
+ self._p = p
+ self._q = q
+ self._g = g
+
+ @property
+ def p(self) -> int:
+ return self._p
+
+ @property
+ def q(self) -> int:
+ return self._q
+
+ @property
+ def g(self) -> int:
+ return self._g
+
+ def parameters(self, backend: typing.Any = None) -> DSAParameters:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_dsa_parameter_numbers(self)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DSAParameterNumbers):
+ return NotImplemented
+
+ return self.p == other.p and self.q == other.q and self.g == other.g
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self=self)
+ )
+
+
+class DSAPublicNumbers:
+ def __init__(self, y: int, parameter_numbers: DSAParameterNumbers):
+ if not isinstance(y, int):
+ raise TypeError("DSAPublicNumbers y argument must be an integer.")
+
+ if not isinstance(parameter_numbers, DSAParameterNumbers):
+ raise TypeError(
+ "parameter_numbers must be a DSAParameterNumbers instance."
+ )
+
+ self._y = y
+ self._parameter_numbers = parameter_numbers
+
+ @property
+ def y(self) -> int:
+ return self._y
+
+ @property
+ def parameter_numbers(self) -> DSAParameterNumbers:
+ return self._parameter_numbers
+
+ def public_key(self, backend: typing.Any = None) -> DSAPublicKey:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_dsa_public_numbers(self)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DSAPublicNumbers):
+ return NotImplemented
+
+ return (
+ self.y == other.y
+ and self.parameter_numbers == other.parameter_numbers
+ )
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self=self)
+ )
+
+
+class DSAPrivateNumbers:
+ def __init__(self, x: int, public_numbers: DSAPublicNumbers):
+ if not isinstance(x, int):
+ raise TypeError("DSAPrivateNumbers x argument must be an integer.")
+
+ if not isinstance(public_numbers, DSAPublicNumbers):
+ raise TypeError(
+ "public_numbers must be a DSAPublicNumbers instance."
+ )
+ self._public_numbers = public_numbers
+ self._x = x
+
+ @property
+ def x(self) -> int:
+ return self._x
+
+ @property
+ def public_numbers(self) -> DSAPublicNumbers:
+ return self._public_numbers
+
+ def private_key(self, backend: typing.Any = None) -> DSAPrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_dsa_private_numbers(self)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DSAPrivateNumbers):
+ return NotImplemented
+
+ return (
+ self.x == other.x and self.public_numbers == other.public_numbers
+ )
+
+
+def generate_parameters(
+ key_size: int, backend: typing.Any = None
+) -> DSAParameters:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.generate_dsa_parameters(key_size)
+
+
+def generate_private_key(
+ key_size: int, backend: typing.Any = None
+) -> DSAPrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.generate_dsa_private_key_and_parameters(key_size)
+
+
+def _check_dsa_parameters(parameters: DSAParameterNumbers) -> None:
+ if parameters.p.bit_length() not in [1024, 2048, 3072, 4096]:
+ raise ValueError(
+ "p must be exactly 1024, 2048, 3072, or 4096 bits long"
+ )
+ if parameters.q.bit_length() not in [160, 224, 256]:
+ raise ValueError("q must be exactly 160, 224, or 256 bits long")
+
+ if not (1 < parameters.g < parameters.p):
+ raise ValueError("g, p don't satisfy 1 < g < p.")
+
+
+def _check_dsa_private_numbers(numbers: DSAPrivateNumbers) -> None:
+ parameters = numbers.public_numbers.parameter_numbers
+ _check_dsa_parameters(parameters)
+ if numbers.x <= 0 or numbers.x >= parameters.q:
+ raise ValueError("x must be > 0 and < q.")
+
+ if numbers.public_numbers.y != pow(parameters.g, numbers.x, parameters.p):
+ raise ValueError("y must be equal to (g ** x % p).")
diff --git a/billinglayer/python/cryptography/hazmat/primitives/asymmetric/ec.py b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/ec.py
new file mode 100644
index 0000000..ddfaabf
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/ec.py
@@ -0,0 +1,490 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import typing
+
+from cryptography import utils
+from cryptography.hazmat._oid import ObjectIdentifier
+from cryptography.hazmat.primitives import _serialization, hashes
+from cryptography.hazmat.primitives.asymmetric import utils as asym_utils
+
+
+class EllipticCurveOID:
+ SECP192R1 = ObjectIdentifier("1.2.840.10045.3.1.1")
+ SECP224R1 = ObjectIdentifier("1.3.132.0.33")
+ SECP256K1 = ObjectIdentifier("1.3.132.0.10")
+ SECP256R1 = ObjectIdentifier("1.2.840.10045.3.1.7")
+ SECP384R1 = ObjectIdentifier("1.3.132.0.34")
+ SECP521R1 = ObjectIdentifier("1.3.132.0.35")
+ BRAINPOOLP256R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.7")
+ BRAINPOOLP384R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.11")
+ BRAINPOOLP512R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.13")
+ SECT163K1 = ObjectIdentifier("1.3.132.0.1")
+ SECT163R2 = ObjectIdentifier("1.3.132.0.15")
+ SECT233K1 = ObjectIdentifier("1.3.132.0.26")
+ SECT233R1 = ObjectIdentifier("1.3.132.0.27")
+ SECT283K1 = ObjectIdentifier("1.3.132.0.16")
+ SECT283R1 = ObjectIdentifier("1.3.132.0.17")
+ SECT409K1 = ObjectIdentifier("1.3.132.0.36")
+ SECT409R1 = ObjectIdentifier("1.3.132.0.37")
+ SECT571K1 = ObjectIdentifier("1.3.132.0.38")
+ SECT571R1 = ObjectIdentifier("1.3.132.0.39")
+
+
+class EllipticCurve(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def name(self) -> str:
+ """
+ The name of the curve. e.g. secp256r1.
+ """
+
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ Bit size of a secret scalar for the curve.
+ """
+
+
+class EllipticCurveSignatureAlgorithm(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def algorithm(
+ self,
+ ) -> typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm]:
+ """
+ The digest algorithm used with this signature.
+ """
+
+
+class EllipticCurvePrivateKey(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def exchange(
+ self, algorithm: ECDH, peer_public_key: EllipticCurvePublicKey
+ ) -> bytes:
+ """
+ Performs a key exchange operation using the provided algorithm with the
+ provided peer's public key.
+ """
+
+ @abc.abstractmethod
+ def public_key(self) -> EllipticCurvePublicKey:
+ """
+ The EllipticCurvePublicKey for this private key.
+ """
+
+ @property
+ @abc.abstractmethod
+ def curve(self) -> EllipticCurve:
+ """
+ The EllipticCurve that this key is on.
+ """
+
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ Bit size of a secret scalar for the curve.
+ """
+
+ @abc.abstractmethod
+ def sign(
+ self,
+ data: bytes,
+ signature_algorithm: EllipticCurveSignatureAlgorithm,
+ ) -> bytes:
+ """
+ Signs the data
+ """
+
+ @abc.abstractmethod
+ def private_numbers(self) -> EllipticCurvePrivateNumbers:
+ """
+ Returns an EllipticCurvePrivateNumbers.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PrivateFormat,
+ encryption_algorithm: _serialization.KeySerializationEncryption,
+ ) -> bytes:
+ """
+ Returns the key serialized as bytes.
+ """
+
+
+EllipticCurvePrivateKeyWithSerialization = EllipticCurvePrivateKey
+
+
+class EllipticCurvePublicKey(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def curve(self) -> EllipticCurve:
+ """
+ The EllipticCurve that this key is on.
+ """
+
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ Bit size of a secret scalar for the curve.
+ """
+
+ @abc.abstractmethod
+ def public_numbers(self) -> EllipticCurvePublicNumbers:
+ """
+ Returns an EllipticCurvePublicNumbers.
+ """
+
+ @abc.abstractmethod
+ def public_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PublicFormat,
+ ) -> bytes:
+ """
+ Returns the key serialized as bytes.
+ """
+
+ @abc.abstractmethod
+ def verify(
+ self,
+ signature: bytes,
+ data: bytes,
+ signature_algorithm: EllipticCurveSignatureAlgorithm,
+ ) -> None:
+ """
+ Verifies the signature of the data.
+ """
+
+ @classmethod
+ def from_encoded_point(
+ cls, curve: EllipticCurve, data: bytes
+ ) -> EllipticCurvePublicKey:
+ utils._check_bytes("data", data)
+
+ if not isinstance(curve, EllipticCurve):
+ raise TypeError("curve must be an EllipticCurve instance")
+
+ if len(data) == 0:
+ raise ValueError("data must not be an empty byte string")
+
+ if data[0] not in [0x02, 0x03, 0x04]:
+ raise ValueError("Unsupported elliptic curve point type")
+
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ return backend.load_elliptic_curve_public_bytes(curve, data)
+
+ @abc.abstractmethod
+ def __eq__(self, other: object) -> bool:
+ """
+ Checks equality.
+ """
+
+
+EllipticCurvePublicKeyWithSerialization = EllipticCurvePublicKey
+
+
+class SECT571R1(EllipticCurve):
+ name = "sect571r1"
+ key_size = 570
+
+
+class SECT409R1(EllipticCurve):
+ name = "sect409r1"
+ key_size = 409
+
+
+class SECT283R1(EllipticCurve):
+ name = "sect283r1"
+ key_size = 283
+
+
+class SECT233R1(EllipticCurve):
+ name = "sect233r1"
+ key_size = 233
+
+
+class SECT163R2(EllipticCurve):
+ name = "sect163r2"
+ key_size = 163
+
+
+class SECT571K1(EllipticCurve):
+ name = "sect571k1"
+ key_size = 571
+
+
+class SECT409K1(EllipticCurve):
+ name = "sect409k1"
+ key_size = 409
+
+
+class SECT283K1(EllipticCurve):
+ name = "sect283k1"
+ key_size = 283
+
+
+class SECT233K1(EllipticCurve):
+ name = "sect233k1"
+ key_size = 233
+
+
+class SECT163K1(EllipticCurve):
+ name = "sect163k1"
+ key_size = 163
+
+
+class SECP521R1(EllipticCurve):
+ name = "secp521r1"
+ key_size = 521
+
+
+class SECP384R1(EllipticCurve):
+ name = "secp384r1"
+ key_size = 384
+
+
+class SECP256R1(EllipticCurve):
+ name = "secp256r1"
+ key_size = 256
+
+
+class SECP256K1(EllipticCurve):
+ name = "secp256k1"
+ key_size = 256
+
+
+class SECP224R1(EllipticCurve):
+ name = "secp224r1"
+ key_size = 224
+
+
+class SECP192R1(EllipticCurve):
+ name = "secp192r1"
+ key_size = 192
+
+
+class BrainpoolP256R1(EllipticCurve):
+ name = "brainpoolP256r1"
+ key_size = 256
+
+
+class BrainpoolP384R1(EllipticCurve):
+ name = "brainpoolP384r1"
+ key_size = 384
+
+
+class BrainpoolP512R1(EllipticCurve):
+ name = "brainpoolP512r1"
+ key_size = 512
+
+
+_CURVE_TYPES: typing.Dict[str, typing.Type[EllipticCurve]] = {
+ "prime192v1": SECP192R1,
+ "prime256v1": SECP256R1,
+ "secp192r1": SECP192R1,
+ "secp224r1": SECP224R1,
+ "secp256r1": SECP256R1,
+ "secp384r1": SECP384R1,
+ "secp521r1": SECP521R1,
+ "secp256k1": SECP256K1,
+ "sect163k1": SECT163K1,
+ "sect233k1": SECT233K1,
+ "sect283k1": SECT283K1,
+ "sect409k1": SECT409K1,
+ "sect571k1": SECT571K1,
+ "sect163r2": SECT163R2,
+ "sect233r1": SECT233R1,
+ "sect283r1": SECT283R1,
+ "sect409r1": SECT409R1,
+ "sect571r1": SECT571R1,
+ "brainpoolP256r1": BrainpoolP256R1,
+ "brainpoolP384r1": BrainpoolP384R1,
+ "brainpoolP512r1": BrainpoolP512R1,
+}
+
+
+class ECDSA(EllipticCurveSignatureAlgorithm):
+ def __init__(
+ self,
+ algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm],
+ ):
+ self._algorithm = algorithm
+
+ @property
+ def algorithm(
+ self,
+ ) -> typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm]:
+ return self._algorithm
+
+
+def generate_private_key(
+ curve: EllipticCurve, backend: typing.Any = None
+) -> EllipticCurvePrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.generate_elliptic_curve_private_key(curve)
+
+
+def derive_private_key(
+ private_value: int,
+ curve: EllipticCurve,
+ backend: typing.Any = None,
+) -> EllipticCurvePrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ if not isinstance(private_value, int):
+ raise TypeError("private_value must be an integer type.")
+
+ if private_value <= 0:
+ raise ValueError("private_value must be a positive integer.")
+
+ if not isinstance(curve, EllipticCurve):
+ raise TypeError("curve must provide the EllipticCurve interface.")
+
+ return ossl.derive_elliptic_curve_private_key(private_value, curve)
+
+
+class EllipticCurvePublicNumbers:
+ def __init__(self, x: int, y: int, curve: EllipticCurve):
+ if not isinstance(x, int) or not isinstance(y, int):
+ raise TypeError("x and y must be integers.")
+
+ if not isinstance(curve, EllipticCurve):
+ raise TypeError("curve must provide the EllipticCurve interface.")
+
+ self._y = y
+ self._x = x
+ self._curve = curve
+
+ def public_key(self, backend: typing.Any = None) -> EllipticCurvePublicKey:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_elliptic_curve_public_numbers(self)
+
+ @property
+ def curve(self) -> EllipticCurve:
+ return self._curve
+
+ @property
+ def x(self) -> int:
+ return self._x
+
+ @property
+ def y(self) -> int:
+ return self._y
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, EllipticCurvePublicNumbers):
+ return NotImplemented
+
+ return (
+ self.x == other.x
+ and self.y == other.y
+ and self.curve.name == other.curve.name
+ and self.curve.key_size == other.curve.key_size
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.x, self.y, self.curve.name, self.curve.key_size))
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+
+class EllipticCurvePrivateNumbers:
+ def __init__(
+ self, private_value: int, public_numbers: EllipticCurvePublicNumbers
+ ):
+ if not isinstance(private_value, int):
+ raise TypeError("private_value must be an integer.")
+
+ if not isinstance(public_numbers, EllipticCurvePublicNumbers):
+ raise TypeError(
+ "public_numbers must be an EllipticCurvePublicNumbers "
+ "instance."
+ )
+
+ self._private_value = private_value
+ self._public_numbers = public_numbers
+
+ def private_key(
+ self, backend: typing.Any = None
+ ) -> EllipticCurvePrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_elliptic_curve_private_numbers(self)
+
+ @property
+ def private_value(self) -> int:
+ return self._private_value
+
+ @property
+ def public_numbers(self) -> EllipticCurvePublicNumbers:
+ return self._public_numbers
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, EllipticCurvePrivateNumbers):
+ return NotImplemented
+
+ return (
+ self.private_value == other.private_value
+ and self.public_numbers == other.public_numbers
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.private_value, self.public_numbers))
+
+
+class ECDH:
+ pass
+
+
+_OID_TO_CURVE = {
+ EllipticCurveOID.SECP192R1: SECP192R1,
+ EllipticCurveOID.SECP224R1: SECP224R1,
+ EllipticCurveOID.SECP256K1: SECP256K1,
+ EllipticCurveOID.SECP256R1: SECP256R1,
+ EllipticCurveOID.SECP384R1: SECP384R1,
+ EllipticCurveOID.SECP521R1: SECP521R1,
+ EllipticCurveOID.BRAINPOOLP256R1: BrainpoolP256R1,
+ EllipticCurveOID.BRAINPOOLP384R1: BrainpoolP384R1,
+ EllipticCurveOID.BRAINPOOLP512R1: BrainpoolP512R1,
+ EllipticCurveOID.SECT163K1: SECT163K1,
+ EllipticCurveOID.SECT163R2: SECT163R2,
+ EllipticCurveOID.SECT233K1: SECT233K1,
+ EllipticCurveOID.SECT233R1: SECT233R1,
+ EllipticCurveOID.SECT283K1: SECT283K1,
+ EllipticCurveOID.SECT283R1: SECT283R1,
+ EllipticCurveOID.SECT409K1: SECT409K1,
+ EllipticCurveOID.SECT409R1: SECT409R1,
+ EllipticCurveOID.SECT571K1: SECT571K1,
+ EllipticCurveOID.SECT571R1: SECT571R1,
+}
+
+
+def get_curve_for_oid(oid: ObjectIdentifier) -> typing.Type[EllipticCurve]:
+ try:
+ return _OID_TO_CURVE[oid]
+ except KeyError:
+ raise LookupError(
+ "The provided object identifier has no matching elliptic "
+ "curve class"
+ )
diff --git a/billinglayer/python/cryptography/hazmat/primitives/asymmetric/ed25519.py b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/ed25519.py
new file mode 100644
index 0000000..f26e54d
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/ed25519.py
@@ -0,0 +1,118 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+from cryptography.hazmat.primitives import _serialization
+
+
+class Ed25519PublicKey(metaclass=abc.ABCMeta):
+ @classmethod
+ def from_public_bytes(cls, data: bytes) -> Ed25519PublicKey:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed25519_supported():
+ raise UnsupportedAlgorithm(
+ "ed25519 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+ return backend.ed25519_load_public_bytes(data)
+
+ @abc.abstractmethod
+ def public_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PublicFormat,
+ ) -> bytes:
+ """
+ The serialized bytes of the public key.
+ """
+
+ @abc.abstractmethod
+ def public_bytes_raw(self) -> bytes:
+ """
+ The raw bytes of the public key.
+ Equivalent to public_bytes(Raw, Raw).
+ """
+
+ @abc.abstractmethod
+ def verify(self, signature: bytes, data: bytes) -> None:
+ """
+ Verify the signature.
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other: object) -> bool:
+ """
+ Checks equality.
+ """
+
+
+if hasattr(rust_openssl, "ed25519"):
+ Ed25519PublicKey.register(rust_openssl.ed25519.Ed25519PublicKey)
+
+
+class Ed25519PrivateKey(metaclass=abc.ABCMeta):
+ @classmethod
+ def generate(cls) -> Ed25519PrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed25519_supported():
+ raise UnsupportedAlgorithm(
+ "ed25519 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+ return backend.ed25519_generate_key()
+
+ @classmethod
+ def from_private_bytes(cls, data: bytes) -> Ed25519PrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed25519_supported():
+ raise UnsupportedAlgorithm(
+ "ed25519 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+ return backend.ed25519_load_private_bytes(data)
+
+ @abc.abstractmethod
+ def public_key(self) -> Ed25519PublicKey:
+ """
+ The Ed25519PublicKey derived from the private key.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PrivateFormat,
+ encryption_algorithm: _serialization.KeySerializationEncryption,
+ ) -> bytes:
+ """
+ The serialized bytes of the private key.
+ """
+
+ @abc.abstractmethod
+ def private_bytes_raw(self) -> bytes:
+ """
+ The raw bytes of the private key.
+ Equivalent to private_bytes(Raw, Raw, NoEncryption()).
+ """
+
+ @abc.abstractmethod
+ def sign(self, data: bytes) -> bytes:
+ """
+ Signs the data.
+ """
+
+
+if hasattr(rust_openssl, "x25519"):
+ Ed25519PrivateKey.register(rust_openssl.ed25519.Ed25519PrivateKey)
diff --git a/billinglayer/python/cryptography/hazmat/primitives/asymmetric/ed448.py b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/ed448.py
new file mode 100644
index 0000000..a9a34b2
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/ed448.py
@@ -0,0 +1,117 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+from cryptography.hazmat.primitives import _serialization
+
+
+class Ed448PublicKey(metaclass=abc.ABCMeta):
+ @classmethod
+ def from_public_bytes(cls, data: bytes) -> Ed448PublicKey:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed448_supported():
+ raise UnsupportedAlgorithm(
+ "ed448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+ return backend.ed448_load_public_bytes(data)
+
+ @abc.abstractmethod
+ def public_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PublicFormat,
+ ) -> bytes:
+ """
+ The serialized bytes of the public key.
+ """
+
+ @abc.abstractmethod
+ def public_bytes_raw(self) -> bytes:
+ """
+ The raw bytes of the public key.
+ Equivalent to public_bytes(Raw, Raw).
+ """
+
+ @abc.abstractmethod
+ def verify(self, signature: bytes, data: bytes) -> None:
+ """
+ Verify the signature.
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other: object) -> bool:
+ """
+ Checks equality.
+ """
+
+
+if hasattr(rust_openssl, "ed448"):
+ Ed448PublicKey.register(rust_openssl.ed448.Ed448PublicKey)
+
+
+class Ed448PrivateKey(metaclass=abc.ABCMeta):
+ @classmethod
+ def generate(cls) -> Ed448PrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed448_supported():
+ raise UnsupportedAlgorithm(
+ "ed448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+ return backend.ed448_generate_key()
+
+ @classmethod
+ def from_private_bytes(cls, data: bytes) -> Ed448PrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed448_supported():
+ raise UnsupportedAlgorithm(
+ "ed448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+ return backend.ed448_load_private_bytes(data)
+
+ @abc.abstractmethod
+ def public_key(self) -> Ed448PublicKey:
+ """
+ The Ed448PublicKey derived from the private key.
+ """
+
+ @abc.abstractmethod
+ def sign(self, data: bytes) -> bytes:
+ """
+ Signs the data.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PrivateFormat,
+ encryption_algorithm: _serialization.KeySerializationEncryption,
+ ) -> bytes:
+ """
+ The serialized bytes of the private key.
+ """
+
+ @abc.abstractmethod
+ def private_bytes_raw(self) -> bytes:
+ """
+ The raw bytes of the private key.
+ Equivalent to private_bytes(Raw, Raw, NoEncryption()).
+ """
+
+
+if hasattr(rust_openssl, "x448"):
+ Ed448PrivateKey.register(rust_openssl.ed448.Ed448PrivateKey)
diff --git a/billinglayer/python/cryptography/hazmat/primitives/asymmetric/padding.py b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/padding.py
new file mode 100644
index 0000000..7198808
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/padding.py
@@ -0,0 +1,102 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import typing
+
+from cryptography.hazmat.primitives import hashes
+from cryptography.hazmat.primitives._asymmetric import (
+ AsymmetricPadding as AsymmetricPadding,
+)
+from cryptography.hazmat.primitives.asymmetric import rsa
+
+
+class PKCS1v15(AsymmetricPadding):
+ name = "EMSA-PKCS1-v1_5"
+
+
+class _MaxLength:
+ "Sentinel value for `MAX_LENGTH`."
+
+
+class _Auto:
+ "Sentinel value for `AUTO`."
+
+
+class _DigestLength:
+ "Sentinel value for `DIGEST_LENGTH`."
+
+
+class PSS(AsymmetricPadding):
+ MAX_LENGTH = _MaxLength()
+ AUTO = _Auto()
+ DIGEST_LENGTH = _DigestLength()
+ name = "EMSA-PSS"
+ _salt_length: typing.Union[int, _MaxLength, _Auto, _DigestLength]
+
+ def __init__(
+ self,
+ mgf: MGF,
+ salt_length: typing.Union[int, _MaxLength, _Auto, _DigestLength],
+ ) -> None:
+ self._mgf = mgf
+
+ if not isinstance(
+ salt_length, (int, _MaxLength, _Auto, _DigestLength)
+ ):
+ raise TypeError(
+ "salt_length must be an integer, MAX_LENGTH, "
+ "DIGEST_LENGTH, or AUTO"
+ )
+
+ if isinstance(salt_length, int) and salt_length < 0:
+ raise ValueError("salt_length must be zero or greater.")
+
+ self._salt_length = salt_length
+
+
+class OAEP(AsymmetricPadding):
+ name = "EME-OAEP"
+
+ def __init__(
+ self,
+ mgf: MGF,
+ algorithm: hashes.HashAlgorithm,
+ label: typing.Optional[bytes],
+ ):
+ if not isinstance(algorithm, hashes.HashAlgorithm):
+ raise TypeError("Expected instance of hashes.HashAlgorithm.")
+
+ self._mgf = mgf
+ self._algorithm = algorithm
+ self._label = label
+
+
+class MGF(metaclass=abc.ABCMeta):
+ _algorithm: hashes.HashAlgorithm
+
+
+class MGF1(MGF):
+ MAX_LENGTH = _MaxLength()
+
+ def __init__(self, algorithm: hashes.HashAlgorithm):
+ if not isinstance(algorithm, hashes.HashAlgorithm):
+ raise TypeError("Expected instance of hashes.HashAlgorithm.")
+
+ self._algorithm = algorithm
+
+
+def calculate_max_pss_salt_length(
+ key: typing.Union[rsa.RSAPrivateKey, rsa.RSAPublicKey],
+ hash_algorithm: hashes.HashAlgorithm,
+) -> int:
+ if not isinstance(key, (rsa.RSAPrivateKey, rsa.RSAPublicKey)):
+ raise TypeError("key must be an RSA public or private key")
+ # bit length - 1 per RFC 3447
+ emlen = (key.key_size + 6) // 8
+ salt_length = emlen - hash_algorithm.digest_size - 2
+ assert salt_length >= 0
+ return salt_length
diff --git a/billinglayer/python/cryptography/hazmat/primitives/asymmetric/rsa.py b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/rsa.py
new file mode 100644
index 0000000..b740f01
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/rsa.py
@@ -0,0 +1,439 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import typing
+from math import gcd
+
+from cryptography.hazmat.primitives import _serialization, hashes
+from cryptography.hazmat.primitives._asymmetric import AsymmetricPadding
+from cryptography.hazmat.primitives.asymmetric import utils as asym_utils
+
+
+class RSAPrivateKey(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def decrypt(self, ciphertext: bytes, padding: AsymmetricPadding) -> bytes:
+ """
+ Decrypts the provided ciphertext.
+ """
+
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ The bit length of the public modulus.
+ """
+
+ @abc.abstractmethod
+ def public_key(self) -> RSAPublicKey:
+ """
+ The RSAPublicKey associated with this private key.
+ """
+
+ @abc.abstractmethod
+ def sign(
+ self,
+ data: bytes,
+ padding: AsymmetricPadding,
+ algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm],
+ ) -> bytes:
+ """
+ Signs the data.
+ """
+
+ @abc.abstractmethod
+ def private_numbers(self) -> RSAPrivateNumbers:
+ """
+ Returns an RSAPrivateNumbers.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PrivateFormat,
+ encryption_algorithm: _serialization.KeySerializationEncryption,
+ ) -> bytes:
+ """
+ Returns the key serialized as bytes.
+ """
+
+
+RSAPrivateKeyWithSerialization = RSAPrivateKey
+
+
+class RSAPublicKey(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def encrypt(self, plaintext: bytes, padding: AsymmetricPadding) -> bytes:
+ """
+ Encrypts the given plaintext.
+ """
+
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ The bit length of the public modulus.
+ """
+
+ @abc.abstractmethod
+ def public_numbers(self) -> RSAPublicNumbers:
+ """
+ Returns an RSAPublicNumbers
+ """
+
+ @abc.abstractmethod
+ def public_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PublicFormat,
+ ) -> bytes:
+ """
+ Returns the key serialized as bytes.
+ """
+
+ @abc.abstractmethod
+ def verify(
+ self,
+ signature: bytes,
+ data: bytes,
+ padding: AsymmetricPadding,
+ algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm],
+ ) -> None:
+ """
+ Verifies the signature of the data.
+ """
+
+ @abc.abstractmethod
+ def recover_data_from_signature(
+ self,
+ signature: bytes,
+ padding: AsymmetricPadding,
+ algorithm: typing.Optional[hashes.HashAlgorithm],
+ ) -> bytes:
+ """
+ Recovers the original data from the signature.
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other: object) -> bool:
+ """
+ Checks equality.
+ """
+
+
+RSAPublicKeyWithSerialization = RSAPublicKey
+
+
+def generate_private_key(
+ public_exponent: int,
+ key_size: int,
+ backend: typing.Any = None,
+) -> RSAPrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ _verify_rsa_parameters(public_exponent, key_size)
+ return ossl.generate_rsa_private_key(public_exponent, key_size)
+
+
+def _verify_rsa_parameters(public_exponent: int, key_size: int) -> None:
+ if public_exponent not in (3, 65537):
+ raise ValueError(
+ "public_exponent must be either 3 (for legacy compatibility) or "
+ "65537. Almost everyone should choose 65537 here!"
+ )
+
+ if key_size < 512:
+ raise ValueError("key_size must be at least 512-bits.")
+
+
+def _check_private_key_components(
+ p: int,
+ q: int,
+ private_exponent: int,
+ dmp1: int,
+ dmq1: int,
+ iqmp: int,
+ public_exponent: int,
+ modulus: int,
+) -> None:
+ if modulus < 3:
+ raise ValueError("modulus must be >= 3.")
+
+ if p >= modulus:
+ raise ValueError("p must be < modulus.")
+
+ if q >= modulus:
+ raise ValueError("q must be < modulus.")
+
+ if dmp1 >= modulus:
+ raise ValueError("dmp1 must be < modulus.")
+
+ if dmq1 >= modulus:
+ raise ValueError("dmq1 must be < modulus.")
+
+ if iqmp >= modulus:
+ raise ValueError("iqmp must be < modulus.")
+
+ if private_exponent >= modulus:
+ raise ValueError("private_exponent must be < modulus.")
+
+ if public_exponent < 3 or public_exponent >= modulus:
+ raise ValueError("public_exponent must be >= 3 and < modulus.")
+
+ if public_exponent & 1 == 0:
+ raise ValueError("public_exponent must be odd.")
+
+ if dmp1 & 1 == 0:
+ raise ValueError("dmp1 must be odd.")
+
+ if dmq1 & 1 == 0:
+ raise ValueError("dmq1 must be odd.")
+
+ if p * q != modulus:
+ raise ValueError("p*q must equal modulus.")
+
+
+def _check_public_key_components(e: int, n: int) -> None:
+ if n < 3:
+ raise ValueError("n must be >= 3.")
+
+ if e < 3 or e >= n:
+ raise ValueError("e must be >= 3 and < n.")
+
+ if e & 1 == 0:
+ raise ValueError("e must be odd.")
+
+
+def _modinv(e: int, m: int) -> int:
+ """
+ Modular Multiplicative Inverse. Returns x such that: (x*e) mod m == 1
+ """
+ x1, x2 = 1, 0
+ a, b = e, m
+ while b > 0:
+ q, r = divmod(a, b)
+ xn = x1 - q * x2
+ a, b, x1, x2 = b, r, x2, xn
+ return x1 % m
+
+
+def rsa_crt_iqmp(p: int, q: int) -> int:
+ """
+ Compute the CRT (q ** -1) % p value from RSA primes p and q.
+ """
+ return _modinv(q, p)
+
+
+def rsa_crt_dmp1(private_exponent: int, p: int) -> int:
+ """
+ Compute the CRT private_exponent % (p - 1) value from the RSA
+ private_exponent (d) and p.
+ """
+ return private_exponent % (p - 1)
+
+
+def rsa_crt_dmq1(private_exponent: int, q: int) -> int:
+ """
+ Compute the CRT private_exponent % (q - 1) value from the RSA
+ private_exponent (d) and q.
+ """
+ return private_exponent % (q - 1)
+
+
+# Controls the number of iterations rsa_recover_prime_factors will perform
+# to obtain the prime factors. Each iteration increments by 2 so the actual
+# maximum attempts is half this number.
+_MAX_RECOVERY_ATTEMPTS = 1000
+
+
+def rsa_recover_prime_factors(
+ n: int, e: int, d: int
+) -> typing.Tuple[int, int]:
+ """
+ Compute factors p and q from the private exponent d. We assume that n has
+ no more than two factors. This function is adapted from code in PyCrypto.
+ """
+ # See 8.2.2(i) in Handbook of Applied Cryptography.
+ ktot = d * e - 1
+ # The quantity d*e-1 is a multiple of phi(n), even,
+ # and can be represented as t*2^s.
+ t = ktot
+ while t % 2 == 0:
+ t = t // 2
+ # Cycle through all multiplicative inverses in Zn.
+ # The algorithm is non-deterministic, but there is a 50% chance
+ # any candidate a leads to successful factoring.
+ # See "Digitalized Signatures and Public Key Functions as Intractable
+ # as Factorization", M. Rabin, 1979
+ spotted = False
+ a = 2
+ while not spotted and a < _MAX_RECOVERY_ATTEMPTS:
+ k = t
+ # Cycle through all values a^{t*2^i}=a^k
+ while k < ktot:
+ cand = pow(a, k, n)
+ # Check if a^k is a non-trivial root of unity (mod n)
+ if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1:
+ # We have found a number such that (cand-1)(cand+1)=0 (mod n).
+ # Either of the terms divides n.
+ p = gcd(cand + 1, n)
+ spotted = True
+ break
+ k *= 2
+ # This value was not any good... let's try another!
+ a += 2
+ if not spotted:
+ raise ValueError("Unable to compute factors p and q from exponent d.")
+ # Found !
+ q, r = divmod(n, p)
+ assert r == 0
+ p, q = sorted((p, q), reverse=True)
+ return (p, q)
+
+
+class RSAPrivateNumbers:
+ def __init__(
+ self,
+ p: int,
+ q: int,
+ d: int,
+ dmp1: int,
+ dmq1: int,
+ iqmp: int,
+ public_numbers: RSAPublicNumbers,
+ ):
+ if (
+ not isinstance(p, int)
+ or not isinstance(q, int)
+ or not isinstance(d, int)
+ or not isinstance(dmp1, int)
+ or not isinstance(dmq1, int)
+ or not isinstance(iqmp, int)
+ ):
+ raise TypeError(
+ "RSAPrivateNumbers p, q, d, dmp1, dmq1, iqmp arguments must"
+ " all be an integers."
+ )
+
+ if not isinstance(public_numbers, RSAPublicNumbers):
+ raise TypeError(
+ "RSAPrivateNumbers public_numbers must be an RSAPublicNumbers"
+ " instance."
+ )
+
+ self._p = p
+ self._q = q
+ self._d = d
+ self._dmp1 = dmp1
+ self._dmq1 = dmq1
+ self._iqmp = iqmp
+ self._public_numbers = public_numbers
+
+ @property
+ def p(self) -> int:
+ return self._p
+
+ @property
+ def q(self) -> int:
+ return self._q
+
+ @property
+ def d(self) -> int:
+ return self._d
+
+ @property
+ def dmp1(self) -> int:
+ return self._dmp1
+
+ @property
+ def dmq1(self) -> int:
+ return self._dmq1
+
+ @property
+ def iqmp(self) -> int:
+ return self._iqmp
+
+ @property
+ def public_numbers(self) -> RSAPublicNumbers:
+ return self._public_numbers
+
+ def private_key(
+ self,
+ backend: typing.Any = None,
+ *,
+ unsafe_skip_rsa_key_validation: bool = False,
+ ) -> RSAPrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_rsa_private_numbers(
+ self, unsafe_skip_rsa_key_validation
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, RSAPrivateNumbers):
+ return NotImplemented
+
+ return (
+ self.p == other.p
+ and self.q == other.q
+ and self.d == other.d
+ and self.dmp1 == other.dmp1
+ and self.dmq1 == other.dmq1
+ and self.iqmp == other.iqmp
+ and self.public_numbers == other.public_numbers
+ )
+
+ def __hash__(self) -> int:
+ return hash(
+ (
+ self.p,
+ self.q,
+ self.d,
+ self.dmp1,
+ self.dmq1,
+ self.iqmp,
+ self.public_numbers,
+ )
+ )
+
+
+class RSAPublicNumbers:
+ def __init__(self, e: int, n: int):
+ if not isinstance(e, int) or not isinstance(n, int):
+ raise TypeError("RSAPublicNumbers arguments must be integers.")
+
+ self._e = e
+ self._n = n
+
+ @property
+ def e(self) -> int:
+ return self._e
+
+ @property
+ def n(self) -> int:
+ return self._n
+
+ def public_key(self, backend: typing.Any = None) -> RSAPublicKey:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_rsa_public_numbers(self)
+
+ def __repr__(self) -> str:
+ return "".format(self)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, RSAPublicNumbers):
+ return NotImplemented
+
+ return self.e == other.e and self.n == other.n
+
+ def __hash__(self) -> int:
+ return hash((self.e, self.n))
diff --git a/billinglayer/python/cryptography/hazmat/primitives/asymmetric/types.py b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/types.py
new file mode 100644
index 0000000..1fe4eaf
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/types.py
@@ -0,0 +1,111 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography import utils
+from cryptography.hazmat.primitives.asymmetric import (
+ dh,
+ dsa,
+ ec,
+ ed448,
+ ed25519,
+ rsa,
+ x448,
+ x25519,
+)
+
+# Every asymmetric key type
+PublicKeyTypes = typing.Union[
+ dh.DHPublicKey,
+ dsa.DSAPublicKey,
+ rsa.RSAPublicKey,
+ ec.EllipticCurvePublicKey,
+ ed25519.Ed25519PublicKey,
+ ed448.Ed448PublicKey,
+ x25519.X25519PublicKey,
+ x448.X448PublicKey,
+]
+PUBLIC_KEY_TYPES = PublicKeyTypes
+utils.deprecated(
+ PUBLIC_KEY_TYPES,
+ __name__,
+ "Use PublicKeyTypes instead",
+ utils.DeprecatedIn40,
+ name="PUBLIC_KEY_TYPES",
+)
+# Every asymmetric key type
+PrivateKeyTypes = typing.Union[
+ dh.DHPrivateKey,
+ ed25519.Ed25519PrivateKey,
+ ed448.Ed448PrivateKey,
+ rsa.RSAPrivateKey,
+ dsa.DSAPrivateKey,
+ ec.EllipticCurvePrivateKey,
+ x25519.X25519PrivateKey,
+ x448.X448PrivateKey,
+]
+PRIVATE_KEY_TYPES = PrivateKeyTypes
+utils.deprecated(
+ PRIVATE_KEY_TYPES,
+ __name__,
+ "Use PrivateKeyTypes instead",
+ utils.DeprecatedIn40,
+ name="PRIVATE_KEY_TYPES",
+)
+# Just the key types we allow to be used for x509 signing. This mirrors
+# the certificate public key types
+CertificateIssuerPrivateKeyTypes = typing.Union[
+ ed25519.Ed25519PrivateKey,
+ ed448.Ed448PrivateKey,
+ rsa.RSAPrivateKey,
+ dsa.DSAPrivateKey,
+ ec.EllipticCurvePrivateKey,
+]
+CERTIFICATE_PRIVATE_KEY_TYPES = CertificateIssuerPrivateKeyTypes
+utils.deprecated(
+ CERTIFICATE_PRIVATE_KEY_TYPES,
+ __name__,
+ "Use CertificateIssuerPrivateKeyTypes instead",
+ utils.DeprecatedIn40,
+ name="CERTIFICATE_PRIVATE_KEY_TYPES",
+)
+# Just the key types we allow to be used for x509 signing. This mirrors
+# the certificate private key types
+CertificateIssuerPublicKeyTypes = typing.Union[
+ dsa.DSAPublicKey,
+ rsa.RSAPublicKey,
+ ec.EllipticCurvePublicKey,
+ ed25519.Ed25519PublicKey,
+ ed448.Ed448PublicKey,
+]
+CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES = CertificateIssuerPublicKeyTypes
+utils.deprecated(
+ CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES,
+ __name__,
+ "Use CertificateIssuerPublicKeyTypes instead",
+ utils.DeprecatedIn40,
+ name="CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES",
+)
+# This type removes DHPublicKey. x448/x25519 can be a public key
+# but cannot be used in signing so they are allowed here.
+CertificatePublicKeyTypes = typing.Union[
+ dsa.DSAPublicKey,
+ rsa.RSAPublicKey,
+ ec.EllipticCurvePublicKey,
+ ed25519.Ed25519PublicKey,
+ ed448.Ed448PublicKey,
+ x25519.X25519PublicKey,
+ x448.X448PublicKey,
+]
+CERTIFICATE_PUBLIC_KEY_TYPES = CertificatePublicKeyTypes
+utils.deprecated(
+ CERTIFICATE_PUBLIC_KEY_TYPES,
+ __name__,
+ "Use CertificatePublicKeyTypes instead",
+ utils.DeprecatedIn40,
+ name="CERTIFICATE_PUBLIC_KEY_TYPES",
+)
diff --git a/billinglayer/python/cryptography/hazmat/primitives/asymmetric/utils.py b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/utils.py
new file mode 100644
index 0000000..826b956
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/utils.py
@@ -0,0 +1,24 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+from cryptography.hazmat.bindings._rust import asn1
+from cryptography.hazmat.primitives import hashes
+
+decode_dss_signature = asn1.decode_dss_signature
+encode_dss_signature = asn1.encode_dss_signature
+
+
+class Prehashed:
+ def __init__(self, algorithm: hashes.HashAlgorithm):
+ if not isinstance(algorithm, hashes.HashAlgorithm):
+ raise TypeError("Expected instance of HashAlgorithm.")
+
+ self._algorithm = algorithm
+ self._digest_size = algorithm.digest_size
+
+ @property
+ def digest_size(self) -> int:
+ return self._digest_size
diff --git a/billinglayer/python/cryptography/hazmat/primitives/asymmetric/x25519.py b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/x25519.py
new file mode 100644
index 0000000..699054c
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/x25519.py
@@ -0,0 +1,113 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+from cryptography.hazmat.primitives import _serialization
+
+
+class X25519PublicKey(metaclass=abc.ABCMeta):
+ @classmethod
+ def from_public_bytes(cls, data: bytes) -> X25519PublicKey:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.x25519_supported():
+ raise UnsupportedAlgorithm(
+ "X25519 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+
+ return backend.x25519_load_public_bytes(data)
+
+ @abc.abstractmethod
+ def public_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PublicFormat,
+ ) -> bytes:
+ """
+ The serialized bytes of the public key.
+ """
+
+ @abc.abstractmethod
+ def public_bytes_raw(self) -> bytes:
+ """
+ The raw bytes of the public key.
+ Equivalent to public_bytes(Raw, Raw).
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other: object) -> bool:
+ """
+ Checks equality.
+ """
+
+
+# For LibreSSL
+if hasattr(rust_openssl, "x25519"):
+ X25519PublicKey.register(rust_openssl.x25519.X25519PublicKey)
+
+
+class X25519PrivateKey(metaclass=abc.ABCMeta):
+ @classmethod
+ def generate(cls) -> X25519PrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.x25519_supported():
+ raise UnsupportedAlgorithm(
+ "X25519 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+ return backend.x25519_generate_key()
+
+ @classmethod
+ def from_private_bytes(cls, data: bytes) -> X25519PrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.x25519_supported():
+ raise UnsupportedAlgorithm(
+ "X25519 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+
+ return backend.x25519_load_private_bytes(data)
+
+ @abc.abstractmethod
+ def public_key(self) -> X25519PublicKey:
+ """
+ Returns the public key assosciated with this private key
+ """
+
+ @abc.abstractmethod
+ def private_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PrivateFormat,
+ encryption_algorithm: _serialization.KeySerializationEncryption,
+ ) -> bytes:
+ """
+ The serialized bytes of the private key.
+ """
+
+ @abc.abstractmethod
+ def private_bytes_raw(self) -> bytes:
+ """
+ The raw bytes of the private key.
+ Equivalent to private_bytes(Raw, Raw, NoEncryption()).
+ """
+
+ @abc.abstractmethod
+ def exchange(self, peer_public_key: X25519PublicKey) -> bytes:
+ """
+ Performs a key exchange operation using the provided peer's public key.
+ """
+
+
+# For LibreSSL
+if hasattr(rust_openssl, "x25519"):
+ X25519PrivateKey.register(rust_openssl.x25519.X25519PrivateKey)
diff --git a/billinglayer/python/cryptography/hazmat/primitives/asymmetric/x448.py b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/x448.py
new file mode 100644
index 0000000..abf7848
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/asymmetric/x448.py
@@ -0,0 +1,111 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+from cryptography.hazmat.primitives import _serialization
+
+
+class X448PublicKey(metaclass=abc.ABCMeta):
+ @classmethod
+ def from_public_bytes(cls, data: bytes) -> X448PublicKey:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.x448_supported():
+ raise UnsupportedAlgorithm(
+ "X448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+
+ return backend.x448_load_public_bytes(data)
+
+ @abc.abstractmethod
+ def public_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PublicFormat,
+ ) -> bytes:
+ """
+ The serialized bytes of the public key.
+ """
+
+ @abc.abstractmethod
+ def public_bytes_raw(self) -> bytes:
+ """
+ The raw bytes of the public key.
+ Equivalent to public_bytes(Raw, Raw).
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other: object) -> bool:
+ """
+ Checks equality.
+ """
+
+
+if hasattr(rust_openssl, "x448"):
+ X448PublicKey.register(rust_openssl.x448.X448PublicKey)
+
+
+class X448PrivateKey(metaclass=abc.ABCMeta):
+ @classmethod
+ def generate(cls) -> X448PrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.x448_supported():
+ raise UnsupportedAlgorithm(
+ "X448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+ return backend.x448_generate_key()
+
+ @classmethod
+ def from_private_bytes(cls, data: bytes) -> X448PrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.x448_supported():
+ raise UnsupportedAlgorithm(
+ "X448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+
+ return backend.x448_load_private_bytes(data)
+
+ @abc.abstractmethod
+ def public_key(self) -> X448PublicKey:
+ """
+ Returns the public key associated with this private key
+ """
+
+ @abc.abstractmethod
+ def private_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PrivateFormat,
+ encryption_algorithm: _serialization.KeySerializationEncryption,
+ ) -> bytes:
+ """
+ The serialized bytes of the private key.
+ """
+
+ @abc.abstractmethod
+ def private_bytes_raw(self) -> bytes:
+ """
+ The raw bytes of the private key.
+ Equivalent to private_bytes(Raw, Raw, NoEncryption()).
+ """
+
+ @abc.abstractmethod
+ def exchange(self, peer_public_key: X448PublicKey) -> bytes:
+ """
+ Performs a key exchange operation using the provided peer's public key.
+ """
+
+
+if hasattr(rust_openssl, "x448"):
+ X448PrivateKey.register(rust_openssl.x448.X448PrivateKey)
diff --git a/billinglayer/python/cryptography/hazmat/primitives/ciphers/__init__.py b/billinglayer/python/cryptography/hazmat/primitives/ciphers/__init__.py
new file mode 100644
index 0000000..cc88fbf
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/ciphers/__init__.py
@@ -0,0 +1,27 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+from cryptography.hazmat.primitives._cipheralgorithm import (
+ BlockCipherAlgorithm,
+ CipherAlgorithm,
+)
+from cryptography.hazmat.primitives.ciphers.base import (
+ AEADCipherContext,
+ AEADDecryptionContext,
+ AEADEncryptionContext,
+ Cipher,
+ CipherContext,
+)
+
+__all__ = [
+ "Cipher",
+ "CipherAlgorithm",
+ "BlockCipherAlgorithm",
+ "CipherContext",
+ "AEADCipherContext",
+ "AEADDecryptionContext",
+ "AEADEncryptionContext",
+]
diff --git a/billinglayer/python/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..b958ad0
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-311.pyc
new file mode 100644
index 0000000..c0a7b5b
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-311.pyc
new file mode 100644
index 0000000..511cf81
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-311.pyc
new file mode 100644
index 0000000..8910d4a
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-311.pyc
new file mode 100644
index 0000000..a2e7eae
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/ciphers/aead.py b/billinglayer/python/cryptography/hazmat/primitives/ciphers/aead.py
new file mode 100644
index 0000000..957b2d2
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/ciphers/aead.py
@@ -0,0 +1,378 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import os
+import typing
+
+from cryptography import exceptions, utils
+from cryptography.hazmat.backends.openssl import aead
+from cryptography.hazmat.backends.openssl.backend import backend
+from cryptography.hazmat.bindings._rust import FixedPool
+
+
+class ChaCha20Poly1305:
+ _MAX_SIZE = 2**31 - 1
+
+ def __init__(self, key: bytes):
+ if not backend.aead_cipher_supported(self):
+ raise exceptions.UnsupportedAlgorithm(
+ "ChaCha20Poly1305 is not supported by this version of OpenSSL",
+ exceptions._Reasons.UNSUPPORTED_CIPHER,
+ )
+ utils._check_byteslike("key", key)
+
+ if len(key) != 32:
+ raise ValueError("ChaCha20Poly1305 key must be 32 bytes.")
+
+ self._key = key
+ self._pool = FixedPool(self._create_fn)
+
+ @classmethod
+ def generate_key(cls) -> bytes:
+ return os.urandom(32)
+
+ def _create_fn(self):
+ return aead._aead_create_ctx(backend, self, self._key)
+
+ def encrypt(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.Optional[bytes],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = b""
+
+ if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE:
+ # This is OverflowError to match what cffi would raise
+ raise OverflowError(
+ "Data or associated data too long. Max 2**31 - 1 bytes"
+ )
+
+ self._check_params(nonce, data, associated_data)
+ with self._pool.acquire() as ctx:
+ return aead._encrypt(
+ backend, self, nonce, data, [associated_data], 16, ctx
+ )
+
+ def decrypt(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.Optional[bytes],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = b""
+
+ self._check_params(nonce, data, associated_data)
+ with self._pool.acquire() as ctx:
+ return aead._decrypt(
+ backend, self, nonce, data, [associated_data], 16, ctx
+ )
+
+ def _check_params(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: bytes,
+ ) -> None:
+ utils._check_byteslike("nonce", nonce)
+ utils._check_byteslike("data", data)
+ utils._check_byteslike("associated_data", associated_data)
+ if len(nonce) != 12:
+ raise ValueError("Nonce must be 12 bytes")
+
+
+class AESCCM:
+ _MAX_SIZE = 2**31 - 1
+
+ def __init__(self, key: bytes, tag_length: int = 16):
+ utils._check_byteslike("key", key)
+ if len(key) not in (16, 24, 32):
+ raise ValueError("AESCCM key must be 128, 192, or 256 bits.")
+
+ self._key = key
+ if not isinstance(tag_length, int):
+ raise TypeError("tag_length must be an integer")
+
+ if tag_length not in (4, 6, 8, 10, 12, 14, 16):
+ raise ValueError("Invalid tag_length")
+
+ self._tag_length = tag_length
+
+ if not backend.aead_cipher_supported(self):
+ raise exceptions.UnsupportedAlgorithm(
+ "AESCCM is not supported by this version of OpenSSL",
+ exceptions._Reasons.UNSUPPORTED_CIPHER,
+ )
+
+ @classmethod
+ def generate_key(cls, bit_length: int) -> bytes:
+ if not isinstance(bit_length, int):
+ raise TypeError("bit_length must be an integer")
+
+ if bit_length not in (128, 192, 256):
+ raise ValueError("bit_length must be 128, 192, or 256")
+
+ return os.urandom(bit_length // 8)
+
+ def encrypt(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.Optional[bytes],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = b""
+
+ if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE:
+ # This is OverflowError to match what cffi would raise
+ raise OverflowError(
+ "Data or associated data too long. Max 2**31 - 1 bytes"
+ )
+
+ self._check_params(nonce, data, associated_data)
+ self._validate_lengths(nonce, len(data))
+ return aead._encrypt(
+ backend, self, nonce, data, [associated_data], self._tag_length
+ )
+
+ def decrypt(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.Optional[bytes],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = b""
+
+ self._check_params(nonce, data, associated_data)
+ return aead._decrypt(
+ backend, self, nonce, data, [associated_data], self._tag_length
+ )
+
+ def _validate_lengths(self, nonce: bytes, data_len: int) -> None:
+ # For information about computing this, see
+ # https://tools.ietf.org/html/rfc3610#section-2.1
+ l_val = 15 - len(nonce)
+ if 2 ** (8 * l_val) < data_len:
+ raise ValueError("Data too long for nonce")
+
+ def _check_params(
+ self, nonce: bytes, data: bytes, associated_data: bytes
+ ) -> None:
+ utils._check_byteslike("nonce", nonce)
+ utils._check_byteslike("data", data)
+ utils._check_byteslike("associated_data", associated_data)
+ if not 7 <= len(nonce) <= 13:
+ raise ValueError("Nonce must be between 7 and 13 bytes")
+
+
+class AESGCM:
+ _MAX_SIZE = 2**31 - 1
+
+ def __init__(self, key: bytes):
+ utils._check_byteslike("key", key)
+ if len(key) not in (16, 24, 32):
+ raise ValueError("AESGCM key must be 128, 192, or 256 bits.")
+
+ self._key = key
+
+ @classmethod
+ def generate_key(cls, bit_length: int) -> bytes:
+ if not isinstance(bit_length, int):
+ raise TypeError("bit_length must be an integer")
+
+ if bit_length not in (128, 192, 256):
+ raise ValueError("bit_length must be 128, 192, or 256")
+
+ return os.urandom(bit_length // 8)
+
+ def encrypt(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.Optional[bytes],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = b""
+
+ if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE:
+ # This is OverflowError to match what cffi would raise
+ raise OverflowError(
+ "Data or associated data too long. Max 2**31 - 1 bytes"
+ )
+
+ self._check_params(nonce, data, associated_data)
+ return aead._encrypt(backend, self, nonce, data, [associated_data], 16)
+
+ def decrypt(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.Optional[bytes],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = b""
+
+ self._check_params(nonce, data, associated_data)
+ return aead._decrypt(backend, self, nonce, data, [associated_data], 16)
+
+ def _check_params(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: bytes,
+ ) -> None:
+ utils._check_byteslike("nonce", nonce)
+ utils._check_byteslike("data", data)
+ utils._check_byteslike("associated_data", associated_data)
+ if len(nonce) < 8 or len(nonce) > 128:
+ raise ValueError("Nonce must be between 8 and 128 bytes")
+
+
+class AESOCB3:
+ _MAX_SIZE = 2**31 - 1
+
+ def __init__(self, key: bytes):
+ utils._check_byteslike("key", key)
+ if len(key) not in (16, 24, 32):
+ raise ValueError("AESOCB3 key must be 128, 192, or 256 bits.")
+
+ self._key = key
+
+ if not backend.aead_cipher_supported(self):
+ raise exceptions.UnsupportedAlgorithm(
+ "OCB3 is not supported by this version of OpenSSL",
+ exceptions._Reasons.UNSUPPORTED_CIPHER,
+ )
+
+ @classmethod
+ def generate_key(cls, bit_length: int) -> bytes:
+ if not isinstance(bit_length, int):
+ raise TypeError("bit_length must be an integer")
+
+ if bit_length not in (128, 192, 256):
+ raise ValueError("bit_length must be 128, 192, or 256")
+
+ return os.urandom(bit_length // 8)
+
+ def encrypt(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.Optional[bytes],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = b""
+
+ if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE:
+ # This is OverflowError to match what cffi would raise
+ raise OverflowError(
+ "Data or associated data too long. Max 2**31 - 1 bytes"
+ )
+
+ self._check_params(nonce, data, associated_data)
+ return aead._encrypt(backend, self, nonce, data, [associated_data], 16)
+
+ def decrypt(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.Optional[bytes],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = b""
+
+ self._check_params(nonce, data, associated_data)
+ return aead._decrypt(backend, self, nonce, data, [associated_data], 16)
+
+ def _check_params(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: bytes,
+ ) -> None:
+ utils._check_byteslike("nonce", nonce)
+ utils._check_byteslike("data", data)
+ utils._check_byteslike("associated_data", associated_data)
+ if len(nonce) < 12 or len(nonce) > 15:
+ raise ValueError("Nonce must be between 12 and 15 bytes")
+
+
+class AESSIV:
+ _MAX_SIZE = 2**31 - 1
+
+ def __init__(self, key: bytes):
+ utils._check_byteslike("key", key)
+ if len(key) not in (32, 48, 64):
+ raise ValueError("AESSIV key must be 256, 384, or 512 bits.")
+
+ self._key = key
+
+ if not backend.aead_cipher_supported(self):
+ raise exceptions.UnsupportedAlgorithm(
+ "AES-SIV is not supported by this version of OpenSSL",
+ exceptions._Reasons.UNSUPPORTED_CIPHER,
+ )
+
+ @classmethod
+ def generate_key(cls, bit_length: int) -> bytes:
+ if not isinstance(bit_length, int):
+ raise TypeError("bit_length must be an integer")
+
+ if bit_length not in (256, 384, 512):
+ raise ValueError("bit_length must be 256, 384, or 512")
+
+ return os.urandom(bit_length // 8)
+
+ def encrypt(
+ self,
+ data: bytes,
+ associated_data: typing.Optional[typing.List[bytes]],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = []
+
+ self._check_params(data, associated_data)
+
+ if len(data) > self._MAX_SIZE or any(
+ len(ad) > self._MAX_SIZE for ad in associated_data
+ ):
+ # This is OverflowError to match what cffi would raise
+ raise OverflowError(
+ "Data or associated data too long. Max 2**31 - 1 bytes"
+ )
+
+ return aead._encrypt(backend, self, b"", data, associated_data, 16)
+
+ def decrypt(
+ self,
+ data: bytes,
+ associated_data: typing.Optional[typing.List[bytes]],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = []
+
+ self._check_params(data, associated_data)
+
+ return aead._decrypt(backend, self, b"", data, associated_data, 16)
+
+ def _check_params(
+ self,
+ data: bytes,
+ associated_data: typing.List[bytes],
+ ) -> None:
+ utils._check_byteslike("data", data)
+ if len(data) == 0:
+ raise ValueError("data must not be zero length")
+
+ if not isinstance(associated_data, list):
+ raise TypeError(
+ "associated_data must be a list of bytes-like objects or None"
+ )
+ for x in associated_data:
+ utils._check_byteslike("associated_data elements", x)
diff --git a/billinglayer/python/cryptography/hazmat/primitives/ciphers/algorithms.py b/billinglayer/python/cryptography/hazmat/primitives/ciphers/algorithms.py
new file mode 100644
index 0000000..4bfc5d8
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/ciphers/algorithms.py
@@ -0,0 +1,228 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+from cryptography import utils
+from cryptography.hazmat.primitives.ciphers import (
+ BlockCipherAlgorithm,
+ CipherAlgorithm,
+)
+
+
+def _verify_key_size(algorithm: CipherAlgorithm, key: bytes) -> bytes:
+ # Verify that the key is instance of bytes
+ utils._check_byteslike("key", key)
+
+ # Verify that the key size matches the expected key size
+ if len(key) * 8 not in algorithm.key_sizes:
+ raise ValueError(
+ "Invalid key size ({}) for {}.".format(
+ len(key) * 8, algorithm.name
+ )
+ )
+ return key
+
+
+class AES(BlockCipherAlgorithm):
+ name = "AES"
+ block_size = 128
+ # 512 added to support AES-256-XTS, which uses 512-bit keys
+ key_sizes = frozenset([128, 192, 256, 512])
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
+
+
+class AES128(BlockCipherAlgorithm):
+ name = "AES"
+ block_size = 128
+ key_sizes = frozenset([128])
+ key_size = 128
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+
+class AES256(BlockCipherAlgorithm):
+ name = "AES"
+ block_size = 128
+ key_sizes = frozenset([256])
+ key_size = 256
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+
+class Camellia(BlockCipherAlgorithm):
+ name = "camellia"
+ block_size = 128
+ key_sizes = frozenset([128, 192, 256])
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
+
+
+class TripleDES(BlockCipherAlgorithm):
+ name = "3DES"
+ block_size = 64
+ key_sizes = frozenset([64, 128, 192])
+
+ def __init__(self, key: bytes):
+ if len(key) == 8:
+ key += key + key
+ elif len(key) == 16:
+ key += key[:8]
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
+
+
+class Blowfish(BlockCipherAlgorithm):
+ name = "Blowfish"
+ block_size = 64
+ key_sizes = frozenset(range(32, 449, 8))
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
+
+
+_BlowfishInternal = Blowfish
+utils.deprecated(
+ Blowfish,
+ __name__,
+ "Blowfish has been deprecated",
+ utils.DeprecatedIn37,
+ name="Blowfish",
+)
+
+
+class CAST5(BlockCipherAlgorithm):
+ name = "CAST5"
+ block_size = 64
+ key_sizes = frozenset(range(40, 129, 8))
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
+
+
+_CAST5Internal = CAST5
+utils.deprecated(
+ CAST5,
+ __name__,
+ "CAST5 has been deprecated",
+ utils.DeprecatedIn37,
+ name="CAST5",
+)
+
+
+class ARC4(CipherAlgorithm):
+ name = "RC4"
+ key_sizes = frozenset([40, 56, 64, 80, 128, 160, 192, 256])
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
+
+
+class IDEA(BlockCipherAlgorithm):
+ name = "IDEA"
+ block_size = 64
+ key_sizes = frozenset([128])
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
+
+
+_IDEAInternal = IDEA
+utils.deprecated(
+ IDEA,
+ __name__,
+ "IDEA has been deprecated",
+ utils.DeprecatedIn37,
+ name="IDEA",
+)
+
+
+class SEED(BlockCipherAlgorithm):
+ name = "SEED"
+ block_size = 128
+ key_sizes = frozenset([128])
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
+
+
+_SEEDInternal = SEED
+utils.deprecated(
+ SEED,
+ __name__,
+ "SEED has been deprecated",
+ utils.DeprecatedIn37,
+ name="SEED",
+)
+
+
+class ChaCha20(CipherAlgorithm):
+ name = "ChaCha20"
+ key_sizes = frozenset([256])
+
+ def __init__(self, key: bytes, nonce: bytes):
+ self.key = _verify_key_size(self, key)
+ utils._check_byteslike("nonce", nonce)
+
+ if len(nonce) != 16:
+ raise ValueError("nonce must be 128-bits (16 bytes)")
+
+ self._nonce = nonce
+
+ @property
+ def nonce(self) -> bytes:
+ return self._nonce
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
+
+
+class SM4(BlockCipherAlgorithm):
+ name = "SM4"
+ block_size = 128
+ key_sizes = frozenset([128])
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
diff --git a/billinglayer/python/cryptography/hazmat/primitives/ciphers/base.py b/billinglayer/python/cryptography/hazmat/primitives/ciphers/base.py
new file mode 100644
index 0000000..38a2ebb
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/ciphers/base.py
@@ -0,0 +1,269 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import typing
+
+from cryptography.exceptions import (
+ AlreadyFinalized,
+ AlreadyUpdated,
+ NotYetFinalized,
+)
+from cryptography.hazmat.primitives._cipheralgorithm import CipherAlgorithm
+from cryptography.hazmat.primitives.ciphers import modes
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.ciphers import (
+ _CipherContext as _BackendCipherContext,
+ )
+
+
+class CipherContext(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def update(self, data: bytes) -> bytes:
+ """
+ Processes the provided bytes through the cipher and returns the results
+ as bytes.
+ """
+
+ @abc.abstractmethod
+ def update_into(self, data: bytes, buf: bytes) -> int:
+ """
+ Processes the provided bytes and writes the resulting data into the
+ provided buffer. Returns the number of bytes written.
+ """
+
+ @abc.abstractmethod
+ def finalize(self) -> bytes:
+ """
+ Returns the results of processing the final block as bytes.
+ """
+
+
+class AEADCipherContext(CipherContext, metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def authenticate_additional_data(self, data: bytes) -> None:
+ """
+ Authenticates the provided bytes.
+ """
+
+
+class AEADDecryptionContext(AEADCipherContext, metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def finalize_with_tag(self, tag: bytes) -> bytes:
+ """
+ Returns the results of processing the final block as bytes and allows
+ delayed passing of the authentication tag.
+ """
+
+
+class AEADEncryptionContext(AEADCipherContext, metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def tag(self) -> bytes:
+ """
+ Returns tag bytes. This is only available after encryption is
+ finalized.
+ """
+
+
+Mode = typing.TypeVar(
+ "Mode", bound=typing.Optional[modes.Mode], covariant=True
+)
+
+
+class Cipher(typing.Generic[Mode]):
+ def __init__(
+ self,
+ algorithm: CipherAlgorithm,
+ mode: Mode,
+ backend: typing.Any = None,
+ ) -> None:
+ if not isinstance(algorithm, CipherAlgorithm):
+ raise TypeError("Expected interface of CipherAlgorithm.")
+
+ if mode is not None:
+ # mypy needs this assert to narrow the type from our generic
+ # type. Maybe it won't some time in the future.
+ assert isinstance(mode, modes.Mode)
+ mode.validate_for_algorithm(algorithm)
+
+ self.algorithm = algorithm
+ self.mode = mode
+
+ @typing.overload
+ def encryptor(
+ self: Cipher[modes.ModeWithAuthenticationTag],
+ ) -> AEADEncryptionContext:
+ ...
+
+ @typing.overload
+ def encryptor(
+ self: _CIPHER_TYPE,
+ ) -> CipherContext:
+ ...
+
+ def encryptor(self):
+ if isinstance(self.mode, modes.ModeWithAuthenticationTag):
+ if self.mode.tag is not None:
+ raise ValueError(
+ "Authentication tag must be None when encrypting."
+ )
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ ctx = backend.create_symmetric_encryption_ctx(
+ self.algorithm, self.mode
+ )
+ return self._wrap_ctx(ctx, encrypt=True)
+
+ @typing.overload
+ def decryptor(
+ self: Cipher[modes.ModeWithAuthenticationTag],
+ ) -> AEADDecryptionContext:
+ ...
+
+ @typing.overload
+ def decryptor(
+ self: _CIPHER_TYPE,
+ ) -> CipherContext:
+ ...
+
+ def decryptor(self):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ ctx = backend.create_symmetric_decryption_ctx(
+ self.algorithm, self.mode
+ )
+ return self._wrap_ctx(ctx, encrypt=False)
+
+ def _wrap_ctx(
+ self, ctx: _BackendCipherContext, encrypt: bool
+ ) -> typing.Union[
+ AEADEncryptionContext, AEADDecryptionContext, CipherContext
+ ]:
+ if isinstance(self.mode, modes.ModeWithAuthenticationTag):
+ if encrypt:
+ return _AEADEncryptionContext(ctx)
+ else:
+ return _AEADDecryptionContext(ctx)
+ else:
+ return _CipherContext(ctx)
+
+
+_CIPHER_TYPE = Cipher[
+ typing.Union[
+ modes.ModeWithNonce,
+ modes.ModeWithTweak,
+ None,
+ modes.ECB,
+ modes.ModeWithInitializationVector,
+ ]
+]
+
+
+class _CipherContext(CipherContext):
+ _ctx: typing.Optional[_BackendCipherContext]
+
+ def __init__(self, ctx: _BackendCipherContext) -> None:
+ self._ctx = ctx
+
+ def update(self, data: bytes) -> bytes:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ return self._ctx.update(data)
+
+ def update_into(self, data: bytes, buf: bytes) -> int:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ return self._ctx.update_into(data, buf)
+
+ def finalize(self) -> bytes:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ data = self._ctx.finalize()
+ self._ctx = None
+ return data
+
+
+class _AEADCipherContext(AEADCipherContext):
+ _ctx: typing.Optional[_BackendCipherContext]
+ _tag: typing.Optional[bytes]
+
+ def __init__(self, ctx: _BackendCipherContext) -> None:
+ self._ctx = ctx
+ self._bytes_processed = 0
+ self._aad_bytes_processed = 0
+ self._tag = None
+ self._updated = False
+
+ def _check_limit(self, data_size: int) -> None:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ self._updated = True
+ self._bytes_processed += data_size
+ if self._bytes_processed > self._ctx._mode._MAX_ENCRYPTED_BYTES:
+ raise ValueError(
+ "{} has a maximum encrypted byte limit of {}".format(
+ self._ctx._mode.name, self._ctx._mode._MAX_ENCRYPTED_BYTES
+ )
+ )
+
+ def update(self, data: bytes) -> bytes:
+ self._check_limit(len(data))
+ # mypy needs this assert even though _check_limit already checked
+ assert self._ctx is not None
+ return self._ctx.update(data)
+
+ def update_into(self, data: bytes, buf: bytes) -> int:
+ self._check_limit(len(data))
+ # mypy needs this assert even though _check_limit already checked
+ assert self._ctx is not None
+ return self._ctx.update_into(data, buf)
+
+ def finalize(self) -> bytes:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ data = self._ctx.finalize()
+ self._tag = self._ctx.tag
+ self._ctx = None
+ return data
+
+ def authenticate_additional_data(self, data: bytes) -> None:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ if self._updated:
+ raise AlreadyUpdated("Update has been called on this context.")
+
+ self._aad_bytes_processed += len(data)
+ if self._aad_bytes_processed > self._ctx._mode._MAX_AAD_BYTES:
+ raise ValueError(
+ "{} has a maximum AAD byte limit of {}".format(
+ self._ctx._mode.name, self._ctx._mode._MAX_AAD_BYTES
+ )
+ )
+
+ self._ctx.authenticate_additional_data(data)
+
+
+class _AEADDecryptionContext(_AEADCipherContext, AEADDecryptionContext):
+ def finalize_with_tag(self, tag: bytes) -> bytes:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ data = self._ctx.finalize_with_tag(tag)
+ self._tag = self._ctx.tag
+ self._ctx = None
+ return data
+
+
+class _AEADEncryptionContext(_AEADCipherContext, AEADEncryptionContext):
+ @property
+ def tag(self) -> bytes:
+ if self._ctx is not None:
+ raise NotYetFinalized(
+ "You must finalize encryption before " "getting the tag."
+ )
+ assert self._tag is not None
+ return self._tag
diff --git a/billinglayer/python/cryptography/hazmat/primitives/ciphers/modes.py b/billinglayer/python/cryptography/hazmat/primitives/ciphers/modes.py
new file mode 100644
index 0000000..d8ea188
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/ciphers/modes.py
@@ -0,0 +1,274 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.primitives._cipheralgorithm import (
+ BlockCipherAlgorithm,
+ CipherAlgorithm,
+)
+from cryptography.hazmat.primitives.ciphers import algorithms
+
+
+class Mode(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def name(self) -> str:
+ """
+ A string naming this mode (e.g. "ECB", "CBC").
+ """
+
+ @abc.abstractmethod
+ def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None:
+ """
+ Checks that all the necessary invariants of this (mode, algorithm)
+ combination are met.
+ """
+
+
+class ModeWithInitializationVector(Mode, metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def initialization_vector(self) -> bytes:
+ """
+ The value of the initialization vector for this mode as bytes.
+ """
+
+
+class ModeWithTweak(Mode, metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def tweak(self) -> bytes:
+ """
+ The value of the tweak for this mode as bytes.
+ """
+
+
+class ModeWithNonce(Mode, metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def nonce(self) -> bytes:
+ """
+ The value of the nonce for this mode as bytes.
+ """
+
+
+class ModeWithAuthenticationTag(Mode, metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def tag(self) -> typing.Optional[bytes]:
+ """
+ The value of the tag supplied to the constructor of this mode.
+ """
+
+
+def _check_aes_key_length(self: Mode, algorithm: CipherAlgorithm) -> None:
+ if algorithm.key_size > 256 and algorithm.name == "AES":
+ raise ValueError(
+ "Only 128, 192, and 256 bit keys are allowed for this AES mode"
+ )
+
+
+def _check_iv_length(
+ self: ModeWithInitializationVector, algorithm: BlockCipherAlgorithm
+) -> None:
+ if len(self.initialization_vector) * 8 != algorithm.block_size:
+ raise ValueError(
+ "Invalid IV size ({}) for {}.".format(
+ len(self.initialization_vector), self.name
+ )
+ )
+
+
+def _check_nonce_length(
+ nonce: bytes, name: str, algorithm: CipherAlgorithm
+) -> None:
+ if not isinstance(algorithm, BlockCipherAlgorithm):
+ raise UnsupportedAlgorithm(
+ f"{name} requires a block cipher algorithm",
+ _Reasons.UNSUPPORTED_CIPHER,
+ )
+ if len(nonce) * 8 != algorithm.block_size:
+ raise ValueError(f"Invalid nonce size ({len(nonce)}) for {name}.")
+
+
+def _check_iv_and_key_length(
+ self: ModeWithInitializationVector, algorithm: CipherAlgorithm
+) -> None:
+ if not isinstance(algorithm, BlockCipherAlgorithm):
+ raise UnsupportedAlgorithm(
+ f"{self} requires a block cipher algorithm",
+ _Reasons.UNSUPPORTED_CIPHER,
+ )
+ _check_aes_key_length(self, algorithm)
+ _check_iv_length(self, algorithm)
+
+
+class CBC(ModeWithInitializationVector):
+ name = "CBC"
+
+ def __init__(self, initialization_vector: bytes):
+ utils._check_byteslike("initialization_vector", initialization_vector)
+ self._initialization_vector = initialization_vector
+
+ @property
+ def initialization_vector(self) -> bytes:
+ return self._initialization_vector
+
+ validate_for_algorithm = _check_iv_and_key_length
+
+
+class XTS(ModeWithTweak):
+ name = "XTS"
+
+ def __init__(self, tweak: bytes):
+ utils._check_byteslike("tweak", tweak)
+
+ if len(tweak) != 16:
+ raise ValueError("tweak must be 128-bits (16 bytes)")
+
+ self._tweak = tweak
+
+ @property
+ def tweak(self) -> bytes:
+ return self._tweak
+
+ def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None:
+ if isinstance(algorithm, (algorithms.AES128, algorithms.AES256)):
+ raise TypeError(
+ "The AES128 and AES256 classes do not support XTS, please use "
+ "the standard AES class instead."
+ )
+
+ if algorithm.key_size not in (256, 512):
+ raise ValueError(
+ "The XTS specification requires a 256-bit key for AES-128-XTS"
+ " and 512-bit key for AES-256-XTS"
+ )
+
+
+class ECB(Mode):
+ name = "ECB"
+
+ validate_for_algorithm = _check_aes_key_length
+
+
+class OFB(ModeWithInitializationVector):
+ name = "OFB"
+
+ def __init__(self, initialization_vector: bytes):
+ utils._check_byteslike("initialization_vector", initialization_vector)
+ self._initialization_vector = initialization_vector
+
+ @property
+ def initialization_vector(self) -> bytes:
+ return self._initialization_vector
+
+ validate_for_algorithm = _check_iv_and_key_length
+
+
+class CFB(ModeWithInitializationVector):
+ name = "CFB"
+
+ def __init__(self, initialization_vector: bytes):
+ utils._check_byteslike("initialization_vector", initialization_vector)
+ self._initialization_vector = initialization_vector
+
+ @property
+ def initialization_vector(self) -> bytes:
+ return self._initialization_vector
+
+ validate_for_algorithm = _check_iv_and_key_length
+
+
+class CFB8(ModeWithInitializationVector):
+ name = "CFB8"
+
+ def __init__(self, initialization_vector: bytes):
+ utils._check_byteslike("initialization_vector", initialization_vector)
+ self._initialization_vector = initialization_vector
+
+ @property
+ def initialization_vector(self) -> bytes:
+ return self._initialization_vector
+
+ validate_for_algorithm = _check_iv_and_key_length
+
+
+class CTR(ModeWithNonce):
+ name = "CTR"
+
+ def __init__(self, nonce: bytes):
+ utils._check_byteslike("nonce", nonce)
+ self._nonce = nonce
+
+ @property
+ def nonce(self) -> bytes:
+ return self._nonce
+
+ def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None:
+ _check_aes_key_length(self, algorithm)
+ _check_nonce_length(self.nonce, self.name, algorithm)
+
+
+class GCM(ModeWithInitializationVector, ModeWithAuthenticationTag):
+ name = "GCM"
+ _MAX_ENCRYPTED_BYTES = (2**39 - 256) // 8
+ _MAX_AAD_BYTES = (2**64) // 8
+
+ def __init__(
+ self,
+ initialization_vector: bytes,
+ tag: typing.Optional[bytes] = None,
+ min_tag_length: int = 16,
+ ):
+ # OpenSSL 3.0.0 constrains GCM IVs to [64, 1024] bits inclusive
+ # This is a sane limit anyway so we'll enforce it here.
+ utils._check_byteslike("initialization_vector", initialization_vector)
+ if len(initialization_vector) < 8 or len(initialization_vector) > 128:
+ raise ValueError(
+ "initialization_vector must be between 8 and 128 bytes (64 "
+ "and 1024 bits)."
+ )
+ self._initialization_vector = initialization_vector
+ if tag is not None:
+ utils._check_bytes("tag", tag)
+ if min_tag_length < 4:
+ raise ValueError("min_tag_length must be >= 4")
+ if len(tag) < min_tag_length:
+ raise ValueError(
+ "Authentication tag must be {} bytes or longer.".format(
+ min_tag_length
+ )
+ )
+ self._tag = tag
+ self._min_tag_length = min_tag_length
+
+ @property
+ def tag(self) -> typing.Optional[bytes]:
+ return self._tag
+
+ @property
+ def initialization_vector(self) -> bytes:
+ return self._initialization_vector
+
+ def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None:
+ _check_aes_key_length(self, algorithm)
+ if not isinstance(algorithm, BlockCipherAlgorithm):
+ raise UnsupportedAlgorithm(
+ "GCM requires a block cipher algorithm",
+ _Reasons.UNSUPPORTED_CIPHER,
+ )
+ block_size_bytes = algorithm.block_size // 8
+ if self._tag is not None and len(self._tag) > block_size_bytes:
+ raise ValueError(
+ "Authentication tag cannot be more than {} bytes.".format(
+ block_size_bytes
+ )
+ )
diff --git a/billinglayer/python/cryptography/hazmat/primitives/cmac.py b/billinglayer/python/cryptography/hazmat/primitives/cmac.py
new file mode 100644
index 0000000..8aa1d79
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/cmac.py
@@ -0,0 +1,65 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import AlreadyFinalized
+from cryptography.hazmat.primitives import ciphers
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.cmac import _CMACContext
+
+
+class CMAC:
+ _ctx: typing.Optional[_CMACContext]
+ _algorithm: ciphers.BlockCipherAlgorithm
+
+ def __init__(
+ self,
+ algorithm: ciphers.BlockCipherAlgorithm,
+ backend: typing.Any = None,
+ ctx: typing.Optional[_CMACContext] = None,
+ ) -> None:
+ if not isinstance(algorithm, ciphers.BlockCipherAlgorithm):
+ raise TypeError("Expected instance of BlockCipherAlgorithm.")
+ self._algorithm = algorithm
+
+ if ctx is None:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ self._ctx = ossl.create_cmac_ctx(self._algorithm)
+ else:
+ self._ctx = ctx
+
+ def update(self, data: bytes) -> None:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ utils._check_bytes("data", data)
+ self._ctx.update(data)
+
+ def finalize(self) -> bytes:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ digest = self._ctx.finalize()
+ self._ctx = None
+ return digest
+
+ def verify(self, signature: bytes) -> None:
+ utils._check_bytes("signature", signature)
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ ctx, self._ctx = self._ctx, None
+ ctx.verify(signature)
+
+ def copy(self) -> CMAC:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ return CMAC(self._algorithm, ctx=self._ctx.copy())
diff --git a/billinglayer/python/cryptography/hazmat/primitives/constant_time.py b/billinglayer/python/cryptography/hazmat/primitives/constant_time.py
new file mode 100644
index 0000000..3975c71
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/constant_time.py
@@ -0,0 +1,14 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import hmac
+
+
+def bytes_eq(a: bytes, b: bytes) -> bool:
+ if not isinstance(a, bytes) or not isinstance(b, bytes):
+ raise TypeError("a and b must be bytes.")
+
+ return hmac.compare_digest(a, b)
diff --git a/billinglayer/python/cryptography/hazmat/primitives/hashes.py b/billinglayer/python/cryptography/hazmat/primitives/hashes.py
new file mode 100644
index 0000000..b6a7ff1
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/hashes.py
@@ -0,0 +1,243 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import typing
+
+from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+
+__all__ = [
+ "HashAlgorithm",
+ "HashContext",
+ "Hash",
+ "ExtendableOutputFunction",
+ "SHA1",
+ "SHA512_224",
+ "SHA512_256",
+ "SHA224",
+ "SHA256",
+ "SHA384",
+ "SHA512",
+ "SHA3_224",
+ "SHA3_256",
+ "SHA3_384",
+ "SHA3_512",
+ "SHAKE128",
+ "SHAKE256",
+ "MD5",
+ "BLAKE2b",
+ "BLAKE2s",
+ "SM3",
+]
+
+
+class HashAlgorithm(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def name(self) -> str:
+ """
+ A string naming this algorithm (e.g. "sha256", "md5").
+ """
+
+ @property
+ @abc.abstractmethod
+ def digest_size(self) -> int:
+ """
+ The size of the resulting digest in bytes.
+ """
+
+ @property
+ @abc.abstractmethod
+ def block_size(self) -> typing.Optional[int]:
+ """
+ The internal block size of the hash function, or None if the hash
+ function does not use blocks internally (e.g. SHA3).
+ """
+
+
+class HashContext(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def algorithm(self) -> HashAlgorithm:
+ """
+ A HashAlgorithm that will be used by this context.
+ """
+
+ @abc.abstractmethod
+ def update(self, data: bytes) -> None:
+ """
+ Processes the provided bytes through the hash.
+ """
+
+ @abc.abstractmethod
+ def finalize(self) -> bytes:
+ """
+ Finalizes the hash context and returns the hash digest as bytes.
+ """
+
+ @abc.abstractmethod
+ def copy(self) -> HashContext:
+ """
+ Return a HashContext that is a copy of the current context.
+ """
+
+
+Hash = rust_openssl.hashes.Hash
+HashContext.register(Hash)
+
+
+class ExtendableOutputFunction(metaclass=abc.ABCMeta):
+ """
+ An interface for extendable output functions.
+ """
+
+
+class SHA1(HashAlgorithm):
+ name = "sha1"
+ digest_size = 20
+ block_size = 64
+
+
+class SHA512_224(HashAlgorithm): # noqa: N801
+ name = "sha512-224"
+ digest_size = 28
+ block_size = 128
+
+
+class SHA512_256(HashAlgorithm): # noqa: N801
+ name = "sha512-256"
+ digest_size = 32
+ block_size = 128
+
+
+class SHA224(HashAlgorithm):
+ name = "sha224"
+ digest_size = 28
+ block_size = 64
+
+
+class SHA256(HashAlgorithm):
+ name = "sha256"
+ digest_size = 32
+ block_size = 64
+
+
+class SHA384(HashAlgorithm):
+ name = "sha384"
+ digest_size = 48
+ block_size = 128
+
+
+class SHA512(HashAlgorithm):
+ name = "sha512"
+ digest_size = 64
+ block_size = 128
+
+
+class SHA3_224(HashAlgorithm): # noqa: N801
+ name = "sha3-224"
+ digest_size = 28
+ block_size = None
+
+
+class SHA3_256(HashAlgorithm): # noqa: N801
+ name = "sha3-256"
+ digest_size = 32
+ block_size = None
+
+
+class SHA3_384(HashAlgorithm): # noqa: N801
+ name = "sha3-384"
+ digest_size = 48
+ block_size = None
+
+
+class SHA3_512(HashAlgorithm): # noqa: N801
+ name = "sha3-512"
+ digest_size = 64
+ block_size = None
+
+
+class SHAKE128(HashAlgorithm, ExtendableOutputFunction):
+ name = "shake128"
+ block_size = None
+
+ def __init__(self, digest_size: int):
+ if not isinstance(digest_size, int):
+ raise TypeError("digest_size must be an integer")
+
+ if digest_size < 1:
+ raise ValueError("digest_size must be a positive integer")
+
+ self._digest_size = digest_size
+
+ @property
+ def digest_size(self) -> int:
+ return self._digest_size
+
+
+class SHAKE256(HashAlgorithm, ExtendableOutputFunction):
+ name = "shake256"
+ block_size = None
+
+ def __init__(self, digest_size: int):
+ if not isinstance(digest_size, int):
+ raise TypeError("digest_size must be an integer")
+
+ if digest_size < 1:
+ raise ValueError("digest_size must be a positive integer")
+
+ self._digest_size = digest_size
+
+ @property
+ def digest_size(self) -> int:
+ return self._digest_size
+
+
+class MD5(HashAlgorithm):
+ name = "md5"
+ digest_size = 16
+ block_size = 64
+
+
+class BLAKE2b(HashAlgorithm):
+ name = "blake2b"
+ _max_digest_size = 64
+ _min_digest_size = 1
+ block_size = 128
+
+ def __init__(self, digest_size: int):
+ if digest_size != 64:
+ raise ValueError("Digest size must be 64")
+
+ self._digest_size = digest_size
+
+ @property
+ def digest_size(self) -> int:
+ return self._digest_size
+
+
+class BLAKE2s(HashAlgorithm):
+ name = "blake2s"
+ block_size = 64
+ _max_digest_size = 32
+ _min_digest_size = 1
+
+ def __init__(self, digest_size: int):
+ if digest_size != 32:
+ raise ValueError("Digest size must be 32")
+
+ self._digest_size = digest_size
+
+ @property
+ def digest_size(self) -> int:
+ return self._digest_size
+
+
+class SM3(HashAlgorithm):
+ name = "sm3"
+ digest_size = 32
+ block_size = 64
diff --git a/billinglayer/python/cryptography/hazmat/primitives/hmac.py b/billinglayer/python/cryptography/hazmat/primitives/hmac.py
new file mode 100644
index 0000000..a9442d5
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/hmac.py
@@ -0,0 +1,13 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+from cryptography.hazmat.primitives import hashes
+
+__all__ = ["HMAC"]
+
+HMAC = rust_openssl.hmac.HMAC
+hashes.HashContext.register(HMAC)
diff --git a/billinglayer/python/cryptography/hazmat/primitives/kdf/__init__.py b/billinglayer/python/cryptography/hazmat/primitives/kdf/__init__.py
new file mode 100644
index 0000000..79bb459
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/kdf/__init__.py
@@ -0,0 +1,23 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+
+
+class KeyDerivationFunction(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def derive(self, key_material: bytes) -> bytes:
+ """
+ Deterministically generates and returns a new key based on the existing
+ key material.
+ """
+
+ @abc.abstractmethod
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ """
+ Checks whether the key generated by the key material matches the
+ expected derived key. Raises an exception if they do not match.
+ """
diff --git a/billinglayer/python/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..7eb3b69
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-311.pyc
new file mode 100644
index 0000000..14bfc96
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-311.pyc
new file mode 100644
index 0000000..e00178f
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-311.pyc
new file mode 100644
index 0000000..6a9c0ff
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-311.pyc
new file mode 100644
index 0000000..c232dda
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-311.pyc
new file mode 100644
index 0000000..80ff4bb
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-311.pyc
new file mode 100644
index 0000000..a99ac91
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/kdf/concatkdf.py b/billinglayer/python/cryptography/hazmat/primitives/kdf/concatkdf.py
new file mode 100644
index 0000000..d5ea58a
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/kdf/concatkdf.py
@@ -0,0 +1,124 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import AlreadyFinalized, InvalidKey
+from cryptography.hazmat.primitives import constant_time, hashes, hmac
+from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
+
+
+def _int_to_u32be(n: int) -> bytes:
+ return n.to_bytes(length=4, byteorder="big")
+
+
+def _common_args_checks(
+ algorithm: hashes.HashAlgorithm,
+ length: int,
+ otherinfo: typing.Optional[bytes],
+) -> None:
+ max_length = algorithm.digest_size * (2**32 - 1)
+ if length > max_length:
+ raise ValueError(f"Cannot derive keys larger than {max_length} bits.")
+ if otherinfo is not None:
+ utils._check_bytes("otherinfo", otherinfo)
+
+
+def _concatkdf_derive(
+ key_material: bytes,
+ length: int,
+ auxfn: typing.Callable[[], hashes.HashContext],
+ otherinfo: bytes,
+) -> bytes:
+ utils._check_byteslike("key_material", key_material)
+ output = [b""]
+ outlen = 0
+ counter = 1
+
+ while length > outlen:
+ h = auxfn()
+ h.update(_int_to_u32be(counter))
+ h.update(key_material)
+ h.update(otherinfo)
+ output.append(h.finalize())
+ outlen += len(output[-1])
+ counter += 1
+
+ return b"".join(output)[:length]
+
+
+class ConcatKDFHash(KeyDerivationFunction):
+ def __init__(
+ self,
+ algorithm: hashes.HashAlgorithm,
+ length: int,
+ otherinfo: typing.Optional[bytes],
+ backend: typing.Any = None,
+ ):
+ _common_args_checks(algorithm, length, otherinfo)
+ self._algorithm = algorithm
+ self._length = length
+ self._otherinfo: bytes = otherinfo if otherinfo is not None else b""
+
+ self._used = False
+
+ def _hash(self) -> hashes.Hash:
+ return hashes.Hash(self._algorithm)
+
+ def derive(self, key_material: bytes) -> bytes:
+ if self._used:
+ raise AlreadyFinalized
+ self._used = True
+ return _concatkdf_derive(
+ key_material, self._length, self._hash, self._otherinfo
+ )
+
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ if not constant_time.bytes_eq(self.derive(key_material), expected_key):
+ raise InvalidKey
+
+
+class ConcatKDFHMAC(KeyDerivationFunction):
+ def __init__(
+ self,
+ algorithm: hashes.HashAlgorithm,
+ length: int,
+ salt: typing.Optional[bytes],
+ otherinfo: typing.Optional[bytes],
+ backend: typing.Any = None,
+ ):
+ _common_args_checks(algorithm, length, otherinfo)
+ self._algorithm = algorithm
+ self._length = length
+ self._otherinfo: bytes = otherinfo if otherinfo is not None else b""
+
+ if algorithm.block_size is None:
+ raise TypeError(f"{algorithm.name} is unsupported for ConcatKDF")
+
+ if salt is None:
+ salt = b"\x00" * algorithm.block_size
+ else:
+ utils._check_bytes("salt", salt)
+
+ self._salt = salt
+
+ self._used = False
+
+ def _hmac(self) -> hmac.HMAC:
+ return hmac.HMAC(self._salt, self._algorithm)
+
+ def derive(self, key_material: bytes) -> bytes:
+ if self._used:
+ raise AlreadyFinalized
+ self._used = True
+ return _concatkdf_derive(
+ key_material, self._length, self._hmac, self._otherinfo
+ )
+
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ if not constant_time.bytes_eq(self.derive(key_material), expected_key):
+ raise InvalidKey
diff --git a/billinglayer/python/cryptography/hazmat/primitives/kdf/hkdf.py b/billinglayer/python/cryptography/hazmat/primitives/kdf/hkdf.py
new file mode 100644
index 0000000..d476894
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/kdf/hkdf.py
@@ -0,0 +1,101 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import AlreadyFinalized, InvalidKey
+from cryptography.hazmat.primitives import constant_time, hashes, hmac
+from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
+
+
+class HKDF(KeyDerivationFunction):
+ def __init__(
+ self,
+ algorithm: hashes.HashAlgorithm,
+ length: int,
+ salt: typing.Optional[bytes],
+ info: typing.Optional[bytes],
+ backend: typing.Any = None,
+ ):
+ self._algorithm = algorithm
+
+ if salt is None:
+ salt = b"\x00" * self._algorithm.digest_size
+ else:
+ utils._check_bytes("salt", salt)
+
+ self._salt = salt
+
+ self._hkdf_expand = HKDFExpand(self._algorithm, length, info)
+
+ def _extract(self, key_material: bytes) -> bytes:
+ h = hmac.HMAC(self._salt, self._algorithm)
+ h.update(key_material)
+ return h.finalize()
+
+ def derive(self, key_material: bytes) -> bytes:
+ utils._check_byteslike("key_material", key_material)
+ return self._hkdf_expand.derive(self._extract(key_material))
+
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ if not constant_time.bytes_eq(self.derive(key_material), expected_key):
+ raise InvalidKey
+
+
+class HKDFExpand(KeyDerivationFunction):
+ def __init__(
+ self,
+ algorithm: hashes.HashAlgorithm,
+ length: int,
+ info: typing.Optional[bytes],
+ backend: typing.Any = None,
+ ):
+ self._algorithm = algorithm
+
+ max_length = 255 * algorithm.digest_size
+
+ if length > max_length:
+ raise ValueError(
+ f"Cannot derive keys larger than {max_length} octets."
+ )
+
+ self._length = length
+
+ if info is None:
+ info = b""
+ else:
+ utils._check_bytes("info", info)
+
+ self._info = info
+
+ self._used = False
+
+ def _expand(self, key_material: bytes) -> bytes:
+ output = [b""]
+ counter = 1
+
+ while self._algorithm.digest_size * (len(output) - 1) < self._length:
+ h = hmac.HMAC(key_material, self._algorithm)
+ h.update(output[-1])
+ h.update(self._info)
+ h.update(bytes([counter]))
+ output.append(h.finalize())
+ counter += 1
+
+ return b"".join(output)[: self._length]
+
+ def derive(self, key_material: bytes) -> bytes:
+ utils._check_byteslike("key_material", key_material)
+ if self._used:
+ raise AlreadyFinalized
+
+ self._used = True
+ return self._expand(key_material)
+
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ if not constant_time.bytes_eq(self.derive(key_material), expected_key):
+ raise InvalidKey
diff --git a/billinglayer/python/cryptography/hazmat/primitives/kdf/kbkdf.py b/billinglayer/python/cryptography/hazmat/primitives/kdf/kbkdf.py
new file mode 100644
index 0000000..9677638
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/kdf/kbkdf.py
@@ -0,0 +1,299 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import (
+ AlreadyFinalized,
+ InvalidKey,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.primitives import (
+ ciphers,
+ cmac,
+ constant_time,
+ hashes,
+ hmac,
+)
+from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
+
+
+class Mode(utils.Enum):
+ CounterMode = "ctr"
+
+
+class CounterLocation(utils.Enum):
+ BeforeFixed = "before_fixed"
+ AfterFixed = "after_fixed"
+ MiddleFixed = "middle_fixed"
+
+
+class _KBKDFDeriver:
+ def __init__(
+ self,
+ prf: typing.Callable,
+ mode: Mode,
+ length: int,
+ rlen: int,
+ llen: typing.Optional[int],
+ location: CounterLocation,
+ break_location: typing.Optional[int],
+ label: typing.Optional[bytes],
+ context: typing.Optional[bytes],
+ fixed: typing.Optional[bytes],
+ ):
+ assert callable(prf)
+
+ if not isinstance(mode, Mode):
+ raise TypeError("mode must be of type Mode")
+
+ if not isinstance(location, CounterLocation):
+ raise TypeError("location must be of type CounterLocation")
+
+ if break_location is None and location is CounterLocation.MiddleFixed:
+ raise ValueError("Please specify a break_location")
+
+ if (
+ break_location is not None
+ and location != CounterLocation.MiddleFixed
+ ):
+ raise ValueError(
+ "break_location is ignored when location is not"
+ " CounterLocation.MiddleFixed"
+ )
+
+ if break_location is not None and not isinstance(break_location, int):
+ raise TypeError("break_location must be an integer")
+
+ if break_location is not None and break_location < 0:
+ raise ValueError("break_location must be a positive integer")
+
+ if (label or context) and fixed:
+ raise ValueError(
+ "When supplying fixed data, " "label and context are ignored."
+ )
+
+ if rlen is None or not self._valid_byte_length(rlen):
+ raise ValueError("rlen must be between 1 and 4")
+
+ if llen is None and fixed is None:
+ raise ValueError("Please specify an llen")
+
+ if llen is not None and not isinstance(llen, int):
+ raise TypeError("llen must be an integer")
+
+ if label is None:
+ label = b""
+
+ if context is None:
+ context = b""
+
+ utils._check_bytes("label", label)
+ utils._check_bytes("context", context)
+ self._prf = prf
+ self._mode = mode
+ self._length = length
+ self._rlen = rlen
+ self._llen = llen
+ self._location = location
+ self._break_location = break_location
+ self._label = label
+ self._context = context
+ self._used = False
+ self._fixed_data = fixed
+
+ @staticmethod
+ def _valid_byte_length(value: int) -> bool:
+ if not isinstance(value, int):
+ raise TypeError("value must be of type int")
+
+ value_bin = utils.int_to_bytes(1, value)
+ if not 1 <= len(value_bin) <= 4:
+ return False
+ return True
+
+ def derive(self, key_material: bytes, prf_output_size: int) -> bytes:
+ if self._used:
+ raise AlreadyFinalized
+
+ utils._check_byteslike("key_material", key_material)
+ self._used = True
+
+ # inverse floor division (equivalent to ceiling)
+ rounds = -(-self._length // prf_output_size)
+
+ output = [b""]
+
+ # For counter mode, the number of iterations shall not be
+ # larger than 2^r-1, where r <= 32 is the binary length of the counter
+ # This ensures that the counter values used as an input to the
+ # PRF will not repeat during a particular call to the KDF function.
+ r_bin = utils.int_to_bytes(1, self._rlen)
+ if rounds > pow(2, len(r_bin) * 8) - 1:
+ raise ValueError("There are too many iterations.")
+
+ fixed = self._generate_fixed_input()
+
+ if self._location == CounterLocation.BeforeFixed:
+ data_before_ctr = b""
+ data_after_ctr = fixed
+ elif self._location == CounterLocation.AfterFixed:
+ data_before_ctr = fixed
+ data_after_ctr = b""
+ else:
+ if isinstance(
+ self._break_location, int
+ ) and self._break_location > len(fixed):
+ raise ValueError("break_location offset > len(fixed)")
+ data_before_ctr = fixed[: self._break_location]
+ data_after_ctr = fixed[self._break_location :]
+
+ for i in range(1, rounds + 1):
+ h = self._prf(key_material)
+
+ counter = utils.int_to_bytes(i, self._rlen)
+ input_data = data_before_ctr + counter + data_after_ctr
+
+ h.update(input_data)
+
+ output.append(h.finalize())
+
+ return b"".join(output)[: self._length]
+
+ def _generate_fixed_input(self) -> bytes:
+ if self._fixed_data and isinstance(self._fixed_data, bytes):
+ return self._fixed_data
+
+ l_val = utils.int_to_bytes(self._length * 8, self._llen)
+
+ return b"".join([self._label, b"\x00", self._context, l_val])
+
+
+class KBKDFHMAC(KeyDerivationFunction):
+ def __init__(
+ self,
+ algorithm: hashes.HashAlgorithm,
+ mode: Mode,
+ length: int,
+ rlen: int,
+ llen: typing.Optional[int],
+ location: CounterLocation,
+ label: typing.Optional[bytes],
+ context: typing.Optional[bytes],
+ fixed: typing.Optional[bytes],
+ backend: typing.Any = None,
+ *,
+ break_location: typing.Optional[int] = None,
+ ):
+ if not isinstance(algorithm, hashes.HashAlgorithm):
+ raise UnsupportedAlgorithm(
+ "Algorithm supplied is not a supported hash algorithm.",
+ _Reasons.UNSUPPORTED_HASH,
+ )
+
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ if not ossl.hmac_supported(algorithm):
+ raise UnsupportedAlgorithm(
+ "Algorithm supplied is not a supported hmac algorithm.",
+ _Reasons.UNSUPPORTED_HASH,
+ )
+
+ self._algorithm = algorithm
+
+ self._deriver = _KBKDFDeriver(
+ self._prf,
+ mode,
+ length,
+ rlen,
+ llen,
+ location,
+ break_location,
+ label,
+ context,
+ fixed,
+ )
+
+ def _prf(self, key_material: bytes) -> hmac.HMAC:
+ return hmac.HMAC(key_material, self._algorithm)
+
+ def derive(self, key_material: bytes) -> bytes:
+ return self._deriver.derive(key_material, self._algorithm.digest_size)
+
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ if not constant_time.bytes_eq(self.derive(key_material), expected_key):
+ raise InvalidKey
+
+
+class KBKDFCMAC(KeyDerivationFunction):
+ def __init__(
+ self,
+ algorithm,
+ mode: Mode,
+ length: int,
+ rlen: int,
+ llen: typing.Optional[int],
+ location: CounterLocation,
+ label: typing.Optional[bytes],
+ context: typing.Optional[bytes],
+ fixed: typing.Optional[bytes],
+ backend: typing.Any = None,
+ *,
+ break_location: typing.Optional[int] = None,
+ ):
+ if not issubclass(
+ algorithm, ciphers.BlockCipherAlgorithm
+ ) or not issubclass(algorithm, ciphers.CipherAlgorithm):
+ raise UnsupportedAlgorithm(
+ "Algorithm supplied is not a supported cipher algorithm.",
+ _Reasons.UNSUPPORTED_CIPHER,
+ )
+
+ self._algorithm = algorithm
+ self._cipher: typing.Optional[ciphers.BlockCipherAlgorithm] = None
+
+ self._deriver = _KBKDFDeriver(
+ self._prf,
+ mode,
+ length,
+ rlen,
+ llen,
+ location,
+ break_location,
+ label,
+ context,
+ fixed,
+ )
+
+ def _prf(self, _: bytes) -> cmac.CMAC:
+ assert self._cipher is not None
+
+ return cmac.CMAC(self._cipher)
+
+ def derive(self, key_material: bytes) -> bytes:
+ self._cipher = self._algorithm(key_material)
+
+ assert self._cipher is not None
+
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ if not ossl.cmac_algorithm_supported(self._cipher):
+ raise UnsupportedAlgorithm(
+ "Algorithm supplied is not a supported cipher algorithm.",
+ _Reasons.UNSUPPORTED_CIPHER,
+ )
+
+ return self._deriver.derive(key_material, self._cipher.block_size // 8)
+
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ if not constant_time.bytes_eq(self.derive(key_material), expected_key):
+ raise InvalidKey
diff --git a/billinglayer/python/cryptography/hazmat/primitives/kdf/pbkdf2.py b/billinglayer/python/cryptography/hazmat/primitives/kdf/pbkdf2.py
new file mode 100644
index 0000000..623e1ca
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/kdf/pbkdf2.py
@@ -0,0 +1,64 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import (
+ AlreadyFinalized,
+ InvalidKey,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+from cryptography.hazmat.primitives import constant_time, hashes
+from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
+
+
+class PBKDF2HMAC(KeyDerivationFunction):
+ def __init__(
+ self,
+ algorithm: hashes.HashAlgorithm,
+ length: int,
+ salt: bytes,
+ iterations: int,
+ backend: typing.Any = None,
+ ):
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ if not ossl.pbkdf2_hmac_supported(algorithm):
+ raise UnsupportedAlgorithm(
+ "{} is not supported for PBKDF2 by this backend.".format(
+ algorithm.name
+ ),
+ _Reasons.UNSUPPORTED_HASH,
+ )
+ self._used = False
+ self._algorithm = algorithm
+ self._length = length
+ utils._check_bytes("salt", salt)
+ self._salt = salt
+ self._iterations = iterations
+
+ def derive(self, key_material: bytes) -> bytes:
+ if self._used:
+ raise AlreadyFinalized("PBKDF2 instances can only be used once.")
+ self._used = True
+
+ return rust_openssl.kdf.derive_pbkdf2_hmac(
+ key_material,
+ self._algorithm,
+ self._salt,
+ self._iterations,
+ self._length,
+ )
+
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ derived_key = self.derive(key_material)
+ if not constant_time.bytes_eq(derived_key, expected_key):
+ raise InvalidKey("Keys do not match.")
diff --git a/billinglayer/python/cryptography/hazmat/primitives/kdf/scrypt.py b/billinglayer/python/cryptography/hazmat/primitives/kdf/scrypt.py
new file mode 100644
index 0000000..05a4f67
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/kdf/scrypt.py
@@ -0,0 +1,80 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import sys
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import (
+ AlreadyFinalized,
+ InvalidKey,
+ UnsupportedAlgorithm,
+)
+from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+from cryptography.hazmat.primitives import constant_time
+from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
+
+# This is used by the scrypt tests to skip tests that require more memory
+# than the MEM_LIMIT
+_MEM_LIMIT = sys.maxsize // 2
+
+
+class Scrypt(KeyDerivationFunction):
+ def __init__(
+ self,
+ salt: bytes,
+ length: int,
+ n: int,
+ r: int,
+ p: int,
+ backend: typing.Any = None,
+ ):
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ if not ossl.scrypt_supported():
+ raise UnsupportedAlgorithm(
+ "This version of OpenSSL does not support scrypt"
+ )
+ self._length = length
+ utils._check_bytes("salt", salt)
+ if n < 2 or (n & (n - 1)) != 0:
+ raise ValueError("n must be greater than 1 and be a power of 2.")
+
+ if r < 1:
+ raise ValueError("r must be greater than or equal to 1.")
+
+ if p < 1:
+ raise ValueError("p must be greater than or equal to 1.")
+
+ self._used = False
+ self._salt = salt
+ self._n = n
+ self._r = r
+ self._p = p
+
+ def derive(self, key_material: bytes) -> bytes:
+ if self._used:
+ raise AlreadyFinalized("Scrypt instances can only be used once.")
+ self._used = True
+
+ utils._check_byteslike("key_material", key_material)
+
+ return rust_openssl.kdf.derive_scrypt(
+ key_material,
+ self._salt,
+ self._n,
+ self._r,
+ self._p,
+ _MEM_LIMIT,
+ self._length,
+ )
+
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ derived_key = self.derive(key_material)
+ if not constant_time.bytes_eq(derived_key, expected_key):
+ raise InvalidKey("Keys do not match.")
diff --git a/billinglayer/python/cryptography/hazmat/primitives/kdf/x963kdf.py b/billinglayer/python/cryptography/hazmat/primitives/kdf/x963kdf.py
new file mode 100644
index 0000000..17acc51
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/kdf/x963kdf.py
@@ -0,0 +1,61 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import AlreadyFinalized, InvalidKey
+from cryptography.hazmat.primitives import constant_time, hashes
+from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
+
+
+def _int_to_u32be(n: int) -> bytes:
+ return n.to_bytes(length=4, byteorder="big")
+
+
+class X963KDF(KeyDerivationFunction):
+ def __init__(
+ self,
+ algorithm: hashes.HashAlgorithm,
+ length: int,
+ sharedinfo: typing.Optional[bytes],
+ backend: typing.Any = None,
+ ):
+ max_len = algorithm.digest_size * (2**32 - 1)
+ if length > max_len:
+ raise ValueError(f"Cannot derive keys larger than {max_len} bits.")
+ if sharedinfo is not None:
+ utils._check_bytes("sharedinfo", sharedinfo)
+
+ self._algorithm = algorithm
+ self._length = length
+ self._sharedinfo = sharedinfo
+ self._used = False
+
+ def derive(self, key_material: bytes) -> bytes:
+ if self._used:
+ raise AlreadyFinalized
+ self._used = True
+ utils._check_byteslike("key_material", key_material)
+ output = [b""]
+ outlen = 0
+ counter = 1
+
+ while self._length > outlen:
+ h = hashes.Hash(self._algorithm)
+ h.update(key_material)
+ h.update(_int_to_u32be(counter))
+ if self._sharedinfo is not None:
+ h.update(self._sharedinfo)
+ output.append(h.finalize())
+ outlen += len(output[-1])
+ counter += 1
+
+ return b"".join(output)[: self._length]
+
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ if not constant_time.bytes_eq(self.derive(key_material), expected_key):
+ raise InvalidKey
diff --git a/billinglayer/python/cryptography/hazmat/primitives/keywrap.py b/billinglayer/python/cryptography/hazmat/primitives/keywrap.py
new file mode 100644
index 0000000..59b0326
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/keywrap.py
@@ -0,0 +1,177 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography.hazmat.primitives.ciphers import Cipher
+from cryptography.hazmat.primitives.ciphers.algorithms import AES
+from cryptography.hazmat.primitives.ciphers.modes import ECB
+from cryptography.hazmat.primitives.constant_time import bytes_eq
+
+
+def _wrap_core(
+ wrapping_key: bytes,
+ a: bytes,
+ r: typing.List[bytes],
+) -> bytes:
+ # RFC 3394 Key Wrap - 2.2.1 (index method)
+ encryptor = Cipher(AES(wrapping_key), ECB()).encryptor()
+ n = len(r)
+ for j in range(6):
+ for i in range(n):
+ # every encryption operation is a discrete 16 byte chunk (because
+ # AES has a 128-bit block size) and since we're using ECB it is
+ # safe to reuse the encryptor for the entire operation
+ b = encryptor.update(a + r[i])
+ a = (
+ int.from_bytes(b[:8], byteorder="big") ^ ((n * j) + i + 1)
+ ).to_bytes(length=8, byteorder="big")
+ r[i] = b[-8:]
+
+ assert encryptor.finalize() == b""
+
+ return a + b"".join(r)
+
+
+def aes_key_wrap(
+ wrapping_key: bytes,
+ key_to_wrap: bytes,
+ backend: typing.Any = None,
+) -> bytes:
+ if len(wrapping_key) not in [16, 24, 32]:
+ raise ValueError("The wrapping key must be a valid AES key length")
+
+ if len(key_to_wrap) < 16:
+ raise ValueError("The key to wrap must be at least 16 bytes")
+
+ if len(key_to_wrap) % 8 != 0:
+ raise ValueError("The key to wrap must be a multiple of 8 bytes")
+
+ a = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6"
+ r = [key_to_wrap[i : i + 8] for i in range(0, len(key_to_wrap), 8)]
+ return _wrap_core(wrapping_key, a, r)
+
+
+def _unwrap_core(
+ wrapping_key: bytes,
+ a: bytes,
+ r: typing.List[bytes],
+) -> typing.Tuple[bytes, typing.List[bytes]]:
+ # Implement RFC 3394 Key Unwrap - 2.2.2 (index method)
+ decryptor = Cipher(AES(wrapping_key), ECB()).decryptor()
+ n = len(r)
+ for j in reversed(range(6)):
+ for i in reversed(range(n)):
+ atr = (
+ int.from_bytes(a, byteorder="big") ^ ((n * j) + i + 1)
+ ).to_bytes(length=8, byteorder="big") + r[i]
+ # every decryption operation is a discrete 16 byte chunk so
+ # it is safe to reuse the decryptor for the entire operation
+ b = decryptor.update(atr)
+ a = b[:8]
+ r[i] = b[-8:]
+
+ assert decryptor.finalize() == b""
+ return a, r
+
+
+def aes_key_wrap_with_padding(
+ wrapping_key: bytes,
+ key_to_wrap: bytes,
+ backend: typing.Any = None,
+) -> bytes:
+ if len(wrapping_key) not in [16, 24, 32]:
+ raise ValueError("The wrapping key must be a valid AES key length")
+
+ aiv = b"\xA6\x59\x59\xA6" + len(key_to_wrap).to_bytes(
+ length=4, byteorder="big"
+ )
+ # pad the key to wrap if necessary
+ pad = (8 - (len(key_to_wrap) % 8)) % 8
+ key_to_wrap = key_to_wrap + b"\x00" * pad
+ if len(key_to_wrap) == 8:
+ # RFC 5649 - 4.1 - exactly 8 octets after padding
+ encryptor = Cipher(AES(wrapping_key), ECB()).encryptor()
+ b = encryptor.update(aiv + key_to_wrap)
+ assert encryptor.finalize() == b""
+ return b
+ else:
+ r = [key_to_wrap[i : i + 8] for i in range(0, len(key_to_wrap), 8)]
+ return _wrap_core(wrapping_key, aiv, r)
+
+
+def aes_key_unwrap_with_padding(
+ wrapping_key: bytes,
+ wrapped_key: bytes,
+ backend: typing.Any = None,
+) -> bytes:
+ if len(wrapped_key) < 16:
+ raise InvalidUnwrap("Must be at least 16 bytes")
+
+ if len(wrapping_key) not in [16, 24, 32]:
+ raise ValueError("The wrapping key must be a valid AES key length")
+
+ if len(wrapped_key) == 16:
+ # RFC 5649 - 4.2 - exactly two 64-bit blocks
+ decryptor = Cipher(AES(wrapping_key), ECB()).decryptor()
+ out = decryptor.update(wrapped_key)
+ assert decryptor.finalize() == b""
+ a = out[:8]
+ data = out[8:]
+ n = 1
+ else:
+ r = [wrapped_key[i : i + 8] for i in range(0, len(wrapped_key), 8)]
+ encrypted_aiv = r.pop(0)
+ n = len(r)
+ a, r = _unwrap_core(wrapping_key, encrypted_aiv, r)
+ data = b"".join(r)
+
+ # 1) Check that MSB(32,A) = A65959A6.
+ # 2) Check that 8*(n-1) < LSB(32,A) <= 8*n. If so, let
+ # MLI = LSB(32,A).
+ # 3) Let b = (8*n)-MLI, and then check that the rightmost b octets of
+ # the output data are zero.
+ mli = int.from_bytes(a[4:], byteorder="big")
+ b = (8 * n) - mli
+ if (
+ not bytes_eq(a[:4], b"\xa6\x59\x59\xa6")
+ or not 8 * (n - 1) < mli <= 8 * n
+ or (b != 0 and not bytes_eq(data[-b:], b"\x00" * b))
+ ):
+ raise InvalidUnwrap()
+
+ if b == 0:
+ return data
+ else:
+ return data[:-b]
+
+
+def aes_key_unwrap(
+ wrapping_key: bytes,
+ wrapped_key: bytes,
+ backend: typing.Any = None,
+) -> bytes:
+ if len(wrapped_key) < 24:
+ raise InvalidUnwrap("Must be at least 24 bytes")
+
+ if len(wrapped_key) % 8 != 0:
+ raise InvalidUnwrap("The wrapped key must be a multiple of 8 bytes")
+
+ if len(wrapping_key) not in [16, 24, 32]:
+ raise ValueError("The wrapping key must be a valid AES key length")
+
+ aiv = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6"
+ r = [wrapped_key[i : i + 8] for i in range(0, len(wrapped_key), 8)]
+ a = r.pop(0)
+ a, r = _unwrap_core(wrapping_key, a, r)
+ if not bytes_eq(a, aiv):
+ raise InvalidUnwrap()
+
+ return b"".join(r)
+
+
+class InvalidUnwrap(Exception):
+ pass
diff --git a/billinglayer/python/cryptography/hazmat/primitives/padding.py b/billinglayer/python/cryptography/hazmat/primitives/padding.py
new file mode 100644
index 0000000..fde3094
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/padding.py
@@ -0,0 +1,225 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import AlreadyFinalized
+from cryptography.hazmat.bindings._rust import (
+ check_ansix923_padding,
+ check_pkcs7_padding,
+)
+
+
+class PaddingContext(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def update(self, data: bytes) -> bytes:
+ """
+ Pads the provided bytes and returns any available data as bytes.
+ """
+
+ @abc.abstractmethod
+ def finalize(self) -> bytes:
+ """
+ Finalize the padding, returns bytes.
+ """
+
+
+def _byte_padding_check(block_size: int) -> None:
+ if not (0 <= block_size <= 2040):
+ raise ValueError("block_size must be in range(0, 2041).")
+
+ if block_size % 8 != 0:
+ raise ValueError("block_size must be a multiple of 8.")
+
+
+def _byte_padding_update(
+ buffer_: typing.Optional[bytes], data: bytes, block_size: int
+) -> typing.Tuple[bytes, bytes]:
+ if buffer_ is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ utils._check_byteslike("data", data)
+
+ buffer_ += bytes(data)
+
+ finished_blocks = len(buffer_) // (block_size // 8)
+
+ result = buffer_[: finished_blocks * (block_size // 8)]
+ buffer_ = buffer_[finished_blocks * (block_size // 8) :]
+
+ return buffer_, result
+
+
+def _byte_padding_pad(
+ buffer_: typing.Optional[bytes],
+ block_size: int,
+ paddingfn: typing.Callable[[int], bytes],
+) -> bytes:
+ if buffer_ is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ pad_size = block_size // 8 - len(buffer_)
+ return buffer_ + paddingfn(pad_size)
+
+
+def _byte_unpadding_update(
+ buffer_: typing.Optional[bytes], data: bytes, block_size: int
+) -> typing.Tuple[bytes, bytes]:
+ if buffer_ is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ utils._check_byteslike("data", data)
+
+ buffer_ += bytes(data)
+
+ finished_blocks = max(len(buffer_) // (block_size // 8) - 1, 0)
+
+ result = buffer_[: finished_blocks * (block_size // 8)]
+ buffer_ = buffer_[finished_blocks * (block_size // 8) :]
+
+ return buffer_, result
+
+
+def _byte_unpadding_check(
+ buffer_: typing.Optional[bytes],
+ block_size: int,
+ checkfn: typing.Callable[[bytes], int],
+) -> bytes:
+ if buffer_ is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ if len(buffer_) != block_size // 8:
+ raise ValueError("Invalid padding bytes.")
+
+ valid = checkfn(buffer_)
+
+ if not valid:
+ raise ValueError("Invalid padding bytes.")
+
+ pad_size = buffer_[-1]
+ return buffer_[:-pad_size]
+
+
+class PKCS7:
+ def __init__(self, block_size: int):
+ _byte_padding_check(block_size)
+ self.block_size = block_size
+
+ def padder(self) -> PaddingContext:
+ return _PKCS7PaddingContext(self.block_size)
+
+ def unpadder(self) -> PaddingContext:
+ return _PKCS7UnpaddingContext(self.block_size)
+
+
+class _PKCS7PaddingContext(PaddingContext):
+ _buffer: typing.Optional[bytes]
+
+ def __init__(self, block_size: int):
+ self.block_size = block_size
+ # TODO: more copies than necessary, we should use zero-buffer (#193)
+ self._buffer = b""
+
+ def update(self, data: bytes) -> bytes:
+ self._buffer, result = _byte_padding_update(
+ self._buffer, data, self.block_size
+ )
+ return result
+
+ def _padding(self, size: int) -> bytes:
+ return bytes([size]) * size
+
+ def finalize(self) -> bytes:
+ result = _byte_padding_pad(
+ self._buffer, self.block_size, self._padding
+ )
+ self._buffer = None
+ return result
+
+
+class _PKCS7UnpaddingContext(PaddingContext):
+ _buffer: typing.Optional[bytes]
+
+ def __init__(self, block_size: int):
+ self.block_size = block_size
+ # TODO: more copies than necessary, we should use zero-buffer (#193)
+ self._buffer = b""
+
+ def update(self, data: bytes) -> bytes:
+ self._buffer, result = _byte_unpadding_update(
+ self._buffer, data, self.block_size
+ )
+ return result
+
+ def finalize(self) -> bytes:
+ result = _byte_unpadding_check(
+ self._buffer, self.block_size, check_pkcs7_padding
+ )
+ self._buffer = None
+ return result
+
+
+class ANSIX923:
+ def __init__(self, block_size: int):
+ _byte_padding_check(block_size)
+ self.block_size = block_size
+
+ def padder(self) -> PaddingContext:
+ return _ANSIX923PaddingContext(self.block_size)
+
+ def unpadder(self) -> PaddingContext:
+ return _ANSIX923UnpaddingContext(self.block_size)
+
+
+class _ANSIX923PaddingContext(PaddingContext):
+ _buffer: typing.Optional[bytes]
+
+ def __init__(self, block_size: int):
+ self.block_size = block_size
+ # TODO: more copies than necessary, we should use zero-buffer (#193)
+ self._buffer = b""
+
+ def update(self, data: bytes) -> bytes:
+ self._buffer, result = _byte_padding_update(
+ self._buffer, data, self.block_size
+ )
+ return result
+
+ def _padding(self, size: int) -> bytes:
+ return bytes([0]) * (size - 1) + bytes([size])
+
+ def finalize(self) -> bytes:
+ result = _byte_padding_pad(
+ self._buffer, self.block_size, self._padding
+ )
+ self._buffer = None
+ return result
+
+
+class _ANSIX923UnpaddingContext(PaddingContext):
+ _buffer: typing.Optional[bytes]
+
+ def __init__(self, block_size: int):
+ self.block_size = block_size
+ # TODO: more copies than necessary, we should use zero-buffer (#193)
+ self._buffer = b""
+
+ def update(self, data: bytes) -> bytes:
+ self._buffer, result = _byte_unpadding_update(
+ self._buffer, data, self.block_size
+ )
+ return result
+
+ def finalize(self) -> bytes:
+ result = _byte_unpadding_check(
+ self._buffer,
+ self.block_size,
+ check_ansix923_padding,
+ )
+ self._buffer = None
+ return result
diff --git a/billinglayer/python/cryptography/hazmat/primitives/poly1305.py b/billinglayer/python/cryptography/hazmat/primitives/poly1305.py
new file mode 100644
index 0000000..7f5a77a
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/poly1305.py
@@ -0,0 +1,11 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+
+__all__ = ["Poly1305"]
+
+Poly1305 = rust_openssl.poly1305.Poly1305
diff --git a/billinglayer/python/cryptography/hazmat/primitives/serialization/__init__.py b/billinglayer/python/cryptography/hazmat/primitives/serialization/__init__.py
new file mode 100644
index 0000000..b6c9a5c
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/serialization/__init__.py
@@ -0,0 +1,63 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+from cryptography.hazmat.primitives._serialization import (
+ BestAvailableEncryption,
+ Encoding,
+ KeySerializationEncryption,
+ NoEncryption,
+ ParameterFormat,
+ PrivateFormat,
+ PublicFormat,
+ _KeySerializationEncryption,
+)
+from cryptography.hazmat.primitives.serialization.base import (
+ load_der_parameters,
+ load_der_private_key,
+ load_der_public_key,
+ load_pem_parameters,
+ load_pem_private_key,
+ load_pem_public_key,
+)
+from cryptography.hazmat.primitives.serialization.ssh import (
+ SSHCertificate,
+ SSHCertificateBuilder,
+ SSHCertificateType,
+ SSHCertPrivateKeyTypes,
+ SSHCertPublicKeyTypes,
+ SSHPrivateKeyTypes,
+ SSHPublicKeyTypes,
+ load_ssh_private_key,
+ load_ssh_public_identity,
+ load_ssh_public_key,
+)
+
+__all__ = [
+ "load_der_parameters",
+ "load_der_private_key",
+ "load_der_public_key",
+ "load_pem_parameters",
+ "load_pem_private_key",
+ "load_pem_public_key",
+ "load_ssh_private_key",
+ "load_ssh_public_identity",
+ "load_ssh_public_key",
+ "Encoding",
+ "PrivateFormat",
+ "PublicFormat",
+ "ParameterFormat",
+ "KeySerializationEncryption",
+ "BestAvailableEncryption",
+ "NoEncryption",
+ "_KeySerializationEncryption",
+ "SSHCertificateBuilder",
+ "SSHCertificate",
+ "SSHCertificateType",
+ "SSHCertPublicKeyTypes",
+ "SSHCertPrivateKeyTypes",
+ "SSHPrivateKeyTypes",
+ "SSHPublicKeyTypes",
+]
diff --git a/billinglayer/python/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..774a7ac
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-311.pyc
new file mode 100644
index 0000000..d8ac2b2
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-311.pyc
new file mode 100644
index 0000000..4003b12
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-311.pyc
new file mode 100644
index 0000000..f55a23c
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-311.pyc
new file mode 100644
index 0000000..5f23e8c
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/serialization/base.py b/billinglayer/python/cryptography/hazmat/primitives/serialization/base.py
new file mode 100644
index 0000000..18a96cc
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/serialization/base.py
@@ -0,0 +1,73 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography.hazmat.primitives.asymmetric import dh
+from cryptography.hazmat.primitives.asymmetric.types import (
+ PrivateKeyTypes,
+ PublicKeyTypes,
+)
+
+
+def load_pem_private_key(
+ data: bytes,
+ password: typing.Optional[bytes],
+ backend: typing.Any = None,
+ *,
+ unsafe_skip_rsa_key_validation: bool = False,
+) -> PrivateKeyTypes:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.load_pem_private_key(
+ data, password, unsafe_skip_rsa_key_validation
+ )
+
+
+def load_pem_public_key(
+ data: bytes, backend: typing.Any = None
+) -> PublicKeyTypes:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.load_pem_public_key(data)
+
+
+def load_pem_parameters(
+ data: bytes, backend: typing.Any = None
+) -> dh.DHParameters:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.load_pem_parameters(data)
+
+
+def load_der_private_key(
+ data: bytes,
+ password: typing.Optional[bytes],
+ backend: typing.Any = None,
+ *,
+ unsafe_skip_rsa_key_validation: bool = False,
+) -> PrivateKeyTypes:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.load_der_private_key(
+ data, password, unsafe_skip_rsa_key_validation
+ )
+
+
+def load_der_public_key(
+ data: bytes, backend: typing.Any = None
+) -> PublicKeyTypes:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.load_der_public_key(data)
+
+
+def load_der_parameters(
+ data: bytes, backend: typing.Any = None
+) -> dh.DHParameters:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.load_der_parameters(data)
diff --git a/billinglayer/python/cryptography/hazmat/primitives/serialization/pkcs12.py b/billinglayer/python/cryptography/hazmat/primitives/serialization/pkcs12.py
new file mode 100644
index 0000000..27133a3
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/serialization/pkcs12.py
@@ -0,0 +1,229 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography import x509
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives._serialization import PBES as PBES
+from cryptography.hazmat.primitives.asymmetric import (
+ dsa,
+ ec,
+ ed448,
+ ed25519,
+ rsa,
+)
+from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes
+
+__all__ = [
+ "PBES",
+ "PKCS12PrivateKeyTypes",
+ "PKCS12Certificate",
+ "PKCS12KeyAndCertificates",
+ "load_key_and_certificates",
+ "load_pkcs12",
+ "serialize_key_and_certificates",
+]
+
+PKCS12PrivateKeyTypes = typing.Union[
+ rsa.RSAPrivateKey,
+ dsa.DSAPrivateKey,
+ ec.EllipticCurvePrivateKey,
+ ed25519.Ed25519PrivateKey,
+ ed448.Ed448PrivateKey,
+]
+
+
+class PKCS12Certificate:
+ def __init__(
+ self,
+ cert: x509.Certificate,
+ friendly_name: typing.Optional[bytes],
+ ):
+ if not isinstance(cert, x509.Certificate):
+ raise TypeError("Expecting x509.Certificate object")
+ if friendly_name is not None and not isinstance(friendly_name, bytes):
+ raise TypeError("friendly_name must be bytes or None")
+ self._cert = cert
+ self._friendly_name = friendly_name
+
+ @property
+ def friendly_name(self) -> typing.Optional[bytes]:
+ return self._friendly_name
+
+ @property
+ def certificate(self) -> x509.Certificate:
+ return self._cert
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, PKCS12Certificate):
+ return NotImplemented
+
+ return (
+ self.certificate == other.certificate
+ and self.friendly_name == other.friendly_name
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.certificate, self.friendly_name))
+
+ def __repr__(self) -> str:
+ return "".format(
+ self.certificate, self.friendly_name
+ )
+
+
+class PKCS12KeyAndCertificates:
+ def __init__(
+ self,
+ key: typing.Optional[PrivateKeyTypes],
+ cert: typing.Optional[PKCS12Certificate],
+ additional_certs: typing.List[PKCS12Certificate],
+ ):
+ if key is not None and not isinstance(
+ key,
+ (
+ rsa.RSAPrivateKey,
+ dsa.DSAPrivateKey,
+ ec.EllipticCurvePrivateKey,
+ ed25519.Ed25519PrivateKey,
+ ed448.Ed448PrivateKey,
+ ),
+ ):
+ raise TypeError(
+ "Key must be RSA, DSA, EllipticCurve, ED25519, or ED448"
+ " private key, or None."
+ )
+ if cert is not None and not isinstance(cert, PKCS12Certificate):
+ raise TypeError("cert must be a PKCS12Certificate object or None")
+ if not all(
+ isinstance(add_cert, PKCS12Certificate)
+ for add_cert in additional_certs
+ ):
+ raise TypeError(
+ "all values in additional_certs must be PKCS12Certificate"
+ " objects"
+ )
+ self._key = key
+ self._cert = cert
+ self._additional_certs = additional_certs
+
+ @property
+ def key(self) -> typing.Optional[PrivateKeyTypes]:
+ return self._key
+
+ @property
+ def cert(self) -> typing.Optional[PKCS12Certificate]:
+ return self._cert
+
+ @property
+ def additional_certs(self) -> typing.List[PKCS12Certificate]:
+ return self._additional_certs
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, PKCS12KeyAndCertificates):
+ return NotImplemented
+
+ return (
+ self.key == other.key
+ and self.cert == other.cert
+ and self.additional_certs == other.additional_certs
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.key, self.cert, tuple(self.additional_certs)))
+
+ def __repr__(self) -> str:
+ fmt = (
+ ""
+ )
+ return fmt.format(self.key, self.cert, self.additional_certs)
+
+
+def load_key_and_certificates(
+ data: bytes,
+ password: typing.Optional[bytes],
+ backend: typing.Any = None,
+) -> typing.Tuple[
+ typing.Optional[PrivateKeyTypes],
+ typing.Optional[x509.Certificate],
+ typing.List[x509.Certificate],
+]:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.load_key_and_certificates_from_pkcs12(data, password)
+
+
+def load_pkcs12(
+ data: bytes,
+ password: typing.Optional[bytes],
+ backend: typing.Any = None,
+) -> PKCS12KeyAndCertificates:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.load_pkcs12(data, password)
+
+
+_PKCS12CATypes = typing.Union[
+ x509.Certificate,
+ PKCS12Certificate,
+]
+
+
+def serialize_key_and_certificates(
+ name: typing.Optional[bytes],
+ key: typing.Optional[PKCS12PrivateKeyTypes],
+ cert: typing.Optional[x509.Certificate],
+ cas: typing.Optional[typing.Iterable[_PKCS12CATypes]],
+ encryption_algorithm: serialization.KeySerializationEncryption,
+) -> bytes:
+ if key is not None and not isinstance(
+ key,
+ (
+ rsa.RSAPrivateKey,
+ dsa.DSAPrivateKey,
+ ec.EllipticCurvePrivateKey,
+ ed25519.Ed25519PrivateKey,
+ ed448.Ed448PrivateKey,
+ ),
+ ):
+ raise TypeError(
+ "Key must be RSA, DSA, EllipticCurve, ED25519, or ED448"
+ " private key, or None."
+ )
+ if cert is not None and not isinstance(cert, x509.Certificate):
+ raise TypeError("cert must be a certificate or None")
+
+ if cas is not None:
+ cas = list(cas)
+ if not all(
+ isinstance(
+ val,
+ (
+ x509.Certificate,
+ PKCS12Certificate,
+ ),
+ )
+ for val in cas
+ ):
+ raise TypeError("all values in cas must be certificates")
+
+ if not isinstance(
+ encryption_algorithm, serialization.KeySerializationEncryption
+ ):
+ raise TypeError(
+ "Key encryption algorithm must be a "
+ "KeySerializationEncryption instance"
+ )
+
+ if key is None and cert is None and not cas:
+ raise ValueError("You must supply at least one of key, cert, or cas")
+
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ return backend.serialize_key_and_certificates_to_pkcs12(
+ name, key, cert, cas, encryption_algorithm
+ )
diff --git a/billinglayer/python/cryptography/hazmat/primitives/serialization/pkcs7.py b/billinglayer/python/cryptography/hazmat/primitives/serialization/pkcs7.py
new file mode 100644
index 0000000..9998bca
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/serialization/pkcs7.py
@@ -0,0 +1,235 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import email.base64mime
+import email.generator
+import email.message
+import email.policy
+import io
+import typing
+
+from cryptography import utils, x509
+from cryptography.hazmat.bindings._rust import pkcs7 as rust_pkcs7
+from cryptography.hazmat.primitives import hashes, serialization
+from cryptography.hazmat.primitives.asymmetric import ec, rsa
+from cryptography.utils import _check_byteslike
+
+
+def load_pem_pkcs7_certificates(data: bytes) -> typing.List[x509.Certificate]:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ return backend.load_pem_pkcs7_certificates(data)
+
+
+def load_der_pkcs7_certificates(data: bytes) -> typing.List[x509.Certificate]:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ return backend.load_der_pkcs7_certificates(data)
+
+
+def serialize_certificates(
+ certs: typing.List[x509.Certificate],
+ encoding: serialization.Encoding,
+) -> bytes:
+ return rust_pkcs7.serialize_certificates(certs, encoding)
+
+
+PKCS7HashTypes = typing.Union[
+ hashes.SHA224,
+ hashes.SHA256,
+ hashes.SHA384,
+ hashes.SHA512,
+]
+
+PKCS7PrivateKeyTypes = typing.Union[
+ rsa.RSAPrivateKey, ec.EllipticCurvePrivateKey
+]
+
+
+class PKCS7Options(utils.Enum):
+ Text = "Add text/plain MIME type"
+ Binary = "Don't translate input data into canonical MIME format"
+ DetachedSignature = "Don't embed data in the PKCS7 structure"
+ NoCapabilities = "Don't embed SMIME capabilities"
+ NoAttributes = "Don't embed authenticatedAttributes"
+ NoCerts = "Don't embed signer certificate"
+
+
+class PKCS7SignatureBuilder:
+ def __init__(
+ self,
+ data: typing.Optional[bytes] = None,
+ signers: typing.List[
+ typing.Tuple[
+ x509.Certificate,
+ PKCS7PrivateKeyTypes,
+ PKCS7HashTypes,
+ ]
+ ] = [],
+ additional_certs: typing.List[x509.Certificate] = [],
+ ):
+ self._data = data
+ self._signers = signers
+ self._additional_certs = additional_certs
+
+ def set_data(self, data: bytes) -> PKCS7SignatureBuilder:
+ _check_byteslike("data", data)
+ if self._data is not None:
+ raise ValueError("data may only be set once")
+
+ return PKCS7SignatureBuilder(data, self._signers)
+
+ def add_signer(
+ self,
+ certificate: x509.Certificate,
+ private_key: PKCS7PrivateKeyTypes,
+ hash_algorithm: PKCS7HashTypes,
+ ) -> PKCS7SignatureBuilder:
+ if not isinstance(
+ hash_algorithm,
+ (
+ hashes.SHA224,
+ hashes.SHA256,
+ hashes.SHA384,
+ hashes.SHA512,
+ ),
+ ):
+ raise TypeError(
+ "hash_algorithm must be one of hashes.SHA224, "
+ "SHA256, SHA384, or SHA512"
+ )
+ if not isinstance(certificate, x509.Certificate):
+ raise TypeError("certificate must be a x509.Certificate")
+
+ if not isinstance(
+ private_key, (rsa.RSAPrivateKey, ec.EllipticCurvePrivateKey)
+ ):
+ raise TypeError("Only RSA & EC keys are supported at this time.")
+
+ return PKCS7SignatureBuilder(
+ self._data,
+ self._signers + [(certificate, private_key, hash_algorithm)],
+ )
+
+ def add_certificate(
+ self, certificate: x509.Certificate
+ ) -> PKCS7SignatureBuilder:
+ if not isinstance(certificate, x509.Certificate):
+ raise TypeError("certificate must be a x509.Certificate")
+
+ return PKCS7SignatureBuilder(
+ self._data, self._signers, self._additional_certs + [certificate]
+ )
+
+ def sign(
+ self,
+ encoding: serialization.Encoding,
+ options: typing.Iterable[PKCS7Options],
+ backend: typing.Any = None,
+ ) -> bytes:
+ if len(self._signers) == 0:
+ raise ValueError("Must have at least one signer")
+ if self._data is None:
+ raise ValueError("You must add data to sign")
+ options = list(options)
+ if not all(isinstance(x, PKCS7Options) for x in options):
+ raise ValueError("options must be from the PKCS7Options enum")
+ if encoding not in (
+ serialization.Encoding.PEM,
+ serialization.Encoding.DER,
+ serialization.Encoding.SMIME,
+ ):
+ raise ValueError(
+ "Must be PEM, DER, or SMIME from the Encoding enum"
+ )
+
+ # Text is a meaningless option unless it is accompanied by
+ # DetachedSignature
+ if (
+ PKCS7Options.Text in options
+ and PKCS7Options.DetachedSignature not in options
+ ):
+ raise ValueError(
+ "When passing the Text option you must also pass "
+ "DetachedSignature"
+ )
+
+ if PKCS7Options.Text in options and encoding in (
+ serialization.Encoding.DER,
+ serialization.Encoding.PEM,
+ ):
+ raise ValueError(
+ "The Text option is only available for SMIME serialization"
+ )
+
+ # No attributes implies no capabilities so we'll error if you try to
+ # pass both.
+ if (
+ PKCS7Options.NoAttributes in options
+ and PKCS7Options.NoCapabilities in options
+ ):
+ raise ValueError(
+ "NoAttributes is a superset of NoCapabilities. Do not pass "
+ "both values."
+ )
+
+ return rust_pkcs7.sign_and_serialize(self, encoding, options)
+
+
+def _smime_encode(
+ data: bytes, signature: bytes, micalg: str, text_mode: bool
+) -> bytes:
+ # This function works pretty hard to replicate what OpenSSL does
+ # precisely. For good and for ill.
+
+ m = email.message.Message()
+ m.add_header("MIME-Version", "1.0")
+ m.add_header(
+ "Content-Type",
+ "multipart/signed",
+ protocol="application/x-pkcs7-signature",
+ micalg=micalg,
+ )
+
+ m.preamble = "This is an S/MIME signed message\n"
+
+ msg_part = OpenSSLMimePart()
+ msg_part.set_payload(data)
+ if text_mode:
+ msg_part.add_header("Content-Type", "text/plain")
+ m.attach(msg_part)
+
+ sig_part = email.message.MIMEPart()
+ sig_part.add_header(
+ "Content-Type", "application/x-pkcs7-signature", name="smime.p7s"
+ )
+ sig_part.add_header("Content-Transfer-Encoding", "base64")
+ sig_part.add_header(
+ "Content-Disposition", "attachment", filename="smime.p7s"
+ )
+ sig_part.set_payload(
+ email.base64mime.body_encode(signature, maxlinelen=65)
+ )
+ del sig_part["MIME-Version"]
+ m.attach(sig_part)
+
+ fp = io.BytesIO()
+ g = email.generator.BytesGenerator(
+ fp,
+ maxheaderlen=0,
+ mangle_from_=False,
+ policy=m.policy.clone(linesep="\r\n"),
+ )
+ g.flatten(m)
+ return fp.getvalue()
+
+
+class OpenSSLMimePart(email.message.MIMEPart):
+ # A MIMEPart subclass that replicates OpenSSL's behavior of not including
+ # a newline if there are no headers.
+ def _write_headers(self, generator) -> None:
+ if list(self.raw_items()):
+ generator._write_headers(self)
diff --git a/billinglayer/python/cryptography/hazmat/primitives/serialization/ssh.py b/billinglayer/python/cryptography/hazmat/primitives/serialization/ssh.py
new file mode 100644
index 0000000..35e53c1
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/serialization/ssh.py
@@ -0,0 +1,1534 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import binascii
+import enum
+import os
+import re
+import typing
+import warnings
+from base64 import encodebytes as _base64_encode
+from dataclasses import dataclass
+
+from cryptography import utils
+from cryptography.exceptions import UnsupportedAlgorithm
+from cryptography.hazmat.primitives import hashes
+from cryptography.hazmat.primitives.asymmetric import (
+ dsa,
+ ec,
+ ed25519,
+ padding,
+ rsa,
+)
+from cryptography.hazmat.primitives.asymmetric import utils as asym_utils
+from cryptography.hazmat.primitives.ciphers import (
+ AEADDecryptionContext,
+ Cipher,
+ algorithms,
+ modes,
+)
+from cryptography.hazmat.primitives.serialization import (
+ Encoding,
+ KeySerializationEncryption,
+ NoEncryption,
+ PrivateFormat,
+ PublicFormat,
+ _KeySerializationEncryption,
+)
+
+try:
+ from bcrypt import kdf as _bcrypt_kdf
+
+ _bcrypt_supported = True
+except ImportError:
+ _bcrypt_supported = False
+
+ def _bcrypt_kdf(
+ password: bytes,
+ salt: bytes,
+ desired_key_bytes: int,
+ rounds: int,
+ ignore_few_rounds: bool = False,
+ ) -> bytes:
+ raise UnsupportedAlgorithm("Need bcrypt module")
+
+
+_SSH_ED25519 = b"ssh-ed25519"
+_SSH_RSA = b"ssh-rsa"
+_SSH_DSA = b"ssh-dss"
+_ECDSA_NISTP256 = b"ecdsa-sha2-nistp256"
+_ECDSA_NISTP384 = b"ecdsa-sha2-nistp384"
+_ECDSA_NISTP521 = b"ecdsa-sha2-nistp521"
+_CERT_SUFFIX = b"-cert-v01@openssh.com"
+
+# These are not key types, only algorithms, so they cannot appear
+# as a public key type
+_SSH_RSA_SHA256 = b"rsa-sha2-256"
+_SSH_RSA_SHA512 = b"rsa-sha2-512"
+
+_SSH_PUBKEY_RC = re.compile(rb"\A(\S+)[ \t]+(\S+)")
+_SK_MAGIC = b"openssh-key-v1\0"
+_SK_START = b"-----BEGIN OPENSSH PRIVATE KEY-----"
+_SK_END = b"-----END OPENSSH PRIVATE KEY-----"
+_BCRYPT = b"bcrypt"
+_NONE = b"none"
+_DEFAULT_CIPHER = b"aes256-ctr"
+_DEFAULT_ROUNDS = 16
+
+# re is only way to work on bytes-like data
+_PEM_RC = re.compile(_SK_START + b"(.*?)" + _SK_END, re.DOTALL)
+
+# padding for max blocksize
+_PADDING = memoryview(bytearray(range(1, 1 + 16)))
+
+
+@dataclass
+class _SSHCipher:
+ alg: typing.Type[algorithms.AES]
+ key_len: int
+ mode: typing.Union[
+ typing.Type[modes.CTR],
+ typing.Type[modes.CBC],
+ typing.Type[modes.GCM],
+ ]
+ block_len: int
+ iv_len: int
+ tag_len: typing.Optional[int]
+ is_aead: bool
+
+
+# ciphers that are actually used in key wrapping
+_SSH_CIPHERS: typing.Dict[bytes, _SSHCipher] = {
+ b"aes256-ctr": _SSHCipher(
+ alg=algorithms.AES,
+ key_len=32,
+ mode=modes.CTR,
+ block_len=16,
+ iv_len=16,
+ tag_len=None,
+ is_aead=False,
+ ),
+ b"aes256-cbc": _SSHCipher(
+ alg=algorithms.AES,
+ key_len=32,
+ mode=modes.CBC,
+ block_len=16,
+ iv_len=16,
+ tag_len=None,
+ is_aead=False,
+ ),
+ b"aes256-gcm@openssh.com": _SSHCipher(
+ alg=algorithms.AES,
+ key_len=32,
+ mode=modes.GCM,
+ block_len=16,
+ iv_len=12,
+ tag_len=16,
+ is_aead=True,
+ ),
+}
+
+# map local curve name to key type
+_ECDSA_KEY_TYPE = {
+ "secp256r1": _ECDSA_NISTP256,
+ "secp384r1": _ECDSA_NISTP384,
+ "secp521r1": _ECDSA_NISTP521,
+}
+
+
+def _get_ssh_key_type(
+ key: typing.Union[SSHPrivateKeyTypes, SSHPublicKeyTypes]
+) -> bytes:
+ if isinstance(key, ec.EllipticCurvePrivateKey):
+ key_type = _ecdsa_key_type(key.public_key())
+ elif isinstance(key, ec.EllipticCurvePublicKey):
+ key_type = _ecdsa_key_type(key)
+ elif isinstance(key, (rsa.RSAPrivateKey, rsa.RSAPublicKey)):
+ key_type = _SSH_RSA
+ elif isinstance(key, (dsa.DSAPrivateKey, dsa.DSAPublicKey)):
+ key_type = _SSH_DSA
+ elif isinstance(
+ key, (ed25519.Ed25519PrivateKey, ed25519.Ed25519PublicKey)
+ ):
+ key_type = _SSH_ED25519
+ else:
+ raise ValueError("Unsupported key type")
+
+ return key_type
+
+
+def _ecdsa_key_type(public_key: ec.EllipticCurvePublicKey) -> bytes:
+ """Return SSH key_type and curve_name for private key."""
+ curve = public_key.curve
+ if curve.name not in _ECDSA_KEY_TYPE:
+ raise ValueError(
+ f"Unsupported curve for ssh private key: {curve.name!r}"
+ )
+ return _ECDSA_KEY_TYPE[curve.name]
+
+
+def _ssh_pem_encode(
+ data: bytes,
+ prefix: bytes = _SK_START + b"\n",
+ suffix: bytes = _SK_END + b"\n",
+) -> bytes:
+ return b"".join([prefix, _base64_encode(data), suffix])
+
+
+def _check_block_size(data: bytes, block_len: int) -> None:
+ """Require data to be full blocks"""
+ if not data or len(data) % block_len != 0:
+ raise ValueError("Corrupt data: missing padding")
+
+
+def _check_empty(data: bytes) -> None:
+ """All data should have been parsed."""
+ if data:
+ raise ValueError("Corrupt data: unparsed data")
+
+
+def _init_cipher(
+ ciphername: bytes,
+ password: typing.Optional[bytes],
+ salt: bytes,
+ rounds: int,
+) -> Cipher[typing.Union[modes.CBC, modes.CTR, modes.GCM]]:
+ """Generate key + iv and return cipher."""
+ if not password:
+ raise ValueError("Key is password-protected.")
+
+ ciph = _SSH_CIPHERS[ciphername]
+ seed = _bcrypt_kdf(
+ password, salt, ciph.key_len + ciph.iv_len, rounds, True
+ )
+ return Cipher(
+ ciph.alg(seed[: ciph.key_len]),
+ ciph.mode(seed[ciph.key_len :]),
+ )
+
+
+def _get_u32(data: memoryview) -> typing.Tuple[int, memoryview]:
+ """Uint32"""
+ if len(data) < 4:
+ raise ValueError("Invalid data")
+ return int.from_bytes(data[:4], byteorder="big"), data[4:]
+
+
+def _get_u64(data: memoryview) -> typing.Tuple[int, memoryview]:
+ """Uint64"""
+ if len(data) < 8:
+ raise ValueError("Invalid data")
+ return int.from_bytes(data[:8], byteorder="big"), data[8:]
+
+
+def _get_sshstr(data: memoryview) -> typing.Tuple[memoryview, memoryview]:
+ """Bytes with u32 length prefix"""
+ n, data = _get_u32(data)
+ if n > len(data):
+ raise ValueError("Invalid data")
+ return data[:n], data[n:]
+
+
+def _get_mpint(data: memoryview) -> typing.Tuple[int, memoryview]:
+ """Big integer."""
+ val, data = _get_sshstr(data)
+ if val and val[0] > 0x7F:
+ raise ValueError("Invalid data")
+ return int.from_bytes(val, "big"), data
+
+
+def _to_mpint(val: int) -> bytes:
+ """Storage format for signed bigint."""
+ if val < 0:
+ raise ValueError("negative mpint not allowed")
+ if not val:
+ return b""
+ nbytes = (val.bit_length() + 8) // 8
+ return utils.int_to_bytes(val, nbytes)
+
+
+class _FragList:
+ """Build recursive structure without data copy."""
+
+ flist: typing.List[bytes]
+
+ def __init__(
+ self, init: typing.Optional[typing.List[bytes]] = None
+ ) -> None:
+ self.flist = []
+ if init:
+ self.flist.extend(init)
+
+ def put_raw(self, val: bytes) -> None:
+ """Add plain bytes"""
+ self.flist.append(val)
+
+ def put_u32(self, val: int) -> None:
+ """Big-endian uint32"""
+ self.flist.append(val.to_bytes(length=4, byteorder="big"))
+
+ def put_u64(self, val: int) -> None:
+ """Big-endian uint64"""
+ self.flist.append(val.to_bytes(length=8, byteorder="big"))
+
+ def put_sshstr(self, val: typing.Union[bytes, _FragList]) -> None:
+ """Bytes prefixed with u32 length"""
+ if isinstance(val, (bytes, memoryview, bytearray)):
+ self.put_u32(len(val))
+ self.flist.append(val)
+ else:
+ self.put_u32(val.size())
+ self.flist.extend(val.flist)
+
+ def put_mpint(self, val: int) -> None:
+ """Big-endian bigint prefixed with u32 length"""
+ self.put_sshstr(_to_mpint(val))
+
+ def size(self) -> int:
+ """Current number of bytes"""
+ return sum(map(len, self.flist))
+
+ def render(self, dstbuf: memoryview, pos: int = 0) -> int:
+ """Write into bytearray"""
+ for frag in self.flist:
+ flen = len(frag)
+ start, pos = pos, pos + flen
+ dstbuf[start:pos] = frag
+ return pos
+
+ def tobytes(self) -> bytes:
+ """Return as bytes"""
+ buf = memoryview(bytearray(self.size()))
+ self.render(buf)
+ return buf.tobytes()
+
+
+class _SSHFormatRSA:
+ """Format for RSA keys.
+
+ Public:
+ mpint e, n
+ Private:
+ mpint n, e, d, iqmp, p, q
+ """
+
+ def get_public(self, data: memoryview):
+ """RSA public fields"""
+ e, data = _get_mpint(data)
+ n, data = _get_mpint(data)
+ return (e, n), data
+
+ def load_public(
+ self, data: memoryview
+ ) -> typing.Tuple[rsa.RSAPublicKey, memoryview]:
+ """Make RSA public key from data."""
+ (e, n), data = self.get_public(data)
+ public_numbers = rsa.RSAPublicNumbers(e, n)
+ public_key = public_numbers.public_key()
+ return public_key, data
+
+ def load_private(
+ self, data: memoryview, pubfields
+ ) -> typing.Tuple[rsa.RSAPrivateKey, memoryview]:
+ """Make RSA private key from data."""
+ n, data = _get_mpint(data)
+ e, data = _get_mpint(data)
+ d, data = _get_mpint(data)
+ iqmp, data = _get_mpint(data)
+ p, data = _get_mpint(data)
+ q, data = _get_mpint(data)
+
+ if (e, n) != pubfields:
+ raise ValueError("Corrupt data: rsa field mismatch")
+ dmp1 = rsa.rsa_crt_dmp1(d, p)
+ dmq1 = rsa.rsa_crt_dmq1(d, q)
+ public_numbers = rsa.RSAPublicNumbers(e, n)
+ private_numbers = rsa.RSAPrivateNumbers(
+ p, q, d, dmp1, dmq1, iqmp, public_numbers
+ )
+ private_key = private_numbers.private_key()
+ return private_key, data
+
+ def encode_public(
+ self, public_key: rsa.RSAPublicKey, f_pub: _FragList
+ ) -> None:
+ """Write RSA public key"""
+ pubn = public_key.public_numbers()
+ f_pub.put_mpint(pubn.e)
+ f_pub.put_mpint(pubn.n)
+
+ def encode_private(
+ self, private_key: rsa.RSAPrivateKey, f_priv: _FragList
+ ) -> None:
+ """Write RSA private key"""
+ private_numbers = private_key.private_numbers()
+ public_numbers = private_numbers.public_numbers
+
+ f_priv.put_mpint(public_numbers.n)
+ f_priv.put_mpint(public_numbers.e)
+
+ f_priv.put_mpint(private_numbers.d)
+ f_priv.put_mpint(private_numbers.iqmp)
+ f_priv.put_mpint(private_numbers.p)
+ f_priv.put_mpint(private_numbers.q)
+
+
+class _SSHFormatDSA:
+ """Format for DSA keys.
+
+ Public:
+ mpint p, q, g, y
+ Private:
+ mpint p, q, g, y, x
+ """
+
+ def get_public(
+ self, data: memoryview
+ ) -> typing.Tuple[typing.Tuple, memoryview]:
+ """DSA public fields"""
+ p, data = _get_mpint(data)
+ q, data = _get_mpint(data)
+ g, data = _get_mpint(data)
+ y, data = _get_mpint(data)
+ return (p, q, g, y), data
+
+ def load_public(
+ self, data: memoryview
+ ) -> typing.Tuple[dsa.DSAPublicKey, memoryview]:
+ """Make DSA public key from data."""
+ (p, q, g, y), data = self.get_public(data)
+ parameter_numbers = dsa.DSAParameterNumbers(p, q, g)
+ public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers)
+ self._validate(public_numbers)
+ public_key = public_numbers.public_key()
+ return public_key, data
+
+ def load_private(
+ self, data: memoryview, pubfields
+ ) -> typing.Tuple[dsa.DSAPrivateKey, memoryview]:
+ """Make DSA private key from data."""
+ (p, q, g, y), data = self.get_public(data)
+ x, data = _get_mpint(data)
+
+ if (p, q, g, y) != pubfields:
+ raise ValueError("Corrupt data: dsa field mismatch")
+ parameter_numbers = dsa.DSAParameterNumbers(p, q, g)
+ public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers)
+ self._validate(public_numbers)
+ private_numbers = dsa.DSAPrivateNumbers(x, public_numbers)
+ private_key = private_numbers.private_key()
+ return private_key, data
+
+ def encode_public(
+ self, public_key: dsa.DSAPublicKey, f_pub: _FragList
+ ) -> None:
+ """Write DSA public key"""
+ public_numbers = public_key.public_numbers()
+ parameter_numbers = public_numbers.parameter_numbers
+ self._validate(public_numbers)
+
+ f_pub.put_mpint(parameter_numbers.p)
+ f_pub.put_mpint(parameter_numbers.q)
+ f_pub.put_mpint(parameter_numbers.g)
+ f_pub.put_mpint(public_numbers.y)
+
+ def encode_private(
+ self, private_key: dsa.DSAPrivateKey, f_priv: _FragList
+ ) -> None:
+ """Write DSA private key"""
+ self.encode_public(private_key.public_key(), f_priv)
+ f_priv.put_mpint(private_key.private_numbers().x)
+
+ def _validate(self, public_numbers: dsa.DSAPublicNumbers) -> None:
+ parameter_numbers = public_numbers.parameter_numbers
+ if parameter_numbers.p.bit_length() != 1024:
+ raise ValueError("SSH supports only 1024 bit DSA keys")
+
+
+class _SSHFormatECDSA:
+ """Format for ECDSA keys.
+
+ Public:
+ str curve
+ bytes point
+ Private:
+ str curve
+ bytes point
+ mpint secret
+ """
+
+ def __init__(self, ssh_curve_name: bytes, curve: ec.EllipticCurve):
+ self.ssh_curve_name = ssh_curve_name
+ self.curve = curve
+
+ def get_public(
+ self, data: memoryview
+ ) -> typing.Tuple[typing.Tuple, memoryview]:
+ """ECDSA public fields"""
+ curve, data = _get_sshstr(data)
+ point, data = _get_sshstr(data)
+ if curve != self.ssh_curve_name:
+ raise ValueError("Curve name mismatch")
+ if point[0] != 4:
+ raise NotImplementedError("Need uncompressed point")
+ return (curve, point), data
+
+ def load_public(
+ self, data: memoryview
+ ) -> typing.Tuple[ec.EllipticCurvePublicKey, memoryview]:
+ """Make ECDSA public key from data."""
+ (curve_name, point), data = self.get_public(data)
+ public_key = ec.EllipticCurvePublicKey.from_encoded_point(
+ self.curve, point.tobytes()
+ )
+ return public_key, data
+
+ def load_private(
+ self, data: memoryview, pubfields
+ ) -> typing.Tuple[ec.EllipticCurvePrivateKey, memoryview]:
+ """Make ECDSA private key from data."""
+ (curve_name, point), data = self.get_public(data)
+ secret, data = _get_mpint(data)
+
+ if (curve_name, point) != pubfields:
+ raise ValueError("Corrupt data: ecdsa field mismatch")
+ private_key = ec.derive_private_key(secret, self.curve)
+ return private_key, data
+
+ def encode_public(
+ self, public_key: ec.EllipticCurvePublicKey, f_pub: _FragList
+ ) -> None:
+ """Write ECDSA public key"""
+ point = public_key.public_bytes(
+ Encoding.X962, PublicFormat.UncompressedPoint
+ )
+ f_pub.put_sshstr(self.ssh_curve_name)
+ f_pub.put_sshstr(point)
+
+ def encode_private(
+ self, private_key: ec.EllipticCurvePrivateKey, f_priv: _FragList
+ ) -> None:
+ """Write ECDSA private key"""
+ public_key = private_key.public_key()
+ private_numbers = private_key.private_numbers()
+
+ self.encode_public(public_key, f_priv)
+ f_priv.put_mpint(private_numbers.private_value)
+
+
+class _SSHFormatEd25519:
+ """Format for Ed25519 keys.
+
+ Public:
+ bytes point
+ Private:
+ bytes point
+ bytes secret_and_point
+ """
+
+ def get_public(
+ self, data: memoryview
+ ) -> typing.Tuple[typing.Tuple, memoryview]:
+ """Ed25519 public fields"""
+ point, data = _get_sshstr(data)
+ return (point,), data
+
+ def load_public(
+ self, data: memoryview
+ ) -> typing.Tuple[ed25519.Ed25519PublicKey, memoryview]:
+ """Make Ed25519 public key from data."""
+ (point,), data = self.get_public(data)
+ public_key = ed25519.Ed25519PublicKey.from_public_bytes(
+ point.tobytes()
+ )
+ return public_key, data
+
+ def load_private(
+ self, data: memoryview, pubfields
+ ) -> typing.Tuple[ed25519.Ed25519PrivateKey, memoryview]:
+ """Make Ed25519 private key from data."""
+ (point,), data = self.get_public(data)
+ keypair, data = _get_sshstr(data)
+
+ secret = keypair[:32]
+ point2 = keypair[32:]
+ if point != point2 or (point,) != pubfields:
+ raise ValueError("Corrupt data: ed25519 field mismatch")
+ private_key = ed25519.Ed25519PrivateKey.from_private_bytes(secret)
+ return private_key, data
+
+ def encode_public(
+ self, public_key: ed25519.Ed25519PublicKey, f_pub: _FragList
+ ) -> None:
+ """Write Ed25519 public key"""
+ raw_public_key = public_key.public_bytes(
+ Encoding.Raw, PublicFormat.Raw
+ )
+ f_pub.put_sshstr(raw_public_key)
+
+ def encode_private(
+ self, private_key: ed25519.Ed25519PrivateKey, f_priv: _FragList
+ ) -> None:
+ """Write Ed25519 private key"""
+ public_key = private_key.public_key()
+ raw_private_key = private_key.private_bytes(
+ Encoding.Raw, PrivateFormat.Raw, NoEncryption()
+ )
+ raw_public_key = public_key.public_bytes(
+ Encoding.Raw, PublicFormat.Raw
+ )
+ f_keypair = _FragList([raw_private_key, raw_public_key])
+
+ self.encode_public(public_key, f_priv)
+ f_priv.put_sshstr(f_keypair)
+
+
+_KEY_FORMATS = {
+ _SSH_RSA: _SSHFormatRSA(),
+ _SSH_DSA: _SSHFormatDSA(),
+ _SSH_ED25519: _SSHFormatEd25519(),
+ _ECDSA_NISTP256: _SSHFormatECDSA(b"nistp256", ec.SECP256R1()),
+ _ECDSA_NISTP384: _SSHFormatECDSA(b"nistp384", ec.SECP384R1()),
+ _ECDSA_NISTP521: _SSHFormatECDSA(b"nistp521", ec.SECP521R1()),
+}
+
+
+def _lookup_kformat(key_type: bytes):
+ """Return valid format or throw error"""
+ if not isinstance(key_type, bytes):
+ key_type = memoryview(key_type).tobytes()
+ if key_type in _KEY_FORMATS:
+ return _KEY_FORMATS[key_type]
+ raise UnsupportedAlgorithm(f"Unsupported key type: {key_type!r}")
+
+
+SSHPrivateKeyTypes = typing.Union[
+ ec.EllipticCurvePrivateKey,
+ rsa.RSAPrivateKey,
+ dsa.DSAPrivateKey,
+ ed25519.Ed25519PrivateKey,
+]
+
+
+def load_ssh_private_key(
+ data: bytes,
+ password: typing.Optional[bytes],
+ backend: typing.Any = None,
+) -> SSHPrivateKeyTypes:
+ """Load private key from OpenSSH custom encoding."""
+ utils._check_byteslike("data", data)
+ if password is not None:
+ utils._check_bytes("password", password)
+
+ m = _PEM_RC.search(data)
+ if not m:
+ raise ValueError("Not OpenSSH private key format")
+ p1 = m.start(1)
+ p2 = m.end(1)
+ data = binascii.a2b_base64(memoryview(data)[p1:p2])
+ if not data.startswith(_SK_MAGIC):
+ raise ValueError("Not OpenSSH private key format")
+ data = memoryview(data)[len(_SK_MAGIC) :]
+
+ # parse header
+ ciphername, data = _get_sshstr(data)
+ kdfname, data = _get_sshstr(data)
+ kdfoptions, data = _get_sshstr(data)
+ nkeys, data = _get_u32(data)
+ if nkeys != 1:
+ raise ValueError("Only one key supported")
+
+ # load public key data
+ pubdata, data = _get_sshstr(data)
+ pub_key_type, pubdata = _get_sshstr(pubdata)
+ kformat = _lookup_kformat(pub_key_type)
+ pubfields, pubdata = kformat.get_public(pubdata)
+ _check_empty(pubdata)
+
+ if (ciphername, kdfname) != (_NONE, _NONE):
+ ciphername_bytes = ciphername.tobytes()
+ if ciphername_bytes not in _SSH_CIPHERS:
+ raise UnsupportedAlgorithm(
+ f"Unsupported cipher: {ciphername_bytes!r}"
+ )
+ if kdfname != _BCRYPT:
+ raise UnsupportedAlgorithm(f"Unsupported KDF: {kdfname!r}")
+ blklen = _SSH_CIPHERS[ciphername_bytes].block_len
+ tag_len = _SSH_CIPHERS[ciphername_bytes].tag_len
+ # load secret data
+ edata, data = _get_sshstr(data)
+ # see https://bugzilla.mindrot.org/show_bug.cgi?id=3553 for
+ # information about how OpenSSH handles AEAD tags
+ if _SSH_CIPHERS[ciphername_bytes].is_aead:
+ tag = bytes(data)
+ if len(tag) != tag_len:
+ raise ValueError("Corrupt data: invalid tag length for cipher")
+ else:
+ _check_empty(data)
+ _check_block_size(edata, blklen)
+ salt, kbuf = _get_sshstr(kdfoptions)
+ rounds, kbuf = _get_u32(kbuf)
+ _check_empty(kbuf)
+ ciph = _init_cipher(ciphername_bytes, password, salt.tobytes(), rounds)
+ dec = ciph.decryptor()
+ edata = memoryview(dec.update(edata))
+ if _SSH_CIPHERS[ciphername_bytes].is_aead:
+ assert isinstance(dec, AEADDecryptionContext)
+ _check_empty(dec.finalize_with_tag(tag))
+ else:
+ # _check_block_size requires data to be a full block so there
+ # should be no output from finalize
+ _check_empty(dec.finalize())
+ else:
+ # load secret data
+ edata, data = _get_sshstr(data)
+ _check_empty(data)
+ blklen = 8
+ _check_block_size(edata, blklen)
+ ck1, edata = _get_u32(edata)
+ ck2, edata = _get_u32(edata)
+ if ck1 != ck2:
+ raise ValueError("Corrupt data: broken checksum")
+
+ # load per-key struct
+ key_type, edata = _get_sshstr(edata)
+ if key_type != pub_key_type:
+ raise ValueError("Corrupt data: key type mismatch")
+ private_key, edata = kformat.load_private(edata, pubfields)
+ comment, edata = _get_sshstr(edata)
+
+ # yes, SSH does padding check *after* all other parsing is done.
+ # need to follow as it writes zero-byte padding too.
+ if edata != _PADDING[: len(edata)]:
+ raise ValueError("Corrupt data: invalid padding")
+
+ if isinstance(private_key, dsa.DSAPrivateKey):
+ warnings.warn(
+ "SSH DSA keys are deprecated and will be removed in a future "
+ "release.",
+ utils.DeprecatedIn40,
+ stacklevel=2,
+ )
+
+ return private_key
+
+
+def _serialize_ssh_private_key(
+ private_key: SSHPrivateKeyTypes,
+ password: bytes,
+ encryption_algorithm: KeySerializationEncryption,
+) -> bytes:
+ """Serialize private key with OpenSSH custom encoding."""
+ utils._check_bytes("password", password)
+ if isinstance(private_key, dsa.DSAPrivateKey):
+ warnings.warn(
+ "SSH DSA key support is deprecated and will be "
+ "removed in a future release",
+ utils.DeprecatedIn40,
+ stacklevel=4,
+ )
+
+ key_type = _get_ssh_key_type(private_key)
+ kformat = _lookup_kformat(key_type)
+
+ # setup parameters
+ f_kdfoptions = _FragList()
+ if password:
+ ciphername = _DEFAULT_CIPHER
+ blklen = _SSH_CIPHERS[ciphername].block_len
+ kdfname = _BCRYPT
+ rounds = _DEFAULT_ROUNDS
+ if (
+ isinstance(encryption_algorithm, _KeySerializationEncryption)
+ and encryption_algorithm._kdf_rounds is not None
+ ):
+ rounds = encryption_algorithm._kdf_rounds
+ salt = os.urandom(16)
+ f_kdfoptions.put_sshstr(salt)
+ f_kdfoptions.put_u32(rounds)
+ ciph = _init_cipher(ciphername, password, salt, rounds)
+ else:
+ ciphername = kdfname = _NONE
+ blklen = 8
+ ciph = None
+ nkeys = 1
+ checkval = os.urandom(4)
+ comment = b""
+
+ # encode public and private parts together
+ f_public_key = _FragList()
+ f_public_key.put_sshstr(key_type)
+ kformat.encode_public(private_key.public_key(), f_public_key)
+
+ f_secrets = _FragList([checkval, checkval])
+ f_secrets.put_sshstr(key_type)
+ kformat.encode_private(private_key, f_secrets)
+ f_secrets.put_sshstr(comment)
+ f_secrets.put_raw(_PADDING[: blklen - (f_secrets.size() % blklen)])
+
+ # top-level structure
+ f_main = _FragList()
+ f_main.put_raw(_SK_MAGIC)
+ f_main.put_sshstr(ciphername)
+ f_main.put_sshstr(kdfname)
+ f_main.put_sshstr(f_kdfoptions)
+ f_main.put_u32(nkeys)
+ f_main.put_sshstr(f_public_key)
+ f_main.put_sshstr(f_secrets)
+
+ # copy result info bytearray
+ slen = f_secrets.size()
+ mlen = f_main.size()
+ buf = memoryview(bytearray(mlen + blklen))
+ f_main.render(buf)
+ ofs = mlen - slen
+
+ # encrypt in-place
+ if ciph is not None:
+ ciph.encryptor().update_into(buf[ofs:mlen], buf[ofs:])
+
+ return _ssh_pem_encode(buf[:mlen])
+
+
+SSHPublicKeyTypes = typing.Union[
+ ec.EllipticCurvePublicKey,
+ rsa.RSAPublicKey,
+ dsa.DSAPublicKey,
+ ed25519.Ed25519PublicKey,
+]
+
+SSHCertPublicKeyTypes = typing.Union[
+ ec.EllipticCurvePublicKey,
+ rsa.RSAPublicKey,
+ ed25519.Ed25519PublicKey,
+]
+
+
+class SSHCertificateType(enum.Enum):
+ USER = 1
+ HOST = 2
+
+
+class SSHCertificate:
+ def __init__(
+ self,
+ _nonce: memoryview,
+ _public_key: SSHPublicKeyTypes,
+ _serial: int,
+ _cctype: int,
+ _key_id: memoryview,
+ _valid_principals: typing.List[bytes],
+ _valid_after: int,
+ _valid_before: int,
+ _critical_options: typing.Dict[bytes, bytes],
+ _extensions: typing.Dict[bytes, bytes],
+ _sig_type: memoryview,
+ _sig_key: memoryview,
+ _inner_sig_type: memoryview,
+ _signature: memoryview,
+ _tbs_cert_body: memoryview,
+ _cert_key_type: bytes,
+ _cert_body: memoryview,
+ ):
+ self._nonce = _nonce
+ self._public_key = _public_key
+ self._serial = _serial
+ try:
+ self._type = SSHCertificateType(_cctype)
+ except ValueError:
+ raise ValueError("Invalid certificate type")
+ self._key_id = _key_id
+ self._valid_principals = _valid_principals
+ self._valid_after = _valid_after
+ self._valid_before = _valid_before
+ self._critical_options = _critical_options
+ self._extensions = _extensions
+ self._sig_type = _sig_type
+ self._sig_key = _sig_key
+ self._inner_sig_type = _inner_sig_type
+ self._signature = _signature
+ self._cert_key_type = _cert_key_type
+ self._cert_body = _cert_body
+ self._tbs_cert_body = _tbs_cert_body
+
+ @property
+ def nonce(self) -> bytes:
+ return bytes(self._nonce)
+
+ def public_key(self) -> SSHCertPublicKeyTypes:
+ # make mypy happy until we remove DSA support entirely and
+ # the underlying union won't have a disallowed type
+ return typing.cast(SSHCertPublicKeyTypes, self._public_key)
+
+ @property
+ def serial(self) -> int:
+ return self._serial
+
+ @property
+ def type(self) -> SSHCertificateType:
+ return self._type
+
+ @property
+ def key_id(self) -> bytes:
+ return bytes(self._key_id)
+
+ @property
+ def valid_principals(self) -> typing.List[bytes]:
+ return self._valid_principals
+
+ @property
+ def valid_before(self) -> int:
+ return self._valid_before
+
+ @property
+ def valid_after(self) -> int:
+ return self._valid_after
+
+ @property
+ def critical_options(self) -> typing.Dict[bytes, bytes]:
+ return self._critical_options
+
+ @property
+ def extensions(self) -> typing.Dict[bytes, bytes]:
+ return self._extensions
+
+ def signature_key(self) -> SSHCertPublicKeyTypes:
+ sigformat = _lookup_kformat(self._sig_type)
+ signature_key, sigkey_rest = sigformat.load_public(self._sig_key)
+ _check_empty(sigkey_rest)
+ return signature_key
+
+ def public_bytes(self) -> bytes:
+ return (
+ bytes(self._cert_key_type)
+ + b" "
+ + binascii.b2a_base64(bytes(self._cert_body), newline=False)
+ )
+
+ def verify_cert_signature(self) -> None:
+ signature_key = self.signature_key()
+ if isinstance(signature_key, ed25519.Ed25519PublicKey):
+ signature_key.verify(
+ bytes(self._signature), bytes(self._tbs_cert_body)
+ )
+ elif isinstance(signature_key, ec.EllipticCurvePublicKey):
+ # The signature is encoded as a pair of big-endian integers
+ r, data = _get_mpint(self._signature)
+ s, data = _get_mpint(data)
+ _check_empty(data)
+ computed_sig = asym_utils.encode_dss_signature(r, s)
+ hash_alg = _get_ec_hash_alg(signature_key.curve)
+ signature_key.verify(
+ computed_sig, bytes(self._tbs_cert_body), ec.ECDSA(hash_alg)
+ )
+ else:
+ assert isinstance(signature_key, rsa.RSAPublicKey)
+ if self._inner_sig_type == _SSH_RSA:
+ hash_alg = hashes.SHA1()
+ elif self._inner_sig_type == _SSH_RSA_SHA256:
+ hash_alg = hashes.SHA256()
+ else:
+ assert self._inner_sig_type == _SSH_RSA_SHA512
+ hash_alg = hashes.SHA512()
+ signature_key.verify(
+ bytes(self._signature),
+ bytes(self._tbs_cert_body),
+ padding.PKCS1v15(),
+ hash_alg,
+ )
+
+
+def _get_ec_hash_alg(curve: ec.EllipticCurve) -> hashes.HashAlgorithm:
+ if isinstance(curve, ec.SECP256R1):
+ return hashes.SHA256()
+ elif isinstance(curve, ec.SECP384R1):
+ return hashes.SHA384()
+ else:
+ assert isinstance(curve, ec.SECP521R1)
+ return hashes.SHA512()
+
+
+def _load_ssh_public_identity(
+ data: bytes,
+ _legacy_dsa_allowed=False,
+) -> typing.Union[SSHCertificate, SSHPublicKeyTypes]:
+ utils._check_byteslike("data", data)
+
+ m = _SSH_PUBKEY_RC.match(data)
+ if not m:
+ raise ValueError("Invalid line format")
+ key_type = orig_key_type = m.group(1)
+ key_body = m.group(2)
+ with_cert = False
+ if key_type.endswith(_CERT_SUFFIX):
+ with_cert = True
+ key_type = key_type[: -len(_CERT_SUFFIX)]
+ if key_type == _SSH_DSA and not _legacy_dsa_allowed:
+ raise UnsupportedAlgorithm(
+ "DSA keys aren't supported in SSH certificates"
+ )
+ kformat = _lookup_kformat(key_type)
+
+ try:
+ rest = memoryview(binascii.a2b_base64(key_body))
+ except (TypeError, binascii.Error):
+ raise ValueError("Invalid format")
+
+ if with_cert:
+ cert_body = rest
+ inner_key_type, rest = _get_sshstr(rest)
+ if inner_key_type != orig_key_type:
+ raise ValueError("Invalid key format")
+ if with_cert:
+ nonce, rest = _get_sshstr(rest)
+ public_key, rest = kformat.load_public(rest)
+ if with_cert:
+ serial, rest = _get_u64(rest)
+ cctype, rest = _get_u32(rest)
+ key_id, rest = _get_sshstr(rest)
+ principals, rest = _get_sshstr(rest)
+ valid_principals = []
+ while principals:
+ principal, principals = _get_sshstr(principals)
+ valid_principals.append(bytes(principal))
+ valid_after, rest = _get_u64(rest)
+ valid_before, rest = _get_u64(rest)
+ crit_options, rest = _get_sshstr(rest)
+ critical_options = _parse_exts_opts(crit_options)
+ exts, rest = _get_sshstr(rest)
+ extensions = _parse_exts_opts(exts)
+ # Get the reserved field, which is unused.
+ _, rest = _get_sshstr(rest)
+ sig_key_raw, rest = _get_sshstr(rest)
+ sig_type, sig_key = _get_sshstr(sig_key_raw)
+ if sig_type == _SSH_DSA and not _legacy_dsa_allowed:
+ raise UnsupportedAlgorithm(
+ "DSA signatures aren't supported in SSH certificates"
+ )
+ # Get the entire cert body and subtract the signature
+ tbs_cert_body = cert_body[: -len(rest)]
+ signature_raw, rest = _get_sshstr(rest)
+ _check_empty(rest)
+ inner_sig_type, sig_rest = _get_sshstr(signature_raw)
+ # RSA certs can have multiple algorithm types
+ if (
+ sig_type == _SSH_RSA
+ and inner_sig_type
+ not in [_SSH_RSA_SHA256, _SSH_RSA_SHA512, _SSH_RSA]
+ ) or (sig_type != _SSH_RSA and inner_sig_type != sig_type):
+ raise ValueError("Signature key type does not match")
+ signature, sig_rest = _get_sshstr(sig_rest)
+ _check_empty(sig_rest)
+ return SSHCertificate(
+ nonce,
+ public_key,
+ serial,
+ cctype,
+ key_id,
+ valid_principals,
+ valid_after,
+ valid_before,
+ critical_options,
+ extensions,
+ sig_type,
+ sig_key,
+ inner_sig_type,
+ signature,
+ tbs_cert_body,
+ orig_key_type,
+ cert_body,
+ )
+ else:
+ _check_empty(rest)
+ return public_key
+
+
+def load_ssh_public_identity(
+ data: bytes,
+) -> typing.Union[SSHCertificate, SSHPublicKeyTypes]:
+ return _load_ssh_public_identity(data)
+
+
+def _parse_exts_opts(exts_opts: memoryview) -> typing.Dict[bytes, bytes]:
+ result: typing.Dict[bytes, bytes] = {}
+ last_name = None
+ while exts_opts:
+ name, exts_opts = _get_sshstr(exts_opts)
+ bname: bytes = bytes(name)
+ if bname in result:
+ raise ValueError("Duplicate name")
+ if last_name is not None and bname < last_name:
+ raise ValueError("Fields not lexically sorted")
+ value, exts_opts = _get_sshstr(exts_opts)
+ if len(value) > 0:
+ try:
+ value, extra = _get_sshstr(value)
+ except ValueError:
+ warnings.warn(
+ "This certificate has an incorrect encoding for critical "
+ "options or extensions. This will be an exception in "
+ "cryptography 42",
+ utils.DeprecatedIn41,
+ stacklevel=4,
+ )
+ else:
+ if len(extra) > 0:
+ raise ValueError("Unexpected extra data after value")
+ result[bname] = bytes(value)
+ last_name = bname
+ return result
+
+
+def load_ssh_public_key(
+ data: bytes, backend: typing.Any = None
+) -> SSHPublicKeyTypes:
+ cert_or_key = _load_ssh_public_identity(data, _legacy_dsa_allowed=True)
+ public_key: SSHPublicKeyTypes
+ if isinstance(cert_or_key, SSHCertificate):
+ public_key = cert_or_key.public_key()
+ else:
+ public_key = cert_or_key
+
+ if isinstance(public_key, dsa.DSAPublicKey):
+ warnings.warn(
+ "SSH DSA keys are deprecated and will be removed in a future "
+ "release.",
+ utils.DeprecatedIn40,
+ stacklevel=2,
+ )
+ return public_key
+
+
+def serialize_ssh_public_key(public_key: SSHPublicKeyTypes) -> bytes:
+ """One-line public key format for OpenSSH"""
+ if isinstance(public_key, dsa.DSAPublicKey):
+ warnings.warn(
+ "SSH DSA key support is deprecated and will be "
+ "removed in a future release",
+ utils.DeprecatedIn40,
+ stacklevel=4,
+ )
+ key_type = _get_ssh_key_type(public_key)
+ kformat = _lookup_kformat(key_type)
+
+ f_pub = _FragList()
+ f_pub.put_sshstr(key_type)
+ kformat.encode_public(public_key, f_pub)
+
+ pub = binascii.b2a_base64(f_pub.tobytes()).strip()
+ return b"".join([key_type, b" ", pub])
+
+
+SSHCertPrivateKeyTypes = typing.Union[
+ ec.EllipticCurvePrivateKey,
+ rsa.RSAPrivateKey,
+ ed25519.Ed25519PrivateKey,
+]
+
+
+# This is an undocumented limit enforced in the openssh codebase for sshd and
+# ssh-keygen, but it is undefined in the ssh certificates spec.
+_SSHKEY_CERT_MAX_PRINCIPALS = 256
+
+
+class SSHCertificateBuilder:
+ def __init__(
+ self,
+ _public_key: typing.Optional[SSHCertPublicKeyTypes] = None,
+ _serial: typing.Optional[int] = None,
+ _type: typing.Optional[SSHCertificateType] = None,
+ _key_id: typing.Optional[bytes] = None,
+ _valid_principals: typing.List[bytes] = [],
+ _valid_for_all_principals: bool = False,
+ _valid_before: typing.Optional[int] = None,
+ _valid_after: typing.Optional[int] = None,
+ _critical_options: typing.List[typing.Tuple[bytes, bytes]] = [],
+ _extensions: typing.List[typing.Tuple[bytes, bytes]] = [],
+ ):
+ self._public_key = _public_key
+ self._serial = _serial
+ self._type = _type
+ self._key_id = _key_id
+ self._valid_principals = _valid_principals
+ self._valid_for_all_principals = _valid_for_all_principals
+ self._valid_before = _valid_before
+ self._valid_after = _valid_after
+ self._critical_options = _critical_options
+ self._extensions = _extensions
+
+ def public_key(
+ self, public_key: SSHCertPublicKeyTypes
+ ) -> SSHCertificateBuilder:
+ if not isinstance(
+ public_key,
+ (
+ ec.EllipticCurvePublicKey,
+ rsa.RSAPublicKey,
+ ed25519.Ed25519PublicKey,
+ ),
+ ):
+ raise TypeError("Unsupported key type")
+ if self._public_key is not None:
+ raise ValueError("public_key already set")
+
+ return SSHCertificateBuilder(
+ _public_key=public_key,
+ _serial=self._serial,
+ _type=self._type,
+ _key_id=self._key_id,
+ _valid_principals=self._valid_principals,
+ _valid_for_all_principals=self._valid_for_all_principals,
+ _valid_before=self._valid_before,
+ _valid_after=self._valid_after,
+ _critical_options=self._critical_options,
+ _extensions=self._extensions,
+ )
+
+ def serial(self, serial: int) -> SSHCertificateBuilder:
+ if not isinstance(serial, int):
+ raise TypeError("serial must be an integer")
+ if not 0 <= serial < 2**64:
+ raise ValueError("serial must be between 0 and 2**64")
+ if self._serial is not None:
+ raise ValueError("serial already set")
+
+ return SSHCertificateBuilder(
+ _public_key=self._public_key,
+ _serial=serial,
+ _type=self._type,
+ _key_id=self._key_id,
+ _valid_principals=self._valid_principals,
+ _valid_for_all_principals=self._valid_for_all_principals,
+ _valid_before=self._valid_before,
+ _valid_after=self._valid_after,
+ _critical_options=self._critical_options,
+ _extensions=self._extensions,
+ )
+
+ def type(self, type: SSHCertificateType) -> SSHCertificateBuilder:
+ if not isinstance(type, SSHCertificateType):
+ raise TypeError("type must be an SSHCertificateType")
+ if self._type is not None:
+ raise ValueError("type already set")
+
+ return SSHCertificateBuilder(
+ _public_key=self._public_key,
+ _serial=self._serial,
+ _type=type,
+ _key_id=self._key_id,
+ _valid_principals=self._valid_principals,
+ _valid_for_all_principals=self._valid_for_all_principals,
+ _valid_before=self._valid_before,
+ _valid_after=self._valid_after,
+ _critical_options=self._critical_options,
+ _extensions=self._extensions,
+ )
+
+ def key_id(self, key_id: bytes) -> SSHCertificateBuilder:
+ if not isinstance(key_id, bytes):
+ raise TypeError("key_id must be bytes")
+ if self._key_id is not None:
+ raise ValueError("key_id already set")
+
+ return SSHCertificateBuilder(
+ _public_key=self._public_key,
+ _serial=self._serial,
+ _type=self._type,
+ _key_id=key_id,
+ _valid_principals=self._valid_principals,
+ _valid_for_all_principals=self._valid_for_all_principals,
+ _valid_before=self._valid_before,
+ _valid_after=self._valid_after,
+ _critical_options=self._critical_options,
+ _extensions=self._extensions,
+ )
+
+ def valid_principals(
+ self, valid_principals: typing.List[bytes]
+ ) -> SSHCertificateBuilder:
+ if self._valid_for_all_principals:
+ raise ValueError(
+ "Principals can't be set because the cert is valid "
+ "for all principals"
+ )
+ if (
+ not all(isinstance(x, bytes) for x in valid_principals)
+ or not valid_principals
+ ):
+ raise TypeError(
+ "principals must be a list of bytes and can't be empty"
+ )
+ if self._valid_principals:
+ raise ValueError("valid_principals already set")
+
+ if len(valid_principals) > _SSHKEY_CERT_MAX_PRINCIPALS:
+ raise ValueError(
+ "Reached or exceeded the maximum number of valid_principals"
+ )
+
+ return SSHCertificateBuilder(
+ _public_key=self._public_key,
+ _serial=self._serial,
+ _type=self._type,
+ _key_id=self._key_id,
+ _valid_principals=valid_principals,
+ _valid_for_all_principals=self._valid_for_all_principals,
+ _valid_before=self._valid_before,
+ _valid_after=self._valid_after,
+ _critical_options=self._critical_options,
+ _extensions=self._extensions,
+ )
+
+ def valid_for_all_principals(self):
+ if self._valid_principals:
+ raise ValueError(
+ "valid_principals already set, can't set "
+ "valid_for_all_principals"
+ )
+ if self._valid_for_all_principals:
+ raise ValueError("valid_for_all_principals already set")
+
+ return SSHCertificateBuilder(
+ _public_key=self._public_key,
+ _serial=self._serial,
+ _type=self._type,
+ _key_id=self._key_id,
+ _valid_principals=self._valid_principals,
+ _valid_for_all_principals=True,
+ _valid_before=self._valid_before,
+ _valid_after=self._valid_after,
+ _critical_options=self._critical_options,
+ _extensions=self._extensions,
+ )
+
+ def valid_before(
+ self, valid_before: typing.Union[int, float]
+ ) -> SSHCertificateBuilder:
+ if not isinstance(valid_before, (int, float)):
+ raise TypeError("valid_before must be an int or float")
+ valid_before = int(valid_before)
+ if valid_before < 0 or valid_before >= 2**64:
+ raise ValueError("valid_before must [0, 2**64)")
+ if self._valid_before is not None:
+ raise ValueError("valid_before already set")
+
+ return SSHCertificateBuilder(
+ _public_key=self._public_key,
+ _serial=self._serial,
+ _type=self._type,
+ _key_id=self._key_id,
+ _valid_principals=self._valid_principals,
+ _valid_for_all_principals=self._valid_for_all_principals,
+ _valid_before=valid_before,
+ _valid_after=self._valid_after,
+ _critical_options=self._critical_options,
+ _extensions=self._extensions,
+ )
+
+ def valid_after(
+ self, valid_after: typing.Union[int, float]
+ ) -> SSHCertificateBuilder:
+ if not isinstance(valid_after, (int, float)):
+ raise TypeError("valid_after must be an int or float")
+ valid_after = int(valid_after)
+ if valid_after < 0 or valid_after >= 2**64:
+ raise ValueError("valid_after must [0, 2**64)")
+ if self._valid_after is not None:
+ raise ValueError("valid_after already set")
+
+ return SSHCertificateBuilder(
+ _public_key=self._public_key,
+ _serial=self._serial,
+ _type=self._type,
+ _key_id=self._key_id,
+ _valid_principals=self._valid_principals,
+ _valid_for_all_principals=self._valid_for_all_principals,
+ _valid_before=self._valid_before,
+ _valid_after=valid_after,
+ _critical_options=self._critical_options,
+ _extensions=self._extensions,
+ )
+
+ def add_critical_option(
+ self, name: bytes, value: bytes
+ ) -> SSHCertificateBuilder:
+ if not isinstance(name, bytes) or not isinstance(value, bytes):
+ raise TypeError("name and value must be bytes")
+ # This is O(n**2)
+ if name in [name for name, _ in self._critical_options]:
+ raise ValueError("Duplicate critical option name")
+
+ return SSHCertificateBuilder(
+ _public_key=self._public_key,
+ _serial=self._serial,
+ _type=self._type,
+ _key_id=self._key_id,
+ _valid_principals=self._valid_principals,
+ _valid_for_all_principals=self._valid_for_all_principals,
+ _valid_before=self._valid_before,
+ _valid_after=self._valid_after,
+ _critical_options=self._critical_options + [(name, value)],
+ _extensions=self._extensions,
+ )
+
+ def add_extension(
+ self, name: bytes, value: bytes
+ ) -> SSHCertificateBuilder:
+ if not isinstance(name, bytes) or not isinstance(value, bytes):
+ raise TypeError("name and value must be bytes")
+ # This is O(n**2)
+ if name in [name for name, _ in self._extensions]:
+ raise ValueError("Duplicate extension name")
+
+ return SSHCertificateBuilder(
+ _public_key=self._public_key,
+ _serial=self._serial,
+ _type=self._type,
+ _key_id=self._key_id,
+ _valid_principals=self._valid_principals,
+ _valid_for_all_principals=self._valid_for_all_principals,
+ _valid_before=self._valid_before,
+ _valid_after=self._valid_after,
+ _critical_options=self._critical_options,
+ _extensions=self._extensions + [(name, value)],
+ )
+
+ def sign(self, private_key: SSHCertPrivateKeyTypes) -> SSHCertificate:
+ if not isinstance(
+ private_key,
+ (
+ ec.EllipticCurvePrivateKey,
+ rsa.RSAPrivateKey,
+ ed25519.Ed25519PrivateKey,
+ ),
+ ):
+ raise TypeError("Unsupported private key type")
+
+ if self._public_key is None:
+ raise ValueError("public_key must be set")
+
+ # Not required
+ serial = 0 if self._serial is None else self._serial
+
+ if self._type is None:
+ raise ValueError("type must be set")
+
+ # Not required
+ key_id = b"" if self._key_id is None else self._key_id
+
+ # A zero length list is valid, but means the certificate
+ # is valid for any principal of the specified type. We require
+ # the user to explicitly set valid_for_all_principals to get
+ # that behavior.
+ if not self._valid_principals and not self._valid_for_all_principals:
+ raise ValueError(
+ "valid_principals must be set if valid_for_all_principals "
+ "is False"
+ )
+
+ if self._valid_before is None:
+ raise ValueError("valid_before must be set")
+
+ if self._valid_after is None:
+ raise ValueError("valid_after must be set")
+
+ if self._valid_after > self._valid_before:
+ raise ValueError("valid_after must be earlier than valid_before")
+
+ # lexically sort our byte strings
+ self._critical_options.sort(key=lambda x: x[0])
+ self._extensions.sort(key=lambda x: x[0])
+
+ key_type = _get_ssh_key_type(self._public_key)
+ cert_prefix = key_type + _CERT_SUFFIX
+
+ # Marshal the bytes to be signed
+ nonce = os.urandom(32)
+ kformat = _lookup_kformat(key_type)
+ f = _FragList()
+ f.put_sshstr(cert_prefix)
+ f.put_sshstr(nonce)
+ kformat.encode_public(self._public_key, f)
+ f.put_u64(serial)
+ f.put_u32(self._type.value)
+ f.put_sshstr(key_id)
+ fprincipals = _FragList()
+ for p in self._valid_principals:
+ fprincipals.put_sshstr(p)
+ f.put_sshstr(fprincipals.tobytes())
+ f.put_u64(self._valid_after)
+ f.put_u64(self._valid_before)
+ fcrit = _FragList()
+ for name, value in self._critical_options:
+ fcrit.put_sshstr(name)
+ if len(value) > 0:
+ foptval = _FragList()
+ foptval.put_sshstr(value)
+ fcrit.put_sshstr(foptval.tobytes())
+ else:
+ fcrit.put_sshstr(value)
+ f.put_sshstr(fcrit.tobytes())
+ fext = _FragList()
+ for name, value in self._extensions:
+ fext.put_sshstr(name)
+ if len(value) > 0:
+ fextval = _FragList()
+ fextval.put_sshstr(value)
+ fext.put_sshstr(fextval.tobytes())
+ else:
+ fext.put_sshstr(value)
+ f.put_sshstr(fext.tobytes())
+ f.put_sshstr(b"") # RESERVED FIELD
+ # encode CA public key
+ ca_type = _get_ssh_key_type(private_key)
+ caformat = _lookup_kformat(ca_type)
+ caf = _FragList()
+ caf.put_sshstr(ca_type)
+ caformat.encode_public(private_key.public_key(), caf)
+ f.put_sshstr(caf.tobytes())
+ # Sigs according to the rules defined for the CA's public key
+ # (RFC4253 section 6.6 for ssh-rsa, RFC5656 for ECDSA,
+ # and RFC8032 for Ed25519).
+ if isinstance(private_key, ed25519.Ed25519PrivateKey):
+ signature = private_key.sign(f.tobytes())
+ fsig = _FragList()
+ fsig.put_sshstr(ca_type)
+ fsig.put_sshstr(signature)
+ f.put_sshstr(fsig.tobytes())
+ elif isinstance(private_key, ec.EllipticCurvePrivateKey):
+ hash_alg = _get_ec_hash_alg(private_key.curve)
+ signature = private_key.sign(f.tobytes(), ec.ECDSA(hash_alg))
+ r, s = asym_utils.decode_dss_signature(signature)
+ fsig = _FragList()
+ fsig.put_sshstr(ca_type)
+ fsigblob = _FragList()
+ fsigblob.put_mpint(r)
+ fsigblob.put_mpint(s)
+ fsig.put_sshstr(fsigblob.tobytes())
+ f.put_sshstr(fsig.tobytes())
+
+ else:
+ assert isinstance(private_key, rsa.RSAPrivateKey)
+ # Just like Golang, we're going to use SHA512 for RSA
+ # https://cs.opensource.google/go/x/crypto/+/refs/tags/
+ # v0.4.0:ssh/certs.go;l=445
+ # RFC 8332 defines SHA256 and 512 as options
+ fsig = _FragList()
+ fsig.put_sshstr(_SSH_RSA_SHA512)
+ signature = private_key.sign(
+ f.tobytes(), padding.PKCS1v15(), hashes.SHA512()
+ )
+ fsig.put_sshstr(signature)
+ f.put_sshstr(fsig.tobytes())
+
+ cert_data = binascii.b2a_base64(f.tobytes()).strip()
+ # load_ssh_public_identity returns a union, but this is
+ # guaranteed to be an SSHCertificate, so we cast to make
+ # mypy happy.
+ return typing.cast(
+ SSHCertificate,
+ load_ssh_public_identity(b"".join([cert_prefix, b" ", cert_data])),
+ )
diff --git a/billinglayer/python/cryptography/hazmat/primitives/twofactor/__init__.py b/billinglayer/python/cryptography/hazmat/primitives/twofactor/__init__.py
new file mode 100644
index 0000000..c1af423
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/twofactor/__init__.py
@@ -0,0 +1,9 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+
+class InvalidToken(Exception):
+ pass
diff --git a/billinglayer/python/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..92a3399
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-311.pyc
new file mode 100644
index 0000000..65bbc66
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-311.pyc b/billinglayer/python/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-311.pyc
new file mode 100644
index 0000000..f6204f2
Binary files /dev/null and b/billinglayer/python/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/hazmat/primitives/twofactor/hotp.py b/billinglayer/python/cryptography/hazmat/primitives/twofactor/hotp.py
new file mode 100644
index 0000000..2067108
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/twofactor/hotp.py
@@ -0,0 +1,92 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import base64
+import typing
+from urllib.parse import quote, urlencode
+
+from cryptography.hazmat.primitives import constant_time, hmac
+from cryptography.hazmat.primitives.hashes import SHA1, SHA256, SHA512
+from cryptography.hazmat.primitives.twofactor import InvalidToken
+
+HOTPHashTypes = typing.Union[SHA1, SHA256, SHA512]
+
+
+def _generate_uri(
+ hotp: HOTP,
+ type_name: str,
+ account_name: str,
+ issuer: typing.Optional[str],
+ extra_parameters: typing.List[typing.Tuple[str, int]],
+) -> str:
+ parameters = [
+ ("digits", hotp._length),
+ ("secret", base64.b32encode(hotp._key)),
+ ("algorithm", hotp._algorithm.name.upper()),
+ ]
+
+ if issuer is not None:
+ parameters.append(("issuer", issuer))
+
+ parameters.extend(extra_parameters)
+
+ label = (
+ f"{quote(issuer)}:{quote(account_name)}"
+ if issuer
+ else quote(account_name)
+ )
+ return f"otpauth://{type_name}/{label}?{urlencode(parameters)}"
+
+
+class HOTP:
+ def __init__(
+ self,
+ key: bytes,
+ length: int,
+ algorithm: HOTPHashTypes,
+ backend: typing.Any = None,
+ enforce_key_length: bool = True,
+ ) -> None:
+ if len(key) < 16 and enforce_key_length is True:
+ raise ValueError("Key length has to be at least 128 bits.")
+
+ if not isinstance(length, int):
+ raise TypeError("Length parameter must be an integer type.")
+
+ if length < 6 or length > 8:
+ raise ValueError("Length of HOTP has to be between 6 and 8.")
+
+ if not isinstance(algorithm, (SHA1, SHA256, SHA512)):
+ raise TypeError("Algorithm must be SHA1, SHA256 or SHA512.")
+
+ self._key = key
+ self._length = length
+ self._algorithm = algorithm
+
+ def generate(self, counter: int) -> bytes:
+ truncated_value = self._dynamic_truncate(counter)
+ hotp = truncated_value % (10**self._length)
+ return "{0:0{1}}".format(hotp, self._length).encode()
+
+ def verify(self, hotp: bytes, counter: int) -> None:
+ if not constant_time.bytes_eq(self.generate(counter), hotp):
+ raise InvalidToken("Supplied HOTP value does not match.")
+
+ def _dynamic_truncate(self, counter: int) -> int:
+ ctx = hmac.HMAC(self._key, self._algorithm)
+ ctx.update(counter.to_bytes(length=8, byteorder="big"))
+ hmac_value = ctx.finalize()
+
+ offset = hmac_value[len(hmac_value) - 1] & 0b1111
+ p = hmac_value[offset : offset + 4]
+ return int.from_bytes(p, byteorder="big") & 0x7FFFFFFF
+
+ def get_provisioning_uri(
+ self, account_name: str, counter: int, issuer: typing.Optional[str]
+ ) -> str:
+ return _generate_uri(
+ self, "hotp", account_name, issuer, [("counter", int(counter))]
+ )
diff --git a/billinglayer/python/cryptography/hazmat/primitives/twofactor/totp.py b/billinglayer/python/cryptography/hazmat/primitives/twofactor/totp.py
new file mode 100644
index 0000000..daddcea
--- /dev/null
+++ b/billinglayer/python/cryptography/hazmat/primitives/twofactor/totp.py
@@ -0,0 +1,50 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography.hazmat.primitives import constant_time
+from cryptography.hazmat.primitives.twofactor import InvalidToken
+from cryptography.hazmat.primitives.twofactor.hotp import (
+ HOTP,
+ HOTPHashTypes,
+ _generate_uri,
+)
+
+
+class TOTP:
+ def __init__(
+ self,
+ key: bytes,
+ length: int,
+ algorithm: HOTPHashTypes,
+ time_step: int,
+ backend: typing.Any = None,
+ enforce_key_length: bool = True,
+ ):
+ self._time_step = time_step
+ self._hotp = HOTP(
+ key, length, algorithm, enforce_key_length=enforce_key_length
+ )
+
+ def generate(self, time: typing.Union[int, float]) -> bytes:
+ counter = int(time / self._time_step)
+ return self._hotp.generate(counter)
+
+ def verify(self, totp: bytes, time: int) -> None:
+ if not constant_time.bytes_eq(self.generate(time), totp):
+ raise InvalidToken("Supplied TOTP value does not match.")
+
+ def get_provisioning_uri(
+ self, account_name: str, issuer: typing.Optional[str]
+ ) -> str:
+ return _generate_uri(
+ self._hotp,
+ "totp",
+ account_name,
+ issuer,
+ [("period", int(self._time_step))],
+ )
diff --git a/billinglayer/python/cryptography/py.typed b/billinglayer/python/cryptography/py.typed
new file mode 100644
index 0000000..e69de29
diff --git a/billinglayer/python/cryptography/utils.py b/billinglayer/python/cryptography/utils.py
new file mode 100644
index 0000000..7191681
--- /dev/null
+++ b/billinglayer/python/cryptography/utils.py
@@ -0,0 +1,130 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import enum
+import sys
+import types
+import typing
+import warnings
+
+
+# We use a UserWarning subclass, instead of DeprecationWarning, because CPython
+# decided deprecation warnings should be invisble by default.
+class CryptographyDeprecationWarning(UserWarning):
+ pass
+
+
+# Several APIs were deprecated with no specific end-of-life date because of the
+# ubiquity of their use. They should not be removed until we agree on when that
+# cycle ends.
+DeprecatedIn36 = CryptographyDeprecationWarning
+DeprecatedIn37 = CryptographyDeprecationWarning
+DeprecatedIn40 = CryptographyDeprecationWarning
+DeprecatedIn41 = CryptographyDeprecationWarning
+
+
+def _check_bytes(name: str, value: bytes) -> None:
+ if not isinstance(value, bytes):
+ raise TypeError(f"{name} must be bytes")
+
+
+def _check_byteslike(name: str, value: bytes) -> None:
+ try:
+ memoryview(value)
+ except TypeError:
+ raise TypeError(f"{name} must be bytes-like")
+
+
+def int_to_bytes(integer: int, length: typing.Optional[int] = None) -> bytes:
+ return integer.to_bytes(
+ length or (integer.bit_length() + 7) // 8 or 1, "big"
+ )
+
+
+def _extract_buffer_length(obj: typing.Any) -> typing.Tuple[typing.Any, int]:
+ from cryptography.hazmat.bindings._rust import _openssl
+
+ buf = _openssl.ffi.from_buffer(obj)
+ return buf, int(_openssl.ffi.cast("uintptr_t", buf))
+
+
+class InterfaceNotImplemented(Exception):
+ pass
+
+
+class _DeprecatedValue:
+ def __init__(self, value: object, message: str, warning_class):
+ self.value = value
+ self.message = message
+ self.warning_class = warning_class
+
+
+class _ModuleWithDeprecations(types.ModuleType):
+ def __init__(self, module: types.ModuleType):
+ super().__init__(module.__name__)
+ self.__dict__["_module"] = module
+
+ def __getattr__(self, attr: str) -> object:
+ obj = getattr(self._module, attr)
+ if isinstance(obj, _DeprecatedValue):
+ warnings.warn(obj.message, obj.warning_class, stacklevel=2)
+ obj = obj.value
+ return obj
+
+ def __setattr__(self, attr: str, value: object) -> None:
+ setattr(self._module, attr, value)
+
+ def __delattr__(self, attr: str) -> None:
+ obj = getattr(self._module, attr)
+ if isinstance(obj, _DeprecatedValue):
+ warnings.warn(obj.message, obj.warning_class, stacklevel=2)
+
+ delattr(self._module, attr)
+
+ def __dir__(self) -> typing.Sequence[str]:
+ return ["_module"] + dir(self._module)
+
+
+def deprecated(
+ value: object,
+ module_name: str,
+ message: str,
+ warning_class: typing.Type[Warning],
+ name: typing.Optional[str] = None,
+) -> _DeprecatedValue:
+ module = sys.modules[module_name]
+ if not isinstance(module, _ModuleWithDeprecations):
+ sys.modules[module_name] = module = _ModuleWithDeprecations(module)
+ dv = _DeprecatedValue(value, message, warning_class)
+ # Maintain backwards compatibility with `name is None` for pyOpenSSL.
+ if name is not None:
+ setattr(module, name, dv)
+ return dv
+
+
+def cached_property(func: typing.Callable) -> property:
+ cached_name = f"_cached_{func}"
+ sentinel = object()
+
+ def inner(instance: object):
+ cache = getattr(instance, cached_name, sentinel)
+ if cache is not sentinel:
+ return cache
+ result = func(instance)
+ setattr(instance, cached_name, result)
+ return result
+
+ return property(inner)
+
+
+# Python 3.10 changed representation of enums. We use well-defined object
+# representation and string representation from Python 3.9.
+class Enum(enum.Enum):
+ def __repr__(self) -> str:
+ return f"<{self.__class__.__name__}.{self._name_}: {self._value_!r}>"
+
+ def __str__(self) -> str:
+ return f"{self.__class__.__name__}.{self._name_}"
diff --git a/billinglayer/python/cryptography/x509/__init__.py b/billinglayer/python/cryptography/x509/__init__.py
new file mode 100644
index 0000000..d77694a
--- /dev/null
+++ b/billinglayer/python/cryptography/x509/__init__.py
@@ -0,0 +1,255 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+from cryptography.x509 import certificate_transparency
+from cryptography.x509.base import (
+ Attribute,
+ AttributeNotFound,
+ Attributes,
+ Certificate,
+ CertificateBuilder,
+ CertificateRevocationList,
+ CertificateRevocationListBuilder,
+ CertificateSigningRequest,
+ CertificateSigningRequestBuilder,
+ InvalidVersion,
+ RevokedCertificate,
+ RevokedCertificateBuilder,
+ Version,
+ load_der_x509_certificate,
+ load_der_x509_crl,
+ load_der_x509_csr,
+ load_pem_x509_certificate,
+ load_pem_x509_certificates,
+ load_pem_x509_crl,
+ load_pem_x509_csr,
+ random_serial_number,
+)
+from cryptography.x509.extensions import (
+ AccessDescription,
+ AuthorityInformationAccess,
+ AuthorityKeyIdentifier,
+ BasicConstraints,
+ CertificateIssuer,
+ CertificatePolicies,
+ CRLDistributionPoints,
+ CRLNumber,
+ CRLReason,
+ DeltaCRLIndicator,
+ DistributionPoint,
+ DuplicateExtension,
+ ExtendedKeyUsage,
+ Extension,
+ ExtensionNotFound,
+ Extensions,
+ ExtensionType,
+ FreshestCRL,
+ GeneralNames,
+ InhibitAnyPolicy,
+ InvalidityDate,
+ IssuerAlternativeName,
+ IssuingDistributionPoint,
+ KeyUsage,
+ MSCertificateTemplate,
+ NameConstraints,
+ NoticeReference,
+ OCSPAcceptableResponses,
+ OCSPNoCheck,
+ OCSPNonce,
+ PolicyConstraints,
+ PolicyInformation,
+ PrecertificateSignedCertificateTimestamps,
+ PrecertPoison,
+ ReasonFlags,
+ SignedCertificateTimestamps,
+ SubjectAlternativeName,
+ SubjectInformationAccess,
+ SubjectKeyIdentifier,
+ TLSFeature,
+ TLSFeatureType,
+ UnrecognizedExtension,
+ UserNotice,
+)
+from cryptography.x509.general_name import (
+ DirectoryName,
+ DNSName,
+ GeneralName,
+ IPAddress,
+ OtherName,
+ RegisteredID,
+ RFC822Name,
+ UniformResourceIdentifier,
+ UnsupportedGeneralNameType,
+)
+from cryptography.x509.name import (
+ Name,
+ NameAttribute,
+ RelativeDistinguishedName,
+)
+from cryptography.x509.oid import (
+ AuthorityInformationAccessOID,
+ CertificatePoliciesOID,
+ CRLEntryExtensionOID,
+ ExtendedKeyUsageOID,
+ ExtensionOID,
+ NameOID,
+ ObjectIdentifier,
+ SignatureAlgorithmOID,
+)
+
+OID_AUTHORITY_INFORMATION_ACCESS = ExtensionOID.AUTHORITY_INFORMATION_ACCESS
+OID_AUTHORITY_KEY_IDENTIFIER = ExtensionOID.AUTHORITY_KEY_IDENTIFIER
+OID_BASIC_CONSTRAINTS = ExtensionOID.BASIC_CONSTRAINTS
+OID_CERTIFICATE_POLICIES = ExtensionOID.CERTIFICATE_POLICIES
+OID_CRL_DISTRIBUTION_POINTS = ExtensionOID.CRL_DISTRIBUTION_POINTS
+OID_EXTENDED_KEY_USAGE = ExtensionOID.EXTENDED_KEY_USAGE
+OID_FRESHEST_CRL = ExtensionOID.FRESHEST_CRL
+OID_INHIBIT_ANY_POLICY = ExtensionOID.INHIBIT_ANY_POLICY
+OID_ISSUER_ALTERNATIVE_NAME = ExtensionOID.ISSUER_ALTERNATIVE_NAME
+OID_KEY_USAGE = ExtensionOID.KEY_USAGE
+OID_NAME_CONSTRAINTS = ExtensionOID.NAME_CONSTRAINTS
+OID_OCSP_NO_CHECK = ExtensionOID.OCSP_NO_CHECK
+OID_POLICY_CONSTRAINTS = ExtensionOID.POLICY_CONSTRAINTS
+OID_POLICY_MAPPINGS = ExtensionOID.POLICY_MAPPINGS
+OID_SUBJECT_ALTERNATIVE_NAME = ExtensionOID.SUBJECT_ALTERNATIVE_NAME
+OID_SUBJECT_DIRECTORY_ATTRIBUTES = ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES
+OID_SUBJECT_INFORMATION_ACCESS = ExtensionOID.SUBJECT_INFORMATION_ACCESS
+OID_SUBJECT_KEY_IDENTIFIER = ExtensionOID.SUBJECT_KEY_IDENTIFIER
+
+OID_DSA_WITH_SHA1 = SignatureAlgorithmOID.DSA_WITH_SHA1
+OID_DSA_WITH_SHA224 = SignatureAlgorithmOID.DSA_WITH_SHA224
+OID_DSA_WITH_SHA256 = SignatureAlgorithmOID.DSA_WITH_SHA256
+OID_ECDSA_WITH_SHA1 = SignatureAlgorithmOID.ECDSA_WITH_SHA1
+OID_ECDSA_WITH_SHA224 = SignatureAlgorithmOID.ECDSA_WITH_SHA224
+OID_ECDSA_WITH_SHA256 = SignatureAlgorithmOID.ECDSA_WITH_SHA256
+OID_ECDSA_WITH_SHA384 = SignatureAlgorithmOID.ECDSA_WITH_SHA384
+OID_ECDSA_WITH_SHA512 = SignatureAlgorithmOID.ECDSA_WITH_SHA512
+OID_RSA_WITH_MD5 = SignatureAlgorithmOID.RSA_WITH_MD5
+OID_RSA_WITH_SHA1 = SignatureAlgorithmOID.RSA_WITH_SHA1
+OID_RSA_WITH_SHA224 = SignatureAlgorithmOID.RSA_WITH_SHA224
+OID_RSA_WITH_SHA256 = SignatureAlgorithmOID.RSA_WITH_SHA256
+OID_RSA_WITH_SHA384 = SignatureAlgorithmOID.RSA_WITH_SHA384
+OID_RSA_WITH_SHA512 = SignatureAlgorithmOID.RSA_WITH_SHA512
+OID_RSASSA_PSS = SignatureAlgorithmOID.RSASSA_PSS
+
+OID_COMMON_NAME = NameOID.COMMON_NAME
+OID_COUNTRY_NAME = NameOID.COUNTRY_NAME
+OID_DOMAIN_COMPONENT = NameOID.DOMAIN_COMPONENT
+OID_DN_QUALIFIER = NameOID.DN_QUALIFIER
+OID_EMAIL_ADDRESS = NameOID.EMAIL_ADDRESS
+OID_GENERATION_QUALIFIER = NameOID.GENERATION_QUALIFIER
+OID_GIVEN_NAME = NameOID.GIVEN_NAME
+OID_LOCALITY_NAME = NameOID.LOCALITY_NAME
+OID_ORGANIZATIONAL_UNIT_NAME = NameOID.ORGANIZATIONAL_UNIT_NAME
+OID_ORGANIZATION_NAME = NameOID.ORGANIZATION_NAME
+OID_PSEUDONYM = NameOID.PSEUDONYM
+OID_SERIAL_NUMBER = NameOID.SERIAL_NUMBER
+OID_STATE_OR_PROVINCE_NAME = NameOID.STATE_OR_PROVINCE_NAME
+OID_SURNAME = NameOID.SURNAME
+OID_TITLE = NameOID.TITLE
+
+OID_CLIENT_AUTH = ExtendedKeyUsageOID.CLIENT_AUTH
+OID_CODE_SIGNING = ExtendedKeyUsageOID.CODE_SIGNING
+OID_EMAIL_PROTECTION = ExtendedKeyUsageOID.EMAIL_PROTECTION
+OID_OCSP_SIGNING = ExtendedKeyUsageOID.OCSP_SIGNING
+OID_SERVER_AUTH = ExtendedKeyUsageOID.SERVER_AUTH
+OID_TIME_STAMPING = ExtendedKeyUsageOID.TIME_STAMPING
+
+OID_ANY_POLICY = CertificatePoliciesOID.ANY_POLICY
+OID_CPS_QUALIFIER = CertificatePoliciesOID.CPS_QUALIFIER
+OID_CPS_USER_NOTICE = CertificatePoliciesOID.CPS_USER_NOTICE
+
+OID_CERTIFICATE_ISSUER = CRLEntryExtensionOID.CERTIFICATE_ISSUER
+OID_CRL_REASON = CRLEntryExtensionOID.CRL_REASON
+OID_INVALIDITY_DATE = CRLEntryExtensionOID.INVALIDITY_DATE
+
+OID_CA_ISSUERS = AuthorityInformationAccessOID.CA_ISSUERS
+OID_OCSP = AuthorityInformationAccessOID.OCSP
+
+__all__ = [
+ "certificate_transparency",
+ "load_pem_x509_certificate",
+ "load_pem_x509_certificates",
+ "load_der_x509_certificate",
+ "load_pem_x509_csr",
+ "load_der_x509_csr",
+ "load_pem_x509_crl",
+ "load_der_x509_crl",
+ "random_serial_number",
+ "Attribute",
+ "AttributeNotFound",
+ "Attributes",
+ "InvalidVersion",
+ "DeltaCRLIndicator",
+ "DuplicateExtension",
+ "ExtensionNotFound",
+ "UnsupportedGeneralNameType",
+ "NameAttribute",
+ "Name",
+ "RelativeDistinguishedName",
+ "ObjectIdentifier",
+ "ExtensionType",
+ "Extensions",
+ "Extension",
+ "ExtendedKeyUsage",
+ "FreshestCRL",
+ "IssuingDistributionPoint",
+ "TLSFeature",
+ "TLSFeatureType",
+ "OCSPAcceptableResponses",
+ "OCSPNoCheck",
+ "BasicConstraints",
+ "CRLNumber",
+ "KeyUsage",
+ "AuthorityInformationAccess",
+ "SubjectInformationAccess",
+ "AccessDescription",
+ "CertificatePolicies",
+ "PolicyInformation",
+ "UserNotice",
+ "NoticeReference",
+ "SubjectKeyIdentifier",
+ "NameConstraints",
+ "CRLDistributionPoints",
+ "DistributionPoint",
+ "ReasonFlags",
+ "InhibitAnyPolicy",
+ "SubjectAlternativeName",
+ "IssuerAlternativeName",
+ "AuthorityKeyIdentifier",
+ "GeneralNames",
+ "GeneralName",
+ "RFC822Name",
+ "DNSName",
+ "UniformResourceIdentifier",
+ "RegisteredID",
+ "DirectoryName",
+ "IPAddress",
+ "OtherName",
+ "Certificate",
+ "CertificateRevocationList",
+ "CertificateRevocationListBuilder",
+ "CertificateSigningRequest",
+ "RevokedCertificate",
+ "RevokedCertificateBuilder",
+ "CertificateSigningRequestBuilder",
+ "CertificateBuilder",
+ "Version",
+ "OID_CA_ISSUERS",
+ "OID_OCSP",
+ "CertificateIssuer",
+ "CRLReason",
+ "InvalidityDate",
+ "UnrecognizedExtension",
+ "PolicyConstraints",
+ "PrecertificateSignedCertificateTimestamps",
+ "PrecertPoison",
+ "OCSPNonce",
+ "SignedCertificateTimestamps",
+ "SignatureAlgorithmOID",
+ "NameOID",
+ "MSCertificateTemplate",
+]
diff --git a/billinglayer/python/cryptography/x509/__pycache__/__init__.cpython-311.pyc b/billinglayer/python/cryptography/x509/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..c0d1de4
Binary files /dev/null and b/billinglayer/python/cryptography/x509/__pycache__/__init__.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/x509/__pycache__/base.cpython-311.pyc b/billinglayer/python/cryptography/x509/__pycache__/base.cpython-311.pyc
new file mode 100644
index 0000000..69ca8ab
Binary files /dev/null and b/billinglayer/python/cryptography/x509/__pycache__/base.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/x509/__pycache__/certificate_transparency.cpython-311.pyc b/billinglayer/python/cryptography/x509/__pycache__/certificate_transparency.cpython-311.pyc
new file mode 100644
index 0000000..b1e8141
Binary files /dev/null and b/billinglayer/python/cryptography/x509/__pycache__/certificate_transparency.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/x509/__pycache__/extensions.cpython-311.pyc b/billinglayer/python/cryptography/x509/__pycache__/extensions.cpython-311.pyc
new file mode 100644
index 0000000..571e3b5
Binary files /dev/null and b/billinglayer/python/cryptography/x509/__pycache__/extensions.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/x509/__pycache__/general_name.cpython-311.pyc b/billinglayer/python/cryptography/x509/__pycache__/general_name.cpython-311.pyc
new file mode 100644
index 0000000..57d9f7e
Binary files /dev/null and b/billinglayer/python/cryptography/x509/__pycache__/general_name.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/x509/__pycache__/name.cpython-311.pyc b/billinglayer/python/cryptography/x509/__pycache__/name.cpython-311.pyc
new file mode 100644
index 0000000..b75d69b
Binary files /dev/null and b/billinglayer/python/cryptography/x509/__pycache__/name.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/x509/__pycache__/ocsp.cpython-311.pyc b/billinglayer/python/cryptography/x509/__pycache__/ocsp.cpython-311.pyc
new file mode 100644
index 0000000..6e81399
Binary files /dev/null and b/billinglayer/python/cryptography/x509/__pycache__/ocsp.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/x509/__pycache__/oid.cpython-311.pyc b/billinglayer/python/cryptography/x509/__pycache__/oid.cpython-311.pyc
new file mode 100644
index 0000000..df28d31
Binary files /dev/null and b/billinglayer/python/cryptography/x509/__pycache__/oid.cpython-311.pyc differ
diff --git a/billinglayer/python/cryptography/x509/base.py b/billinglayer/python/cryptography/x509/base.py
new file mode 100644
index 0000000..576385e
--- /dev/null
+++ b/billinglayer/python/cryptography/x509/base.py
@@ -0,0 +1,1173 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import datetime
+import os
+import typing
+
+from cryptography import utils
+from cryptography.hazmat.bindings._rust import x509 as rust_x509
+from cryptography.hazmat.primitives import hashes, serialization
+from cryptography.hazmat.primitives.asymmetric import (
+ dsa,
+ ec,
+ ed448,
+ ed25519,
+ padding,
+ rsa,
+ x448,
+ x25519,
+)
+from cryptography.hazmat.primitives.asymmetric.types import (
+ CertificateIssuerPrivateKeyTypes,
+ CertificateIssuerPublicKeyTypes,
+ CertificatePublicKeyTypes,
+)
+from cryptography.x509.extensions import (
+ Extension,
+ Extensions,
+ ExtensionType,
+ _make_sequence_methods,
+)
+from cryptography.x509.name import Name, _ASN1Type
+from cryptography.x509.oid import ObjectIdentifier
+
+_EARLIEST_UTC_TIME = datetime.datetime(1950, 1, 1)
+
+# This must be kept in sync with sign.rs's list of allowable types in
+# identify_hash_type
+_AllowedHashTypes = typing.Union[
+ hashes.SHA224,
+ hashes.SHA256,
+ hashes.SHA384,
+ hashes.SHA512,
+ hashes.SHA3_224,
+ hashes.SHA3_256,
+ hashes.SHA3_384,
+ hashes.SHA3_512,
+]
+
+
+class AttributeNotFound(Exception):
+ def __init__(self, msg: str, oid: ObjectIdentifier) -> None:
+ super().__init__(msg)
+ self.oid = oid
+
+
+def _reject_duplicate_extension(
+ extension: Extension[ExtensionType],
+ extensions: typing.List[Extension[ExtensionType]],
+) -> None:
+ # This is quadratic in the number of extensions
+ for e in extensions:
+ if e.oid == extension.oid:
+ raise ValueError("This extension has already been set.")
+
+
+def _reject_duplicate_attribute(
+ oid: ObjectIdentifier,
+ attributes: typing.List[
+ typing.Tuple[ObjectIdentifier, bytes, typing.Optional[int]]
+ ],
+) -> None:
+ # This is quadratic in the number of attributes
+ for attr_oid, _, _ in attributes:
+ if attr_oid == oid:
+ raise ValueError("This attribute has already been set.")
+
+
+def _convert_to_naive_utc_time(time: datetime.datetime) -> datetime.datetime:
+ """Normalizes a datetime to a naive datetime in UTC.
+
+ time -- datetime to normalize. Assumed to be in UTC if not timezone
+ aware.
+ """
+ if time.tzinfo is not None:
+ offset = time.utcoffset()
+ offset = offset if offset else datetime.timedelta()
+ return time.replace(tzinfo=None) - offset
+ else:
+ return time
+
+
+class Attribute:
+ def __init__(
+ self,
+ oid: ObjectIdentifier,
+ value: bytes,
+ _type: int = _ASN1Type.UTF8String.value,
+ ) -> None:
+ self._oid = oid
+ self._value = value
+ self._type = _type
+
+ @property
+ def oid(self) -> ObjectIdentifier:
+ return self._oid
+
+ @property
+ def value(self) -> bytes:
+ return self._value
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, Attribute):
+ return NotImplemented
+
+ return (
+ self.oid == other.oid
+ and self.value == other.value
+ and self._type == other._type
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.oid, self.value, self._type))
+
+
+class Attributes:
+ def __init__(
+ self,
+ attributes: typing.Iterable[Attribute],
+ ) -> None:
+ self._attributes = list(attributes)
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_attributes")
+
+ def __repr__(self) -> str:
+ return f""
+
+ def get_attribute_for_oid(self, oid: ObjectIdentifier) -> Attribute:
+ for attr in self:
+ if attr.oid == oid:
+ return attr
+
+ raise AttributeNotFound(f"No {oid} attribute was found", oid)
+
+
+class Version(utils.Enum):
+ v1 = 0
+ v3 = 2
+
+
+class InvalidVersion(Exception):
+ def __init__(self, msg: str, parsed_version: int) -> None:
+ super().__init__(msg)
+ self.parsed_version = parsed_version
+
+
+class Certificate(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def fingerprint(self, algorithm: hashes.HashAlgorithm) -> bytes:
+ """
+ Returns bytes using digest passed.
+ """
+
+ @property
+ @abc.abstractmethod
+ def serial_number(self) -> int:
+ """
+ Returns certificate serial number
+ """
+
+ @property
+ @abc.abstractmethod
+ def version(self) -> Version:
+ """
+ Returns the certificate version
+ """
+
+ @abc.abstractmethod
+ def public_key(self) -> CertificatePublicKeyTypes:
+ """
+ Returns the public key
+ """
+
+ @property
+ @abc.abstractmethod
+ def not_valid_before(self) -> datetime.datetime:
+ """
+ Not before time (represented as UTC datetime)
+ """
+
+ @property
+ @abc.abstractmethod
+ def not_valid_after(self) -> datetime.datetime:
+ """
+ Not after time (represented as UTC datetime)
+ """
+
+ @property
+ @abc.abstractmethod
+ def issuer(self) -> Name:
+ """
+ Returns the issuer name object.
+ """
+
+ @property
+ @abc.abstractmethod
+ def subject(self) -> Name:
+ """
+ Returns the subject name object.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_hash_algorithm(
+ self,
+ ) -> typing.Optional[hashes.HashAlgorithm]:
+ """
+ Returns a HashAlgorithm corresponding to the type of the digest signed
+ in the certificate.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_algorithm_oid(self) -> ObjectIdentifier:
+ """
+ Returns the ObjectIdentifier of the signature algorithm.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_algorithm_parameters(
+ self,
+ ) -> typing.Union[None, padding.PSS, padding.PKCS1v15, ec.ECDSA]:
+ """
+ Returns the signature algorithm parameters.
+ """
+
+ @property
+ @abc.abstractmethod
+ def extensions(self) -> Extensions:
+ """
+ Returns an Extensions object.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature(self) -> bytes:
+ """
+ Returns the signature bytes.
+ """
+
+ @property
+ @abc.abstractmethod
+ def tbs_certificate_bytes(self) -> bytes:
+ """
+ Returns the tbsCertificate payload bytes as defined in RFC 5280.
+ """
+
+ @property
+ @abc.abstractmethod
+ def tbs_precertificate_bytes(self) -> bytes:
+ """
+ Returns the tbsCertificate payload bytes with the SCT list extension
+ stripped.
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other: object) -> bool:
+ """
+ Checks equality.
+ """
+
+ @abc.abstractmethod
+ def __hash__(self) -> int:
+ """
+ Computes a hash.
+ """
+
+ @abc.abstractmethod
+ def public_bytes(self, encoding: serialization.Encoding) -> bytes:
+ """
+ Serializes the certificate to PEM or DER format.
+ """
+
+ @abc.abstractmethod
+ def verify_directly_issued_by(self, issuer: Certificate) -> None:
+ """
+ This method verifies that certificate issuer name matches the
+ issuer subject name and that the certificate is signed by the
+ issuer's private key. No other validation is performed.
+ """
+
+
+# Runtime isinstance checks need this since the rust class is not a subclass.
+Certificate.register(rust_x509.Certificate)
+
+
+class RevokedCertificate(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def serial_number(self) -> int:
+ """
+ Returns the serial number of the revoked certificate.
+ """
+
+ @property
+ @abc.abstractmethod
+ def revocation_date(self) -> datetime.datetime:
+ """
+ Returns the date of when this certificate was revoked.
+ """
+
+ @property
+ @abc.abstractmethod
+ def extensions(self) -> Extensions:
+ """
+ Returns an Extensions object containing a list of Revoked extensions.
+ """
+
+
+# Runtime isinstance checks need this since the rust class is not a subclass.
+RevokedCertificate.register(rust_x509.RevokedCertificate)
+
+
+class _RawRevokedCertificate(RevokedCertificate):
+ def __init__(
+ self,
+ serial_number: int,
+ revocation_date: datetime.datetime,
+ extensions: Extensions,
+ ):
+ self._serial_number = serial_number
+ self._revocation_date = revocation_date
+ self._extensions = extensions
+
+ @property
+ def serial_number(self) -> int:
+ return self._serial_number
+
+ @property
+ def revocation_date(self) -> datetime.datetime:
+ return self._revocation_date
+
+ @property
+ def extensions(self) -> Extensions:
+ return self._extensions
+
+
+class CertificateRevocationList(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def public_bytes(self, encoding: serialization.Encoding) -> bytes:
+ """
+ Serializes the CRL to PEM or DER format.
+ """
+
+ @abc.abstractmethod
+ def fingerprint(self, algorithm: hashes.HashAlgorithm) -> bytes:
+ """
+ Returns bytes using digest passed.
+ """
+
+ @abc.abstractmethod
+ def get_revoked_certificate_by_serial_number(
+ self, serial_number: int
+ ) -> typing.Optional[RevokedCertificate]:
+ """
+ Returns an instance of RevokedCertificate or None if the serial_number
+ is not in the CRL.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_hash_algorithm(
+ self,
+ ) -> typing.Optional[hashes.HashAlgorithm]:
+ """
+ Returns a HashAlgorithm corresponding to the type of the digest signed
+ in the certificate.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_algorithm_oid(self) -> ObjectIdentifier:
+ """
+ Returns the ObjectIdentifier of the signature algorithm.
+ """
+
+ @property
+ @abc.abstractmethod
+ def issuer(self) -> Name:
+ """
+ Returns the X509Name with the issuer of this CRL.
+ """
+
+ @property
+ @abc.abstractmethod
+ def next_update(self) -> typing.Optional[datetime.datetime]:
+ """
+ Returns the date of next update for this CRL.
+ """
+
+ @property
+ @abc.abstractmethod
+ def last_update(self) -> datetime.datetime:
+ """
+ Returns the date of last update for this CRL.
+ """
+
+ @property
+ @abc.abstractmethod
+ def extensions(self) -> Extensions:
+ """
+ Returns an Extensions object containing a list of CRL extensions.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature(self) -> bytes:
+ """
+ Returns the signature bytes.
+ """
+
+ @property
+ @abc.abstractmethod
+ def tbs_certlist_bytes(self) -> bytes:
+ """
+ Returns the tbsCertList payload bytes as defined in RFC 5280.
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other: object) -> bool:
+ """
+ Checks equality.
+ """
+
+ @abc.abstractmethod
+ def __len__(self) -> int:
+ """
+ Number of revoked certificates in the CRL.
+ """
+
+ @typing.overload
+ def __getitem__(self, idx: int) -> RevokedCertificate:
+ ...
+
+ @typing.overload
+ def __getitem__(self, idx: slice) -> typing.List[RevokedCertificate]:
+ ...
+
+ @abc.abstractmethod
+ def __getitem__(
+ self, idx: typing.Union[int, slice]
+ ) -> typing.Union[RevokedCertificate, typing.List[RevokedCertificate]]:
+ """
+ Returns a revoked certificate (or slice of revoked certificates).
+ """
+
+ @abc.abstractmethod
+ def __iter__(self) -> typing.Iterator[RevokedCertificate]:
+ """
+ Iterator over the revoked certificates
+ """
+
+ @abc.abstractmethod
+ def is_signature_valid(
+ self, public_key: CertificateIssuerPublicKeyTypes
+ ) -> bool:
+ """
+ Verifies signature of revocation list against given public key.
+ """
+
+
+CertificateRevocationList.register(rust_x509.CertificateRevocationList)
+
+
+class CertificateSigningRequest(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def __eq__(self, other: object) -> bool:
+ """
+ Checks equality.
+ """
+
+ @abc.abstractmethod
+ def __hash__(self) -> int:
+ """
+ Computes a hash.
+ """
+
+ @abc.abstractmethod
+ def public_key(self) -> CertificatePublicKeyTypes:
+ """
+ Returns the public key
+ """
+
+ @property
+ @abc.abstractmethod
+ def subject(self) -> Name:
+ """
+ Returns the subject name object.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_hash_algorithm(
+ self,
+ ) -> typing.Optional[hashes.HashAlgorithm]:
+ """
+ Returns a HashAlgorithm corresponding to the type of the digest signed
+ in the certificate.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_algorithm_oid(self) -> ObjectIdentifier:
+ """
+ Returns the ObjectIdentifier of the signature algorithm.
+ """
+
+ @property
+ @abc.abstractmethod
+ def extensions(self) -> Extensions:
+ """
+ Returns the extensions in the signing request.
+ """
+
+ @property
+ @abc.abstractmethod
+ def attributes(self) -> Attributes:
+ """
+ Returns an Attributes object.
+ """
+
+ @abc.abstractmethod
+ def public_bytes(self, encoding: serialization.Encoding) -> bytes:
+ """
+ Encodes the request to PEM or DER format.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature(self) -> bytes:
+ """
+ Returns the signature bytes.
+ """
+
+ @property
+ @abc.abstractmethod
+ def tbs_certrequest_bytes(self) -> bytes:
+ """
+ Returns the PKCS#10 CertificationRequestInfo bytes as defined in RFC
+ 2986.
+ """
+
+ @property
+ @abc.abstractmethod
+ def is_signature_valid(self) -> bool:
+ """
+ Verifies signature of signing request.
+ """
+
+ @abc.abstractmethod
+ def get_attribute_for_oid(self, oid: ObjectIdentifier) -> bytes:
+ """
+ Get the attribute value for a given OID.
+ """
+
+
+# Runtime isinstance checks need this since the rust class is not a subclass.
+CertificateSigningRequest.register(rust_x509.CertificateSigningRequest)
+
+
+# Backend argument preserved for API compatibility, but ignored.
+def load_pem_x509_certificate(
+ data: bytes, backend: typing.Any = None
+) -> Certificate:
+ return rust_x509.load_pem_x509_certificate(data)
+
+
+def load_pem_x509_certificates(data: bytes) -> typing.List[Certificate]:
+ return rust_x509.load_pem_x509_certificates(data)
+
+
+# Backend argument preserved for API compatibility, but ignored.
+def load_der_x509_certificate(
+ data: bytes, backend: typing.Any = None
+) -> Certificate:
+ return rust_x509.load_der_x509_certificate(data)
+
+
+# Backend argument preserved for API compatibility, but ignored.
+def load_pem_x509_csr(
+ data: bytes, backend: typing.Any = None
+) -> CertificateSigningRequest:
+ return rust_x509.load_pem_x509_csr(data)
+
+
+# Backend argument preserved for API compatibility, but ignored.
+def load_der_x509_csr(
+ data: bytes, backend: typing.Any = None
+) -> CertificateSigningRequest:
+ return rust_x509.load_der_x509_csr(data)
+
+
+# Backend argument preserved for API compatibility, but ignored.
+def load_pem_x509_crl(
+ data: bytes, backend: typing.Any = None
+) -> CertificateRevocationList:
+ return rust_x509.load_pem_x509_crl(data)
+
+
+# Backend argument preserved for API compatibility, but ignored.
+def load_der_x509_crl(
+ data: bytes, backend: typing.Any = None
+) -> CertificateRevocationList:
+ return rust_x509.load_der_x509_crl(data)
+
+
+class CertificateSigningRequestBuilder:
+ def __init__(
+ self,
+ subject_name: typing.Optional[Name] = None,
+ extensions: typing.List[Extension[ExtensionType]] = [],
+ attributes: typing.List[
+ typing.Tuple[ObjectIdentifier, bytes, typing.Optional[int]]
+ ] = [],
+ ):
+ """
+ Creates an empty X.509 certificate request (v1).
+ """
+ self._subject_name = subject_name
+ self._extensions = extensions
+ self._attributes = attributes
+
+ def subject_name(self, name: Name) -> CertificateSigningRequestBuilder:
+ """
+ Sets the certificate requestor's distinguished name.
+ """
+ if not isinstance(name, Name):
+ raise TypeError("Expecting x509.Name object.")
+ if self._subject_name is not None:
+ raise ValueError("The subject name may only be set once.")
+ return CertificateSigningRequestBuilder(
+ name, self._extensions, self._attributes
+ )
+
+ def add_extension(
+ self, extval: ExtensionType, critical: bool
+ ) -> CertificateSigningRequestBuilder:
+ """
+ Adds an X.509 extension to the certificate request.
+ """
+ if not isinstance(extval, ExtensionType):
+ raise TypeError("extension must be an ExtensionType")
+
+ extension = Extension(extval.oid, critical, extval)
+ _reject_duplicate_extension(extension, self._extensions)
+
+ return CertificateSigningRequestBuilder(
+ self._subject_name,
+ self._extensions + [extension],
+ self._attributes,
+ )
+
+ def add_attribute(
+ self,
+ oid: ObjectIdentifier,
+ value: bytes,
+ *,
+ _tag: typing.Optional[_ASN1Type] = None,
+ ) -> CertificateSigningRequestBuilder:
+ """
+ Adds an X.509 attribute with an OID and associated value.
+ """
+ if not isinstance(oid, ObjectIdentifier):
+ raise TypeError("oid must be an ObjectIdentifier")
+
+ if not isinstance(value, bytes):
+ raise TypeError("value must be bytes")
+
+ if _tag is not None and not isinstance(_tag, _ASN1Type):
+ raise TypeError("tag must be _ASN1Type")
+
+ _reject_duplicate_attribute(oid, self._attributes)
+
+ if _tag is not None:
+ tag = _tag.value
+ else:
+ tag = None
+
+ return CertificateSigningRequestBuilder(
+ self._subject_name,
+ self._extensions,
+ self._attributes + [(oid, value, tag)],
+ )
+
+ def sign(
+ self,
+ private_key: CertificateIssuerPrivateKeyTypes,
+ algorithm: typing.Optional[_AllowedHashTypes],
+ backend: typing.Any = None,
+ ) -> CertificateSigningRequest:
+ """
+ Signs the request using the requestor's private key.
+ """
+ if self._subject_name is None:
+ raise ValueError("A CertificateSigningRequest must have a subject")
+ return rust_x509.create_x509_csr(self, private_key, algorithm)
+
+
+class CertificateBuilder:
+ _extensions: typing.List[Extension[ExtensionType]]
+
+ def __init__(
+ self,
+ issuer_name: typing.Optional[Name] = None,
+ subject_name: typing.Optional[Name] = None,
+ public_key: typing.Optional[CertificatePublicKeyTypes] = None,
+ serial_number: typing.Optional[int] = None,
+ not_valid_before: typing.Optional[datetime.datetime] = None,
+ not_valid_after: typing.Optional[datetime.datetime] = None,
+ extensions: typing.List[Extension[ExtensionType]] = [],
+ ) -> None:
+ self._version = Version.v3
+ self._issuer_name = issuer_name
+ self._subject_name = subject_name
+ self._public_key = public_key
+ self._serial_number = serial_number
+ self._not_valid_before = not_valid_before
+ self._not_valid_after = not_valid_after
+ self._extensions = extensions
+
+ def issuer_name(self, name: Name) -> CertificateBuilder:
+ """
+ Sets the CA's distinguished name.
+ """
+ if not isinstance(name, Name):
+ raise TypeError("Expecting x509.Name object.")
+ if self._issuer_name is not None:
+ raise ValueError("The issuer name may only be set once.")
+ return CertificateBuilder(
+ name,
+ self._subject_name,
+ self._public_key,
+ self._serial_number,
+ self._not_valid_before,
+ self._not_valid_after,
+ self._extensions,
+ )
+
+ def subject_name(self, name: Name) -> CertificateBuilder:
+ """
+ Sets the requestor's distinguished name.
+ """
+ if not isinstance(name, Name):
+ raise TypeError("Expecting x509.Name object.")
+ if self._subject_name is not None:
+ raise ValueError("The subject name may only be set once.")
+ return CertificateBuilder(
+ self._issuer_name,
+ name,
+ self._public_key,
+ self._serial_number,
+ self._not_valid_before,
+ self._not_valid_after,
+ self._extensions,
+ )
+
+ def public_key(
+ self,
+ key: CertificatePublicKeyTypes,
+ ) -> CertificateBuilder:
+ """
+ Sets the requestor's public key (as found in the signing request).
+ """
+ if not isinstance(
+ key,
+ (
+ dsa.DSAPublicKey,
+ rsa.RSAPublicKey,
+ ec.EllipticCurvePublicKey,
+ ed25519.Ed25519PublicKey,
+ ed448.Ed448PublicKey,
+ x25519.X25519PublicKey,
+ x448.X448PublicKey,
+ ),
+ ):
+ raise TypeError(
+ "Expecting one of DSAPublicKey, RSAPublicKey,"
+ " EllipticCurvePublicKey, Ed25519PublicKey,"
+ " Ed448PublicKey, X25519PublicKey, or "
+ "X448PublicKey."
+ )
+ if self._public_key is not None:
+ raise ValueError("The public key may only be set once.")
+ return CertificateBuilder(
+ self._issuer_name,
+ self._subject_name,
+ key,
+ self._serial_number,
+ self._not_valid_before,
+ self._not_valid_after,
+ self._extensions,
+ )
+
+ def serial_number(self, number: int) -> CertificateBuilder:
+ """
+ Sets the certificate serial number.
+ """
+ if not isinstance(number, int):
+ raise TypeError("Serial number must be of integral type.")
+ if self._serial_number is not None:
+ raise ValueError("The serial number may only be set once.")
+ if number <= 0:
+ raise ValueError("The serial number should be positive.")
+
+ # ASN.1 integers are always signed, so most significant bit must be
+ # zero.
+ if number.bit_length() >= 160: # As defined in RFC 5280
+ raise ValueError(
+ "The serial number should not be more than 159 " "bits."
+ )
+ return CertificateBuilder(
+ self._issuer_name,
+ self._subject_name,
+ self._public_key,
+ number,
+ self._not_valid_before,
+ self._not_valid_after,
+ self._extensions,
+ )
+
+ def not_valid_before(self, time: datetime.datetime) -> CertificateBuilder:
+ """
+ Sets the certificate activation time.
+ """
+ if not isinstance(time, datetime.datetime):
+ raise TypeError("Expecting datetime object.")
+ if self._not_valid_before is not None:
+ raise ValueError("The not valid before may only be set once.")
+ time = _convert_to_naive_utc_time(time)
+ if time < _EARLIEST_UTC_TIME:
+ raise ValueError(
+ "The not valid before date must be on or after"
+ " 1950 January 1)."
+ )
+ if self._not_valid_after is not None and time > self._not_valid_after:
+ raise ValueError(
+ "The not valid before date must be before the not valid after "
+ "date."
+ )
+ return CertificateBuilder(
+ self._issuer_name,
+ self._subject_name,
+ self._public_key,
+ self._serial_number,
+ time,
+ self._not_valid_after,
+ self._extensions,
+ )
+
+ def not_valid_after(self, time: datetime.datetime) -> CertificateBuilder:
+ """
+ Sets the certificate expiration time.
+ """
+ if not isinstance(time, datetime.datetime):
+ raise TypeError("Expecting datetime object.")
+ if self._not_valid_after is not None:
+ raise ValueError("The not valid after may only be set once.")
+ time = _convert_to_naive_utc_time(time)
+ if time < _EARLIEST_UTC_TIME:
+ raise ValueError(
+ "The not valid after date must be on or after"
+ " 1950 January 1."
+ )
+ if (
+ self._not_valid_before is not None
+ and time < self._not_valid_before
+ ):
+ raise ValueError(
+ "The not valid after date must be after the not valid before "
+ "date."
+ )
+ return CertificateBuilder(
+ self._issuer_name,
+ self._subject_name,
+ self._public_key,
+ self._serial_number,
+ self._not_valid_before,
+ time,
+ self._extensions,
+ )
+
+ def add_extension(
+ self, extval: ExtensionType, critical: bool
+ ) -> CertificateBuilder:
+ """
+ Adds an X.509 extension to the certificate.
+ """
+ if not isinstance(extval, ExtensionType):
+ raise TypeError("extension must be an ExtensionType")
+
+ extension = Extension(extval.oid, critical, extval)
+ _reject_duplicate_extension(extension, self._extensions)
+
+ return CertificateBuilder(
+ self._issuer_name,
+ self._subject_name,
+ self._public_key,
+ self._serial_number,
+ self._not_valid_before,
+ self._not_valid_after,
+ self._extensions + [extension],
+ )
+
+ def sign(
+ self,
+ private_key: CertificateIssuerPrivateKeyTypes,
+ algorithm: typing.Optional[_AllowedHashTypes],
+ backend: typing.Any = None,
+ *,
+ rsa_padding: typing.Optional[
+ typing.Union[padding.PSS, padding.PKCS1v15]
+ ] = None,
+ ) -> Certificate:
+ """
+ Signs the certificate using the CA's private key.
+ """
+ if self._subject_name is None:
+ raise ValueError("A certificate must have a subject name")
+
+ if self._issuer_name is None:
+ raise ValueError("A certificate must have an issuer name")
+
+ if self._serial_number is None:
+ raise ValueError("A certificate must have a serial number")
+
+ if self._not_valid_before is None:
+ raise ValueError("A certificate must have a not valid before time")
+
+ if self._not_valid_after is None:
+ raise ValueError("A certificate must have a not valid after time")
+
+ if self._public_key is None:
+ raise ValueError("A certificate must have a public key")
+
+ if rsa_padding is not None:
+ if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)):
+ raise TypeError("Padding must be PSS or PKCS1v15")
+ if not isinstance(private_key, rsa.RSAPrivateKey):
+ raise TypeError("Padding is only supported for RSA keys")
+
+ return rust_x509.create_x509_certificate(
+ self, private_key, algorithm, rsa_padding
+ )
+
+
+class CertificateRevocationListBuilder:
+ _extensions: typing.List[Extension[ExtensionType]]
+ _revoked_certificates: typing.List[RevokedCertificate]
+
+ def __init__(
+ self,
+ issuer_name: typing.Optional[Name] = None,
+ last_update: typing.Optional[datetime.datetime] = None,
+ next_update: typing.Optional[datetime.datetime] = None,
+ extensions: typing.List[Extension[ExtensionType]] = [],
+ revoked_certificates: typing.List[RevokedCertificate] = [],
+ ):
+ self._issuer_name = issuer_name
+ self._last_update = last_update
+ self._next_update = next_update
+ self._extensions = extensions
+ self._revoked_certificates = revoked_certificates
+
+ def issuer_name(
+ self, issuer_name: Name
+ ) -> CertificateRevocationListBuilder:
+ if not isinstance(issuer_name, Name):
+ raise TypeError("Expecting x509.Name object.")
+ if self._issuer_name is not None:
+ raise ValueError("The issuer name may only be set once.")
+ return CertificateRevocationListBuilder(
+ issuer_name,
+ self._last_update,
+ self._next_update,
+ self._extensions,
+ self._revoked_certificates,
+ )
+
+ def last_update(
+ self, last_update: datetime.datetime
+ ) -> CertificateRevocationListBuilder:
+ if not isinstance(last_update, datetime.datetime):
+ raise TypeError("Expecting datetime object.")
+ if self._last_update is not None:
+ raise ValueError("Last update may only be set once.")
+ last_update = _convert_to_naive_utc_time(last_update)
+ if last_update < _EARLIEST_UTC_TIME:
+ raise ValueError(
+ "The last update date must be on or after" " 1950 January 1."
+ )
+ if self._next_update is not None and last_update > self._next_update:
+ raise ValueError(
+ "The last update date must be before the next update date."
+ )
+ return CertificateRevocationListBuilder(
+ self._issuer_name,
+ last_update,
+ self._next_update,
+ self._extensions,
+ self._revoked_certificates,
+ )
+
+ def next_update(
+ self, next_update: datetime.datetime
+ ) -> CertificateRevocationListBuilder:
+ if not isinstance(next_update, datetime.datetime):
+ raise TypeError("Expecting datetime object.")
+ if self._next_update is not None:
+ raise ValueError("Last update may only be set once.")
+ next_update = _convert_to_naive_utc_time(next_update)
+ if next_update < _EARLIEST_UTC_TIME:
+ raise ValueError(
+ "The last update date must be on or after" " 1950 January 1."
+ )
+ if self._last_update is not None and next_update < self._last_update:
+ raise ValueError(
+ "The next update date must be after the last update date."
+ )
+ return CertificateRevocationListBuilder(
+ self._issuer_name,
+ self._last_update,
+ next_update,
+ self._extensions,
+ self._revoked_certificates,
+ )
+
+ def add_extension(
+ self, extval: ExtensionType, critical: bool
+ ) -> CertificateRevocationListBuilder:
+ """
+ Adds an X.509 extension to the certificate revocation list.
+ """
+ if not isinstance(extval, ExtensionType):
+ raise TypeError("extension must be an ExtensionType")
+
+ extension = Extension(extval.oid, critical, extval)
+ _reject_duplicate_extension(extension, self._extensions)
+ return CertificateRevocationListBuilder(
+ self._issuer_name,
+ self._last_update,
+ self._next_update,
+ self._extensions + [extension],
+ self._revoked_certificates,
+ )
+
+ def add_revoked_certificate(
+ self, revoked_certificate: RevokedCertificate
+ ) -> CertificateRevocationListBuilder:
+ """
+ Adds a revoked certificate to the CRL.
+ """
+ if not isinstance(revoked_certificate, RevokedCertificate):
+ raise TypeError("Must be an instance of RevokedCertificate")
+
+ return CertificateRevocationListBuilder(
+ self._issuer_name,
+ self._last_update,
+ self._next_update,
+ self._extensions,
+ self._revoked_certificates + [revoked_certificate],
+ )
+
+ def sign(
+ self,
+ private_key: CertificateIssuerPrivateKeyTypes,
+ algorithm: typing.Optional[_AllowedHashTypes],
+ backend: typing.Any = None,
+ ) -> CertificateRevocationList:
+ if self._issuer_name is None:
+ raise ValueError("A CRL must have an issuer name")
+
+ if self._last_update is None:
+ raise ValueError("A CRL must have a last update time")
+
+ if self._next_update is None:
+ raise ValueError("A CRL must have a next update time")
+
+ return rust_x509.create_x509_crl(self, private_key, algorithm)
+
+
+class RevokedCertificateBuilder:
+ def __init__(
+ self,
+ serial_number: typing.Optional[int] = None,
+ revocation_date: typing.Optional[datetime.datetime] = None,
+ extensions: typing.List[Extension[ExtensionType]] = [],
+ ):
+ self._serial_number = serial_number
+ self._revocation_date = revocation_date
+ self._extensions = extensions
+
+ def serial_number(self, number: int) -> RevokedCertificateBuilder:
+ if not isinstance(number, int):
+ raise TypeError("Serial number must be of integral type.")
+ if self._serial_number is not None:
+ raise ValueError("The serial number may only be set once.")
+ if number <= 0:
+ raise ValueError("The serial number should be positive")
+
+ # ASN.1 integers are always signed, so most significant bit must be
+ # zero.
+ if number.bit_length() >= 160: # As defined in RFC 5280
+ raise ValueError(
+ "The serial number should not be more than 159 " "bits."
+ )
+ return RevokedCertificateBuilder(
+ number, self._revocation_date, self._extensions
+ )
+
+ def revocation_date(
+ self, time: datetime.datetime
+ ) -> RevokedCertificateBuilder:
+ if not isinstance(time, datetime.datetime):
+ raise TypeError("Expecting datetime object.")
+ if self._revocation_date is not None:
+ raise ValueError("The revocation date may only be set once.")
+ time = _convert_to_naive_utc_time(time)
+ if time < _EARLIEST_UTC_TIME:
+ raise ValueError(
+ "The revocation date must be on or after" " 1950 January 1."
+ )
+ return RevokedCertificateBuilder(
+ self._serial_number, time, self._extensions
+ )
+
+ def add_extension(
+ self, extval: ExtensionType, critical: bool
+ ) -> RevokedCertificateBuilder:
+ if not isinstance(extval, ExtensionType):
+ raise TypeError("extension must be an ExtensionType")
+
+ extension = Extension(extval.oid, critical, extval)
+ _reject_duplicate_extension(extension, self._extensions)
+ return RevokedCertificateBuilder(
+ self._serial_number,
+ self._revocation_date,
+ self._extensions + [extension],
+ )
+
+ def build(self, backend: typing.Any = None) -> RevokedCertificate:
+ if self._serial_number is None:
+ raise ValueError("A revoked certificate must have a serial number")
+ if self._revocation_date is None:
+ raise ValueError(
+ "A revoked certificate must have a revocation date"
+ )
+ return _RawRevokedCertificate(
+ self._serial_number,
+ self._revocation_date,
+ Extensions(self._extensions),
+ )
+
+
+def random_serial_number() -> int:
+ return int.from_bytes(os.urandom(20), "big") >> 1
diff --git a/billinglayer/python/cryptography/x509/certificate_transparency.py b/billinglayer/python/cryptography/x509/certificate_transparency.py
new file mode 100644
index 0000000..73647ee
--- /dev/null
+++ b/billinglayer/python/cryptography/x509/certificate_transparency.py
@@ -0,0 +1,97 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import datetime
+
+from cryptography import utils
+from cryptography.hazmat.bindings._rust import x509 as rust_x509
+from cryptography.hazmat.primitives.hashes import HashAlgorithm
+
+
+class LogEntryType(utils.Enum):
+ X509_CERTIFICATE = 0
+ PRE_CERTIFICATE = 1
+
+
+class Version(utils.Enum):
+ v1 = 0
+
+
+class SignatureAlgorithm(utils.Enum):
+ """
+ Signature algorithms that are valid for SCTs.
+
+ These are exactly the same as SignatureAlgorithm in RFC 5246 (TLS 1.2).
+
+ See:
+ """
+
+ ANONYMOUS = 0
+ RSA = 1
+ DSA = 2
+ ECDSA = 3
+
+
+class SignedCertificateTimestamp(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def version(self) -> Version:
+ """
+ Returns the SCT version.
+ """
+
+ @property
+ @abc.abstractmethod
+ def log_id(self) -> bytes:
+ """
+ Returns an identifier indicating which log this SCT is for.
+ """
+
+ @property
+ @abc.abstractmethod
+ def timestamp(self) -> datetime.datetime:
+ """
+ Returns the timestamp for this SCT.
+ """
+
+ @property
+ @abc.abstractmethod
+ def entry_type(self) -> LogEntryType:
+ """
+ Returns whether this is an SCT for a certificate or pre-certificate.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_hash_algorithm(self) -> HashAlgorithm:
+ """
+ Returns the hash algorithm used for the SCT's signature.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_algorithm(self) -> SignatureAlgorithm:
+ """
+ Returns the signing algorithm used for the SCT's signature.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature(self) -> bytes:
+ """
+ Returns the signature for this SCT.
+ """
+
+ @property
+ @abc.abstractmethod
+ def extension_bytes(self) -> bytes:
+ """
+ Returns the raw bytes of any extensions for this SCT.
+ """
+
+
+SignedCertificateTimestamp.register(rust_x509.Sct)
diff --git a/billinglayer/python/cryptography/x509/extensions.py b/billinglayer/python/cryptography/x509/extensions.py
new file mode 100644
index 0000000..ac99592
--- /dev/null
+++ b/billinglayer/python/cryptography/x509/extensions.py
@@ -0,0 +1,2215 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import datetime
+import hashlib
+import ipaddress
+import typing
+
+from cryptography import utils
+from cryptography.hazmat.bindings._rust import asn1
+from cryptography.hazmat.bindings._rust import x509 as rust_x509
+from cryptography.hazmat.primitives import constant_time, serialization
+from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePublicKey
+from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey
+from cryptography.hazmat.primitives.asymmetric.types import (
+ CertificateIssuerPublicKeyTypes,
+ CertificatePublicKeyTypes,
+)
+from cryptography.x509.certificate_transparency import (
+ SignedCertificateTimestamp,
+)
+from cryptography.x509.general_name import (
+ DirectoryName,
+ DNSName,
+ GeneralName,
+ IPAddress,
+ OtherName,
+ RegisteredID,
+ RFC822Name,
+ UniformResourceIdentifier,
+ _IPAddressTypes,
+)
+from cryptography.x509.name import Name, RelativeDistinguishedName
+from cryptography.x509.oid import (
+ CRLEntryExtensionOID,
+ ExtensionOID,
+ ObjectIdentifier,
+ OCSPExtensionOID,
+)
+
+ExtensionTypeVar = typing.TypeVar(
+ "ExtensionTypeVar", bound="ExtensionType", covariant=True
+)
+
+
+def _key_identifier_from_public_key(
+ public_key: CertificatePublicKeyTypes,
+) -> bytes:
+ if isinstance(public_key, RSAPublicKey):
+ data = public_key.public_bytes(
+ serialization.Encoding.DER,
+ serialization.PublicFormat.PKCS1,
+ )
+ elif isinstance(public_key, EllipticCurvePublicKey):
+ data = public_key.public_bytes(
+ serialization.Encoding.X962,
+ serialization.PublicFormat.UncompressedPoint,
+ )
+ else:
+ # This is a very slow way to do this.
+ serialized = public_key.public_bytes(
+ serialization.Encoding.DER,
+ serialization.PublicFormat.SubjectPublicKeyInfo,
+ )
+ data = asn1.parse_spki_for_data(serialized)
+
+ return hashlib.sha1(data).digest()
+
+
+def _make_sequence_methods(field_name: str):
+ def len_method(self) -> int:
+ return len(getattr(self, field_name))
+
+ def iter_method(self):
+ return iter(getattr(self, field_name))
+
+ def getitem_method(self, idx):
+ return getattr(self, field_name)[idx]
+
+ return len_method, iter_method, getitem_method
+
+
+class DuplicateExtension(Exception):
+ def __init__(self, msg: str, oid: ObjectIdentifier) -> None:
+ super().__init__(msg)
+ self.oid = oid
+
+
+class ExtensionNotFound(Exception):
+ def __init__(self, msg: str, oid: ObjectIdentifier) -> None:
+ super().__init__(msg)
+ self.oid = oid
+
+
+class ExtensionType(metaclass=abc.ABCMeta):
+ oid: typing.ClassVar[ObjectIdentifier]
+
+ def public_bytes(self) -> bytes:
+ """
+ Serializes the extension type to DER.
+ """
+ raise NotImplementedError(
+ "public_bytes is not implemented for extension type {!r}".format(
+ self
+ )
+ )
+
+
+class Extensions:
+ def __init__(
+ self, extensions: typing.Iterable[Extension[ExtensionType]]
+ ) -> None:
+ self._extensions = list(extensions)
+
+ def get_extension_for_oid(
+ self, oid: ObjectIdentifier
+ ) -> Extension[ExtensionType]:
+ for ext in self:
+ if ext.oid == oid:
+ return ext
+
+ raise ExtensionNotFound(f"No {oid} extension was found", oid)
+
+ def get_extension_for_class(
+ self, extclass: typing.Type[ExtensionTypeVar]
+ ) -> Extension[ExtensionTypeVar]:
+ if extclass is UnrecognizedExtension:
+ raise TypeError(
+ "UnrecognizedExtension can't be used with "
+ "get_extension_for_class because more than one instance of the"
+ " class may be present."
+ )
+
+ for ext in self:
+ if isinstance(ext.value, extclass):
+ return ext
+
+ raise ExtensionNotFound(
+ f"No {extclass} extension was found", extclass.oid
+ )
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_extensions")
+
+ def __repr__(self) -> str:
+ return f""
+
+
+class CRLNumber(ExtensionType):
+ oid = ExtensionOID.CRL_NUMBER
+
+ def __init__(self, crl_number: int) -> None:
+ if not isinstance(crl_number, int):
+ raise TypeError("crl_number must be an integer")
+
+ self._crl_number = crl_number
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, CRLNumber):
+ return NotImplemented
+
+ return self.crl_number == other.crl_number
+
+ def __hash__(self) -> int:
+ return hash(self.crl_number)
+
+ def __repr__(self) -> str:
+ return f""
+
+ @property
+ def crl_number(self) -> int:
+ return self._crl_number
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class AuthorityKeyIdentifier(ExtensionType):
+ oid = ExtensionOID.AUTHORITY_KEY_IDENTIFIER
+
+ def __init__(
+ self,
+ key_identifier: typing.Optional[bytes],
+ authority_cert_issuer: typing.Optional[typing.Iterable[GeneralName]],
+ authority_cert_serial_number: typing.Optional[int],
+ ) -> None:
+ if (authority_cert_issuer is None) != (
+ authority_cert_serial_number is None
+ ):
+ raise ValueError(
+ "authority_cert_issuer and authority_cert_serial_number "
+ "must both be present or both None"
+ )
+
+ if authority_cert_issuer is not None:
+ authority_cert_issuer = list(authority_cert_issuer)
+ if not all(
+ isinstance(x, GeneralName) for x in authority_cert_issuer
+ ):
+ raise TypeError(
+ "authority_cert_issuer must be a list of GeneralName "
+ "objects"
+ )
+
+ if authority_cert_serial_number is not None and not isinstance(
+ authority_cert_serial_number, int
+ ):
+ raise TypeError("authority_cert_serial_number must be an integer")
+
+ self._key_identifier = key_identifier
+ self._authority_cert_issuer = authority_cert_issuer
+ self._authority_cert_serial_number = authority_cert_serial_number
+
+ # This takes a subset of CertificatePublicKeyTypes because an issuer
+ # cannot have an X25519/X448 key. This introduces some unfortunate
+ # asymmetry that requires typing users to explicitly
+ # narrow their type, but we should make this accurate and not just
+ # convenient.
+ @classmethod
+ def from_issuer_public_key(
+ cls, public_key: CertificateIssuerPublicKeyTypes
+ ) -> AuthorityKeyIdentifier:
+ digest = _key_identifier_from_public_key(public_key)
+ return cls(
+ key_identifier=digest,
+ authority_cert_issuer=None,
+ authority_cert_serial_number=None,
+ )
+
+ @classmethod
+ def from_issuer_subject_key_identifier(
+ cls, ski: SubjectKeyIdentifier
+ ) -> AuthorityKeyIdentifier:
+ return cls(
+ key_identifier=ski.digest,
+ authority_cert_issuer=None,
+ authority_cert_serial_number=None,
+ )
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, AuthorityKeyIdentifier):
+ return NotImplemented
+
+ return (
+ self.key_identifier == other.key_identifier
+ and self.authority_cert_issuer == other.authority_cert_issuer
+ and self.authority_cert_serial_number
+ == other.authority_cert_serial_number
+ )
+
+ def __hash__(self) -> int:
+ if self.authority_cert_issuer is None:
+ aci = None
+ else:
+ aci = tuple(self.authority_cert_issuer)
+ return hash(
+ (self.key_identifier, aci, self.authority_cert_serial_number)
+ )
+
+ @property
+ def key_identifier(self) -> typing.Optional[bytes]:
+ return self._key_identifier
+
+ @property
+ def authority_cert_issuer(
+ self,
+ ) -> typing.Optional[typing.List[GeneralName]]:
+ return self._authority_cert_issuer
+
+ @property
+ def authority_cert_serial_number(self) -> typing.Optional[int]:
+ return self._authority_cert_serial_number
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class SubjectKeyIdentifier(ExtensionType):
+ oid = ExtensionOID.SUBJECT_KEY_IDENTIFIER
+
+ def __init__(self, digest: bytes) -> None:
+ self._digest = digest
+
+ @classmethod
+ def from_public_key(
+ cls, public_key: CertificatePublicKeyTypes
+ ) -> SubjectKeyIdentifier:
+ return cls(_key_identifier_from_public_key(public_key))
+
+ @property
+ def digest(self) -> bytes:
+ return self._digest
+
+ @property
+ def key_identifier(self) -> bytes:
+ return self._digest
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, SubjectKeyIdentifier):
+ return NotImplemented
+
+ return constant_time.bytes_eq(self.digest, other.digest)
+
+ def __hash__(self) -> int:
+ return hash(self.digest)
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class AuthorityInformationAccess(ExtensionType):
+ oid = ExtensionOID.AUTHORITY_INFORMATION_ACCESS
+
+ def __init__(
+ self, descriptions: typing.Iterable[AccessDescription]
+ ) -> None:
+ descriptions = list(descriptions)
+ if not all(isinstance(x, AccessDescription) for x in descriptions):
+ raise TypeError(
+ "Every item in the descriptions list must be an "
+ "AccessDescription"
+ )
+
+ self._descriptions = descriptions
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions")
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, AuthorityInformationAccess):
+ return NotImplemented
+
+ return self._descriptions == other._descriptions
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._descriptions))
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class SubjectInformationAccess(ExtensionType):
+ oid = ExtensionOID.SUBJECT_INFORMATION_ACCESS
+
+ def __init__(
+ self, descriptions: typing.Iterable[AccessDescription]
+ ) -> None:
+ descriptions = list(descriptions)
+ if not all(isinstance(x, AccessDescription) for x in descriptions):
+ raise TypeError(
+ "Every item in the descriptions list must be an "
+ "AccessDescription"
+ )
+
+ self._descriptions = descriptions
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions")
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, SubjectInformationAccess):
+ return NotImplemented
+
+ return self._descriptions == other._descriptions
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._descriptions))
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class AccessDescription:
+ def __init__(
+ self, access_method: ObjectIdentifier, access_location: GeneralName
+ ) -> None:
+ if not isinstance(access_method, ObjectIdentifier):
+ raise TypeError("access_method must be an ObjectIdentifier")
+
+ if not isinstance(access_location, GeneralName):
+ raise TypeError("access_location must be a GeneralName")
+
+ self._access_method = access_method
+ self._access_location = access_location
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, AccessDescription):
+ return NotImplemented
+
+ return (
+ self.access_method == other.access_method
+ and self.access_location == other.access_location
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.access_method, self.access_location))
+
+ @property
+ def access_method(self) -> ObjectIdentifier:
+ return self._access_method
+
+ @property
+ def access_location(self) -> GeneralName:
+ return self._access_location
+
+
+class BasicConstraints(ExtensionType):
+ oid = ExtensionOID.BASIC_CONSTRAINTS
+
+ def __init__(self, ca: bool, path_length: typing.Optional[int]) -> None:
+ if not isinstance(ca, bool):
+ raise TypeError("ca must be a boolean value")
+
+ if path_length is not None and not ca:
+ raise ValueError("path_length must be None when ca is False")
+
+ if path_length is not None and (
+ not isinstance(path_length, int) or path_length < 0
+ ):
+ raise TypeError(
+ "path_length must be a non-negative integer or None"
+ )
+
+ self._ca = ca
+ self._path_length = path_length
+
+ @property
+ def ca(self) -> bool:
+ return self._ca
+
+ @property
+ def path_length(self) -> typing.Optional[int]:
+ return self._path_length
+
+ def __repr__(self) -> str:
+ return (
+ ""
+ ).format(self)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, BasicConstraints):
+ return NotImplemented
+
+ return self.ca == other.ca and self.path_length == other.path_length
+
+ def __hash__(self) -> int:
+ return hash((self.ca, self.path_length))
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class DeltaCRLIndicator(ExtensionType):
+ oid = ExtensionOID.DELTA_CRL_INDICATOR
+
+ def __init__(self, crl_number: int) -> None:
+ if not isinstance(crl_number, int):
+ raise TypeError("crl_number must be an integer")
+
+ self._crl_number = crl_number
+
+ @property
+ def crl_number(self) -> int:
+ return self._crl_number
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DeltaCRLIndicator):
+ return NotImplemented
+
+ return self.crl_number == other.crl_number
+
+ def __hash__(self) -> int:
+ return hash(self.crl_number)
+
+ def __repr__(self) -> str:
+ return f""
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class CRLDistributionPoints(ExtensionType):
+ oid = ExtensionOID.CRL_DISTRIBUTION_POINTS
+
+ def __init__(
+ self, distribution_points: typing.Iterable[DistributionPoint]
+ ) -> None:
+ distribution_points = list(distribution_points)
+ if not all(
+ isinstance(x, DistributionPoint) for x in distribution_points
+ ):
+ raise TypeError(
+ "distribution_points must be a list of DistributionPoint "
+ "objects"
+ )
+
+ self._distribution_points = distribution_points
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods(
+ "_distribution_points"
+ )
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, CRLDistributionPoints):
+ return NotImplemented
+
+ return self._distribution_points == other._distribution_points
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._distribution_points))
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class FreshestCRL(ExtensionType):
+ oid = ExtensionOID.FRESHEST_CRL
+
+ def __init__(
+ self, distribution_points: typing.Iterable[DistributionPoint]
+ ) -> None:
+ distribution_points = list(distribution_points)
+ if not all(
+ isinstance(x, DistributionPoint) for x in distribution_points
+ ):
+ raise TypeError(
+ "distribution_points must be a list of DistributionPoint "
+ "objects"
+ )
+
+ self._distribution_points = distribution_points
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods(
+ "_distribution_points"
+ )
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, FreshestCRL):
+ return NotImplemented
+
+ return self._distribution_points == other._distribution_points
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._distribution_points))
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class DistributionPoint:
+ def __init__(
+ self,
+ full_name: typing.Optional[typing.Iterable[GeneralName]],
+ relative_name: typing.Optional[RelativeDistinguishedName],
+ reasons: typing.Optional[typing.FrozenSet[ReasonFlags]],
+ crl_issuer: typing.Optional[typing.Iterable[GeneralName]],
+ ) -> None:
+ if full_name and relative_name:
+ raise ValueError(
+ "You cannot provide both full_name and relative_name, at "
+ "least one must be None."
+ )
+ if not full_name and not relative_name and not crl_issuer:
+ raise ValueError(
+ "Either full_name, relative_name or crl_issuer must be "
+ "provided."
+ )
+
+ if full_name is not None:
+ full_name = list(full_name)
+ if not all(isinstance(x, GeneralName) for x in full_name):
+ raise TypeError(
+ "full_name must be a list of GeneralName objects"
+ )
+
+ if relative_name:
+ if not isinstance(relative_name, RelativeDistinguishedName):
+ raise TypeError(
+ "relative_name must be a RelativeDistinguishedName"
+ )
+
+ if crl_issuer is not None:
+ crl_issuer = list(crl_issuer)
+ if not all(isinstance(x, GeneralName) for x in crl_issuer):
+ raise TypeError(
+ "crl_issuer must be None or a list of general names"
+ )
+
+ if reasons and (
+ not isinstance(reasons, frozenset)
+ or not all(isinstance(x, ReasonFlags) for x in reasons)
+ ):
+ raise TypeError("reasons must be None or frozenset of ReasonFlags")
+
+ if reasons and (
+ ReasonFlags.unspecified in reasons
+ or ReasonFlags.remove_from_crl in reasons
+ ):
+ raise ValueError(
+ "unspecified and remove_from_crl are not valid reasons in a "
+ "DistributionPoint"
+ )
+
+ self._full_name = full_name
+ self._relative_name = relative_name
+ self._reasons = reasons
+ self._crl_issuer = crl_issuer
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DistributionPoint):
+ return NotImplemented
+
+ return (
+ self.full_name == other.full_name
+ and self.relative_name == other.relative_name
+ and self.reasons == other.reasons
+ and self.crl_issuer == other.crl_issuer
+ )
+
+ def __hash__(self) -> int:
+ if self.full_name is not None:
+ fn: typing.Optional[typing.Tuple[GeneralName, ...]] = tuple(
+ self.full_name
+ )
+ else:
+ fn = None
+
+ if self.crl_issuer is not None:
+ crl_issuer: typing.Optional[
+ typing.Tuple[GeneralName, ...]
+ ] = tuple(self.crl_issuer)
+ else:
+ crl_issuer = None
+
+ return hash((fn, self.relative_name, self.reasons, crl_issuer))
+
+ @property
+ def full_name(self) -> typing.Optional[typing.List[GeneralName]]:
+ return self._full_name
+
+ @property
+ def relative_name(self) -> typing.Optional[RelativeDistinguishedName]:
+ return self._relative_name
+
+ @property
+ def reasons(self) -> typing.Optional[typing.FrozenSet[ReasonFlags]]:
+ return self._reasons
+
+ @property
+ def crl_issuer(self) -> typing.Optional[typing.List[GeneralName]]:
+ return self._crl_issuer
+
+
+class ReasonFlags(utils.Enum):
+ unspecified = "unspecified"
+ key_compromise = "keyCompromise"
+ ca_compromise = "cACompromise"
+ affiliation_changed = "affiliationChanged"
+ superseded = "superseded"
+ cessation_of_operation = "cessationOfOperation"
+ certificate_hold = "certificateHold"
+ privilege_withdrawn = "privilegeWithdrawn"
+ aa_compromise = "aACompromise"
+ remove_from_crl = "removeFromCRL"
+
+
+# These are distribution point bit string mappings. Not to be confused with
+# CRLReason reason flags bit string mappings.
+# ReasonFlags ::= BIT STRING {
+# unused (0),
+# keyCompromise (1),
+# cACompromise (2),
+# affiliationChanged (3),
+# superseded (4),
+# cessationOfOperation (5),
+# certificateHold (6),
+# privilegeWithdrawn (7),
+# aACompromise (8) }
+_REASON_BIT_MAPPING = {
+ 1: ReasonFlags.key_compromise,
+ 2: ReasonFlags.ca_compromise,
+ 3: ReasonFlags.affiliation_changed,
+ 4: ReasonFlags.superseded,
+ 5: ReasonFlags.cessation_of_operation,
+ 6: ReasonFlags.certificate_hold,
+ 7: ReasonFlags.privilege_withdrawn,
+ 8: ReasonFlags.aa_compromise,
+}
+
+_CRLREASONFLAGS = {
+ ReasonFlags.key_compromise: 1,
+ ReasonFlags.ca_compromise: 2,
+ ReasonFlags.affiliation_changed: 3,
+ ReasonFlags.superseded: 4,
+ ReasonFlags.cessation_of_operation: 5,
+ ReasonFlags.certificate_hold: 6,
+ ReasonFlags.privilege_withdrawn: 7,
+ ReasonFlags.aa_compromise: 8,
+}
+
+
+class PolicyConstraints(ExtensionType):
+ oid = ExtensionOID.POLICY_CONSTRAINTS
+
+ def __init__(
+ self,
+ require_explicit_policy: typing.Optional[int],
+ inhibit_policy_mapping: typing.Optional[int],
+ ) -> None:
+ if require_explicit_policy is not None and not isinstance(
+ require_explicit_policy, int
+ ):
+ raise TypeError(
+ "require_explicit_policy must be a non-negative integer or "
+ "None"
+ )
+
+ if inhibit_policy_mapping is not None and not isinstance(
+ inhibit_policy_mapping, int
+ ):
+ raise TypeError(
+ "inhibit_policy_mapping must be a non-negative integer or None"
+ )
+
+ if inhibit_policy_mapping is None and require_explicit_policy is None:
+ raise ValueError(
+ "At least one of require_explicit_policy and "
+ "inhibit_policy_mapping must not be None"
+ )
+
+ self._require_explicit_policy = require_explicit_policy
+ self._inhibit_policy_mapping = inhibit_policy_mapping
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, PolicyConstraints):
+ return NotImplemented
+
+ return (
+ self.require_explicit_policy == other.require_explicit_policy
+ and self.inhibit_policy_mapping == other.inhibit_policy_mapping
+ )
+
+ def __hash__(self) -> int:
+ return hash(
+ (self.require_explicit_policy, self.inhibit_policy_mapping)
+ )
+
+ @property
+ def require_explicit_policy(self) -> typing.Optional[int]:
+ return self._require_explicit_policy
+
+ @property
+ def inhibit_policy_mapping(self) -> typing.Optional[int]:
+ return self._inhibit_policy_mapping
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class CertificatePolicies(ExtensionType):
+ oid = ExtensionOID.CERTIFICATE_POLICIES
+
+ def __init__(self, policies: typing.Iterable[PolicyInformation]) -> None:
+ policies = list(policies)
+ if not all(isinstance(x, PolicyInformation) for x in policies):
+ raise TypeError(
+ "Every item in the policies list must be a "
+ "PolicyInformation"
+ )
+
+ self._policies = policies
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_policies")
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, CertificatePolicies):
+ return NotImplemented
+
+ return self._policies == other._policies
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._policies))
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class PolicyInformation:
+ def __init__(
+ self,
+ policy_identifier: ObjectIdentifier,
+ policy_qualifiers: typing.Optional[
+ typing.Iterable[typing.Union[str, UserNotice]]
+ ],
+ ) -> None:
+ if not isinstance(policy_identifier, ObjectIdentifier):
+ raise TypeError("policy_identifier must be an ObjectIdentifier")
+
+ self._policy_identifier = policy_identifier
+
+ if policy_qualifiers is not None:
+ policy_qualifiers = list(policy_qualifiers)
+ if not all(
+ isinstance(x, (str, UserNotice)) for x in policy_qualifiers
+ ):
+ raise TypeError(
+ "policy_qualifiers must be a list of strings and/or "
+ "UserNotice objects or None"
+ )
+
+ self._policy_qualifiers = policy_qualifiers
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, PolicyInformation):
+ return NotImplemented
+
+ return (
+ self.policy_identifier == other.policy_identifier
+ and self.policy_qualifiers == other.policy_qualifiers
+ )
+
+ def __hash__(self) -> int:
+ if self.policy_qualifiers is not None:
+ pq: typing.Optional[
+ typing.Tuple[typing.Union[str, UserNotice], ...]
+ ] = tuple(self.policy_qualifiers)
+ else:
+ pq = None
+
+ return hash((self.policy_identifier, pq))
+
+ @property
+ def policy_identifier(self) -> ObjectIdentifier:
+ return self._policy_identifier
+
+ @property
+ def policy_qualifiers(
+ self,
+ ) -> typing.Optional[typing.List[typing.Union[str, UserNotice]]]:
+ return self._policy_qualifiers
+
+
+class UserNotice:
+ def __init__(
+ self,
+ notice_reference: typing.Optional[NoticeReference],
+ explicit_text: typing.Optional[str],
+ ) -> None:
+ if notice_reference and not isinstance(
+ notice_reference, NoticeReference
+ ):
+ raise TypeError(
+ "notice_reference must be None or a NoticeReference"
+ )
+
+ self._notice_reference = notice_reference
+ self._explicit_text = explicit_text
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, UserNotice):
+ return NotImplemented
+
+ return (
+ self.notice_reference == other.notice_reference
+ and self.explicit_text == other.explicit_text
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.notice_reference, self.explicit_text))
+
+ @property
+ def notice_reference(self) -> typing.Optional[NoticeReference]:
+ return self._notice_reference
+
+ @property
+ def explicit_text(self) -> typing.Optional[str]:
+ return self._explicit_text
+
+
+class NoticeReference:
+ def __init__(
+ self,
+ organization: typing.Optional[str],
+ notice_numbers: typing.Iterable[int],
+ ) -> None:
+ self._organization = organization
+ notice_numbers = list(notice_numbers)
+ if not all(isinstance(x, int) for x in notice_numbers):
+ raise TypeError("notice_numbers must be a list of integers")
+
+ self._notice_numbers = notice_numbers
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, NoticeReference):
+ return NotImplemented
+
+ return (
+ self.organization == other.organization
+ and self.notice_numbers == other.notice_numbers
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.organization, tuple(self.notice_numbers)))
+
+ @property
+ def organization(self) -> typing.Optional[str]:
+ return self._organization
+
+ @property
+ def notice_numbers(self) -> typing.List[int]:
+ return self._notice_numbers
+
+
+class ExtendedKeyUsage(ExtensionType):
+ oid = ExtensionOID.EXTENDED_KEY_USAGE
+
+ def __init__(self, usages: typing.Iterable[ObjectIdentifier]) -> None:
+ usages = list(usages)
+ if not all(isinstance(x, ObjectIdentifier) for x in usages):
+ raise TypeError(
+ "Every item in the usages list must be an ObjectIdentifier"
+ )
+
+ self._usages = usages
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_usages")
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, ExtendedKeyUsage):
+ return NotImplemented
+
+ return self._usages == other._usages
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._usages))
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class OCSPNoCheck(ExtensionType):
+ oid = ExtensionOID.OCSP_NO_CHECK
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, OCSPNoCheck):
+ return NotImplemented
+
+ return True
+
+ def __hash__(self) -> int:
+ return hash(OCSPNoCheck)
+
+ def __repr__(self) -> str:
+ return ""
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class PrecertPoison(ExtensionType):
+ oid = ExtensionOID.PRECERT_POISON
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, PrecertPoison):
+ return NotImplemented
+
+ return True
+
+ def __hash__(self) -> int:
+ return hash(PrecertPoison)
+
+ def __repr__(self) -> str:
+ return ""
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class TLSFeature(ExtensionType):
+ oid = ExtensionOID.TLS_FEATURE
+
+ def __init__(self, features: typing.Iterable[TLSFeatureType]) -> None:
+ features = list(features)
+ if (
+ not all(isinstance(x, TLSFeatureType) for x in features)
+ or len(features) == 0
+ ):
+ raise TypeError(
+ "features must be a list of elements from the TLSFeatureType "
+ "enum"
+ )
+
+ self._features = features
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_features")
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, TLSFeature):
+ return NotImplemented
+
+ return self._features == other._features
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._features))
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class TLSFeatureType(utils.Enum):
+ # status_request is defined in RFC 6066 and is used for what is commonly
+ # called OCSP Must-Staple when present in the TLS Feature extension in an
+ # X.509 certificate.
+ status_request = 5
+ # status_request_v2 is defined in RFC 6961 and allows multiple OCSP
+ # responses to be provided. It is not currently in use by clients or
+ # servers.
+ status_request_v2 = 17
+
+
+_TLS_FEATURE_TYPE_TO_ENUM = {x.value: x for x in TLSFeatureType}
+
+
+class InhibitAnyPolicy(ExtensionType):
+ oid = ExtensionOID.INHIBIT_ANY_POLICY
+
+ def __init__(self, skip_certs: int) -> None:
+ if not isinstance(skip_certs, int):
+ raise TypeError("skip_certs must be an integer")
+
+ if skip_certs < 0:
+ raise ValueError("skip_certs must be a non-negative integer")
+
+ self._skip_certs = skip_certs
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, InhibitAnyPolicy):
+ return NotImplemented
+
+ return self.skip_certs == other.skip_certs
+
+ def __hash__(self) -> int:
+ return hash(self.skip_certs)
+
+ @property
+ def skip_certs(self) -> int:
+ return self._skip_certs
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class KeyUsage(ExtensionType):
+ oid = ExtensionOID.KEY_USAGE
+
+ def __init__(
+ self,
+ digital_signature: bool,
+ content_commitment: bool,
+ key_encipherment: bool,
+ data_encipherment: bool,
+ key_agreement: bool,
+ key_cert_sign: bool,
+ crl_sign: bool,
+ encipher_only: bool,
+ decipher_only: bool,
+ ) -> None:
+ if not key_agreement and (encipher_only or decipher_only):
+ raise ValueError(
+ "encipher_only and decipher_only can only be true when "
+ "key_agreement is true"
+ )
+
+ self._digital_signature = digital_signature
+ self._content_commitment = content_commitment
+ self._key_encipherment = key_encipherment
+ self._data_encipherment = data_encipherment
+ self._key_agreement = key_agreement
+ self._key_cert_sign = key_cert_sign
+ self._crl_sign = crl_sign
+ self._encipher_only = encipher_only
+ self._decipher_only = decipher_only
+
+ @property
+ def digital_signature(self) -> bool:
+ return self._digital_signature
+
+ @property
+ def content_commitment(self) -> bool:
+ return self._content_commitment
+
+ @property
+ def key_encipherment(self) -> bool:
+ return self._key_encipherment
+
+ @property
+ def data_encipherment(self) -> bool:
+ return self._data_encipherment
+
+ @property
+ def key_agreement(self) -> bool:
+ return self._key_agreement
+
+ @property
+ def key_cert_sign(self) -> bool:
+ return self._key_cert_sign
+
+ @property
+ def crl_sign(self) -> bool:
+ return self._crl_sign
+
+ @property
+ def encipher_only(self) -> bool:
+ if not self.key_agreement:
+ raise ValueError(
+ "encipher_only is undefined unless key_agreement is true"
+ )
+ else:
+ return self._encipher_only
+
+ @property
+ def decipher_only(self) -> bool:
+ if not self.key_agreement:
+ raise ValueError(
+ "decipher_only is undefined unless key_agreement is true"
+ )
+ else:
+ return self._decipher_only
+
+ def __repr__(self) -> str:
+ try:
+ encipher_only = self.encipher_only
+ decipher_only = self.decipher_only
+ except ValueError:
+ # Users found None confusing because even though encipher/decipher
+ # have no meaning unless key_agreement is true, to construct an
+ # instance of the class you still need to pass False.
+ encipher_only = False
+ decipher_only = False
+
+ return (
+ ""
+ ).format(self, encipher_only, decipher_only)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, KeyUsage):
+ return NotImplemented
+
+ return (
+ self.digital_signature == other.digital_signature
+ and self.content_commitment == other.content_commitment
+ and self.key_encipherment == other.key_encipherment
+ and self.data_encipherment == other.data_encipherment
+ and self.key_agreement == other.key_agreement
+ and self.key_cert_sign == other.key_cert_sign
+ and self.crl_sign == other.crl_sign
+ and self._encipher_only == other._encipher_only
+ and self._decipher_only == other._decipher_only
+ )
+
+ def __hash__(self) -> int:
+ return hash(
+ (
+ self.digital_signature,
+ self.content_commitment,
+ self.key_encipherment,
+ self.data_encipherment,
+ self.key_agreement,
+ self.key_cert_sign,
+ self.crl_sign,
+ self._encipher_only,
+ self._decipher_only,
+ )
+ )
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class NameConstraints(ExtensionType):
+ oid = ExtensionOID.NAME_CONSTRAINTS
+
+ def __init__(
+ self,
+ permitted_subtrees: typing.Optional[typing.Iterable[GeneralName]],
+ excluded_subtrees: typing.Optional[typing.Iterable[GeneralName]],
+ ) -> None:
+ if permitted_subtrees is not None:
+ permitted_subtrees = list(permitted_subtrees)
+ if not permitted_subtrees:
+ raise ValueError(
+ "permitted_subtrees must be a non-empty list or None"
+ )
+ if not all(isinstance(x, GeneralName) for x in permitted_subtrees):
+ raise TypeError(
+ "permitted_subtrees must be a list of GeneralName objects "
+ "or None"
+ )
+
+ self._validate_tree(permitted_subtrees)
+
+ if excluded_subtrees is not None:
+ excluded_subtrees = list(excluded_subtrees)
+ if not excluded_subtrees:
+ raise ValueError(
+ "excluded_subtrees must be a non-empty list or None"
+ )
+ if not all(isinstance(x, GeneralName) for x in excluded_subtrees):
+ raise TypeError(
+ "excluded_subtrees must be a list of GeneralName objects "
+ "or None"
+ )
+
+ self._validate_tree(excluded_subtrees)
+
+ if permitted_subtrees is None and excluded_subtrees is None:
+ raise ValueError(
+ "At least one of permitted_subtrees and excluded_subtrees "
+ "must not be None"
+ )
+
+ self._permitted_subtrees = permitted_subtrees
+ self._excluded_subtrees = excluded_subtrees
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, NameConstraints):
+ return NotImplemented
+
+ return (
+ self.excluded_subtrees == other.excluded_subtrees
+ and self.permitted_subtrees == other.permitted_subtrees
+ )
+
+ def _validate_tree(self, tree: typing.Iterable[GeneralName]) -> None:
+ self._validate_ip_name(tree)
+ self._validate_dns_name(tree)
+
+ def _validate_ip_name(self, tree: typing.Iterable[GeneralName]) -> None:
+ if any(
+ isinstance(name, IPAddress)
+ and not isinstance(
+ name.value, (ipaddress.IPv4Network, ipaddress.IPv6Network)
+ )
+ for name in tree
+ ):
+ raise TypeError(
+ "IPAddress name constraints must be an IPv4Network or"
+ " IPv6Network object"
+ )
+
+ def _validate_dns_name(self, tree: typing.Iterable[GeneralName]) -> None:
+ if any(
+ isinstance(name, DNSName) and "*" in name.value for name in tree
+ ):
+ raise ValueError(
+ "DNSName name constraints must not contain the '*' wildcard"
+ " character"
+ )
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __hash__(self) -> int:
+ if self.permitted_subtrees is not None:
+ ps: typing.Optional[typing.Tuple[GeneralName, ...]] = tuple(
+ self.permitted_subtrees
+ )
+ else:
+ ps = None
+
+ if self.excluded_subtrees is not None:
+ es: typing.Optional[typing.Tuple[GeneralName, ...]] = tuple(
+ self.excluded_subtrees
+ )
+ else:
+ es = None
+
+ return hash((ps, es))
+
+ @property
+ def permitted_subtrees(
+ self,
+ ) -> typing.Optional[typing.List[GeneralName]]:
+ return self._permitted_subtrees
+
+ @property
+ def excluded_subtrees(
+ self,
+ ) -> typing.Optional[typing.List[GeneralName]]:
+ return self._excluded_subtrees
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class Extension(typing.Generic[ExtensionTypeVar]):
+ def __init__(
+ self, oid: ObjectIdentifier, critical: bool, value: ExtensionTypeVar
+ ) -> None:
+ if not isinstance(oid, ObjectIdentifier):
+ raise TypeError(
+ "oid argument must be an ObjectIdentifier instance."
+ )
+
+ if not isinstance(critical, bool):
+ raise TypeError("critical must be a boolean value")
+
+ self._oid = oid
+ self._critical = critical
+ self._value = value
+
+ @property
+ def oid(self) -> ObjectIdentifier:
+ return self._oid
+
+ @property
+ def critical(self) -> bool:
+ return self._critical
+
+ @property
+ def value(self) -> ExtensionTypeVar:
+ return self._value
+
+ def __repr__(self) -> str:
+ return (
+ ""
+ ).format(self)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, Extension):
+ return NotImplemented
+
+ return (
+ self.oid == other.oid
+ and self.critical == other.critical
+ and self.value == other.value
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.oid, self.critical, self.value))
+
+
+class GeneralNames:
+ def __init__(self, general_names: typing.Iterable[GeneralName]) -> None:
+ general_names = list(general_names)
+ if not all(isinstance(x, GeneralName) for x in general_names):
+ raise TypeError(
+ "Every item in the general_names list must be an "
+ "object conforming to the GeneralName interface"
+ )
+
+ self._general_names = general_names
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names")
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Union[
+ typing.Type[DNSName],
+ typing.Type[UniformResourceIdentifier],
+ typing.Type[RFC822Name],
+ ],
+ ) -> typing.List[str]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Type[DirectoryName],
+ ) -> typing.List[Name]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Type[RegisteredID],
+ ) -> typing.List[ObjectIdentifier]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self, type: typing.Type[IPAddress]
+ ) -> typing.List[_IPAddressTypes]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self, type: typing.Type[OtherName]
+ ) -> typing.List[OtherName]:
+ ...
+
+ def get_values_for_type(
+ self,
+ type: typing.Union[
+ typing.Type[DNSName],
+ typing.Type[DirectoryName],
+ typing.Type[IPAddress],
+ typing.Type[OtherName],
+ typing.Type[RFC822Name],
+ typing.Type[RegisteredID],
+ typing.Type[UniformResourceIdentifier],
+ ],
+ ) -> typing.Union[
+ typing.List[_IPAddressTypes],
+ typing.List[str],
+ typing.List[OtherName],
+ typing.List[Name],
+ typing.List[ObjectIdentifier],
+ ]:
+ # Return the value of each GeneralName, except for OtherName instances
+ # which we return directly because it has two important properties not
+ # just one value.
+ objs = (i for i in self if isinstance(i, type))
+ if type != OtherName:
+ return [i.value for i in objs]
+ return list(objs)
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, GeneralNames):
+ return NotImplemented
+
+ return self._general_names == other._general_names
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._general_names))
+
+
+class SubjectAlternativeName(ExtensionType):
+ oid = ExtensionOID.SUBJECT_ALTERNATIVE_NAME
+
+ def __init__(self, general_names: typing.Iterable[GeneralName]) -> None:
+ self._general_names = GeneralNames(general_names)
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names")
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Union[
+ typing.Type[DNSName],
+ typing.Type[UniformResourceIdentifier],
+ typing.Type[RFC822Name],
+ ],
+ ) -> typing.List[str]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Type[DirectoryName],
+ ) -> typing.List[Name]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Type[RegisteredID],
+ ) -> typing.List[ObjectIdentifier]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self, type: typing.Type[IPAddress]
+ ) -> typing.List[_IPAddressTypes]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self, type: typing.Type[OtherName]
+ ) -> typing.List[OtherName]:
+ ...
+
+ def get_values_for_type(
+ self,
+ type: typing.Union[
+ typing.Type[DNSName],
+ typing.Type[DirectoryName],
+ typing.Type[IPAddress],
+ typing.Type[OtherName],
+ typing.Type[RFC822Name],
+ typing.Type[RegisteredID],
+ typing.Type[UniformResourceIdentifier],
+ ],
+ ) -> typing.Union[
+ typing.List[_IPAddressTypes],
+ typing.List[str],
+ typing.List[OtherName],
+ typing.List[Name],
+ typing.List[ObjectIdentifier],
+ ]:
+ return self._general_names.get_values_for_type(type)
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, SubjectAlternativeName):
+ return NotImplemented
+
+ return self._general_names == other._general_names
+
+ def __hash__(self) -> int:
+ return hash(self._general_names)
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class IssuerAlternativeName(ExtensionType):
+ oid = ExtensionOID.ISSUER_ALTERNATIVE_NAME
+
+ def __init__(self, general_names: typing.Iterable[GeneralName]) -> None:
+ self._general_names = GeneralNames(general_names)
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names")
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Union[
+ typing.Type[DNSName],
+ typing.Type[UniformResourceIdentifier],
+ typing.Type[RFC822Name],
+ ],
+ ) -> typing.List[str]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Type[DirectoryName],
+ ) -> typing.List[Name]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Type[RegisteredID],
+ ) -> typing.List[ObjectIdentifier]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self, type: typing.Type[IPAddress]
+ ) -> typing.List[_IPAddressTypes]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self, type: typing.Type[OtherName]
+ ) -> typing.List[OtherName]:
+ ...
+
+ def get_values_for_type(
+ self,
+ type: typing.Union[
+ typing.Type[DNSName],
+ typing.Type[DirectoryName],
+ typing.Type[IPAddress],
+ typing.Type[OtherName],
+ typing.Type[RFC822Name],
+ typing.Type[RegisteredID],
+ typing.Type[UniformResourceIdentifier],
+ ],
+ ) -> typing.Union[
+ typing.List[_IPAddressTypes],
+ typing.List[str],
+ typing.List[OtherName],
+ typing.List[Name],
+ typing.List[ObjectIdentifier],
+ ]:
+ return self._general_names.get_values_for_type(type)
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, IssuerAlternativeName):
+ return NotImplemented
+
+ return self._general_names == other._general_names
+
+ def __hash__(self) -> int:
+ return hash(self._general_names)
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class CertificateIssuer(ExtensionType):
+ oid = CRLEntryExtensionOID.CERTIFICATE_ISSUER
+
+ def __init__(self, general_names: typing.Iterable[GeneralName]) -> None:
+ self._general_names = GeneralNames(general_names)
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names")
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Union[
+ typing.Type[DNSName],
+ typing.Type[UniformResourceIdentifier],
+ typing.Type[RFC822Name],
+ ],
+ ) -> typing.List[str]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Type[DirectoryName],
+ ) -> typing.List[Name]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Type[RegisteredID],
+ ) -> typing.List[ObjectIdentifier]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self, type: typing.Type[IPAddress]
+ ) -> typing.List[_IPAddressTypes]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self, type: typing.Type[OtherName]
+ ) -> typing.List[OtherName]:
+ ...
+
+ def get_values_for_type(
+ self,
+ type: typing.Union[
+ typing.Type[DNSName],
+ typing.Type[DirectoryName],
+ typing.Type[IPAddress],
+ typing.Type[OtherName],
+ typing.Type[RFC822Name],
+ typing.Type[RegisteredID],
+ typing.Type[UniformResourceIdentifier],
+ ],
+ ) -> typing.Union[
+ typing.List[_IPAddressTypes],
+ typing.List[str],
+ typing.List[OtherName],
+ typing.List[Name],
+ typing.List[ObjectIdentifier],
+ ]:
+ return self._general_names.get_values_for_type(type)
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, CertificateIssuer):
+ return NotImplemented
+
+ return self._general_names == other._general_names
+
+ def __hash__(self) -> int:
+ return hash(self._general_names)
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class CRLReason(ExtensionType):
+ oid = CRLEntryExtensionOID.CRL_REASON
+
+ def __init__(self, reason: ReasonFlags) -> None:
+ if not isinstance(reason, ReasonFlags):
+ raise TypeError("reason must be an element from ReasonFlags")
+
+ self._reason = reason
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, CRLReason):
+ return NotImplemented
+
+ return self.reason == other.reason
+
+ def __hash__(self) -> int:
+ return hash(self.reason)
+
+ @property
+ def reason(self) -> ReasonFlags:
+ return self._reason
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class InvalidityDate(ExtensionType):
+ oid = CRLEntryExtensionOID.INVALIDITY_DATE
+
+ def __init__(self, invalidity_date: datetime.datetime) -> None:
+ if not isinstance(invalidity_date, datetime.datetime):
+ raise TypeError("invalidity_date must be a datetime.datetime")
+
+ self._invalidity_date = invalidity_date
+
+ def __repr__(self) -> str:
+ return "".format(
+ self._invalidity_date
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, InvalidityDate):
+ return NotImplemented
+
+ return self.invalidity_date == other.invalidity_date
+
+ def __hash__(self) -> int:
+ return hash(self.invalidity_date)
+
+ @property
+ def invalidity_date(self) -> datetime.datetime:
+ return self._invalidity_date
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class PrecertificateSignedCertificateTimestamps(ExtensionType):
+ oid = ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS
+
+ def __init__(
+ self,
+ signed_certificate_timestamps: typing.Iterable[
+ SignedCertificateTimestamp
+ ],
+ ) -> None:
+ signed_certificate_timestamps = list(signed_certificate_timestamps)
+ if not all(
+ isinstance(sct, SignedCertificateTimestamp)
+ for sct in signed_certificate_timestamps
+ ):
+ raise TypeError(
+ "Every item in the signed_certificate_timestamps list must be "
+ "a SignedCertificateTimestamp"
+ )
+ self._signed_certificate_timestamps = signed_certificate_timestamps
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods(
+ "_signed_certificate_timestamps"
+ )
+
+ def __repr__(self) -> str:
+ return "".format(
+ list(self)
+ )
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._signed_certificate_timestamps))
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, PrecertificateSignedCertificateTimestamps):
+ return NotImplemented
+
+ return (
+ self._signed_certificate_timestamps
+ == other._signed_certificate_timestamps
+ )
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class SignedCertificateTimestamps(ExtensionType):
+ oid = ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS
+
+ def __init__(
+ self,
+ signed_certificate_timestamps: typing.Iterable[
+ SignedCertificateTimestamp
+ ],
+ ) -> None:
+ signed_certificate_timestamps = list(signed_certificate_timestamps)
+ if not all(
+ isinstance(sct, SignedCertificateTimestamp)
+ for sct in signed_certificate_timestamps
+ ):
+ raise TypeError(
+ "Every item in the signed_certificate_timestamps list must be "
+ "a SignedCertificateTimestamp"
+ )
+ self._signed_certificate_timestamps = signed_certificate_timestamps
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods(
+ "_signed_certificate_timestamps"
+ )
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._signed_certificate_timestamps))
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, SignedCertificateTimestamps):
+ return NotImplemented
+
+ return (
+ self._signed_certificate_timestamps
+ == other._signed_certificate_timestamps
+ )
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class OCSPNonce(ExtensionType):
+ oid = OCSPExtensionOID.NONCE
+
+ def __init__(self, nonce: bytes) -> None:
+ if not isinstance(nonce, bytes):
+ raise TypeError("nonce must be bytes")
+
+ self._nonce = nonce
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, OCSPNonce):
+ return NotImplemented
+
+ return self.nonce == other.nonce
+
+ def __hash__(self) -> int:
+ return hash(self.nonce)
+
+ def __repr__(self) -> str:
+ return f""
+
+ @property
+ def nonce(self) -> bytes:
+ return self._nonce
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class OCSPAcceptableResponses(ExtensionType):
+ oid = OCSPExtensionOID.ACCEPTABLE_RESPONSES
+
+ def __init__(self, responses: typing.Iterable[ObjectIdentifier]) -> None:
+ responses = list(responses)
+ if any(not isinstance(r, ObjectIdentifier) for r in responses):
+ raise TypeError("All responses must be ObjectIdentifiers")
+
+ self._responses = responses
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, OCSPAcceptableResponses):
+ return NotImplemented
+
+ return self._responses == other._responses
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._responses))
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __iter__(self) -> typing.Iterator[ObjectIdentifier]:
+ return iter(self._responses)
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class IssuingDistributionPoint(ExtensionType):
+ oid = ExtensionOID.ISSUING_DISTRIBUTION_POINT
+
+ def __init__(
+ self,
+ full_name: typing.Optional[typing.Iterable[GeneralName]],
+ relative_name: typing.Optional[RelativeDistinguishedName],
+ only_contains_user_certs: bool,
+ only_contains_ca_certs: bool,
+ only_some_reasons: typing.Optional[typing.FrozenSet[ReasonFlags]],
+ indirect_crl: bool,
+ only_contains_attribute_certs: bool,
+ ) -> None:
+ if full_name is not None:
+ full_name = list(full_name)
+
+ if only_some_reasons and (
+ not isinstance(only_some_reasons, frozenset)
+ or not all(isinstance(x, ReasonFlags) for x in only_some_reasons)
+ ):
+ raise TypeError(
+ "only_some_reasons must be None or frozenset of ReasonFlags"
+ )
+
+ if only_some_reasons and (
+ ReasonFlags.unspecified in only_some_reasons
+ or ReasonFlags.remove_from_crl in only_some_reasons
+ ):
+ raise ValueError(
+ "unspecified and remove_from_crl are not valid reasons in an "
+ "IssuingDistributionPoint"
+ )
+
+ if not (
+ isinstance(only_contains_user_certs, bool)
+ and isinstance(only_contains_ca_certs, bool)
+ and isinstance(indirect_crl, bool)
+ and isinstance(only_contains_attribute_certs, bool)
+ ):
+ raise TypeError(
+ "only_contains_user_certs, only_contains_ca_certs, "
+ "indirect_crl and only_contains_attribute_certs "
+ "must all be boolean."
+ )
+
+ crl_constraints = [
+ only_contains_user_certs,
+ only_contains_ca_certs,
+ indirect_crl,
+ only_contains_attribute_certs,
+ ]
+
+ if len([x for x in crl_constraints if x]) > 1:
+ raise ValueError(
+ "Only one of the following can be set to True: "
+ "only_contains_user_certs, only_contains_ca_certs, "
+ "indirect_crl, only_contains_attribute_certs"
+ )
+
+ if not any(
+ [
+ only_contains_user_certs,
+ only_contains_ca_certs,
+ indirect_crl,
+ only_contains_attribute_certs,
+ full_name,
+ relative_name,
+ only_some_reasons,
+ ]
+ ):
+ raise ValueError(
+ "Cannot create empty extension: "
+ "if only_contains_user_certs, only_contains_ca_certs, "
+ "indirect_crl, and only_contains_attribute_certs are all False"
+ ", then either full_name, relative_name, or only_some_reasons "
+ "must have a value."
+ )
+
+ self._only_contains_user_certs = only_contains_user_certs
+ self._only_contains_ca_certs = only_contains_ca_certs
+ self._indirect_crl = indirect_crl
+ self._only_contains_attribute_certs = only_contains_attribute_certs
+ self._only_some_reasons = only_some_reasons
+ self._full_name = full_name
+ self._relative_name = relative_name
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, IssuingDistributionPoint):
+ return NotImplemented
+
+ return (
+ self.full_name == other.full_name
+ and self.relative_name == other.relative_name
+ and self.only_contains_user_certs == other.only_contains_user_certs
+ and self.only_contains_ca_certs == other.only_contains_ca_certs
+ and self.only_some_reasons == other.only_some_reasons
+ and self.indirect_crl == other.indirect_crl
+ and self.only_contains_attribute_certs
+ == other.only_contains_attribute_certs
+ )
+
+ def __hash__(self) -> int:
+ return hash(
+ (
+ self.full_name,
+ self.relative_name,
+ self.only_contains_user_certs,
+ self.only_contains_ca_certs,
+ self.only_some_reasons,
+ self.indirect_crl,
+ self.only_contains_attribute_certs,
+ )
+ )
+
+ @property
+ def full_name(self) -> typing.Optional[typing.List[GeneralName]]:
+ return self._full_name
+
+ @property
+ def relative_name(self) -> typing.Optional[RelativeDistinguishedName]:
+ return self._relative_name
+
+ @property
+ def only_contains_user_certs(self) -> bool:
+ return self._only_contains_user_certs
+
+ @property
+ def only_contains_ca_certs(self) -> bool:
+ return self._only_contains_ca_certs
+
+ @property
+ def only_some_reasons(
+ self,
+ ) -> typing.Optional[typing.FrozenSet[ReasonFlags]]:
+ return self._only_some_reasons
+
+ @property
+ def indirect_crl(self) -> bool:
+ return self._indirect_crl
+
+ @property
+ def only_contains_attribute_certs(self) -> bool:
+ return self._only_contains_attribute_certs
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class MSCertificateTemplate(ExtensionType):
+ oid = ExtensionOID.MS_CERTIFICATE_TEMPLATE
+
+ def __init__(
+ self,
+ template_id: ObjectIdentifier,
+ major_version: typing.Optional[int],
+ minor_version: typing.Optional[int],
+ ) -> None:
+ if not isinstance(template_id, ObjectIdentifier):
+ raise TypeError("oid must be an ObjectIdentifier")
+ self._template_id = template_id
+ if (
+ major_version is not None and not isinstance(major_version, int)
+ ) or (
+ minor_version is not None and not isinstance(minor_version, int)
+ ):
+ raise TypeError(
+ "major_version and minor_version must be integers or None"
+ )
+ self._major_version = major_version
+ self._minor_version = minor_version
+
+ @property
+ def template_id(self) -> ObjectIdentifier:
+ return self._template_id
+
+ @property
+ def major_version(self) -> typing.Optional[int]:
+ return self._major_version
+
+ @property
+ def minor_version(self) -> typing.Optional[int]:
+ return self._minor_version
+
+ def __repr__(self) -> str:
+ return (
+ f""
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, MSCertificateTemplate):
+ return NotImplemented
+
+ return (
+ self.template_id == other.template_id
+ and self.major_version == other.major_version
+ and self.minor_version == other.minor_version
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.template_id, self.major_version, self.minor_version))
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class UnrecognizedExtension(ExtensionType):
+ def __init__(self, oid: ObjectIdentifier, value: bytes) -> None:
+ if not isinstance(oid, ObjectIdentifier):
+ raise TypeError("oid must be an ObjectIdentifier")
+ self._oid = oid
+ self._value = value
+
+ @property
+ def oid(self) -> ObjectIdentifier: # type: ignore[override]
+ return self._oid
+
+ @property
+ def value(self) -> bytes:
+ return self._value
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, UnrecognizedExtension):
+ return NotImplemented
+
+ return self.oid == other.oid and self.value == other.value
+
+ def __hash__(self) -> int:
+ return hash((self.oid, self.value))
+
+ def public_bytes(self) -> bytes:
+ return self.value
diff --git a/billinglayer/python/cryptography/x509/general_name.py b/billinglayer/python/cryptography/x509/general_name.py
new file mode 100644
index 0000000..79271af
--- /dev/null
+++ b/billinglayer/python/cryptography/x509/general_name.py
@@ -0,0 +1,283 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import ipaddress
+import typing
+from email.utils import parseaddr
+
+from cryptography.x509.name import Name
+from cryptography.x509.oid import ObjectIdentifier
+
+_IPAddressTypes = typing.Union[
+ ipaddress.IPv4Address,
+ ipaddress.IPv6Address,
+ ipaddress.IPv4Network,
+ ipaddress.IPv6Network,
+]
+
+
+class UnsupportedGeneralNameType(Exception):
+ pass
+
+
+class GeneralName(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def value(self) -> typing.Any:
+ """
+ Return the value of the object
+ """
+
+
+class RFC822Name(GeneralName):
+ def __init__(self, value: str) -> None:
+ if isinstance(value, str):
+ try:
+ value.encode("ascii")
+ except UnicodeEncodeError:
+ raise ValueError(
+ "RFC822Name values should be passed as an A-label string. "
+ "This means unicode characters should be encoded via "
+ "a library like idna."
+ )
+ else:
+ raise TypeError("value must be string")
+
+ name, address = parseaddr(value)
+ if name or not address:
+ # parseaddr has found a name (e.g. Name ) or the entire
+ # value is an empty string.
+ raise ValueError("Invalid rfc822name value")
+
+ self._value = value
+
+ @property
+ def value(self) -> str:
+ return self._value
+
+ @classmethod
+ def _init_without_validation(cls, value: str) -> RFC822Name:
+ instance = cls.__new__(cls)
+ instance._value = value
+ return instance
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, RFC822Name):
+ return NotImplemented
+
+ return self.value == other.value
+
+ def __hash__(self) -> int:
+ return hash(self.value)
+
+
+class DNSName(GeneralName):
+ def __init__(self, value: str) -> None:
+ if isinstance(value, str):
+ try:
+ value.encode("ascii")
+ except UnicodeEncodeError:
+ raise ValueError(
+ "DNSName values should be passed as an A-label string. "
+ "This means unicode characters should be encoded via "
+ "a library like idna."
+ )
+ else:
+ raise TypeError("value must be string")
+
+ self._value = value
+
+ @property
+ def value(self) -> str:
+ return self._value
+
+ @classmethod
+ def _init_without_validation(cls, value: str) -> DNSName:
+ instance = cls.__new__(cls)
+ instance._value = value
+ return instance
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DNSName):
+ return NotImplemented
+
+ return self.value == other.value
+
+ def __hash__(self) -> int:
+ return hash(self.value)
+
+
+class UniformResourceIdentifier(GeneralName):
+ def __init__(self, value: str) -> None:
+ if isinstance(value, str):
+ try:
+ value.encode("ascii")
+ except UnicodeEncodeError:
+ raise ValueError(
+ "URI values should be passed as an A-label string. "
+ "This means unicode characters should be encoded via "
+ "a library like idna."
+ )
+ else:
+ raise TypeError("value must be string")
+
+ self._value = value
+
+ @property
+ def value(self) -> str:
+ return self._value
+
+ @classmethod
+ def _init_without_validation(cls, value: str) -> UniformResourceIdentifier:
+ instance = cls.__new__(cls)
+ instance._value = value
+ return instance
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, UniformResourceIdentifier):
+ return NotImplemented
+
+ return self.value == other.value
+
+ def __hash__(self) -> int:
+ return hash(self.value)
+
+
+class DirectoryName(GeneralName):
+ def __init__(self, value: Name) -> None:
+ if not isinstance(value, Name):
+ raise TypeError("value must be a Name")
+
+ self._value = value
+
+ @property
+ def value(self) -> Name:
+ return self._value
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DirectoryName):
+ return NotImplemented
+
+ return self.value == other.value
+
+ def __hash__(self) -> int:
+ return hash(self.value)
+
+
+class RegisteredID(GeneralName):
+ def __init__(self, value: ObjectIdentifier) -> None:
+ if not isinstance(value, ObjectIdentifier):
+ raise TypeError("value must be an ObjectIdentifier")
+
+ self._value = value
+
+ @property
+ def value(self) -> ObjectIdentifier:
+ return self._value
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, RegisteredID):
+ return NotImplemented
+
+ return self.value == other.value
+
+ def __hash__(self) -> int:
+ return hash(self.value)
+
+
+class IPAddress(GeneralName):
+ def __init__(self, value: _IPAddressTypes) -> None:
+ if not isinstance(
+ value,
+ (
+ ipaddress.IPv4Address,
+ ipaddress.IPv6Address,
+ ipaddress.IPv4Network,
+ ipaddress.IPv6Network,
+ ),
+ ):
+ raise TypeError(
+ "value must be an instance of ipaddress.IPv4Address, "
+ "ipaddress.IPv6Address, ipaddress.IPv4Network, or "
+ "ipaddress.IPv6Network"
+ )
+
+ self._value = value
+
+ @property
+ def value(self) -> _IPAddressTypes:
+ return self._value
+
+ def _packed(self) -> bytes:
+ if isinstance(
+ self.value, (ipaddress.IPv4Address, ipaddress.IPv6Address)
+ ):
+ return self.value.packed
+ else:
+ return (
+ self.value.network_address.packed + self.value.netmask.packed
+ )
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, IPAddress):
+ return NotImplemented
+
+ return self.value == other.value
+
+ def __hash__(self) -> int:
+ return hash(self.value)
+
+
+class OtherName(GeneralName):
+ def __init__(self, type_id: ObjectIdentifier, value: bytes) -> None:
+ if not isinstance(type_id, ObjectIdentifier):
+ raise TypeError("type_id must be an ObjectIdentifier")
+ if not isinstance(value, bytes):
+ raise TypeError("value must be a binary string")
+
+ self._type_id = type_id
+ self._value = value
+
+ @property
+ def type_id(self) -> ObjectIdentifier:
+ return self._type_id
+
+ @property
+ def value(self) -> bytes:
+ return self._value
+
+ def __repr__(self) -> str:
+ return "".format(
+ self.type_id, self.value
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, OtherName):
+ return NotImplemented
+
+ return self.type_id == other.type_id and self.value == other.value
+
+ def __hash__(self) -> int:
+ return hash((self.type_id, self.value))
diff --git a/billinglayer/python/cryptography/x509/name.py b/billinglayer/python/cryptography/x509/name.py
new file mode 100644
index 0000000..ff98e87
--- /dev/null
+++ b/billinglayer/python/cryptography/x509/name.py
@@ -0,0 +1,462 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import binascii
+import re
+import sys
+import typing
+import warnings
+
+from cryptography import utils
+from cryptography.hazmat.bindings._rust import x509 as rust_x509
+from cryptography.x509.oid import NameOID, ObjectIdentifier
+
+
+class _ASN1Type(utils.Enum):
+ BitString = 3
+ OctetString = 4
+ UTF8String = 12
+ NumericString = 18
+ PrintableString = 19
+ T61String = 20
+ IA5String = 22
+ UTCTime = 23
+ GeneralizedTime = 24
+ VisibleString = 26
+ UniversalString = 28
+ BMPString = 30
+
+
+_ASN1_TYPE_TO_ENUM = {i.value: i for i in _ASN1Type}
+_NAMEOID_DEFAULT_TYPE: typing.Dict[ObjectIdentifier, _ASN1Type] = {
+ NameOID.COUNTRY_NAME: _ASN1Type.PrintableString,
+ NameOID.JURISDICTION_COUNTRY_NAME: _ASN1Type.PrintableString,
+ NameOID.SERIAL_NUMBER: _ASN1Type.PrintableString,
+ NameOID.DN_QUALIFIER: _ASN1Type.PrintableString,
+ NameOID.EMAIL_ADDRESS: _ASN1Type.IA5String,
+ NameOID.DOMAIN_COMPONENT: _ASN1Type.IA5String,
+}
+
+# Type alias
+_OidNameMap = typing.Mapping[ObjectIdentifier, str]
+_NameOidMap = typing.Mapping[str, ObjectIdentifier]
+
+#: Short attribute names from RFC 4514:
+#: https://tools.ietf.org/html/rfc4514#page-7
+_NAMEOID_TO_NAME: _OidNameMap = {
+ NameOID.COMMON_NAME: "CN",
+ NameOID.LOCALITY_NAME: "L",
+ NameOID.STATE_OR_PROVINCE_NAME: "ST",
+ NameOID.ORGANIZATION_NAME: "O",
+ NameOID.ORGANIZATIONAL_UNIT_NAME: "OU",
+ NameOID.COUNTRY_NAME: "C",
+ NameOID.STREET_ADDRESS: "STREET",
+ NameOID.DOMAIN_COMPONENT: "DC",
+ NameOID.USER_ID: "UID",
+}
+_NAME_TO_NAMEOID = {v: k for k, v in _NAMEOID_TO_NAME.items()}
+
+
+def _escape_dn_value(val: typing.Union[str, bytes]) -> str:
+ """Escape special characters in RFC4514 Distinguished Name value."""
+
+ if not val:
+ return ""
+
+ # RFC 4514 Section 2.4 defines the value as being the # (U+0023) character
+ # followed by the hexadecimal encoding of the octets.
+ if isinstance(val, bytes):
+ return "#" + binascii.hexlify(val).decode("utf8")
+
+ # See https://tools.ietf.org/html/rfc4514#section-2.4
+ val = val.replace("\\", "\\\\")
+ val = val.replace('"', '\\"')
+ val = val.replace("+", "\\+")
+ val = val.replace(",", "\\,")
+ val = val.replace(";", "\\;")
+ val = val.replace("<", "\\<")
+ val = val.replace(">", "\\>")
+ val = val.replace("\0", "\\00")
+
+ if val[0] in ("#", " "):
+ val = "\\" + val
+ if val[-1] == " ":
+ val = val[:-1] + "\\ "
+
+ return val
+
+
+def _unescape_dn_value(val: str) -> str:
+ if not val:
+ return ""
+
+ # See https://tools.ietf.org/html/rfc4514#section-3
+
+ # special = escaped / SPACE / SHARP / EQUALS
+ # escaped = DQUOTE / PLUS / COMMA / SEMI / LANGLE / RANGLE
+ def sub(m):
+ val = m.group(1)
+ # Regular escape
+ if len(val) == 1:
+ return val
+ # Hex-value scape
+ return chr(int(val, 16))
+
+ return _RFC4514NameParser._PAIR_RE.sub(sub, val)
+
+
+class NameAttribute:
+ def __init__(
+ self,
+ oid: ObjectIdentifier,
+ value: typing.Union[str, bytes],
+ _type: typing.Optional[_ASN1Type] = None,
+ *,
+ _validate: bool = True,
+ ) -> None:
+ if not isinstance(oid, ObjectIdentifier):
+ raise TypeError(
+ "oid argument must be an ObjectIdentifier instance."
+ )
+ if _type == _ASN1Type.BitString:
+ if oid != NameOID.X500_UNIQUE_IDENTIFIER:
+ raise TypeError(
+ "oid must be X500_UNIQUE_IDENTIFIER for BitString type."
+ )
+ if not isinstance(value, bytes):
+ raise TypeError("value must be bytes for BitString")
+ else:
+ if not isinstance(value, str):
+ raise TypeError("value argument must be a str")
+
+ if (
+ oid == NameOID.COUNTRY_NAME
+ or oid == NameOID.JURISDICTION_COUNTRY_NAME
+ ):
+ assert isinstance(value, str)
+ c_len = len(value.encode("utf8"))
+ if c_len != 2 and _validate is True:
+ raise ValueError(
+ "Country name must be a 2 character country code"
+ )
+ elif c_len != 2:
+ warnings.warn(
+ "Country names should be two characters, but the "
+ "attribute is {} characters in length.".format(c_len),
+ stacklevel=2,
+ )
+
+ # The appropriate ASN1 string type varies by OID and is defined across
+ # multiple RFCs including 2459, 3280, and 5280. In general UTF8String
+ # is preferred (2459), but 3280 and 5280 specify several OIDs with
+ # alternate types. This means when we see the sentinel value we need
+ # to look up whether the OID has a non-UTF8 type. If it does, set it
+ # to that. Otherwise, UTF8!
+ if _type is None:
+ _type = _NAMEOID_DEFAULT_TYPE.get(oid, _ASN1Type.UTF8String)
+
+ if not isinstance(_type, _ASN1Type):
+ raise TypeError("_type must be from the _ASN1Type enum")
+
+ self._oid = oid
+ self._value = value
+ self._type = _type
+
+ @property
+ def oid(self) -> ObjectIdentifier:
+ return self._oid
+
+ @property
+ def value(self) -> typing.Union[str, bytes]:
+ return self._value
+
+ @property
+ def rfc4514_attribute_name(self) -> str:
+ """
+ The short attribute name (for example "CN") if available,
+ otherwise the OID dotted string.
+ """
+ return _NAMEOID_TO_NAME.get(self.oid, self.oid.dotted_string)
+
+ def rfc4514_string(
+ self, attr_name_overrides: typing.Optional[_OidNameMap] = None
+ ) -> str:
+ """
+ Format as RFC4514 Distinguished Name string.
+
+ Use short attribute name if available, otherwise fall back to OID
+ dotted string.
+ """
+ attr_name = (
+ attr_name_overrides.get(self.oid) if attr_name_overrides else None
+ )
+ if attr_name is None:
+ attr_name = self.rfc4514_attribute_name
+
+ return f"{attr_name}={_escape_dn_value(self.value)}"
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, NameAttribute):
+ return NotImplemented
+
+ return self.oid == other.oid and self.value == other.value
+
+ def __hash__(self) -> int:
+ return hash((self.oid, self.value))
+
+ def __repr__(self) -> str:
+ return "".format(self)
+
+
+class RelativeDistinguishedName:
+ def __init__(self, attributes: typing.Iterable[NameAttribute]):
+ attributes = list(attributes)
+ if not attributes:
+ raise ValueError("a relative distinguished name cannot be empty")
+ if not all(isinstance(x, NameAttribute) for x in attributes):
+ raise TypeError("attributes must be an iterable of NameAttribute")
+
+ # Keep list and frozenset to preserve attribute order where it matters
+ self._attributes = attributes
+ self._attribute_set = frozenset(attributes)
+
+ if len(self._attribute_set) != len(attributes):
+ raise ValueError("duplicate attributes are not allowed")
+
+ def get_attributes_for_oid(
+ self, oid: ObjectIdentifier
+ ) -> typing.List[NameAttribute]:
+ return [i for i in self if i.oid == oid]
+
+ def rfc4514_string(
+ self, attr_name_overrides: typing.Optional[_OidNameMap] = None
+ ) -> str:
+ """
+ Format as RFC4514 Distinguished Name string.
+
+ Within each RDN, attributes are joined by '+', although that is rarely
+ used in certificates.
+ """
+ return "+".join(
+ attr.rfc4514_string(attr_name_overrides)
+ for attr in self._attributes
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, RelativeDistinguishedName):
+ return NotImplemented
+
+ return self._attribute_set == other._attribute_set
+
+ def __hash__(self) -> int:
+ return hash(self._attribute_set)
+
+ def __iter__(self) -> typing.Iterator[NameAttribute]:
+ return iter(self._attributes)
+
+ def __len__(self) -> int:
+ return len(self._attributes)
+
+ def __repr__(self) -> str:
+ return f""
+
+
+class Name:
+ @typing.overload
+ def __init__(self, attributes: typing.Iterable[NameAttribute]) -> None:
+ ...
+
+ @typing.overload
+ def __init__(
+ self, attributes: typing.Iterable[RelativeDistinguishedName]
+ ) -> None:
+ ...
+
+ def __init__(
+ self,
+ attributes: typing.Iterable[
+ typing.Union[NameAttribute, RelativeDistinguishedName]
+ ],
+ ) -> None:
+ attributes = list(attributes)
+ if all(isinstance(x, NameAttribute) for x in attributes):
+ self._attributes = [
+ RelativeDistinguishedName([typing.cast(NameAttribute, x)])
+ for x in attributes
+ ]
+ elif all(isinstance(x, RelativeDistinguishedName) for x in attributes):
+ self._attributes = typing.cast(
+ typing.List[RelativeDistinguishedName], attributes
+ )
+ else:
+ raise TypeError(
+ "attributes must be a list of NameAttribute"
+ " or a list RelativeDistinguishedName"
+ )
+
+ @classmethod
+ def from_rfc4514_string(
+ cls,
+ data: str,
+ attr_name_overrides: typing.Optional[_NameOidMap] = None,
+ ) -> Name:
+ return _RFC4514NameParser(data, attr_name_overrides or {}).parse()
+
+ def rfc4514_string(
+ self, attr_name_overrides: typing.Optional[_OidNameMap] = None
+ ) -> str:
+ """
+ Format as RFC4514 Distinguished Name string.
+ For example 'CN=foobar.com,O=Foo Corp,C=US'
+
+ An X.509 name is a two-level structure: a list of sets of attributes.
+ Each list element is separated by ',' and within each list element, set
+ elements are separated by '+'. The latter is almost never used in
+ real world certificates. According to RFC4514 section 2.1 the
+ RDNSequence must be reversed when converting to string representation.
+ """
+ return ",".join(
+ attr.rfc4514_string(attr_name_overrides)
+ for attr in reversed(self._attributes)
+ )
+
+ def get_attributes_for_oid(
+ self, oid: ObjectIdentifier
+ ) -> typing.List[NameAttribute]:
+ return [i for i in self if i.oid == oid]
+
+ @property
+ def rdns(self) -> typing.List[RelativeDistinguishedName]:
+ return self._attributes
+
+ def public_bytes(self, backend: typing.Any = None) -> bytes:
+ return rust_x509.encode_name_bytes(self)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, Name):
+ return NotImplemented
+
+ return self._attributes == other._attributes
+
+ def __hash__(self) -> int:
+ # TODO: this is relatively expensive, if this looks like a bottleneck
+ # for you, consider optimizing!
+ return hash(tuple(self._attributes))
+
+ def __iter__(self) -> typing.Iterator[NameAttribute]:
+ for rdn in self._attributes:
+ for ava in rdn:
+ yield ava
+
+ def __len__(self) -> int:
+ return sum(len(rdn) for rdn in self._attributes)
+
+ def __repr__(self) -> str:
+ rdns = ",".join(attr.rfc4514_string() for attr in self._attributes)
+ return f""
+
+
+class _RFC4514NameParser:
+ _OID_RE = re.compile(r"(0|([1-9]\d*))(\.(0|([1-9]\d*)))+")
+ _DESCR_RE = re.compile(r"[a-zA-Z][a-zA-Z\d-]*")
+
+ _PAIR = r"\\([\\ #=\"\+,;<>]|[\da-zA-Z]{2})"
+ _PAIR_RE = re.compile(_PAIR)
+ _LUTF1 = r"[\x01-\x1f\x21\x24-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]"
+ _SUTF1 = r"[\x01-\x21\x23-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]"
+ _TUTF1 = r"[\x01-\x1F\x21\x23-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]"
+ _UTFMB = rf"[\x80-{chr(sys.maxunicode)}]"
+ _LEADCHAR = rf"{_LUTF1}|{_UTFMB}"
+ _STRINGCHAR = rf"{_SUTF1}|{_UTFMB}"
+ _TRAILCHAR = rf"{_TUTF1}|{_UTFMB}"
+ _STRING_RE = re.compile(
+ rf"""
+ (
+ ({_LEADCHAR}|{_PAIR})
+ (
+ ({_STRINGCHAR}|{_PAIR})*
+ ({_TRAILCHAR}|{_PAIR})
+ )?
+ )?
+ """,
+ re.VERBOSE,
+ )
+ _HEXSTRING_RE = re.compile(r"#([\da-zA-Z]{2})+")
+
+ def __init__(self, data: str, attr_name_overrides: _NameOidMap) -> None:
+ self._data = data
+ self._idx = 0
+
+ self._attr_name_overrides = attr_name_overrides
+
+ def _has_data(self) -> bool:
+ return self._idx < len(self._data)
+
+ def _peek(self) -> typing.Optional[str]:
+ if self._has_data():
+ return self._data[self._idx]
+ return None
+
+ def _read_char(self, ch: str) -> None:
+ if self._peek() != ch:
+ raise ValueError
+ self._idx += 1
+
+ def _read_re(self, pat) -> str:
+ match = pat.match(self._data, pos=self._idx)
+ if match is None:
+ raise ValueError
+ val = match.group()
+ self._idx += len(val)
+ return val
+
+ def parse(self) -> Name:
+ """
+ Parses the `data` string and converts it to a Name.
+
+ According to RFC4514 section 2.1 the RDNSequence must be
+ reversed when converting to string representation. So, when
+ we parse it, we need to reverse again to get the RDNs on the
+ correct order.
+ """
+ rdns = [self._parse_rdn()]
+
+ while self._has_data():
+ self._read_char(",")
+ rdns.append(self._parse_rdn())
+
+ return Name(reversed(rdns))
+
+ def _parse_rdn(self) -> RelativeDistinguishedName:
+ nas = [self._parse_na()]
+ while self._peek() == "+":
+ self._read_char("+")
+ nas.append(self._parse_na())
+
+ return RelativeDistinguishedName(nas)
+
+ def _parse_na(self) -> NameAttribute:
+ try:
+ oid_value = self._read_re(self._OID_RE)
+ except ValueError:
+ name = self._read_re(self._DESCR_RE)
+ oid = self._attr_name_overrides.get(
+ name, _NAME_TO_NAMEOID.get(name)
+ )
+ if oid is None:
+ raise ValueError
+ else:
+ oid = ObjectIdentifier(oid_value)
+
+ self._read_char("=")
+ if self._peek() == "#":
+ value = self._read_re(self._HEXSTRING_RE)
+ value = binascii.unhexlify(value[1:]).decode()
+ else:
+ raw_value = self._read_re(self._STRING_RE)
+ value = _unescape_dn_value(raw_value)
+
+ return NameAttribute(oid, value)
diff --git a/billinglayer/python/cryptography/x509/ocsp.py b/billinglayer/python/cryptography/x509/ocsp.py
new file mode 100644
index 0000000..7054795
--- /dev/null
+++ b/billinglayer/python/cryptography/x509/ocsp.py
@@ -0,0 +1,622 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import datetime
+import typing
+
+from cryptography import utils, x509
+from cryptography.hazmat.bindings._rust import ocsp
+from cryptography.hazmat.primitives import hashes, serialization
+from cryptography.hazmat.primitives.asymmetric.types import (
+ CertificateIssuerPrivateKeyTypes,
+)
+from cryptography.x509.base import (
+ _EARLIEST_UTC_TIME,
+ _convert_to_naive_utc_time,
+ _reject_duplicate_extension,
+)
+
+
+class OCSPResponderEncoding(utils.Enum):
+ HASH = "By Hash"
+ NAME = "By Name"
+
+
+class OCSPResponseStatus(utils.Enum):
+ SUCCESSFUL = 0
+ MALFORMED_REQUEST = 1
+ INTERNAL_ERROR = 2
+ TRY_LATER = 3
+ SIG_REQUIRED = 5
+ UNAUTHORIZED = 6
+
+
+_ALLOWED_HASHES = (
+ hashes.SHA1,
+ hashes.SHA224,
+ hashes.SHA256,
+ hashes.SHA384,
+ hashes.SHA512,
+)
+
+
+def _verify_algorithm(algorithm: hashes.HashAlgorithm) -> None:
+ if not isinstance(algorithm, _ALLOWED_HASHES):
+ raise ValueError(
+ "Algorithm must be SHA1, SHA224, SHA256, SHA384, or SHA512"
+ )
+
+
+class OCSPCertStatus(utils.Enum):
+ GOOD = 0
+ REVOKED = 1
+ UNKNOWN = 2
+
+
+class _SingleResponse:
+ def __init__(
+ self,
+ cert: x509.Certificate,
+ issuer: x509.Certificate,
+ algorithm: hashes.HashAlgorithm,
+ cert_status: OCSPCertStatus,
+ this_update: datetime.datetime,
+ next_update: typing.Optional[datetime.datetime],
+ revocation_time: typing.Optional[datetime.datetime],
+ revocation_reason: typing.Optional[x509.ReasonFlags],
+ ):
+ if not isinstance(cert, x509.Certificate) or not isinstance(
+ issuer, x509.Certificate
+ ):
+ raise TypeError("cert and issuer must be a Certificate")
+
+ _verify_algorithm(algorithm)
+ if not isinstance(this_update, datetime.datetime):
+ raise TypeError("this_update must be a datetime object")
+ if next_update is not None and not isinstance(
+ next_update, datetime.datetime
+ ):
+ raise TypeError("next_update must be a datetime object or None")
+
+ self._cert = cert
+ self._issuer = issuer
+ self._algorithm = algorithm
+ self._this_update = this_update
+ self._next_update = next_update
+
+ if not isinstance(cert_status, OCSPCertStatus):
+ raise TypeError(
+ "cert_status must be an item from the OCSPCertStatus enum"
+ )
+ if cert_status is not OCSPCertStatus.REVOKED:
+ if revocation_time is not None:
+ raise ValueError(
+ "revocation_time can only be provided if the certificate "
+ "is revoked"
+ )
+ if revocation_reason is not None:
+ raise ValueError(
+ "revocation_reason can only be provided if the certificate"
+ " is revoked"
+ )
+ else:
+ if not isinstance(revocation_time, datetime.datetime):
+ raise TypeError("revocation_time must be a datetime object")
+
+ revocation_time = _convert_to_naive_utc_time(revocation_time)
+ if revocation_time < _EARLIEST_UTC_TIME:
+ raise ValueError(
+ "The revocation_time must be on or after"
+ " 1950 January 1."
+ )
+
+ if revocation_reason is not None and not isinstance(
+ revocation_reason, x509.ReasonFlags
+ ):
+ raise TypeError(
+ "revocation_reason must be an item from the ReasonFlags "
+ "enum or None"
+ )
+
+ self._cert_status = cert_status
+ self._revocation_time = revocation_time
+ self._revocation_reason = revocation_reason
+
+
+class OCSPRequest(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def issuer_key_hash(self) -> bytes:
+ """
+ The hash of the issuer public key
+ """
+
+ @property
+ @abc.abstractmethod
+ def issuer_name_hash(self) -> bytes:
+ """
+ The hash of the issuer name
+ """
+
+ @property
+ @abc.abstractmethod
+ def hash_algorithm(self) -> hashes.HashAlgorithm:
+ """
+ The hash algorithm used in the issuer name and key hashes
+ """
+
+ @property
+ @abc.abstractmethod
+ def serial_number(self) -> int:
+ """
+ The serial number of the cert whose status is being checked
+ """
+
+ @abc.abstractmethod
+ def public_bytes(self, encoding: serialization.Encoding) -> bytes:
+ """
+ Serializes the request to DER
+ """
+
+ @property
+ @abc.abstractmethod
+ def extensions(self) -> x509.Extensions:
+ """
+ The list of request extensions. Not single request extensions.
+ """
+
+
+class OCSPSingleResponse(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def certificate_status(self) -> OCSPCertStatus:
+ """
+ The status of the certificate (an element from the OCSPCertStatus enum)
+ """
+
+ @property
+ @abc.abstractmethod
+ def revocation_time(self) -> typing.Optional[datetime.datetime]:
+ """
+ The date of when the certificate was revoked or None if not
+ revoked.
+ """
+
+ @property
+ @abc.abstractmethod
+ def revocation_reason(self) -> typing.Optional[x509.ReasonFlags]:
+ """
+ The reason the certificate was revoked or None if not specified or
+ not revoked.
+ """
+
+ @property
+ @abc.abstractmethod
+ def this_update(self) -> datetime.datetime:
+ """
+ The most recent time at which the status being indicated is known by
+ the responder to have been correct
+ """
+
+ @property
+ @abc.abstractmethod
+ def next_update(self) -> typing.Optional[datetime.datetime]:
+ """
+ The time when newer information will be available
+ """
+
+ @property
+ @abc.abstractmethod
+ def issuer_key_hash(self) -> bytes:
+ """
+ The hash of the issuer public key
+ """
+
+ @property
+ @abc.abstractmethod
+ def issuer_name_hash(self) -> bytes:
+ """
+ The hash of the issuer name
+ """
+
+ @property
+ @abc.abstractmethod
+ def hash_algorithm(self) -> hashes.HashAlgorithm:
+ """
+ The hash algorithm used in the issuer name and key hashes
+ """
+
+ @property
+ @abc.abstractmethod
+ def serial_number(self) -> int:
+ """
+ The serial number of the cert whose status is being checked
+ """
+
+
+class OCSPResponse(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def responses(self) -> typing.Iterator[OCSPSingleResponse]:
+ """
+ An iterator over the individual SINGLERESP structures in the
+ response
+ """
+
+ @property
+ @abc.abstractmethod
+ def response_status(self) -> OCSPResponseStatus:
+ """
+ The status of the response. This is a value from the OCSPResponseStatus
+ enumeration
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_algorithm_oid(self) -> x509.ObjectIdentifier:
+ """
+ The ObjectIdentifier of the signature algorithm
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_hash_algorithm(
+ self,
+ ) -> typing.Optional[hashes.HashAlgorithm]:
+ """
+ Returns a HashAlgorithm corresponding to the type of the digest signed
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature(self) -> bytes:
+ """
+ The signature bytes
+ """
+
+ @property
+ @abc.abstractmethod
+ def tbs_response_bytes(self) -> bytes:
+ """
+ The tbsResponseData bytes
+ """
+
+ @property
+ @abc.abstractmethod
+ def certificates(self) -> typing.List[x509.Certificate]:
+ """
+ A list of certificates used to help build a chain to verify the OCSP
+ response. This situation occurs when the OCSP responder uses a delegate
+ certificate.
+ """
+
+ @property
+ @abc.abstractmethod
+ def responder_key_hash(self) -> typing.Optional[bytes]:
+ """
+ The responder's key hash or None
+ """
+
+ @property
+ @abc.abstractmethod
+ def responder_name(self) -> typing.Optional[x509.Name]:
+ """
+ The responder's Name or None
+ """
+
+ @property
+ @abc.abstractmethod
+ def produced_at(self) -> datetime.datetime:
+ """
+ The time the response was produced
+ """
+
+ @property
+ @abc.abstractmethod
+ def certificate_status(self) -> OCSPCertStatus:
+ """
+ The status of the certificate (an element from the OCSPCertStatus enum)
+ """
+
+ @property
+ @abc.abstractmethod
+ def revocation_time(self) -> typing.Optional[datetime.datetime]:
+ """
+ The date of when the certificate was revoked or None if not
+ revoked.
+ """
+
+ @property
+ @abc.abstractmethod
+ def revocation_reason(self) -> typing.Optional[x509.ReasonFlags]:
+ """
+ The reason the certificate was revoked or None if not specified or
+ not revoked.
+ """
+
+ @property
+ @abc.abstractmethod
+ def this_update(self) -> datetime.datetime:
+ """
+ The most recent time at which the status being indicated is known by
+ the responder to have been correct
+ """
+
+ @property
+ @abc.abstractmethod
+ def next_update(self) -> typing.Optional[datetime.datetime]:
+ """
+ The time when newer information will be available
+ """
+
+ @property
+ @abc.abstractmethod
+ def issuer_key_hash(self) -> bytes:
+ """
+ The hash of the issuer public key
+ """
+
+ @property
+ @abc.abstractmethod
+ def issuer_name_hash(self) -> bytes:
+ """
+ The hash of the issuer name
+ """
+
+ @property
+ @abc.abstractmethod
+ def hash_algorithm(self) -> hashes.HashAlgorithm:
+ """
+ The hash algorithm used in the issuer name and key hashes
+ """
+
+ @property
+ @abc.abstractmethod
+ def serial_number(self) -> int:
+ """
+ The serial number of the cert whose status is being checked
+ """
+
+ @property
+ @abc.abstractmethod
+ def extensions(self) -> x509.Extensions:
+ """
+ The list of response extensions. Not single response extensions.
+ """
+
+ @property
+ @abc.abstractmethod
+ def single_extensions(self) -> x509.Extensions:
+ """
+ The list of single response extensions. Not response extensions.
+ """
+
+ @abc.abstractmethod
+ def public_bytes(self, encoding: serialization.Encoding) -> bytes:
+ """
+ Serializes the response to DER
+ """
+
+
+class OCSPRequestBuilder:
+ def __init__(
+ self,
+ request: typing.Optional[
+ typing.Tuple[
+ x509.Certificate, x509.Certificate, hashes.HashAlgorithm
+ ]
+ ] = None,
+ request_hash: typing.Optional[
+ typing.Tuple[bytes, bytes, int, hashes.HashAlgorithm]
+ ] = None,
+ extensions: typing.List[x509.Extension[x509.ExtensionType]] = [],
+ ) -> None:
+ self._request = request
+ self._request_hash = request_hash
+ self._extensions = extensions
+
+ def add_certificate(
+ self,
+ cert: x509.Certificate,
+ issuer: x509.Certificate,
+ algorithm: hashes.HashAlgorithm,
+ ) -> OCSPRequestBuilder:
+ if self._request is not None or self._request_hash is not None:
+ raise ValueError("Only one certificate can be added to a request")
+
+ _verify_algorithm(algorithm)
+ if not isinstance(cert, x509.Certificate) or not isinstance(
+ issuer, x509.Certificate
+ ):
+ raise TypeError("cert and issuer must be a Certificate")
+
+ return OCSPRequestBuilder(
+ (cert, issuer, algorithm), self._request_hash, self._extensions
+ )
+
+ def add_certificate_by_hash(
+ self,
+ issuer_name_hash: bytes,
+ issuer_key_hash: bytes,
+ serial_number: int,
+ algorithm: hashes.HashAlgorithm,
+ ) -> OCSPRequestBuilder:
+ if self._request is not None or self._request_hash is not None:
+ raise ValueError("Only one certificate can be added to a request")
+
+ if not isinstance(serial_number, int):
+ raise TypeError("serial_number must be an integer")
+
+ _verify_algorithm(algorithm)
+ utils._check_bytes("issuer_name_hash", issuer_name_hash)
+ utils._check_bytes("issuer_key_hash", issuer_key_hash)
+ if algorithm.digest_size != len(
+ issuer_name_hash
+ ) or algorithm.digest_size != len(issuer_key_hash):
+ raise ValueError(
+ "issuer_name_hash and issuer_key_hash must be the same length "
+ "as the digest size of the algorithm"
+ )
+
+ return OCSPRequestBuilder(
+ self._request,
+ (issuer_name_hash, issuer_key_hash, serial_number, algorithm),
+ self._extensions,
+ )
+
+ def add_extension(
+ self, extval: x509.ExtensionType, critical: bool
+ ) -> OCSPRequestBuilder:
+ if not isinstance(extval, x509.ExtensionType):
+ raise TypeError("extension must be an ExtensionType")
+
+ extension = x509.Extension(extval.oid, critical, extval)
+ _reject_duplicate_extension(extension, self._extensions)
+
+ return OCSPRequestBuilder(
+ self._request, self._request_hash, self._extensions + [extension]
+ )
+
+ def build(self) -> OCSPRequest:
+ if self._request is None and self._request_hash is None:
+ raise ValueError("You must add a certificate before building")
+
+ return ocsp.create_ocsp_request(self)
+
+
+class OCSPResponseBuilder:
+ def __init__(
+ self,
+ response: typing.Optional[_SingleResponse] = None,
+ responder_id: typing.Optional[
+ typing.Tuple[x509.Certificate, OCSPResponderEncoding]
+ ] = None,
+ certs: typing.Optional[typing.List[x509.Certificate]] = None,
+ extensions: typing.List[x509.Extension[x509.ExtensionType]] = [],
+ ):
+ self._response = response
+ self._responder_id = responder_id
+ self._certs = certs
+ self._extensions = extensions
+
+ def add_response(
+ self,
+ cert: x509.Certificate,
+ issuer: x509.Certificate,
+ algorithm: hashes.HashAlgorithm,
+ cert_status: OCSPCertStatus,
+ this_update: datetime.datetime,
+ next_update: typing.Optional[datetime.datetime],
+ revocation_time: typing.Optional[datetime.datetime],
+ revocation_reason: typing.Optional[x509.ReasonFlags],
+ ) -> OCSPResponseBuilder:
+ if self._response is not None:
+ raise ValueError("Only one response per OCSPResponse.")
+
+ singleresp = _SingleResponse(
+ cert,
+ issuer,
+ algorithm,
+ cert_status,
+ this_update,
+ next_update,
+ revocation_time,
+ revocation_reason,
+ )
+ return OCSPResponseBuilder(
+ singleresp,
+ self._responder_id,
+ self._certs,
+ self._extensions,
+ )
+
+ def responder_id(
+ self, encoding: OCSPResponderEncoding, responder_cert: x509.Certificate
+ ) -> OCSPResponseBuilder:
+ if self._responder_id is not None:
+ raise ValueError("responder_id can only be set once")
+ if not isinstance(responder_cert, x509.Certificate):
+ raise TypeError("responder_cert must be a Certificate")
+ if not isinstance(encoding, OCSPResponderEncoding):
+ raise TypeError(
+ "encoding must be an element from OCSPResponderEncoding"
+ )
+
+ return OCSPResponseBuilder(
+ self._response,
+ (responder_cert, encoding),
+ self._certs,
+ self._extensions,
+ )
+
+ def certificates(
+ self, certs: typing.Iterable[x509.Certificate]
+ ) -> OCSPResponseBuilder:
+ if self._certs is not None:
+ raise ValueError("certificates may only be set once")
+ certs = list(certs)
+ if len(certs) == 0:
+ raise ValueError("certs must not be an empty list")
+ if not all(isinstance(x, x509.Certificate) for x in certs):
+ raise TypeError("certs must be a list of Certificates")
+ return OCSPResponseBuilder(
+ self._response,
+ self._responder_id,
+ certs,
+ self._extensions,
+ )
+
+ def add_extension(
+ self, extval: x509.ExtensionType, critical: bool
+ ) -> OCSPResponseBuilder:
+ if not isinstance(extval, x509.ExtensionType):
+ raise TypeError("extension must be an ExtensionType")
+
+ extension = x509.Extension(extval.oid, critical, extval)
+ _reject_duplicate_extension(extension, self._extensions)
+
+ return OCSPResponseBuilder(
+ self._response,
+ self._responder_id,
+ self._certs,
+ self._extensions + [extension],
+ )
+
+ def sign(
+ self,
+ private_key: CertificateIssuerPrivateKeyTypes,
+ algorithm: typing.Optional[hashes.HashAlgorithm],
+ ) -> OCSPResponse:
+ if self._response is None:
+ raise ValueError("You must add a response before signing")
+ if self._responder_id is None:
+ raise ValueError("You must add a responder_id before signing")
+
+ return ocsp.create_ocsp_response(
+ OCSPResponseStatus.SUCCESSFUL, self, private_key, algorithm
+ )
+
+ @classmethod
+ def build_unsuccessful(
+ cls, response_status: OCSPResponseStatus
+ ) -> OCSPResponse:
+ if not isinstance(response_status, OCSPResponseStatus):
+ raise TypeError(
+ "response_status must be an item from OCSPResponseStatus"
+ )
+ if response_status is OCSPResponseStatus.SUCCESSFUL:
+ raise ValueError("response_status cannot be SUCCESSFUL")
+
+ return ocsp.create_ocsp_response(response_status, None, None, None)
+
+
+def load_der_ocsp_request(data: bytes) -> OCSPRequest:
+ return ocsp.load_der_ocsp_request(data)
+
+
+def load_der_ocsp_response(data: bytes) -> OCSPResponse:
+ return ocsp.load_der_ocsp_response(data)
diff --git a/billinglayer/python/cryptography/x509/oid.py b/billinglayer/python/cryptography/x509/oid.py
new file mode 100644
index 0000000..cda50cc
--- /dev/null
+++ b/billinglayer/python/cryptography/x509/oid.py
@@ -0,0 +1,33 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+from cryptography.hazmat._oid import (
+ AttributeOID,
+ AuthorityInformationAccessOID,
+ CertificatePoliciesOID,
+ CRLEntryExtensionOID,
+ ExtendedKeyUsageOID,
+ ExtensionOID,
+ NameOID,
+ ObjectIdentifier,
+ OCSPExtensionOID,
+ SignatureAlgorithmOID,
+ SubjectInformationAccessOID,
+)
+
+__all__ = [
+ "AttributeOID",
+ "AuthorityInformationAccessOID",
+ "CRLEntryExtensionOID",
+ "CertificatePoliciesOID",
+ "ExtendedKeyUsageOID",
+ "ExtensionOID",
+ "NameOID",
+ "OCSPExtensionOID",
+ "ObjectIdentifier",
+ "SignatureAlgorithmOID",
+ "SubjectInformationAccessOID",
+]
diff --git a/billinglayer/python/idna-3.4.dist-info/INSTALLER b/billinglayer/python/idna-3.4.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/billinglayer/python/idna-3.4.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/billinglayer/python/idna-3.4.dist-info/LICENSE.md b/billinglayer/python/idna-3.4.dist-info/LICENSE.md
new file mode 100644
index 0000000..b6f8732
--- /dev/null
+++ b/billinglayer/python/idna-3.4.dist-info/LICENSE.md
@@ -0,0 +1,29 @@
+BSD 3-Clause License
+
+Copyright (c) 2013-2021, Kim Davies
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/billinglayer/python/idna-3.4.dist-info/METADATA b/billinglayer/python/idna-3.4.dist-info/METADATA
new file mode 100644
index 0000000..07f6193
--- /dev/null
+++ b/billinglayer/python/idna-3.4.dist-info/METADATA
@@ -0,0 +1,242 @@
+Metadata-Version: 2.1
+Name: idna
+Version: 3.4
+Summary: Internationalized Domain Names in Applications (IDNA)
+Author-email: Kim Davies
+Requires-Python: >=3.5
+Description-Content-Type: text/x-rst
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: System Administrators
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Internet :: Name Service (DNS)
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Utilities
+Project-URL: Changelog, https://github.com/kjd/idna/blob/master/HISTORY.rst
+Project-URL: Issue tracker, https://github.com/kjd/idna/issues
+Project-URL: Source, https://github.com/kjd/idna
+
+Internationalized Domain Names in Applications (IDNA)
+=====================================================
+
+Support for the Internationalized Domain Names in
+Applications (IDNA) protocol as specified in `RFC 5891
+`_. This is the latest version of
+the protocol and is sometimes referred to as “IDNA 2008”.
+
+This library also provides support for Unicode Technical
+Standard 46, `Unicode IDNA Compatibility Processing
+`_.
+
+This acts as a suitable replacement for the “encodings.idna”
+module that comes with the Python standard library, but which
+only supports the older superseded IDNA specification (`RFC 3490
+`_).
+
+Basic functions are simply executed:
+
+.. code-block:: pycon
+
+ >>> import idna
+ >>> idna.encode('ドメイン.テスト')
+ b'xn--eckwd4c7c.xn--zckzah'
+ >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah'))
+ ドメイン.テスト
+
+
+Installation
+------------
+
+This package is available for installation from PyPI:
+
+.. code-block:: bash
+
+ $ python3 -m pip install idna
+
+
+Usage
+-----
+
+For typical usage, the ``encode`` and ``decode`` functions will take a
+domain name argument and perform a conversion to A-labels or U-labels
+respectively.
+
+.. code-block:: pycon
+
+ >>> import idna
+ >>> idna.encode('ドメイン.テスト')
+ b'xn--eckwd4c7c.xn--zckzah'
+ >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah'))
+ ドメイン.テスト
+
+You may use the codec encoding and decoding methods using the
+``idna.codec`` module:
+
+.. code-block:: pycon
+
+ >>> import idna.codec
+ >>> print('домен.испытание'.encode('idna'))
+ b'xn--d1acufc.xn--80akhbyknj4f'
+ >>> print(b'xn--d1acufc.xn--80akhbyknj4f'.decode('idna'))
+ домен.испытание
+
+Conversions can be applied at a per-label basis using the ``ulabel`` or
+``alabel`` functions if necessary:
+
+.. code-block:: pycon
+
+ >>> idna.alabel('测试')
+ b'xn--0zwm56d'
+
+Compatibility Mapping (UTS #46)
++++++++++++++++++++++++++++++++
+
+As described in `RFC 5895 `_, the
+IDNA specification does not normalize input from different potential
+ways a user may input a domain name. This functionality, known as
+a “mapping”, is considered by the specification to be a local
+user-interface issue distinct from IDNA conversion functionality.
+
+This library provides one such mapping, that was developed by the
+Unicode Consortium. Known as `Unicode IDNA Compatibility Processing
+`_, it provides for both a regular
+mapping for typical applications, as well as a transitional mapping to
+help migrate from older IDNA 2003 applications.
+
+For example, “Königsgäßchen” is not a permissible label as *LATIN
+CAPITAL LETTER K* is not allowed (nor are capital letters in general).
+UTS 46 will convert this into lower case prior to applying the IDNA
+conversion.
+
+.. code-block:: pycon
+
+ >>> import idna
+ >>> idna.encode('Königsgäßchen')
+ ...
+ idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'Königsgäßchen' not allowed
+ >>> idna.encode('Königsgäßchen', uts46=True)
+ b'xn--knigsgchen-b4a3dun'
+ >>> print(idna.decode('xn--knigsgchen-b4a3dun'))
+ königsgäßchen
+
+Transitional processing provides conversions to help transition from
+the older 2003 standard to the current standard. For example, in the
+original IDNA specification, the *LATIN SMALL LETTER SHARP S* (ß) was
+converted into two *LATIN SMALL LETTER S* (ss), whereas in the current
+IDNA specification this conversion is not performed.
+
+.. code-block:: pycon
+
+ >>> idna.encode('Königsgäßchen', uts46=True, transitional=True)
+ 'xn--knigsgsschen-lcb0w'
+
+Implementors should use transitional processing with caution, only in
+rare cases where conversion from legacy labels to current labels must be
+performed (i.e. IDNA implementations that pre-date 2008). For typical
+applications that just need to convert labels, transitional processing
+is unlikely to be beneficial and could produce unexpected incompatible
+results.
+
+``encodings.idna`` Compatibility
+++++++++++++++++++++++++++++++++
+
+Function calls from the Python built-in ``encodings.idna`` module are
+mapped to their IDNA 2008 equivalents using the ``idna.compat`` module.
+Simply substitute the ``import`` clause in your code to refer to the new
+module name.
+
+Exceptions
+----------
+
+All errors raised during the conversion following the specification
+should raise an exception derived from the ``idna.IDNAError`` base
+class.
+
+More specific exceptions that may be generated as ``idna.IDNABidiError``
+when the error reflects an illegal combination of left-to-right and
+right-to-left characters in a label; ``idna.InvalidCodepoint`` when
+a specific codepoint is an illegal character in an IDN label (i.e.
+INVALID); and ``idna.InvalidCodepointContext`` when the codepoint is
+illegal based on its positional context (i.e. it is CONTEXTO or CONTEXTJ
+but the contextual requirements are not satisfied.)
+
+Building and Diagnostics
+------------------------
+
+The IDNA and UTS 46 functionality relies upon pre-calculated lookup
+tables for performance. These tables are derived from computing against
+eligibility criteria in the respective standards. These tables are
+computed using the command-line script ``tools/idna-data``.
+
+This tool will fetch relevant codepoint data from the Unicode repository
+and perform the required calculations to identify eligibility. There are
+three main modes:
+
+* ``idna-data make-libdata``. Generates ``idnadata.py`` and
+ ``uts46data.py``, the pre-calculated lookup tables using for IDNA and
+ UTS 46 conversions. Implementors who wish to track this library against
+ a different Unicode version may use this tool to manually generate a
+ different version of the ``idnadata.py`` and ``uts46data.py`` files.
+
+* ``idna-data make-table``. Generate a table of the IDNA disposition
+ (e.g. PVALID, CONTEXTJ, CONTEXTO) in the format found in Appendix
+ B.1 of RFC 5892 and the pre-computed tables published by `IANA
+ `_.
+
+* ``idna-data U+0061``. Prints debugging output on the various
+ properties associated with an individual Unicode codepoint (in this
+ case, U+0061), that are used to assess the IDNA and UTS 46 status of a
+ codepoint. This is helpful in debugging or analysis.
+
+The tool accepts a number of arguments, described using ``idna-data
+-h``. Most notably, the ``--version`` argument allows the specification
+of the version of Unicode to use in computing the table data. For
+example, ``idna-data --version 9.0.0 make-libdata`` will generate
+library data against Unicode 9.0.0.
+
+
+Additional Notes
+----------------
+
+* **Packages**. The latest tagged release version is published in the
+ `Python Package Index `_.
+
+* **Version support**. This library supports Python 3.5 and higher.
+ As this library serves as a low-level toolkit for a variety of
+ applications, many of which strive for broad compatibility with older
+ Python versions, there is no rush to remove older intepreter support.
+ Removing support for older versions should be well justified in that the
+ maintenance burden has become too high.
+
+* **Python 2**. Python 2 is supported by version 2.x of this library.
+ While active development of the version 2.x series has ended, notable
+ issues being corrected may be backported to 2.x. Use "idna<3" in your
+ requirements file if you need this library for a Python 2 application.
+
+* **Testing**. The library has a test suite based on each rule of the
+ IDNA specification, as well as tests that are provided as part of the
+ Unicode Technical Standard 46, `Unicode IDNA Compatibility Processing
+ `_.
+
+* **Emoji**. It is an occasional request to support emoji domains in
+ this library. Encoding of symbols like emoji is expressly prohibited by
+ the technical standard IDNA 2008 and emoji domains are broadly phased
+ out across the domain industry due to associated security risks. For
+ now, applications that wish need to support these non-compliant labels
+ may wish to consider trying the encode/decode operation in this library
+ first, and then falling back to using `encodings.idna`. See `the Github
+ project `_ for more discussion.
+
diff --git a/billinglayer/python/idna-3.4.dist-info/RECORD b/billinglayer/python/idna-3.4.dist-info/RECORD
new file mode 100644
index 0000000..5da1ae1
--- /dev/null
+++ b/billinglayer/python/idna-3.4.dist-info/RECORD
@@ -0,0 +1,22 @@
+idna-3.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+idna-3.4.dist-info/LICENSE.md,sha256=otbk2UC9JNvnuWRc3hmpeSzFHbeuDVrNMBrIYMqj6DY,1523
+idna-3.4.dist-info/METADATA,sha256=8aLSf9MFS7oB26pZh2hprg7eJp0UJSc-3rpf_evp4DA,9830
+idna-3.4.dist-info/RECORD,,
+idna-3.4.dist-info/WHEEL,sha256=4TfKIB_xu-04bc2iKz6_zFt-gEFEEDU_31HGhqzOCE8,81
+idna/__init__.py,sha256=KJQN1eQBr8iIK5SKrJ47lXvxG0BJ7Lm38W4zT0v_8lk,849
+idna/__pycache__/__init__.cpython-311.pyc,,
+idna/__pycache__/codec.cpython-311.pyc,,
+idna/__pycache__/compat.cpython-311.pyc,,
+idna/__pycache__/core.cpython-311.pyc,,
+idna/__pycache__/idnadata.cpython-311.pyc,,
+idna/__pycache__/intranges.cpython-311.pyc,,
+idna/__pycache__/package_data.cpython-311.pyc,,
+idna/__pycache__/uts46data.cpython-311.pyc,,
+idna/codec.py,sha256=6ly5odKfqrytKT9_7UrlGklHnf1DSK2r9C6cSM4sa28,3374
+idna/compat.py,sha256=0_sOEUMT4CVw9doD3vyRhX80X19PwqFoUBs7gWsFME4,321
+idna/core.py,sha256=1JxchwKzkxBSn7R_oCE12oBu3eVux0VzdxolmIad24M,12950
+idna/idnadata.py,sha256=xUjqKqiJV8Ho_XzBpAtv5JFoVPSupK-SUXvtjygUHqw,44375
+idna/intranges.py,sha256=YBr4fRYuWH7kTKS2tXlFjM24ZF1Pdvcir-aywniInqg,1881
+idna/package_data.py,sha256=C_jHJzmX8PI4xq0jpzmcTMxpb5lDsq4o5VyxQzlVrZE,21
+idna/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+idna/uts46data.py,sha256=zvjZU24s58_uAS850Mcd0NnD0X7_gCMAMjzWNIeUJdc,206539
diff --git a/billinglayer/python/idna-3.4.dist-info/WHEEL b/billinglayer/python/idna-3.4.dist-info/WHEEL
new file mode 100644
index 0000000..668ba4d
--- /dev/null
+++ b/billinglayer/python/idna-3.4.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: flit 3.7.1
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/billinglayer/python/idna/__init__.py b/billinglayer/python/idna/__init__.py
new file mode 100644
index 0000000..a40eeaf
--- /dev/null
+++ b/billinglayer/python/idna/__init__.py
@@ -0,0 +1,44 @@
+from .package_data import __version__
+from .core import (
+ IDNABidiError,
+ IDNAError,
+ InvalidCodepoint,
+ InvalidCodepointContext,
+ alabel,
+ check_bidi,
+ check_hyphen_ok,
+ check_initial_combiner,
+ check_label,
+ check_nfc,
+ decode,
+ encode,
+ ulabel,
+ uts46_remap,
+ valid_contextj,
+ valid_contexto,
+ valid_label_length,
+ valid_string_length,
+)
+from .intranges import intranges_contain
+
+__all__ = [
+ "IDNABidiError",
+ "IDNAError",
+ "InvalidCodepoint",
+ "InvalidCodepointContext",
+ "alabel",
+ "check_bidi",
+ "check_hyphen_ok",
+ "check_initial_combiner",
+ "check_label",
+ "check_nfc",
+ "decode",
+ "encode",
+ "intranges_contain",
+ "ulabel",
+ "uts46_remap",
+ "valid_contextj",
+ "valid_contexto",
+ "valid_label_length",
+ "valid_string_length",
+]
diff --git a/billinglayer/python/idna/__pycache__/__init__.cpython-311.pyc b/billinglayer/python/idna/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..1e3e2e1
Binary files /dev/null and b/billinglayer/python/idna/__pycache__/__init__.cpython-311.pyc differ
diff --git a/billinglayer/python/idna/__pycache__/codec.cpython-311.pyc b/billinglayer/python/idna/__pycache__/codec.cpython-311.pyc
new file mode 100644
index 0000000..c65aab9
Binary files /dev/null and b/billinglayer/python/idna/__pycache__/codec.cpython-311.pyc differ
diff --git a/billinglayer/python/idna/__pycache__/compat.cpython-311.pyc b/billinglayer/python/idna/__pycache__/compat.cpython-311.pyc
new file mode 100644
index 0000000..68ee71b
Binary files /dev/null and b/billinglayer/python/idna/__pycache__/compat.cpython-311.pyc differ
diff --git a/billinglayer/python/idna/__pycache__/core.cpython-311.pyc b/billinglayer/python/idna/__pycache__/core.cpython-311.pyc
new file mode 100644
index 0000000..1bd0f9d
Binary files /dev/null and b/billinglayer/python/idna/__pycache__/core.cpython-311.pyc differ
diff --git a/billinglayer/python/idna/__pycache__/idnadata.cpython-311.pyc b/billinglayer/python/idna/__pycache__/idnadata.cpython-311.pyc
new file mode 100644
index 0000000..dd314e7
Binary files /dev/null and b/billinglayer/python/idna/__pycache__/idnadata.cpython-311.pyc differ
diff --git a/billinglayer/python/idna/__pycache__/intranges.cpython-311.pyc b/billinglayer/python/idna/__pycache__/intranges.cpython-311.pyc
new file mode 100644
index 0000000..7536a50
Binary files /dev/null and b/billinglayer/python/idna/__pycache__/intranges.cpython-311.pyc differ
diff --git a/billinglayer/python/idna/__pycache__/package_data.cpython-311.pyc b/billinglayer/python/idna/__pycache__/package_data.cpython-311.pyc
new file mode 100644
index 0000000..a6f1dd9
Binary files /dev/null and b/billinglayer/python/idna/__pycache__/package_data.cpython-311.pyc differ
diff --git a/billinglayer/python/idna/__pycache__/uts46data.cpython-311.pyc b/billinglayer/python/idna/__pycache__/uts46data.cpython-311.pyc
new file mode 100644
index 0000000..6cb226b
Binary files /dev/null and b/billinglayer/python/idna/__pycache__/uts46data.cpython-311.pyc differ
diff --git a/billinglayer/python/idna/codec.py b/billinglayer/python/idna/codec.py
new file mode 100644
index 0000000..1ca9ba6
--- /dev/null
+++ b/billinglayer/python/idna/codec.py
@@ -0,0 +1,112 @@
+from .core import encode, decode, alabel, ulabel, IDNAError
+import codecs
+import re
+from typing import Tuple, Optional
+
+_unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]')
+
+class Codec(codecs.Codec):
+
+ def encode(self, data: str, errors: str = 'strict') -> Tuple[bytes, int]:
+ if errors != 'strict':
+ raise IDNAError('Unsupported error handling \"{}\"'.format(errors))
+
+ if not data:
+ return b"", 0
+
+ return encode(data), len(data)
+
+ def decode(self, data: bytes, errors: str = 'strict') -> Tuple[str, int]:
+ if errors != 'strict':
+ raise IDNAError('Unsupported error handling \"{}\"'.format(errors))
+
+ if not data:
+ return '', 0
+
+ return decode(data), len(data)
+
+class IncrementalEncoder(codecs.BufferedIncrementalEncoder):
+ def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[str, int]: # type: ignore
+ if errors != 'strict':
+ raise IDNAError('Unsupported error handling \"{}\"'.format(errors))
+
+ if not data:
+ return "", 0
+
+ labels = _unicode_dots_re.split(data)
+ trailing_dot = ''
+ if labels:
+ if not labels[-1]:
+ trailing_dot = '.'
+ del labels[-1]
+ elif not final:
+ # Keep potentially unfinished label until the next call
+ del labels[-1]
+ if labels:
+ trailing_dot = '.'
+
+ result = []
+ size = 0
+ for label in labels:
+ result.append(alabel(label))
+ if size:
+ size += 1
+ size += len(label)
+
+ # Join with U+002E
+ result_str = '.'.join(result) + trailing_dot # type: ignore
+ size += len(trailing_dot)
+ return result_str, size
+
+class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
+ def _buffer_decode(self, data: str, errors: str, final: bool) -> Tuple[str, int]: # type: ignore
+ if errors != 'strict':
+ raise IDNAError('Unsupported error handling \"{}\"'.format(errors))
+
+ if not data:
+ return ('', 0)
+
+ labels = _unicode_dots_re.split(data)
+ trailing_dot = ''
+ if labels:
+ if not labels[-1]:
+ trailing_dot = '.'
+ del labels[-1]
+ elif not final:
+ # Keep potentially unfinished label until the next call
+ del labels[-1]
+ if labels:
+ trailing_dot = '.'
+
+ result = []
+ size = 0
+ for label in labels:
+ result.append(ulabel(label))
+ if size:
+ size += 1
+ size += len(label)
+
+ result_str = '.'.join(result) + trailing_dot
+ size += len(trailing_dot)
+ return (result_str, size)
+
+
+class StreamWriter(Codec, codecs.StreamWriter):
+ pass
+
+
+class StreamReader(Codec, codecs.StreamReader):
+ pass
+
+
+def getregentry() -> codecs.CodecInfo:
+ # Compatibility as a search_function for codecs.register()
+ return codecs.CodecInfo(
+ name='idna',
+ encode=Codec().encode, # type: ignore
+ decode=Codec().decode, # type: ignore
+ incrementalencoder=IncrementalEncoder,
+ incrementaldecoder=IncrementalDecoder,
+ streamwriter=StreamWriter,
+ streamreader=StreamReader,
+ )
diff --git a/billinglayer/python/idna/compat.py b/billinglayer/python/idna/compat.py
new file mode 100644
index 0000000..786e6bd
--- /dev/null
+++ b/billinglayer/python/idna/compat.py
@@ -0,0 +1,13 @@
+from .core import *
+from .codec import *
+from typing import Any, Union
+
+def ToASCII(label: str) -> bytes:
+ return encode(label)
+
+def ToUnicode(label: Union[bytes, bytearray]) -> str:
+ return decode(label)
+
+def nameprep(s: Any) -> None:
+ raise NotImplementedError('IDNA 2008 does not utilise nameprep protocol')
+
diff --git a/billinglayer/python/idna/core.py b/billinglayer/python/idna/core.py
new file mode 100644
index 0000000..4f30037
--- /dev/null
+++ b/billinglayer/python/idna/core.py
@@ -0,0 +1,400 @@
+from . import idnadata
+import bisect
+import unicodedata
+import re
+from typing import Union, Optional
+from .intranges import intranges_contain
+
+_virama_combining_class = 9
+_alabel_prefix = b'xn--'
+_unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]')
+
+class IDNAError(UnicodeError):
+ """ Base exception for all IDNA-encoding related problems """
+ pass
+
+
+class IDNABidiError(IDNAError):
+ """ Exception when bidirectional requirements are not satisfied """
+ pass
+
+
+class InvalidCodepoint(IDNAError):
+ """ Exception when a disallowed or unallocated codepoint is used """
+ pass
+
+
+class InvalidCodepointContext(IDNAError):
+ """ Exception when the codepoint is not valid in the context it is used """
+ pass
+
+
+def _combining_class(cp: int) -> int:
+ v = unicodedata.combining(chr(cp))
+ if v == 0:
+ if not unicodedata.name(chr(cp)):
+ raise ValueError('Unknown character in unicodedata')
+ return v
+
+def _is_script(cp: str, script: str) -> bool:
+ return intranges_contain(ord(cp), idnadata.scripts[script])
+
+def _punycode(s: str) -> bytes:
+ return s.encode('punycode')
+
+def _unot(s: int) -> str:
+ return 'U+{:04X}'.format(s)
+
+
+def valid_label_length(label: Union[bytes, str]) -> bool:
+ if len(label) > 63:
+ return False
+ return True
+
+
+def valid_string_length(label: Union[bytes, str], trailing_dot: bool) -> bool:
+ if len(label) > (254 if trailing_dot else 253):
+ return False
+ return True
+
+
+def check_bidi(label: str, check_ltr: bool = False) -> bool:
+ # Bidi rules should only be applied if string contains RTL characters
+ bidi_label = False
+ for (idx, cp) in enumerate(label, 1):
+ direction = unicodedata.bidirectional(cp)
+ if direction == '':
+ # String likely comes from a newer version of Unicode
+ raise IDNABidiError('Unknown directionality in label {} at position {}'.format(repr(label), idx))
+ if direction in ['R', 'AL', 'AN']:
+ bidi_label = True
+ if not bidi_label and not check_ltr:
+ return True
+
+ # Bidi rule 1
+ direction = unicodedata.bidirectional(label[0])
+ if direction in ['R', 'AL']:
+ rtl = True
+ elif direction == 'L':
+ rtl = False
+ else:
+ raise IDNABidiError('First codepoint in label {} must be directionality L, R or AL'.format(repr(label)))
+
+ valid_ending = False
+ number_type = None # type: Optional[str]
+ for (idx, cp) in enumerate(label, 1):
+ direction = unicodedata.bidirectional(cp)
+
+ if rtl:
+ # Bidi rule 2
+ if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']:
+ raise IDNABidiError('Invalid direction for codepoint at position {} in a right-to-left label'.format(idx))
+ # Bidi rule 3
+ if direction in ['R', 'AL', 'EN', 'AN']:
+ valid_ending = True
+ elif direction != 'NSM':
+ valid_ending = False
+ # Bidi rule 4
+ if direction in ['AN', 'EN']:
+ if not number_type:
+ number_type = direction
+ else:
+ if number_type != direction:
+ raise IDNABidiError('Can not mix numeral types in a right-to-left label')
+ else:
+ # Bidi rule 5
+ if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']:
+ raise IDNABidiError('Invalid direction for codepoint at position {} in a left-to-right label'.format(idx))
+ # Bidi rule 6
+ if direction in ['L', 'EN']:
+ valid_ending = True
+ elif direction != 'NSM':
+ valid_ending = False
+
+ if not valid_ending:
+ raise IDNABidiError('Label ends with illegal codepoint directionality')
+
+ return True
+
+
+def check_initial_combiner(label: str) -> bool:
+ if unicodedata.category(label[0])[0] == 'M':
+ raise IDNAError('Label begins with an illegal combining character')
+ return True
+
+
+def check_hyphen_ok(label: str) -> bool:
+ if label[2:4] == '--':
+ raise IDNAError('Label has disallowed hyphens in 3rd and 4th position')
+ if label[0] == '-' or label[-1] == '-':
+ raise IDNAError('Label must not start or end with a hyphen')
+ return True
+
+
+def check_nfc(label: str) -> None:
+ if unicodedata.normalize('NFC', label) != label:
+ raise IDNAError('Label must be in Normalization Form C')
+
+
+def valid_contextj(label: str, pos: int) -> bool:
+ cp_value = ord(label[pos])
+
+ if cp_value == 0x200c:
+
+ if pos > 0:
+ if _combining_class(ord(label[pos - 1])) == _virama_combining_class:
+ return True
+
+ ok = False
+ for i in range(pos-1, -1, -1):
+ joining_type = idnadata.joining_types.get(ord(label[i]))
+ if joining_type == ord('T'):
+ continue
+ if joining_type in [ord('L'), ord('D')]:
+ ok = True
+ break
+
+ if not ok:
+ return False
+
+ ok = False
+ for i in range(pos+1, len(label)):
+ joining_type = idnadata.joining_types.get(ord(label[i]))
+ if joining_type == ord('T'):
+ continue
+ if joining_type in [ord('R'), ord('D')]:
+ ok = True
+ break
+ return ok
+
+ if cp_value == 0x200d:
+
+ if pos > 0:
+ if _combining_class(ord(label[pos - 1])) == _virama_combining_class:
+ return True
+ return False
+
+ else:
+
+ return False
+
+
+def valid_contexto(label: str, pos: int, exception: bool = False) -> bool:
+ cp_value = ord(label[pos])
+
+ if cp_value == 0x00b7:
+ if 0 < pos < len(label)-1:
+ if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c:
+ return True
+ return False
+
+ elif cp_value == 0x0375:
+ if pos < len(label)-1 and len(label) > 1:
+ return _is_script(label[pos + 1], 'Greek')
+ return False
+
+ elif cp_value == 0x05f3 or cp_value == 0x05f4:
+ if pos > 0:
+ return _is_script(label[pos - 1], 'Hebrew')
+ return False
+
+ elif cp_value == 0x30fb:
+ for cp in label:
+ if cp == '\u30fb':
+ continue
+ if _is_script(cp, 'Hiragana') or _is_script(cp, 'Katakana') or _is_script(cp, 'Han'):
+ return True
+ return False
+
+ elif 0x660 <= cp_value <= 0x669:
+ for cp in label:
+ if 0x6f0 <= ord(cp) <= 0x06f9:
+ return False
+ return True
+
+ elif 0x6f0 <= cp_value <= 0x6f9:
+ for cp in label:
+ if 0x660 <= ord(cp) <= 0x0669:
+ return False
+ return True
+
+ return False
+
+
+def check_label(label: Union[str, bytes, bytearray]) -> None:
+ if isinstance(label, (bytes, bytearray)):
+ label = label.decode('utf-8')
+ if len(label) == 0:
+ raise IDNAError('Empty Label')
+
+ check_nfc(label)
+ check_hyphen_ok(label)
+ check_initial_combiner(label)
+
+ for (pos, cp) in enumerate(label):
+ cp_value = ord(cp)
+ if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']):
+ continue
+ elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']):
+ try:
+ if not valid_contextj(label, pos):
+ raise InvalidCodepointContext('Joiner {} not allowed at position {} in {}'.format(
+ _unot(cp_value), pos+1, repr(label)))
+ except ValueError:
+ raise IDNAError('Unknown codepoint adjacent to joiner {} at position {} in {}'.format(
+ _unot(cp_value), pos+1, repr(label)))
+ elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']):
+ if not valid_contexto(label, pos):
+ raise InvalidCodepointContext('Codepoint {} not allowed at position {} in {}'.format(_unot(cp_value), pos+1, repr(label)))
+ else:
+ raise InvalidCodepoint('Codepoint {} at position {} of {} not allowed'.format(_unot(cp_value), pos+1, repr(label)))
+
+ check_bidi(label)
+
+
+def alabel(label: str) -> bytes:
+ try:
+ label_bytes = label.encode('ascii')
+ ulabel(label_bytes)
+ if not valid_label_length(label_bytes):
+ raise IDNAError('Label too long')
+ return label_bytes
+ except UnicodeEncodeError:
+ pass
+
+ if not label:
+ raise IDNAError('No Input')
+
+ label = str(label)
+ check_label(label)
+ label_bytes = _punycode(label)
+ label_bytes = _alabel_prefix + label_bytes
+
+ if not valid_label_length(label_bytes):
+ raise IDNAError('Label too long')
+
+ return label_bytes
+
+
+def ulabel(label: Union[str, bytes, bytearray]) -> str:
+ if not isinstance(label, (bytes, bytearray)):
+ try:
+ label_bytes = label.encode('ascii')
+ except UnicodeEncodeError:
+ check_label(label)
+ return label
+ else:
+ label_bytes = label
+
+ label_bytes = label_bytes.lower()
+ if label_bytes.startswith(_alabel_prefix):
+ label_bytes = label_bytes[len(_alabel_prefix):]
+ if not label_bytes:
+ raise IDNAError('Malformed A-label, no Punycode eligible content found')
+ if label_bytes.decode('ascii')[-1] == '-':
+ raise IDNAError('A-label must not end with a hyphen')
+ else:
+ check_label(label_bytes)
+ return label_bytes.decode('ascii')
+
+ try:
+ label = label_bytes.decode('punycode')
+ except UnicodeError:
+ raise IDNAError('Invalid A-label')
+ check_label(label)
+ return label
+
+
+def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False) -> str:
+ """Re-map the characters in the string according to UTS46 processing."""
+ from .uts46data import uts46data
+ output = ''
+
+ for pos, char in enumerate(domain):
+ code_point = ord(char)
+ try:
+ uts46row = uts46data[code_point if code_point < 256 else
+ bisect.bisect_left(uts46data, (code_point, 'Z')) - 1]
+ status = uts46row[1]
+ replacement = None # type: Optional[str]
+ if len(uts46row) == 3:
+ replacement = uts46row[2] # type: ignore
+ if (status == 'V' or
+ (status == 'D' and not transitional) or
+ (status == '3' and not std3_rules and replacement is None)):
+ output += char
+ elif replacement is not None and (status == 'M' or
+ (status == '3' and not std3_rules) or
+ (status == 'D' and transitional)):
+ output += replacement
+ elif status != 'I':
+ raise IndexError()
+ except IndexError:
+ raise InvalidCodepoint(
+ 'Codepoint {} not allowed at position {} in {}'.format(
+ _unot(code_point), pos + 1, repr(domain)))
+
+ return unicodedata.normalize('NFC', output)
+
+
+def encode(s: Union[str, bytes, bytearray], strict: bool = False, uts46: bool = False, std3_rules: bool = False, transitional: bool = False) -> bytes:
+ if isinstance(s, (bytes, bytearray)):
+ try:
+ s = s.decode('ascii')
+ except UnicodeDecodeError:
+ raise IDNAError('should pass a unicode string to the function rather than a byte string.')
+ if uts46:
+ s = uts46_remap(s, std3_rules, transitional)
+ trailing_dot = False
+ result = []
+ if strict:
+ labels = s.split('.')
+ else:
+ labels = _unicode_dots_re.split(s)
+ if not labels or labels == ['']:
+ raise IDNAError('Empty domain')
+ if labels[-1] == '':
+ del labels[-1]
+ trailing_dot = True
+ for label in labels:
+ s = alabel(label)
+ if s:
+ result.append(s)
+ else:
+ raise IDNAError('Empty label')
+ if trailing_dot:
+ result.append(b'')
+ s = b'.'.join(result)
+ if not valid_string_length(s, trailing_dot):
+ raise IDNAError('Domain too long')
+ return s
+
+
+def decode(s: Union[str, bytes, bytearray], strict: bool = False, uts46: bool = False, std3_rules: bool = False) -> str:
+ try:
+ if isinstance(s, (bytes, bytearray)):
+ s = s.decode('ascii')
+ except UnicodeDecodeError:
+ raise IDNAError('Invalid ASCII in A-label')
+ if uts46:
+ s = uts46_remap(s, std3_rules, False)
+ trailing_dot = False
+ result = []
+ if not strict:
+ labels = _unicode_dots_re.split(s)
+ else:
+ labels = s.split('.')
+ if not labels or labels == ['']:
+ raise IDNAError('Empty domain')
+ if not labels[-1]:
+ del labels[-1]
+ trailing_dot = True
+ for label in labels:
+ s = ulabel(label)
+ if s:
+ result.append(s)
+ else:
+ raise IDNAError('Empty label')
+ if trailing_dot:
+ result.append('')
+ return '.'.join(result)
diff --git a/billinglayer/python/idna/idnadata.py b/billinglayer/python/idna/idnadata.py
new file mode 100644
index 0000000..67db462
--- /dev/null
+++ b/billinglayer/python/idna/idnadata.py
@@ -0,0 +1,2151 @@
+# This file is automatically generated by tools/idna-data
+
+__version__ = '15.0.0'
+scripts = {
+ 'Greek': (
+ 0x37000000374,
+ 0x37500000378,
+ 0x37a0000037e,
+ 0x37f00000380,
+ 0x38400000385,
+ 0x38600000387,
+ 0x3880000038b,
+ 0x38c0000038d,
+ 0x38e000003a2,
+ 0x3a3000003e2,
+ 0x3f000000400,
+ 0x1d2600001d2b,
+ 0x1d5d00001d62,
+ 0x1d6600001d6b,
+ 0x1dbf00001dc0,
+ 0x1f0000001f16,
+ 0x1f1800001f1e,
+ 0x1f2000001f46,
+ 0x1f4800001f4e,
+ 0x1f5000001f58,
+ 0x1f5900001f5a,
+ 0x1f5b00001f5c,
+ 0x1f5d00001f5e,
+ 0x1f5f00001f7e,
+ 0x1f8000001fb5,
+ 0x1fb600001fc5,
+ 0x1fc600001fd4,
+ 0x1fd600001fdc,
+ 0x1fdd00001ff0,
+ 0x1ff200001ff5,
+ 0x1ff600001fff,
+ 0x212600002127,
+ 0xab650000ab66,
+ 0x101400001018f,
+ 0x101a0000101a1,
+ 0x1d2000001d246,
+ ),
+ 'Han': (
+ 0x2e8000002e9a,
+ 0x2e9b00002ef4,
+ 0x2f0000002fd6,
+ 0x300500003006,
+ 0x300700003008,
+ 0x30210000302a,
+ 0x30380000303c,
+ 0x340000004dc0,
+ 0x4e000000a000,
+ 0xf9000000fa6e,
+ 0xfa700000fada,
+ 0x16fe200016fe4,
+ 0x16ff000016ff2,
+ 0x200000002a6e0,
+ 0x2a7000002b73a,
+ 0x2b7400002b81e,
+ 0x2b8200002cea2,
+ 0x2ceb00002ebe1,
+ 0x2f8000002fa1e,
+ 0x300000003134b,
+ 0x31350000323b0,
+ ),
+ 'Hebrew': (
+ 0x591000005c8,
+ 0x5d0000005eb,
+ 0x5ef000005f5,
+ 0xfb1d0000fb37,
+ 0xfb380000fb3d,
+ 0xfb3e0000fb3f,
+ 0xfb400000fb42,
+ 0xfb430000fb45,
+ 0xfb460000fb50,
+ ),
+ 'Hiragana': (
+ 0x304100003097,
+ 0x309d000030a0,
+ 0x1b0010001b120,
+ 0x1b1320001b133,
+ 0x1b1500001b153,
+ 0x1f2000001f201,
+ ),
+ 'Katakana': (
+ 0x30a1000030fb,
+ 0x30fd00003100,
+ 0x31f000003200,
+ 0x32d0000032ff,
+ 0x330000003358,
+ 0xff660000ff70,
+ 0xff710000ff9e,
+ 0x1aff00001aff4,
+ 0x1aff50001affc,
+ 0x1affd0001afff,
+ 0x1b0000001b001,
+ 0x1b1200001b123,
+ 0x1b1550001b156,
+ 0x1b1640001b168,
+ ),
+}
+joining_types = {
+ 0x600: 85,
+ 0x601: 85,
+ 0x602: 85,
+ 0x603: 85,
+ 0x604: 85,
+ 0x605: 85,
+ 0x608: 85,
+ 0x60b: 85,
+ 0x620: 68,
+ 0x621: 85,
+ 0x622: 82,
+ 0x623: 82,
+ 0x624: 82,
+ 0x625: 82,
+ 0x626: 68,
+ 0x627: 82,
+ 0x628: 68,
+ 0x629: 82,
+ 0x62a: 68,
+ 0x62b: 68,
+ 0x62c: 68,
+ 0x62d: 68,
+ 0x62e: 68,
+ 0x62f: 82,
+ 0x630: 82,
+ 0x631: 82,
+ 0x632: 82,
+ 0x633: 68,
+ 0x634: 68,
+ 0x635: 68,
+ 0x636: 68,
+ 0x637: 68,
+ 0x638: 68,
+ 0x639: 68,
+ 0x63a: 68,
+ 0x63b: 68,
+ 0x63c: 68,
+ 0x63d: 68,
+ 0x63e: 68,
+ 0x63f: 68,
+ 0x640: 67,
+ 0x641: 68,
+ 0x642: 68,
+ 0x643: 68,
+ 0x644: 68,
+ 0x645: 68,
+ 0x646: 68,
+ 0x647: 68,
+ 0x648: 82,
+ 0x649: 68,
+ 0x64a: 68,
+ 0x66e: 68,
+ 0x66f: 68,
+ 0x671: 82,
+ 0x672: 82,
+ 0x673: 82,
+ 0x674: 85,
+ 0x675: 82,
+ 0x676: 82,
+ 0x677: 82,
+ 0x678: 68,
+ 0x679: 68,
+ 0x67a: 68,
+ 0x67b: 68,
+ 0x67c: 68,
+ 0x67d: 68,
+ 0x67e: 68,
+ 0x67f: 68,
+ 0x680: 68,
+ 0x681: 68,
+ 0x682: 68,
+ 0x683: 68,
+ 0x684: 68,
+ 0x685: 68,
+ 0x686: 68,
+ 0x687: 68,
+ 0x688: 82,
+ 0x689: 82,
+ 0x68a: 82,
+ 0x68b: 82,
+ 0x68c: 82,
+ 0x68d: 82,
+ 0x68e: 82,
+ 0x68f: 82,
+ 0x690: 82,
+ 0x691: 82,
+ 0x692: 82,
+ 0x693: 82,
+ 0x694: 82,
+ 0x695: 82,
+ 0x696: 82,
+ 0x697: 82,
+ 0x698: 82,
+ 0x699: 82,
+ 0x69a: 68,
+ 0x69b: 68,
+ 0x69c: 68,
+ 0x69d: 68,
+ 0x69e: 68,
+ 0x69f: 68,
+ 0x6a0: 68,
+ 0x6a1: 68,
+ 0x6a2: 68,
+ 0x6a3: 68,
+ 0x6a4: 68,
+ 0x6a5: 68,
+ 0x6a6: 68,
+ 0x6a7: 68,
+ 0x6a8: 68,
+ 0x6a9: 68,
+ 0x6aa: 68,
+ 0x6ab: 68,
+ 0x6ac: 68,
+ 0x6ad: 68,
+ 0x6ae: 68,
+ 0x6af: 68,
+ 0x6b0: 68,
+ 0x6b1: 68,
+ 0x6b2: 68,
+ 0x6b3: 68,
+ 0x6b4: 68,
+ 0x6b5: 68,
+ 0x6b6: 68,
+ 0x6b7: 68,
+ 0x6b8: 68,
+ 0x6b9: 68,
+ 0x6ba: 68,
+ 0x6bb: 68,
+ 0x6bc: 68,
+ 0x6bd: 68,
+ 0x6be: 68,
+ 0x6bf: 68,
+ 0x6c0: 82,
+ 0x6c1: 68,
+ 0x6c2: 68,
+ 0x6c3: 82,
+ 0x6c4: 82,
+ 0x6c5: 82,
+ 0x6c6: 82,
+ 0x6c7: 82,
+ 0x6c8: 82,
+ 0x6c9: 82,
+ 0x6ca: 82,
+ 0x6cb: 82,
+ 0x6cc: 68,
+ 0x6cd: 82,
+ 0x6ce: 68,
+ 0x6cf: 82,
+ 0x6d0: 68,
+ 0x6d1: 68,
+ 0x6d2: 82,
+ 0x6d3: 82,
+ 0x6d5: 82,
+ 0x6dd: 85,
+ 0x6ee: 82,
+ 0x6ef: 82,
+ 0x6fa: 68,
+ 0x6fb: 68,
+ 0x6fc: 68,
+ 0x6ff: 68,
+ 0x70f: 84,
+ 0x710: 82,
+ 0x712: 68,
+ 0x713: 68,
+ 0x714: 68,
+ 0x715: 82,
+ 0x716: 82,
+ 0x717: 82,
+ 0x718: 82,
+ 0x719: 82,
+ 0x71a: 68,
+ 0x71b: 68,
+ 0x71c: 68,
+ 0x71d: 68,
+ 0x71e: 82,
+ 0x71f: 68,
+ 0x720: 68,
+ 0x721: 68,
+ 0x722: 68,
+ 0x723: 68,
+ 0x724: 68,
+ 0x725: 68,
+ 0x726: 68,
+ 0x727: 68,
+ 0x728: 82,
+ 0x729: 68,
+ 0x72a: 82,
+ 0x72b: 68,
+ 0x72c: 82,
+ 0x72d: 68,
+ 0x72e: 68,
+ 0x72f: 82,
+ 0x74d: 82,
+ 0x74e: 68,
+ 0x74f: 68,
+ 0x750: 68,
+ 0x751: 68,
+ 0x752: 68,
+ 0x753: 68,
+ 0x754: 68,
+ 0x755: 68,
+ 0x756: 68,
+ 0x757: 68,
+ 0x758: 68,
+ 0x759: 82,
+ 0x75a: 82,
+ 0x75b: 82,
+ 0x75c: 68,
+ 0x75d: 68,
+ 0x75e: 68,
+ 0x75f: 68,
+ 0x760: 68,
+ 0x761: 68,
+ 0x762: 68,
+ 0x763: 68,
+ 0x764: 68,
+ 0x765: 68,
+ 0x766: 68,
+ 0x767: 68,
+ 0x768: 68,
+ 0x769: 68,
+ 0x76a: 68,
+ 0x76b: 82,
+ 0x76c: 82,
+ 0x76d: 68,
+ 0x76e: 68,
+ 0x76f: 68,
+ 0x770: 68,
+ 0x771: 82,
+ 0x772: 68,
+ 0x773: 82,
+ 0x774: 82,
+ 0x775: 68,
+ 0x776: 68,
+ 0x777: 68,
+ 0x778: 82,
+ 0x779: 82,
+ 0x77a: 68,
+ 0x77b: 68,
+ 0x77c: 68,
+ 0x77d: 68,
+ 0x77e: 68,
+ 0x77f: 68,
+ 0x7ca: 68,
+ 0x7cb: 68,
+ 0x7cc: 68,
+ 0x7cd: 68,
+ 0x7ce: 68,
+ 0x7cf: 68,
+ 0x7d0: 68,
+ 0x7d1: 68,
+ 0x7d2: 68,
+ 0x7d3: 68,
+ 0x7d4: 68,
+ 0x7d5: 68,
+ 0x7d6: 68,
+ 0x7d7: 68,
+ 0x7d8: 68,
+ 0x7d9: 68,
+ 0x7da: 68,
+ 0x7db: 68,
+ 0x7dc: 68,
+ 0x7dd: 68,
+ 0x7de: 68,
+ 0x7df: 68,
+ 0x7e0: 68,
+ 0x7e1: 68,
+ 0x7e2: 68,
+ 0x7e3: 68,
+ 0x7e4: 68,
+ 0x7e5: 68,
+ 0x7e6: 68,
+ 0x7e7: 68,
+ 0x7e8: 68,
+ 0x7e9: 68,
+ 0x7ea: 68,
+ 0x7fa: 67,
+ 0x840: 82,
+ 0x841: 68,
+ 0x842: 68,
+ 0x843: 68,
+ 0x844: 68,
+ 0x845: 68,
+ 0x846: 82,
+ 0x847: 82,
+ 0x848: 68,
+ 0x849: 82,
+ 0x84a: 68,
+ 0x84b: 68,
+ 0x84c: 68,
+ 0x84d: 68,
+ 0x84e: 68,
+ 0x84f: 68,
+ 0x850: 68,
+ 0x851: 68,
+ 0x852: 68,
+ 0x853: 68,
+ 0x854: 82,
+ 0x855: 68,
+ 0x856: 82,
+ 0x857: 82,
+ 0x858: 82,
+ 0x860: 68,
+ 0x861: 85,
+ 0x862: 68,
+ 0x863: 68,
+ 0x864: 68,
+ 0x865: 68,
+ 0x866: 85,
+ 0x867: 82,
+ 0x868: 68,
+ 0x869: 82,
+ 0x86a: 82,
+ 0x870: 82,
+ 0x871: 82,
+ 0x872: 82,
+ 0x873: 82,
+ 0x874: 82,
+ 0x875: 82,
+ 0x876: 82,
+ 0x877: 82,
+ 0x878: 82,
+ 0x879: 82,
+ 0x87a: 82,
+ 0x87b: 82,
+ 0x87c: 82,
+ 0x87d: 82,
+ 0x87e: 82,
+ 0x87f: 82,
+ 0x880: 82,
+ 0x881: 82,
+ 0x882: 82,
+ 0x883: 67,
+ 0x884: 67,
+ 0x885: 67,
+ 0x886: 68,
+ 0x887: 85,
+ 0x888: 85,
+ 0x889: 68,
+ 0x88a: 68,
+ 0x88b: 68,
+ 0x88c: 68,
+ 0x88d: 68,
+ 0x88e: 82,
+ 0x890: 85,
+ 0x891: 85,
+ 0x8a0: 68,
+ 0x8a1: 68,
+ 0x8a2: 68,
+ 0x8a3: 68,
+ 0x8a4: 68,
+ 0x8a5: 68,
+ 0x8a6: 68,
+ 0x8a7: 68,
+ 0x8a8: 68,
+ 0x8a9: 68,
+ 0x8aa: 82,
+ 0x8ab: 82,
+ 0x8ac: 82,
+ 0x8ad: 85,
+ 0x8ae: 82,
+ 0x8af: 68,
+ 0x8b0: 68,
+ 0x8b1: 82,
+ 0x8b2: 82,
+ 0x8b3: 68,
+ 0x8b4: 68,
+ 0x8b5: 68,
+ 0x8b6: 68,
+ 0x8b7: 68,
+ 0x8b8: 68,
+ 0x8b9: 82,
+ 0x8ba: 68,
+ 0x8bb: 68,
+ 0x8bc: 68,
+ 0x8bd: 68,
+ 0x8be: 68,
+ 0x8bf: 68,
+ 0x8c0: 68,
+ 0x8c1: 68,
+ 0x8c2: 68,
+ 0x8c3: 68,
+ 0x8c4: 68,
+ 0x8c5: 68,
+ 0x8c6: 68,
+ 0x8c7: 68,
+ 0x8c8: 68,
+ 0x8e2: 85,
+ 0x1806: 85,
+ 0x1807: 68,
+ 0x180a: 67,
+ 0x180e: 85,
+ 0x1820: 68,
+ 0x1821: 68,
+ 0x1822: 68,
+ 0x1823: 68,
+ 0x1824: 68,
+ 0x1825: 68,
+ 0x1826: 68,
+ 0x1827: 68,
+ 0x1828: 68,
+ 0x1829: 68,
+ 0x182a: 68,
+ 0x182b: 68,
+ 0x182c: 68,
+ 0x182d: 68,
+ 0x182e: 68,
+ 0x182f: 68,
+ 0x1830: 68,
+ 0x1831: 68,
+ 0x1832: 68,
+ 0x1833: 68,
+ 0x1834: 68,
+ 0x1835: 68,
+ 0x1836: 68,
+ 0x1837: 68,
+ 0x1838: 68,
+ 0x1839: 68,
+ 0x183a: 68,
+ 0x183b: 68,
+ 0x183c: 68,
+ 0x183d: 68,
+ 0x183e: 68,
+ 0x183f: 68,
+ 0x1840: 68,
+ 0x1841: 68,
+ 0x1842: 68,
+ 0x1843: 68,
+ 0x1844: 68,
+ 0x1845: 68,
+ 0x1846: 68,
+ 0x1847: 68,
+ 0x1848: 68,
+ 0x1849: 68,
+ 0x184a: 68,
+ 0x184b: 68,
+ 0x184c: 68,
+ 0x184d: 68,
+ 0x184e: 68,
+ 0x184f: 68,
+ 0x1850: 68,
+ 0x1851: 68,
+ 0x1852: 68,
+ 0x1853: 68,
+ 0x1854: 68,
+ 0x1855: 68,
+ 0x1856: 68,
+ 0x1857: 68,
+ 0x1858: 68,
+ 0x1859: 68,
+ 0x185a: 68,
+ 0x185b: 68,
+ 0x185c: 68,
+ 0x185d: 68,
+ 0x185e: 68,
+ 0x185f: 68,
+ 0x1860: 68,
+ 0x1861: 68,
+ 0x1862: 68,
+ 0x1863: 68,
+ 0x1864: 68,
+ 0x1865: 68,
+ 0x1866: 68,
+ 0x1867: 68,
+ 0x1868: 68,
+ 0x1869: 68,
+ 0x186a: 68,
+ 0x186b: 68,
+ 0x186c: 68,
+ 0x186d: 68,
+ 0x186e: 68,
+ 0x186f: 68,
+ 0x1870: 68,
+ 0x1871: 68,
+ 0x1872: 68,
+ 0x1873: 68,
+ 0x1874: 68,
+ 0x1875: 68,
+ 0x1876: 68,
+ 0x1877: 68,
+ 0x1878: 68,
+ 0x1880: 85,
+ 0x1881: 85,
+ 0x1882: 85,
+ 0x1883: 85,
+ 0x1884: 85,
+ 0x1885: 84,
+ 0x1886: 84,
+ 0x1887: 68,
+ 0x1888: 68,
+ 0x1889: 68,
+ 0x188a: 68,
+ 0x188b: 68,
+ 0x188c: 68,
+ 0x188d: 68,
+ 0x188e: 68,
+ 0x188f: 68,
+ 0x1890: 68,
+ 0x1891: 68,
+ 0x1892: 68,
+ 0x1893: 68,
+ 0x1894: 68,
+ 0x1895: 68,
+ 0x1896: 68,
+ 0x1897: 68,
+ 0x1898: 68,
+ 0x1899: 68,
+ 0x189a: 68,
+ 0x189b: 68,
+ 0x189c: 68,
+ 0x189d: 68,
+ 0x189e: 68,
+ 0x189f: 68,
+ 0x18a0: 68,
+ 0x18a1: 68,
+ 0x18a2: 68,
+ 0x18a3: 68,
+ 0x18a4: 68,
+ 0x18a5: 68,
+ 0x18a6: 68,
+ 0x18a7: 68,
+ 0x18a8: 68,
+ 0x18aa: 68,
+ 0x200c: 85,
+ 0x200d: 67,
+ 0x202f: 85,
+ 0x2066: 85,
+ 0x2067: 85,
+ 0x2068: 85,
+ 0x2069: 85,
+ 0xa840: 68,
+ 0xa841: 68,
+ 0xa842: 68,
+ 0xa843: 68,
+ 0xa844: 68,
+ 0xa845: 68,
+ 0xa846: 68,
+ 0xa847: 68,
+ 0xa848: 68,
+ 0xa849: 68,
+ 0xa84a: 68,
+ 0xa84b: 68,
+ 0xa84c: 68,
+ 0xa84d: 68,
+ 0xa84e: 68,
+ 0xa84f: 68,
+ 0xa850: 68,
+ 0xa851: 68,
+ 0xa852: 68,
+ 0xa853: 68,
+ 0xa854: 68,
+ 0xa855: 68,
+ 0xa856: 68,
+ 0xa857: 68,
+ 0xa858: 68,
+ 0xa859: 68,
+ 0xa85a: 68,
+ 0xa85b: 68,
+ 0xa85c: 68,
+ 0xa85d: 68,
+ 0xa85e: 68,
+ 0xa85f: 68,
+ 0xa860: 68,
+ 0xa861: 68,
+ 0xa862: 68,
+ 0xa863: 68,
+ 0xa864: 68,
+ 0xa865: 68,
+ 0xa866: 68,
+ 0xa867: 68,
+ 0xa868: 68,
+ 0xa869: 68,
+ 0xa86a: 68,
+ 0xa86b: 68,
+ 0xa86c: 68,
+ 0xa86d: 68,
+ 0xa86e: 68,
+ 0xa86f: 68,
+ 0xa870: 68,
+ 0xa871: 68,
+ 0xa872: 76,
+ 0xa873: 85,
+ 0x10ac0: 68,
+ 0x10ac1: 68,
+ 0x10ac2: 68,
+ 0x10ac3: 68,
+ 0x10ac4: 68,
+ 0x10ac5: 82,
+ 0x10ac6: 85,
+ 0x10ac7: 82,
+ 0x10ac8: 85,
+ 0x10ac9: 82,
+ 0x10aca: 82,
+ 0x10acb: 85,
+ 0x10acc: 85,
+ 0x10acd: 76,
+ 0x10ace: 82,
+ 0x10acf: 82,
+ 0x10ad0: 82,
+ 0x10ad1: 82,
+ 0x10ad2: 82,
+ 0x10ad3: 68,
+ 0x10ad4: 68,
+ 0x10ad5: 68,
+ 0x10ad6: 68,
+ 0x10ad7: 76,
+ 0x10ad8: 68,
+ 0x10ad9: 68,
+ 0x10ada: 68,
+ 0x10adb: 68,
+ 0x10adc: 68,
+ 0x10add: 82,
+ 0x10ade: 68,
+ 0x10adf: 68,
+ 0x10ae0: 68,
+ 0x10ae1: 82,
+ 0x10ae2: 85,
+ 0x10ae3: 85,
+ 0x10ae4: 82,
+ 0x10aeb: 68,
+ 0x10aec: 68,
+ 0x10aed: 68,
+ 0x10aee: 68,
+ 0x10aef: 82,
+ 0x10b80: 68,
+ 0x10b81: 82,
+ 0x10b82: 68,
+ 0x10b83: 82,
+ 0x10b84: 82,
+ 0x10b85: 82,
+ 0x10b86: 68,
+ 0x10b87: 68,
+ 0x10b88: 68,
+ 0x10b89: 82,
+ 0x10b8a: 68,
+ 0x10b8b: 68,
+ 0x10b8c: 82,
+ 0x10b8d: 68,
+ 0x10b8e: 82,
+ 0x10b8f: 82,
+ 0x10b90: 68,
+ 0x10b91: 82,
+ 0x10ba9: 82,
+ 0x10baa: 82,
+ 0x10bab: 82,
+ 0x10bac: 82,
+ 0x10bad: 68,
+ 0x10bae: 68,
+ 0x10baf: 85,
+ 0x10d00: 76,
+ 0x10d01: 68,
+ 0x10d02: 68,
+ 0x10d03: 68,
+ 0x10d04: 68,
+ 0x10d05: 68,
+ 0x10d06: 68,
+ 0x10d07: 68,
+ 0x10d08: 68,
+ 0x10d09: 68,
+ 0x10d0a: 68,
+ 0x10d0b: 68,
+ 0x10d0c: 68,
+ 0x10d0d: 68,
+ 0x10d0e: 68,
+ 0x10d0f: 68,
+ 0x10d10: 68,
+ 0x10d11: 68,
+ 0x10d12: 68,
+ 0x10d13: 68,
+ 0x10d14: 68,
+ 0x10d15: 68,
+ 0x10d16: 68,
+ 0x10d17: 68,
+ 0x10d18: 68,
+ 0x10d19: 68,
+ 0x10d1a: 68,
+ 0x10d1b: 68,
+ 0x10d1c: 68,
+ 0x10d1d: 68,
+ 0x10d1e: 68,
+ 0x10d1f: 68,
+ 0x10d20: 68,
+ 0x10d21: 68,
+ 0x10d22: 82,
+ 0x10d23: 68,
+ 0x10f30: 68,
+ 0x10f31: 68,
+ 0x10f32: 68,
+ 0x10f33: 82,
+ 0x10f34: 68,
+ 0x10f35: 68,
+ 0x10f36: 68,
+ 0x10f37: 68,
+ 0x10f38: 68,
+ 0x10f39: 68,
+ 0x10f3a: 68,
+ 0x10f3b: 68,
+ 0x10f3c: 68,
+ 0x10f3d: 68,
+ 0x10f3e: 68,
+ 0x10f3f: 68,
+ 0x10f40: 68,
+ 0x10f41: 68,
+ 0x10f42: 68,
+ 0x10f43: 68,
+ 0x10f44: 68,
+ 0x10f45: 85,
+ 0x10f51: 68,
+ 0x10f52: 68,
+ 0x10f53: 68,
+ 0x10f54: 82,
+ 0x10f70: 68,
+ 0x10f71: 68,
+ 0x10f72: 68,
+ 0x10f73: 68,
+ 0x10f74: 82,
+ 0x10f75: 82,
+ 0x10f76: 68,
+ 0x10f77: 68,
+ 0x10f78: 68,
+ 0x10f79: 68,
+ 0x10f7a: 68,
+ 0x10f7b: 68,
+ 0x10f7c: 68,
+ 0x10f7d: 68,
+ 0x10f7e: 68,
+ 0x10f7f: 68,
+ 0x10f80: 68,
+ 0x10f81: 68,
+ 0x10fb0: 68,
+ 0x10fb1: 85,
+ 0x10fb2: 68,
+ 0x10fb3: 68,
+ 0x10fb4: 82,
+ 0x10fb5: 82,
+ 0x10fb6: 82,
+ 0x10fb7: 85,
+ 0x10fb8: 68,
+ 0x10fb9: 82,
+ 0x10fba: 82,
+ 0x10fbb: 68,
+ 0x10fbc: 68,
+ 0x10fbd: 82,
+ 0x10fbe: 68,
+ 0x10fbf: 68,
+ 0x10fc0: 85,
+ 0x10fc1: 68,
+ 0x10fc2: 82,
+ 0x10fc3: 82,
+ 0x10fc4: 68,
+ 0x10fc5: 85,
+ 0x10fc6: 85,
+ 0x10fc7: 85,
+ 0x10fc8: 85,
+ 0x10fc9: 82,
+ 0x10fca: 68,
+ 0x10fcb: 76,
+ 0x110bd: 85,
+ 0x110cd: 85,
+ 0x1e900: 68,
+ 0x1e901: 68,
+ 0x1e902: 68,
+ 0x1e903: 68,
+ 0x1e904: 68,
+ 0x1e905: 68,
+ 0x1e906: 68,
+ 0x1e907: 68,
+ 0x1e908: 68,
+ 0x1e909: 68,
+ 0x1e90a: 68,
+ 0x1e90b: 68,
+ 0x1e90c: 68,
+ 0x1e90d: 68,
+ 0x1e90e: 68,
+ 0x1e90f: 68,
+ 0x1e910: 68,
+ 0x1e911: 68,
+ 0x1e912: 68,
+ 0x1e913: 68,
+ 0x1e914: 68,
+ 0x1e915: 68,
+ 0x1e916: 68,
+ 0x1e917: 68,
+ 0x1e918: 68,
+ 0x1e919: 68,
+ 0x1e91a: 68,
+ 0x1e91b: 68,
+ 0x1e91c: 68,
+ 0x1e91d: 68,
+ 0x1e91e: 68,
+ 0x1e91f: 68,
+ 0x1e920: 68,
+ 0x1e921: 68,
+ 0x1e922: 68,
+ 0x1e923: 68,
+ 0x1e924: 68,
+ 0x1e925: 68,
+ 0x1e926: 68,
+ 0x1e927: 68,
+ 0x1e928: 68,
+ 0x1e929: 68,
+ 0x1e92a: 68,
+ 0x1e92b: 68,
+ 0x1e92c: 68,
+ 0x1e92d: 68,
+ 0x1e92e: 68,
+ 0x1e92f: 68,
+ 0x1e930: 68,
+ 0x1e931: 68,
+ 0x1e932: 68,
+ 0x1e933: 68,
+ 0x1e934: 68,
+ 0x1e935: 68,
+ 0x1e936: 68,
+ 0x1e937: 68,
+ 0x1e938: 68,
+ 0x1e939: 68,
+ 0x1e93a: 68,
+ 0x1e93b: 68,
+ 0x1e93c: 68,
+ 0x1e93d: 68,
+ 0x1e93e: 68,
+ 0x1e93f: 68,
+ 0x1e940: 68,
+ 0x1e941: 68,
+ 0x1e942: 68,
+ 0x1e943: 68,
+ 0x1e94b: 84,
+}
+codepoint_classes = {
+ 'PVALID': (
+ 0x2d0000002e,
+ 0x300000003a,
+ 0x610000007b,
+ 0xdf000000f7,
+ 0xf800000100,
+ 0x10100000102,
+ 0x10300000104,
+ 0x10500000106,
+ 0x10700000108,
+ 0x1090000010a,
+ 0x10b0000010c,
+ 0x10d0000010e,
+ 0x10f00000110,
+ 0x11100000112,
+ 0x11300000114,
+ 0x11500000116,
+ 0x11700000118,
+ 0x1190000011a,
+ 0x11b0000011c,
+ 0x11d0000011e,
+ 0x11f00000120,
+ 0x12100000122,
+ 0x12300000124,
+ 0x12500000126,
+ 0x12700000128,
+ 0x1290000012a,
+ 0x12b0000012c,
+ 0x12d0000012e,
+ 0x12f00000130,
+ 0x13100000132,
+ 0x13500000136,
+ 0x13700000139,
+ 0x13a0000013b,
+ 0x13c0000013d,
+ 0x13e0000013f,
+ 0x14200000143,
+ 0x14400000145,
+ 0x14600000147,
+ 0x14800000149,
+ 0x14b0000014c,
+ 0x14d0000014e,
+ 0x14f00000150,
+ 0x15100000152,
+ 0x15300000154,
+ 0x15500000156,
+ 0x15700000158,
+ 0x1590000015a,
+ 0x15b0000015c,
+ 0x15d0000015e,
+ 0x15f00000160,
+ 0x16100000162,
+ 0x16300000164,
+ 0x16500000166,
+ 0x16700000168,
+ 0x1690000016a,
+ 0x16b0000016c,
+ 0x16d0000016e,
+ 0x16f00000170,
+ 0x17100000172,
+ 0x17300000174,
+ 0x17500000176,
+ 0x17700000178,
+ 0x17a0000017b,
+ 0x17c0000017d,
+ 0x17e0000017f,
+ 0x18000000181,
+ 0x18300000184,
+ 0x18500000186,
+ 0x18800000189,
+ 0x18c0000018e,
+ 0x19200000193,
+ 0x19500000196,
+ 0x1990000019c,
+ 0x19e0000019f,
+ 0x1a1000001a2,
+ 0x1a3000001a4,
+ 0x1a5000001a6,
+ 0x1a8000001a9,
+ 0x1aa000001ac,
+ 0x1ad000001ae,
+ 0x1b0000001b1,
+ 0x1b4000001b5,
+ 0x1b6000001b7,
+ 0x1b9000001bc,
+ 0x1bd000001c4,
+ 0x1ce000001cf,
+ 0x1d0000001d1,
+ 0x1d2000001d3,
+ 0x1d4000001d5,
+ 0x1d6000001d7,
+ 0x1d8000001d9,
+ 0x1da000001db,
+ 0x1dc000001de,
+ 0x1df000001e0,
+ 0x1e1000001e2,
+ 0x1e3000001e4,
+ 0x1e5000001e6,
+ 0x1e7000001e8,
+ 0x1e9000001ea,
+ 0x1eb000001ec,
+ 0x1ed000001ee,
+ 0x1ef000001f1,
+ 0x1f5000001f6,
+ 0x1f9000001fa,
+ 0x1fb000001fc,
+ 0x1fd000001fe,
+ 0x1ff00000200,
+ 0x20100000202,
+ 0x20300000204,
+ 0x20500000206,
+ 0x20700000208,
+ 0x2090000020a,
+ 0x20b0000020c,
+ 0x20d0000020e,
+ 0x20f00000210,
+ 0x21100000212,
+ 0x21300000214,
+ 0x21500000216,
+ 0x21700000218,
+ 0x2190000021a,
+ 0x21b0000021c,
+ 0x21d0000021e,
+ 0x21f00000220,
+ 0x22100000222,
+ 0x22300000224,
+ 0x22500000226,
+ 0x22700000228,
+ 0x2290000022a,
+ 0x22b0000022c,
+ 0x22d0000022e,
+ 0x22f00000230,
+ 0x23100000232,
+ 0x2330000023a,
+ 0x23c0000023d,
+ 0x23f00000241,
+ 0x24200000243,
+ 0x24700000248,
+ 0x2490000024a,
+ 0x24b0000024c,
+ 0x24d0000024e,
+ 0x24f000002b0,
+ 0x2b9000002c2,
+ 0x2c6000002d2,
+ 0x2ec000002ed,
+ 0x2ee000002ef,
+ 0x30000000340,
+ 0x34200000343,
+ 0x3460000034f,
+ 0x35000000370,
+ 0x37100000372,
+ 0x37300000374,
+ 0x37700000378,
+ 0x37b0000037e,
+ 0x39000000391,
+ 0x3ac000003cf,
+ 0x3d7000003d8,
+ 0x3d9000003da,
+ 0x3db000003dc,
+ 0x3dd000003de,
+ 0x3df000003e0,
+ 0x3e1000003e2,
+ 0x3e3000003e4,
+ 0x3e5000003e6,
+ 0x3e7000003e8,
+ 0x3e9000003ea,
+ 0x3eb000003ec,
+ 0x3ed000003ee,
+ 0x3ef000003f0,
+ 0x3f3000003f4,
+ 0x3f8000003f9,
+ 0x3fb000003fd,
+ 0x43000000460,
+ 0x46100000462,
+ 0x46300000464,
+ 0x46500000466,
+ 0x46700000468,
+ 0x4690000046a,
+ 0x46b0000046c,
+ 0x46d0000046e,
+ 0x46f00000470,
+ 0x47100000472,
+ 0x47300000474,
+ 0x47500000476,
+ 0x47700000478,
+ 0x4790000047a,
+ 0x47b0000047c,
+ 0x47d0000047e,
+ 0x47f00000480,
+ 0x48100000482,
+ 0x48300000488,
+ 0x48b0000048c,
+ 0x48d0000048e,
+ 0x48f00000490,
+ 0x49100000492,
+ 0x49300000494,
+ 0x49500000496,
+ 0x49700000498,
+ 0x4990000049a,
+ 0x49b0000049c,
+ 0x49d0000049e,
+ 0x49f000004a0,
+ 0x4a1000004a2,
+ 0x4a3000004a4,
+ 0x4a5000004a6,
+ 0x4a7000004a8,
+ 0x4a9000004aa,
+ 0x4ab000004ac,
+ 0x4ad000004ae,
+ 0x4af000004b0,
+ 0x4b1000004b2,
+ 0x4b3000004b4,
+ 0x4b5000004b6,
+ 0x4b7000004b8,
+ 0x4b9000004ba,
+ 0x4bb000004bc,
+ 0x4bd000004be,
+ 0x4bf000004c0,
+ 0x4c2000004c3,
+ 0x4c4000004c5,
+ 0x4c6000004c7,
+ 0x4c8000004c9,
+ 0x4ca000004cb,
+ 0x4cc000004cd,
+ 0x4ce000004d0,
+ 0x4d1000004d2,
+ 0x4d3000004d4,
+ 0x4d5000004d6,
+ 0x4d7000004d8,
+ 0x4d9000004da,
+ 0x4db000004dc,
+ 0x4dd000004de,
+ 0x4df000004e0,
+ 0x4e1000004e2,
+ 0x4e3000004e4,
+ 0x4e5000004e6,
+ 0x4e7000004e8,
+ 0x4e9000004ea,
+ 0x4eb000004ec,
+ 0x4ed000004ee,
+ 0x4ef000004f0,
+ 0x4f1000004f2,
+ 0x4f3000004f4,
+ 0x4f5000004f6,
+ 0x4f7000004f8,
+ 0x4f9000004fa,
+ 0x4fb000004fc,
+ 0x4fd000004fe,
+ 0x4ff00000500,
+ 0x50100000502,
+ 0x50300000504,
+ 0x50500000506,
+ 0x50700000508,
+ 0x5090000050a,
+ 0x50b0000050c,
+ 0x50d0000050e,
+ 0x50f00000510,
+ 0x51100000512,
+ 0x51300000514,
+ 0x51500000516,
+ 0x51700000518,
+ 0x5190000051a,
+ 0x51b0000051c,
+ 0x51d0000051e,
+ 0x51f00000520,
+ 0x52100000522,
+ 0x52300000524,
+ 0x52500000526,
+ 0x52700000528,
+ 0x5290000052a,
+ 0x52b0000052c,
+ 0x52d0000052e,
+ 0x52f00000530,
+ 0x5590000055a,
+ 0x56000000587,
+ 0x58800000589,
+ 0x591000005be,
+ 0x5bf000005c0,
+ 0x5c1000005c3,
+ 0x5c4000005c6,
+ 0x5c7000005c8,
+ 0x5d0000005eb,
+ 0x5ef000005f3,
+ 0x6100000061b,
+ 0x62000000640,
+ 0x64100000660,
+ 0x66e00000675,
+ 0x679000006d4,
+ 0x6d5000006dd,
+ 0x6df000006e9,
+ 0x6ea000006f0,
+ 0x6fa00000700,
+ 0x7100000074b,
+ 0x74d000007b2,
+ 0x7c0000007f6,
+ 0x7fd000007fe,
+ 0x8000000082e,
+ 0x8400000085c,
+ 0x8600000086b,
+ 0x87000000888,
+ 0x8890000088f,
+ 0x898000008e2,
+ 0x8e300000958,
+ 0x96000000964,
+ 0x96600000970,
+ 0x97100000984,
+ 0x9850000098d,
+ 0x98f00000991,
+ 0x993000009a9,
+ 0x9aa000009b1,
+ 0x9b2000009b3,
+ 0x9b6000009ba,
+ 0x9bc000009c5,
+ 0x9c7000009c9,
+ 0x9cb000009cf,
+ 0x9d7000009d8,
+ 0x9e0000009e4,
+ 0x9e6000009f2,
+ 0x9fc000009fd,
+ 0x9fe000009ff,
+ 0xa0100000a04,
+ 0xa0500000a0b,
+ 0xa0f00000a11,
+ 0xa1300000a29,
+ 0xa2a00000a31,
+ 0xa3200000a33,
+ 0xa3500000a36,
+ 0xa3800000a3a,
+ 0xa3c00000a3d,
+ 0xa3e00000a43,
+ 0xa4700000a49,
+ 0xa4b00000a4e,
+ 0xa5100000a52,
+ 0xa5c00000a5d,
+ 0xa6600000a76,
+ 0xa8100000a84,
+ 0xa8500000a8e,
+ 0xa8f00000a92,
+ 0xa9300000aa9,
+ 0xaaa00000ab1,
+ 0xab200000ab4,
+ 0xab500000aba,
+ 0xabc00000ac6,
+ 0xac700000aca,
+ 0xacb00000ace,
+ 0xad000000ad1,
+ 0xae000000ae4,
+ 0xae600000af0,
+ 0xaf900000b00,
+ 0xb0100000b04,
+ 0xb0500000b0d,
+ 0xb0f00000b11,
+ 0xb1300000b29,
+ 0xb2a00000b31,
+ 0xb3200000b34,
+ 0xb3500000b3a,
+ 0xb3c00000b45,
+ 0xb4700000b49,
+ 0xb4b00000b4e,
+ 0xb5500000b58,
+ 0xb5f00000b64,
+ 0xb6600000b70,
+ 0xb7100000b72,
+ 0xb8200000b84,
+ 0xb8500000b8b,
+ 0xb8e00000b91,
+ 0xb9200000b96,
+ 0xb9900000b9b,
+ 0xb9c00000b9d,
+ 0xb9e00000ba0,
+ 0xba300000ba5,
+ 0xba800000bab,
+ 0xbae00000bba,
+ 0xbbe00000bc3,
+ 0xbc600000bc9,
+ 0xbca00000bce,
+ 0xbd000000bd1,
+ 0xbd700000bd8,
+ 0xbe600000bf0,
+ 0xc0000000c0d,
+ 0xc0e00000c11,
+ 0xc1200000c29,
+ 0xc2a00000c3a,
+ 0xc3c00000c45,
+ 0xc4600000c49,
+ 0xc4a00000c4e,
+ 0xc5500000c57,
+ 0xc5800000c5b,
+ 0xc5d00000c5e,
+ 0xc6000000c64,
+ 0xc6600000c70,
+ 0xc8000000c84,
+ 0xc8500000c8d,
+ 0xc8e00000c91,
+ 0xc9200000ca9,
+ 0xcaa00000cb4,
+ 0xcb500000cba,
+ 0xcbc00000cc5,
+ 0xcc600000cc9,
+ 0xcca00000cce,
+ 0xcd500000cd7,
+ 0xcdd00000cdf,
+ 0xce000000ce4,
+ 0xce600000cf0,
+ 0xcf100000cf4,
+ 0xd0000000d0d,
+ 0xd0e00000d11,
+ 0xd1200000d45,
+ 0xd4600000d49,
+ 0xd4a00000d4f,
+ 0xd5400000d58,
+ 0xd5f00000d64,
+ 0xd6600000d70,
+ 0xd7a00000d80,
+ 0xd8100000d84,
+ 0xd8500000d97,
+ 0xd9a00000db2,
+ 0xdb300000dbc,
+ 0xdbd00000dbe,
+ 0xdc000000dc7,
+ 0xdca00000dcb,
+ 0xdcf00000dd5,
+ 0xdd600000dd7,
+ 0xdd800000de0,
+ 0xde600000df0,
+ 0xdf200000df4,
+ 0xe0100000e33,
+ 0xe3400000e3b,
+ 0xe4000000e4f,
+ 0xe5000000e5a,
+ 0xe8100000e83,
+ 0xe8400000e85,
+ 0xe8600000e8b,
+ 0xe8c00000ea4,
+ 0xea500000ea6,
+ 0xea700000eb3,
+ 0xeb400000ebe,
+ 0xec000000ec5,
+ 0xec600000ec7,
+ 0xec800000ecf,
+ 0xed000000eda,
+ 0xede00000ee0,
+ 0xf0000000f01,
+ 0xf0b00000f0c,
+ 0xf1800000f1a,
+ 0xf2000000f2a,
+ 0xf3500000f36,
+ 0xf3700000f38,
+ 0xf3900000f3a,
+ 0xf3e00000f43,
+ 0xf4400000f48,
+ 0xf4900000f4d,
+ 0xf4e00000f52,
+ 0xf5300000f57,
+ 0xf5800000f5c,
+ 0xf5d00000f69,
+ 0xf6a00000f6d,
+ 0xf7100000f73,
+ 0xf7400000f75,
+ 0xf7a00000f81,
+ 0xf8200000f85,
+ 0xf8600000f93,
+ 0xf9400000f98,
+ 0xf9900000f9d,
+ 0xf9e00000fa2,
+ 0xfa300000fa7,
+ 0xfa800000fac,
+ 0xfad00000fb9,
+ 0xfba00000fbd,
+ 0xfc600000fc7,
+ 0x10000000104a,
+ 0x10500000109e,
+ 0x10d0000010fb,
+ 0x10fd00001100,
+ 0x120000001249,
+ 0x124a0000124e,
+ 0x125000001257,
+ 0x125800001259,
+ 0x125a0000125e,
+ 0x126000001289,
+ 0x128a0000128e,
+ 0x1290000012b1,
+ 0x12b2000012b6,
+ 0x12b8000012bf,
+ 0x12c0000012c1,
+ 0x12c2000012c6,
+ 0x12c8000012d7,
+ 0x12d800001311,
+ 0x131200001316,
+ 0x13180000135b,
+ 0x135d00001360,
+ 0x138000001390,
+ 0x13a0000013f6,
+ 0x14010000166d,
+ 0x166f00001680,
+ 0x16810000169b,
+ 0x16a0000016eb,
+ 0x16f1000016f9,
+ 0x170000001716,
+ 0x171f00001735,
+ 0x174000001754,
+ 0x17600000176d,
+ 0x176e00001771,
+ 0x177200001774,
+ 0x1780000017b4,
+ 0x17b6000017d4,
+ 0x17d7000017d8,
+ 0x17dc000017de,
+ 0x17e0000017ea,
+ 0x18100000181a,
+ 0x182000001879,
+ 0x1880000018ab,
+ 0x18b0000018f6,
+ 0x19000000191f,
+ 0x19200000192c,
+ 0x19300000193c,
+ 0x19460000196e,
+ 0x197000001975,
+ 0x1980000019ac,
+ 0x19b0000019ca,
+ 0x19d0000019da,
+ 0x1a0000001a1c,
+ 0x1a2000001a5f,
+ 0x1a6000001a7d,
+ 0x1a7f00001a8a,
+ 0x1a9000001a9a,
+ 0x1aa700001aa8,
+ 0x1ab000001abe,
+ 0x1abf00001acf,
+ 0x1b0000001b4d,
+ 0x1b5000001b5a,
+ 0x1b6b00001b74,
+ 0x1b8000001bf4,
+ 0x1c0000001c38,
+ 0x1c4000001c4a,
+ 0x1c4d00001c7e,
+ 0x1cd000001cd3,
+ 0x1cd400001cfb,
+ 0x1d0000001d2c,
+ 0x1d2f00001d30,
+ 0x1d3b00001d3c,
+ 0x1d4e00001d4f,
+ 0x1d6b00001d78,
+ 0x1d7900001d9b,
+ 0x1dc000001e00,
+ 0x1e0100001e02,
+ 0x1e0300001e04,
+ 0x1e0500001e06,
+ 0x1e0700001e08,
+ 0x1e0900001e0a,
+ 0x1e0b00001e0c,
+ 0x1e0d00001e0e,
+ 0x1e0f00001e10,
+ 0x1e1100001e12,
+ 0x1e1300001e14,
+ 0x1e1500001e16,
+ 0x1e1700001e18,
+ 0x1e1900001e1a,
+ 0x1e1b00001e1c,
+ 0x1e1d00001e1e,
+ 0x1e1f00001e20,
+ 0x1e2100001e22,
+ 0x1e2300001e24,
+ 0x1e2500001e26,
+ 0x1e2700001e28,
+ 0x1e2900001e2a,
+ 0x1e2b00001e2c,
+ 0x1e2d00001e2e,
+ 0x1e2f00001e30,
+ 0x1e3100001e32,
+ 0x1e3300001e34,
+ 0x1e3500001e36,
+ 0x1e3700001e38,
+ 0x1e3900001e3a,
+ 0x1e3b00001e3c,
+ 0x1e3d00001e3e,
+ 0x1e3f00001e40,
+ 0x1e4100001e42,
+ 0x1e4300001e44,
+ 0x1e4500001e46,
+ 0x1e4700001e48,
+ 0x1e4900001e4a,
+ 0x1e4b00001e4c,
+ 0x1e4d00001e4e,
+ 0x1e4f00001e50,
+ 0x1e5100001e52,
+ 0x1e5300001e54,
+ 0x1e5500001e56,
+ 0x1e5700001e58,
+ 0x1e5900001e5a,
+ 0x1e5b00001e5c,
+ 0x1e5d00001e5e,
+ 0x1e5f00001e60,
+ 0x1e6100001e62,
+ 0x1e6300001e64,
+ 0x1e6500001e66,
+ 0x1e6700001e68,
+ 0x1e6900001e6a,
+ 0x1e6b00001e6c,
+ 0x1e6d00001e6e,
+ 0x1e6f00001e70,
+ 0x1e7100001e72,
+ 0x1e7300001e74,
+ 0x1e7500001e76,
+ 0x1e7700001e78,
+ 0x1e7900001e7a,
+ 0x1e7b00001e7c,
+ 0x1e7d00001e7e,
+ 0x1e7f00001e80,
+ 0x1e8100001e82,
+ 0x1e8300001e84,
+ 0x1e8500001e86,
+ 0x1e8700001e88,
+ 0x1e8900001e8a,
+ 0x1e8b00001e8c,
+ 0x1e8d00001e8e,
+ 0x1e8f00001e90,
+ 0x1e9100001e92,
+ 0x1e9300001e94,
+ 0x1e9500001e9a,
+ 0x1e9c00001e9e,
+ 0x1e9f00001ea0,
+ 0x1ea100001ea2,
+ 0x1ea300001ea4,
+ 0x1ea500001ea6,
+ 0x1ea700001ea8,
+ 0x1ea900001eaa,
+ 0x1eab00001eac,
+ 0x1ead00001eae,
+ 0x1eaf00001eb0,
+ 0x1eb100001eb2,
+ 0x1eb300001eb4,
+ 0x1eb500001eb6,
+ 0x1eb700001eb8,
+ 0x1eb900001eba,
+ 0x1ebb00001ebc,
+ 0x1ebd00001ebe,
+ 0x1ebf00001ec0,
+ 0x1ec100001ec2,
+ 0x1ec300001ec4,
+ 0x1ec500001ec6,
+ 0x1ec700001ec8,
+ 0x1ec900001eca,
+ 0x1ecb00001ecc,
+ 0x1ecd00001ece,
+ 0x1ecf00001ed0,
+ 0x1ed100001ed2,
+ 0x1ed300001ed4,
+ 0x1ed500001ed6,
+ 0x1ed700001ed8,
+ 0x1ed900001eda,
+ 0x1edb00001edc,
+ 0x1edd00001ede,
+ 0x1edf00001ee0,
+ 0x1ee100001ee2,
+ 0x1ee300001ee4,
+ 0x1ee500001ee6,
+ 0x1ee700001ee8,
+ 0x1ee900001eea,
+ 0x1eeb00001eec,
+ 0x1eed00001eee,
+ 0x1eef00001ef0,
+ 0x1ef100001ef2,
+ 0x1ef300001ef4,
+ 0x1ef500001ef6,
+ 0x1ef700001ef8,
+ 0x1ef900001efa,
+ 0x1efb00001efc,
+ 0x1efd00001efe,
+ 0x1eff00001f08,
+ 0x1f1000001f16,
+ 0x1f2000001f28,
+ 0x1f3000001f38,
+ 0x1f4000001f46,
+ 0x1f5000001f58,
+ 0x1f6000001f68,
+ 0x1f7000001f71,
+ 0x1f7200001f73,
+ 0x1f7400001f75,
+ 0x1f7600001f77,
+ 0x1f7800001f79,
+ 0x1f7a00001f7b,
+ 0x1f7c00001f7d,
+ 0x1fb000001fb2,
+ 0x1fb600001fb7,
+ 0x1fc600001fc7,
+ 0x1fd000001fd3,
+ 0x1fd600001fd8,
+ 0x1fe000001fe3,
+ 0x1fe400001fe8,
+ 0x1ff600001ff7,
+ 0x214e0000214f,
+ 0x218400002185,
+ 0x2c3000002c60,
+ 0x2c6100002c62,
+ 0x2c6500002c67,
+ 0x2c6800002c69,
+ 0x2c6a00002c6b,
+ 0x2c6c00002c6d,
+ 0x2c7100002c72,
+ 0x2c7300002c75,
+ 0x2c7600002c7c,
+ 0x2c8100002c82,
+ 0x2c8300002c84,
+ 0x2c8500002c86,
+ 0x2c8700002c88,
+ 0x2c8900002c8a,
+ 0x2c8b00002c8c,
+ 0x2c8d00002c8e,
+ 0x2c8f00002c90,
+ 0x2c9100002c92,
+ 0x2c9300002c94,
+ 0x2c9500002c96,
+ 0x2c9700002c98,
+ 0x2c9900002c9a,
+ 0x2c9b00002c9c,
+ 0x2c9d00002c9e,
+ 0x2c9f00002ca0,
+ 0x2ca100002ca2,
+ 0x2ca300002ca4,
+ 0x2ca500002ca6,
+ 0x2ca700002ca8,
+ 0x2ca900002caa,
+ 0x2cab00002cac,
+ 0x2cad00002cae,
+ 0x2caf00002cb0,
+ 0x2cb100002cb2,
+ 0x2cb300002cb4,
+ 0x2cb500002cb6,
+ 0x2cb700002cb8,
+ 0x2cb900002cba,
+ 0x2cbb00002cbc,
+ 0x2cbd00002cbe,
+ 0x2cbf00002cc0,
+ 0x2cc100002cc2,
+ 0x2cc300002cc4,
+ 0x2cc500002cc6,
+ 0x2cc700002cc8,
+ 0x2cc900002cca,
+ 0x2ccb00002ccc,
+ 0x2ccd00002cce,
+ 0x2ccf00002cd0,
+ 0x2cd100002cd2,
+ 0x2cd300002cd4,
+ 0x2cd500002cd6,
+ 0x2cd700002cd8,
+ 0x2cd900002cda,
+ 0x2cdb00002cdc,
+ 0x2cdd00002cde,
+ 0x2cdf00002ce0,
+ 0x2ce100002ce2,
+ 0x2ce300002ce5,
+ 0x2cec00002ced,
+ 0x2cee00002cf2,
+ 0x2cf300002cf4,
+ 0x2d0000002d26,
+ 0x2d2700002d28,
+ 0x2d2d00002d2e,
+ 0x2d3000002d68,
+ 0x2d7f00002d97,
+ 0x2da000002da7,
+ 0x2da800002daf,
+ 0x2db000002db7,
+ 0x2db800002dbf,
+ 0x2dc000002dc7,
+ 0x2dc800002dcf,
+ 0x2dd000002dd7,
+ 0x2dd800002ddf,
+ 0x2de000002e00,
+ 0x2e2f00002e30,
+ 0x300500003008,
+ 0x302a0000302e,
+ 0x303c0000303d,
+ 0x304100003097,
+ 0x30990000309b,
+ 0x309d0000309f,
+ 0x30a1000030fb,
+ 0x30fc000030ff,
+ 0x310500003130,
+ 0x31a0000031c0,
+ 0x31f000003200,
+ 0x340000004dc0,
+ 0x4e000000a48d,
+ 0xa4d00000a4fe,
+ 0xa5000000a60d,
+ 0xa6100000a62c,
+ 0xa6410000a642,
+ 0xa6430000a644,
+ 0xa6450000a646,
+ 0xa6470000a648,
+ 0xa6490000a64a,
+ 0xa64b0000a64c,
+ 0xa64d0000a64e,
+ 0xa64f0000a650,
+ 0xa6510000a652,
+ 0xa6530000a654,
+ 0xa6550000a656,
+ 0xa6570000a658,
+ 0xa6590000a65a,
+ 0xa65b0000a65c,
+ 0xa65d0000a65e,
+ 0xa65f0000a660,
+ 0xa6610000a662,
+ 0xa6630000a664,
+ 0xa6650000a666,
+ 0xa6670000a668,
+ 0xa6690000a66a,
+ 0xa66b0000a66c,
+ 0xa66d0000a670,
+ 0xa6740000a67e,
+ 0xa67f0000a680,
+ 0xa6810000a682,
+ 0xa6830000a684,
+ 0xa6850000a686,
+ 0xa6870000a688,
+ 0xa6890000a68a,
+ 0xa68b0000a68c,
+ 0xa68d0000a68e,
+ 0xa68f0000a690,
+ 0xa6910000a692,
+ 0xa6930000a694,
+ 0xa6950000a696,
+ 0xa6970000a698,
+ 0xa6990000a69a,
+ 0xa69b0000a69c,
+ 0xa69e0000a6e6,
+ 0xa6f00000a6f2,
+ 0xa7170000a720,
+ 0xa7230000a724,
+ 0xa7250000a726,
+ 0xa7270000a728,
+ 0xa7290000a72a,
+ 0xa72b0000a72c,
+ 0xa72d0000a72e,
+ 0xa72f0000a732,
+ 0xa7330000a734,
+ 0xa7350000a736,
+ 0xa7370000a738,
+ 0xa7390000a73a,
+ 0xa73b0000a73c,
+ 0xa73d0000a73e,
+ 0xa73f0000a740,
+ 0xa7410000a742,
+ 0xa7430000a744,
+ 0xa7450000a746,
+ 0xa7470000a748,
+ 0xa7490000a74a,
+ 0xa74b0000a74c,
+ 0xa74d0000a74e,
+ 0xa74f0000a750,
+ 0xa7510000a752,
+ 0xa7530000a754,
+ 0xa7550000a756,
+ 0xa7570000a758,
+ 0xa7590000a75a,
+ 0xa75b0000a75c,
+ 0xa75d0000a75e,
+ 0xa75f0000a760,
+ 0xa7610000a762,
+ 0xa7630000a764,
+ 0xa7650000a766,
+ 0xa7670000a768,
+ 0xa7690000a76a,
+ 0xa76b0000a76c,
+ 0xa76d0000a76e,
+ 0xa76f0000a770,
+ 0xa7710000a779,
+ 0xa77a0000a77b,
+ 0xa77c0000a77d,
+ 0xa77f0000a780,
+ 0xa7810000a782,
+ 0xa7830000a784,
+ 0xa7850000a786,
+ 0xa7870000a789,
+ 0xa78c0000a78d,
+ 0xa78e0000a790,
+ 0xa7910000a792,
+ 0xa7930000a796,
+ 0xa7970000a798,
+ 0xa7990000a79a,
+ 0xa79b0000a79c,
+ 0xa79d0000a79e,
+ 0xa79f0000a7a0,
+ 0xa7a10000a7a2,
+ 0xa7a30000a7a4,
+ 0xa7a50000a7a6,
+ 0xa7a70000a7a8,
+ 0xa7a90000a7aa,
+ 0xa7af0000a7b0,
+ 0xa7b50000a7b6,
+ 0xa7b70000a7b8,
+ 0xa7b90000a7ba,
+ 0xa7bb0000a7bc,
+ 0xa7bd0000a7be,
+ 0xa7bf0000a7c0,
+ 0xa7c10000a7c2,
+ 0xa7c30000a7c4,
+ 0xa7c80000a7c9,
+ 0xa7ca0000a7cb,
+ 0xa7d10000a7d2,
+ 0xa7d30000a7d4,
+ 0xa7d50000a7d6,
+ 0xa7d70000a7d8,
+ 0xa7d90000a7da,
+ 0xa7f20000a7f5,
+ 0xa7f60000a7f8,
+ 0xa7fa0000a828,
+ 0xa82c0000a82d,
+ 0xa8400000a874,
+ 0xa8800000a8c6,
+ 0xa8d00000a8da,
+ 0xa8e00000a8f8,
+ 0xa8fb0000a8fc,
+ 0xa8fd0000a92e,
+ 0xa9300000a954,
+ 0xa9800000a9c1,
+ 0xa9cf0000a9da,
+ 0xa9e00000a9ff,
+ 0xaa000000aa37,
+ 0xaa400000aa4e,
+ 0xaa500000aa5a,
+ 0xaa600000aa77,
+ 0xaa7a0000aac3,
+ 0xaadb0000aade,
+ 0xaae00000aaf0,
+ 0xaaf20000aaf7,
+ 0xab010000ab07,
+ 0xab090000ab0f,
+ 0xab110000ab17,
+ 0xab200000ab27,
+ 0xab280000ab2f,
+ 0xab300000ab5b,
+ 0xab600000ab69,
+ 0xabc00000abeb,
+ 0xabec0000abee,
+ 0xabf00000abfa,
+ 0xac000000d7a4,
+ 0xfa0e0000fa10,
+ 0xfa110000fa12,
+ 0xfa130000fa15,
+ 0xfa1f0000fa20,
+ 0xfa210000fa22,
+ 0xfa230000fa25,
+ 0xfa270000fa2a,
+ 0xfb1e0000fb1f,
+ 0xfe200000fe30,
+ 0xfe730000fe74,
+ 0x100000001000c,
+ 0x1000d00010027,
+ 0x100280001003b,
+ 0x1003c0001003e,
+ 0x1003f0001004e,
+ 0x100500001005e,
+ 0x10080000100fb,
+ 0x101fd000101fe,
+ 0x102800001029d,
+ 0x102a0000102d1,
+ 0x102e0000102e1,
+ 0x1030000010320,
+ 0x1032d00010341,
+ 0x103420001034a,
+ 0x103500001037b,
+ 0x103800001039e,
+ 0x103a0000103c4,
+ 0x103c8000103d0,
+ 0x104280001049e,
+ 0x104a0000104aa,
+ 0x104d8000104fc,
+ 0x1050000010528,
+ 0x1053000010564,
+ 0x10597000105a2,
+ 0x105a3000105b2,
+ 0x105b3000105ba,
+ 0x105bb000105bd,
+ 0x1060000010737,
+ 0x1074000010756,
+ 0x1076000010768,
+ 0x1078000010786,
+ 0x10787000107b1,
+ 0x107b2000107bb,
+ 0x1080000010806,
+ 0x1080800010809,
+ 0x1080a00010836,
+ 0x1083700010839,
+ 0x1083c0001083d,
+ 0x1083f00010856,
+ 0x1086000010877,
+ 0x108800001089f,
+ 0x108e0000108f3,
+ 0x108f4000108f6,
+ 0x1090000010916,
+ 0x109200001093a,
+ 0x10980000109b8,
+ 0x109be000109c0,
+ 0x10a0000010a04,
+ 0x10a0500010a07,
+ 0x10a0c00010a14,
+ 0x10a1500010a18,
+ 0x10a1900010a36,
+ 0x10a3800010a3b,
+ 0x10a3f00010a40,
+ 0x10a6000010a7d,
+ 0x10a8000010a9d,
+ 0x10ac000010ac8,
+ 0x10ac900010ae7,
+ 0x10b0000010b36,
+ 0x10b4000010b56,
+ 0x10b6000010b73,
+ 0x10b8000010b92,
+ 0x10c0000010c49,
+ 0x10cc000010cf3,
+ 0x10d0000010d28,
+ 0x10d3000010d3a,
+ 0x10e8000010eaa,
+ 0x10eab00010ead,
+ 0x10eb000010eb2,
+ 0x10efd00010f1d,
+ 0x10f2700010f28,
+ 0x10f3000010f51,
+ 0x10f7000010f86,
+ 0x10fb000010fc5,
+ 0x10fe000010ff7,
+ 0x1100000011047,
+ 0x1106600011076,
+ 0x1107f000110bb,
+ 0x110c2000110c3,
+ 0x110d0000110e9,
+ 0x110f0000110fa,
+ 0x1110000011135,
+ 0x1113600011140,
+ 0x1114400011148,
+ 0x1115000011174,
+ 0x1117600011177,
+ 0x11180000111c5,
+ 0x111c9000111cd,
+ 0x111ce000111db,
+ 0x111dc000111dd,
+ 0x1120000011212,
+ 0x1121300011238,
+ 0x1123e00011242,
+ 0x1128000011287,
+ 0x1128800011289,
+ 0x1128a0001128e,
+ 0x1128f0001129e,
+ 0x1129f000112a9,
+ 0x112b0000112eb,
+ 0x112f0000112fa,
+ 0x1130000011304,
+ 0x113050001130d,
+ 0x1130f00011311,
+ 0x1131300011329,
+ 0x1132a00011331,
+ 0x1133200011334,
+ 0x113350001133a,
+ 0x1133b00011345,
+ 0x1134700011349,
+ 0x1134b0001134e,
+ 0x1135000011351,
+ 0x1135700011358,
+ 0x1135d00011364,
+ 0x113660001136d,
+ 0x1137000011375,
+ 0x114000001144b,
+ 0x114500001145a,
+ 0x1145e00011462,
+ 0x11480000114c6,
+ 0x114c7000114c8,
+ 0x114d0000114da,
+ 0x11580000115b6,
+ 0x115b8000115c1,
+ 0x115d8000115de,
+ 0x1160000011641,
+ 0x1164400011645,
+ 0x116500001165a,
+ 0x11680000116b9,
+ 0x116c0000116ca,
+ 0x117000001171b,
+ 0x1171d0001172c,
+ 0x117300001173a,
+ 0x1174000011747,
+ 0x118000001183b,
+ 0x118c0000118ea,
+ 0x118ff00011907,
+ 0x119090001190a,
+ 0x1190c00011914,
+ 0x1191500011917,
+ 0x1191800011936,
+ 0x1193700011939,
+ 0x1193b00011944,
+ 0x119500001195a,
+ 0x119a0000119a8,
+ 0x119aa000119d8,
+ 0x119da000119e2,
+ 0x119e3000119e5,
+ 0x11a0000011a3f,
+ 0x11a4700011a48,
+ 0x11a5000011a9a,
+ 0x11a9d00011a9e,
+ 0x11ab000011af9,
+ 0x11c0000011c09,
+ 0x11c0a00011c37,
+ 0x11c3800011c41,
+ 0x11c5000011c5a,
+ 0x11c7200011c90,
+ 0x11c9200011ca8,
+ 0x11ca900011cb7,
+ 0x11d0000011d07,
+ 0x11d0800011d0a,
+ 0x11d0b00011d37,
+ 0x11d3a00011d3b,
+ 0x11d3c00011d3e,
+ 0x11d3f00011d48,
+ 0x11d5000011d5a,
+ 0x11d6000011d66,
+ 0x11d6700011d69,
+ 0x11d6a00011d8f,
+ 0x11d9000011d92,
+ 0x11d9300011d99,
+ 0x11da000011daa,
+ 0x11ee000011ef7,
+ 0x11f0000011f11,
+ 0x11f1200011f3b,
+ 0x11f3e00011f43,
+ 0x11f5000011f5a,
+ 0x11fb000011fb1,
+ 0x120000001239a,
+ 0x1248000012544,
+ 0x12f9000012ff1,
+ 0x1300000013430,
+ 0x1344000013456,
+ 0x1440000014647,
+ 0x1680000016a39,
+ 0x16a4000016a5f,
+ 0x16a6000016a6a,
+ 0x16a7000016abf,
+ 0x16ac000016aca,
+ 0x16ad000016aee,
+ 0x16af000016af5,
+ 0x16b0000016b37,
+ 0x16b4000016b44,
+ 0x16b5000016b5a,
+ 0x16b6300016b78,
+ 0x16b7d00016b90,
+ 0x16e6000016e80,
+ 0x16f0000016f4b,
+ 0x16f4f00016f88,
+ 0x16f8f00016fa0,
+ 0x16fe000016fe2,
+ 0x16fe300016fe5,
+ 0x16ff000016ff2,
+ 0x17000000187f8,
+ 0x1880000018cd6,
+ 0x18d0000018d09,
+ 0x1aff00001aff4,
+ 0x1aff50001affc,
+ 0x1affd0001afff,
+ 0x1b0000001b123,
+ 0x1b1320001b133,
+ 0x1b1500001b153,
+ 0x1b1550001b156,
+ 0x1b1640001b168,
+ 0x1b1700001b2fc,
+ 0x1bc000001bc6b,
+ 0x1bc700001bc7d,
+ 0x1bc800001bc89,
+ 0x1bc900001bc9a,
+ 0x1bc9d0001bc9f,
+ 0x1cf000001cf2e,
+ 0x1cf300001cf47,
+ 0x1da000001da37,
+ 0x1da3b0001da6d,
+ 0x1da750001da76,
+ 0x1da840001da85,
+ 0x1da9b0001daa0,
+ 0x1daa10001dab0,
+ 0x1df000001df1f,
+ 0x1df250001df2b,
+ 0x1e0000001e007,
+ 0x1e0080001e019,
+ 0x1e01b0001e022,
+ 0x1e0230001e025,
+ 0x1e0260001e02b,
+ 0x1e0300001e06e,
+ 0x1e08f0001e090,
+ 0x1e1000001e12d,
+ 0x1e1300001e13e,
+ 0x1e1400001e14a,
+ 0x1e14e0001e14f,
+ 0x1e2900001e2af,
+ 0x1e2c00001e2fa,
+ 0x1e4d00001e4fa,
+ 0x1e7e00001e7e7,
+ 0x1e7e80001e7ec,
+ 0x1e7ed0001e7ef,
+ 0x1e7f00001e7ff,
+ 0x1e8000001e8c5,
+ 0x1e8d00001e8d7,
+ 0x1e9220001e94c,
+ 0x1e9500001e95a,
+ 0x200000002a6e0,
+ 0x2a7000002b73a,
+ 0x2b7400002b81e,
+ 0x2b8200002cea2,
+ 0x2ceb00002ebe1,
+ 0x300000003134b,
+ 0x31350000323b0,
+ ),
+ 'CONTEXTJ': (
+ 0x200c0000200e,
+ ),
+ 'CONTEXTO': (
+ 0xb7000000b8,
+ 0x37500000376,
+ 0x5f3000005f5,
+ 0x6600000066a,
+ 0x6f0000006fa,
+ 0x30fb000030fc,
+ ),
+}
diff --git a/billinglayer/python/idna/intranges.py b/billinglayer/python/idna/intranges.py
new file mode 100644
index 0000000..6a43b04
--- /dev/null
+++ b/billinglayer/python/idna/intranges.py
@@ -0,0 +1,54 @@
+"""
+Given a list of integers, made up of (hopefully) a small number of long runs
+of consecutive integers, compute a representation of the form
+((start1, end1), (start2, end2) ...). Then answer the question "was x present
+in the original list?" in time O(log(# runs)).
+"""
+
+import bisect
+from typing import List, Tuple
+
+def intranges_from_list(list_: List[int]) -> Tuple[int, ...]:
+ """Represent a list of integers as a sequence of ranges:
+ ((start_0, end_0), (start_1, end_1), ...), such that the original
+ integers are exactly those x such that start_i <= x < end_i for some i.
+
+ Ranges are encoded as single integers (start << 32 | end), not as tuples.
+ """
+
+ sorted_list = sorted(list_)
+ ranges = []
+ last_write = -1
+ for i in range(len(sorted_list)):
+ if i+1 < len(sorted_list):
+ if sorted_list[i] == sorted_list[i+1]-1:
+ continue
+ current_range = sorted_list[last_write+1:i+1]
+ ranges.append(_encode_range(current_range[0], current_range[-1] + 1))
+ last_write = i
+
+ return tuple(ranges)
+
+def _encode_range(start: int, end: int) -> int:
+ return (start << 32) | end
+
+def _decode_range(r: int) -> Tuple[int, int]:
+ return (r >> 32), (r & ((1 << 32) - 1))
+
+
+def intranges_contain(int_: int, ranges: Tuple[int, ...]) -> bool:
+ """Determine if `int_` falls into one of the ranges in `ranges`."""
+ tuple_ = _encode_range(int_, 0)
+ pos = bisect.bisect_left(ranges, tuple_)
+ # we could be immediately ahead of a tuple (start, end)
+ # with start < int_ <= end
+ if pos > 0:
+ left, right = _decode_range(ranges[pos-1])
+ if left <= int_ < right:
+ return True
+ # or we could be immediately behind a tuple (int_, end)
+ if pos < len(ranges):
+ left, _ = _decode_range(ranges[pos])
+ if left == int_:
+ return True
+ return False
diff --git a/billinglayer/python/idna/package_data.py b/billinglayer/python/idna/package_data.py
new file mode 100644
index 0000000..8501893
--- /dev/null
+++ b/billinglayer/python/idna/package_data.py
@@ -0,0 +1,2 @@
+__version__ = '3.4'
+
diff --git a/billinglayer/python/idna/py.typed b/billinglayer/python/idna/py.typed
new file mode 100644
index 0000000..e69de29
diff --git a/billinglayer/python/idna/uts46data.py b/billinglayer/python/idna/uts46data.py
new file mode 100644
index 0000000..186796c
--- /dev/null
+++ b/billinglayer/python/idna/uts46data.py
@@ -0,0 +1,8600 @@
+# This file is automatically generated by tools/idna-data
+# vim: set fileencoding=utf-8 :
+
+from typing import List, Tuple, Union
+
+
+"""IDNA Mapping Table from UTS46."""
+
+
+__version__ = '15.0.0'
+def _seg_0() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x0, '3'),
+ (0x1, '3'),
+ (0x2, '3'),
+ (0x3, '3'),
+ (0x4, '3'),
+ (0x5, '3'),
+ (0x6, '3'),
+ (0x7, '3'),
+ (0x8, '3'),
+ (0x9, '3'),
+ (0xA, '3'),
+ (0xB, '3'),
+ (0xC, '3'),
+ (0xD, '3'),
+ (0xE, '3'),
+ (0xF, '3'),
+ (0x10, '3'),
+ (0x11, '3'),
+ (0x12, '3'),
+ (0x13, '3'),
+ (0x14, '3'),
+ (0x15, '3'),
+ (0x16, '3'),
+ (0x17, '3'),
+ (0x18, '3'),
+ (0x19, '3'),
+ (0x1A, '3'),
+ (0x1B, '3'),
+ (0x1C, '3'),
+ (0x1D, '3'),
+ (0x1E, '3'),
+ (0x1F, '3'),
+ (0x20, '3'),
+ (0x21, '3'),
+ (0x22, '3'),
+ (0x23, '3'),
+ (0x24, '3'),
+ (0x25, '3'),
+ (0x26, '3'),
+ (0x27, '3'),
+ (0x28, '3'),
+ (0x29, '3'),
+ (0x2A, '3'),
+ (0x2B, '3'),
+ (0x2C, '3'),
+ (0x2D, 'V'),
+ (0x2E, 'V'),
+ (0x2F, '3'),
+ (0x30, 'V'),
+ (0x31, 'V'),
+ (0x32, 'V'),
+ (0x33, 'V'),
+ (0x34, 'V'),
+ (0x35, 'V'),
+ (0x36, 'V'),
+ (0x37, 'V'),
+ (0x38, 'V'),
+ (0x39, 'V'),
+ (0x3A, '3'),
+ (0x3B, '3'),
+ (0x3C, '3'),
+ (0x3D, '3'),
+ (0x3E, '3'),
+ (0x3F, '3'),
+ (0x40, '3'),
+ (0x41, 'M', 'a'),
+ (0x42, 'M', 'b'),
+ (0x43, 'M', 'c'),
+ (0x44, 'M', 'd'),
+ (0x45, 'M', 'e'),
+ (0x46, 'M', 'f'),
+ (0x47, 'M', 'g'),
+ (0x48, 'M', 'h'),
+ (0x49, 'M', 'i'),
+ (0x4A, 'M', 'j'),
+ (0x4B, 'M', 'k'),
+ (0x4C, 'M', 'l'),
+ (0x4D, 'M', 'm'),
+ (0x4E, 'M', 'n'),
+ (0x4F, 'M', 'o'),
+ (0x50, 'M', 'p'),
+ (0x51, 'M', 'q'),
+ (0x52, 'M', 'r'),
+ (0x53, 'M', 's'),
+ (0x54, 'M', 't'),
+ (0x55, 'M', 'u'),
+ (0x56, 'M', 'v'),
+ (0x57, 'M', 'w'),
+ (0x58, 'M', 'x'),
+ (0x59, 'M', 'y'),
+ (0x5A, 'M', 'z'),
+ (0x5B, '3'),
+ (0x5C, '3'),
+ (0x5D, '3'),
+ (0x5E, '3'),
+ (0x5F, '3'),
+ (0x60, '3'),
+ (0x61, 'V'),
+ (0x62, 'V'),
+ (0x63, 'V'),
+ ]
+
+def _seg_1() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x64, 'V'),
+ (0x65, 'V'),
+ (0x66, 'V'),
+ (0x67, 'V'),
+ (0x68, 'V'),
+ (0x69, 'V'),
+ (0x6A, 'V'),
+ (0x6B, 'V'),
+ (0x6C, 'V'),
+ (0x6D, 'V'),
+ (0x6E, 'V'),
+ (0x6F, 'V'),
+ (0x70, 'V'),
+ (0x71, 'V'),
+ (0x72, 'V'),
+ (0x73, 'V'),
+ (0x74, 'V'),
+ (0x75, 'V'),
+ (0x76, 'V'),
+ (0x77, 'V'),
+ (0x78, 'V'),
+ (0x79, 'V'),
+ (0x7A, 'V'),
+ (0x7B, '3'),
+ (0x7C, '3'),
+ (0x7D, '3'),
+ (0x7E, '3'),
+ (0x7F, '3'),
+ (0x80, 'X'),
+ (0x81, 'X'),
+ (0x82, 'X'),
+ (0x83, 'X'),
+ (0x84, 'X'),
+ (0x85, 'X'),
+ (0x86, 'X'),
+ (0x87, 'X'),
+ (0x88, 'X'),
+ (0x89, 'X'),
+ (0x8A, 'X'),
+ (0x8B, 'X'),
+ (0x8C, 'X'),
+ (0x8D, 'X'),
+ (0x8E, 'X'),
+ (0x8F, 'X'),
+ (0x90, 'X'),
+ (0x91, 'X'),
+ (0x92, 'X'),
+ (0x93, 'X'),
+ (0x94, 'X'),
+ (0x95, 'X'),
+ (0x96, 'X'),
+ (0x97, 'X'),
+ (0x98, 'X'),
+ (0x99, 'X'),
+ (0x9A, 'X'),
+ (0x9B, 'X'),
+ (0x9C, 'X'),
+ (0x9D, 'X'),
+ (0x9E, 'X'),
+ (0x9F, 'X'),
+ (0xA0, '3', ' '),
+ (0xA1, 'V'),
+ (0xA2, 'V'),
+ (0xA3, 'V'),
+ (0xA4, 'V'),
+ (0xA5, 'V'),
+ (0xA6, 'V'),
+ (0xA7, 'V'),
+ (0xA8, '3', ' ̈'),
+ (0xA9, 'V'),
+ (0xAA, 'M', 'a'),
+ (0xAB, 'V'),
+ (0xAC, 'V'),
+ (0xAD, 'I'),
+ (0xAE, 'V'),
+ (0xAF, '3', ' ̄'),
+ (0xB0, 'V'),
+ (0xB1, 'V'),
+ (0xB2, 'M', '2'),
+ (0xB3, 'M', '3'),
+ (0xB4, '3', ' ́'),
+ (0xB5, 'M', 'μ'),
+ (0xB6, 'V'),
+ (0xB7, 'V'),
+ (0xB8, '3', ' ̧'),
+ (0xB9, 'M', '1'),
+ (0xBA, 'M', 'o'),
+ (0xBB, 'V'),
+ (0xBC, 'M', '1⁄4'),
+ (0xBD, 'M', '1⁄2'),
+ (0xBE, 'M', '3⁄4'),
+ (0xBF, 'V'),
+ (0xC0, 'M', 'à'),
+ (0xC1, 'M', 'á'),
+ (0xC2, 'M', 'â'),
+ (0xC3, 'M', 'ã'),
+ (0xC4, 'M', 'ä'),
+ (0xC5, 'M', 'å'),
+ (0xC6, 'M', 'æ'),
+ (0xC7, 'M', 'ç'),
+ ]
+
+def _seg_2() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xC8, 'M', 'è'),
+ (0xC9, 'M', 'é'),
+ (0xCA, 'M', 'ê'),
+ (0xCB, 'M', 'ë'),
+ (0xCC, 'M', 'ì'),
+ (0xCD, 'M', 'í'),
+ (0xCE, 'M', 'î'),
+ (0xCF, 'M', 'ï'),
+ (0xD0, 'M', 'ð'),
+ (0xD1, 'M', 'ñ'),
+ (0xD2, 'M', 'ò'),
+ (0xD3, 'M', 'ó'),
+ (0xD4, 'M', 'ô'),
+ (0xD5, 'M', 'õ'),
+ (0xD6, 'M', 'ö'),
+ (0xD7, 'V'),
+ (0xD8, 'M', 'ø'),
+ (0xD9, 'M', 'ù'),
+ (0xDA, 'M', 'ú'),
+ (0xDB, 'M', 'û'),
+ (0xDC, 'M', 'ü'),
+ (0xDD, 'M', 'ý'),
+ (0xDE, 'M', 'þ'),
+ (0xDF, 'D', 'ss'),
+ (0xE0, 'V'),
+ (0xE1, 'V'),
+ (0xE2, 'V'),
+ (0xE3, 'V'),
+ (0xE4, 'V'),
+ (0xE5, 'V'),
+ (0xE6, 'V'),
+ (0xE7, 'V'),
+ (0xE8, 'V'),
+ (0xE9, 'V'),
+ (0xEA, 'V'),
+ (0xEB, 'V'),
+ (0xEC, 'V'),
+ (0xED, 'V'),
+ (0xEE, 'V'),
+ (0xEF, 'V'),
+ (0xF0, 'V'),
+ (0xF1, 'V'),
+ (0xF2, 'V'),
+ (0xF3, 'V'),
+ (0xF4, 'V'),
+ (0xF5, 'V'),
+ (0xF6, 'V'),
+ (0xF7, 'V'),
+ (0xF8, 'V'),
+ (0xF9, 'V'),
+ (0xFA, 'V'),
+ (0xFB, 'V'),
+ (0xFC, 'V'),
+ (0xFD, 'V'),
+ (0xFE, 'V'),
+ (0xFF, 'V'),
+ (0x100, 'M', 'ā'),
+ (0x101, 'V'),
+ (0x102, 'M', 'ă'),
+ (0x103, 'V'),
+ (0x104, 'M', 'ą'),
+ (0x105, 'V'),
+ (0x106, 'M', 'ć'),
+ (0x107, 'V'),
+ (0x108, 'M', 'ĉ'),
+ (0x109, 'V'),
+ (0x10A, 'M', 'ċ'),
+ (0x10B, 'V'),
+ (0x10C, 'M', 'č'),
+ (0x10D, 'V'),
+ (0x10E, 'M', 'ď'),
+ (0x10F, 'V'),
+ (0x110, 'M', 'đ'),
+ (0x111, 'V'),
+ (0x112, 'M', 'ē'),
+ (0x113, 'V'),
+ (0x114, 'M', 'ĕ'),
+ (0x115, 'V'),
+ (0x116, 'M', 'ė'),
+ (0x117, 'V'),
+ (0x118, 'M', 'ę'),
+ (0x119, 'V'),
+ (0x11A, 'M', 'ě'),
+ (0x11B, 'V'),
+ (0x11C, 'M', 'ĝ'),
+ (0x11D, 'V'),
+ (0x11E, 'M', 'ğ'),
+ (0x11F, 'V'),
+ (0x120, 'M', 'ġ'),
+ (0x121, 'V'),
+ (0x122, 'M', 'ģ'),
+ (0x123, 'V'),
+ (0x124, 'M', 'ĥ'),
+ (0x125, 'V'),
+ (0x126, 'M', 'ħ'),
+ (0x127, 'V'),
+ (0x128, 'M', 'ĩ'),
+ (0x129, 'V'),
+ (0x12A, 'M', 'ī'),
+ (0x12B, 'V'),
+ ]
+
+def _seg_3() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x12C, 'M', 'ĭ'),
+ (0x12D, 'V'),
+ (0x12E, 'M', 'į'),
+ (0x12F, 'V'),
+ (0x130, 'M', 'i̇'),
+ (0x131, 'V'),
+ (0x132, 'M', 'ij'),
+ (0x134, 'M', 'ĵ'),
+ (0x135, 'V'),
+ (0x136, 'M', 'ķ'),
+ (0x137, 'V'),
+ (0x139, 'M', 'ĺ'),
+ (0x13A, 'V'),
+ (0x13B, 'M', 'ļ'),
+ (0x13C, 'V'),
+ (0x13D, 'M', 'ľ'),
+ (0x13E, 'V'),
+ (0x13F, 'M', 'l·'),
+ (0x141, 'M', 'ł'),
+ (0x142, 'V'),
+ (0x143, 'M', 'ń'),
+ (0x144, 'V'),
+ (0x145, 'M', 'ņ'),
+ (0x146, 'V'),
+ (0x147, 'M', 'ň'),
+ (0x148, 'V'),
+ (0x149, 'M', 'ʼn'),
+ (0x14A, 'M', 'ŋ'),
+ (0x14B, 'V'),
+ (0x14C, 'M', 'ō'),
+ (0x14D, 'V'),
+ (0x14E, 'M', 'ŏ'),
+ (0x14F, 'V'),
+ (0x150, 'M', 'ő'),
+ (0x151, 'V'),
+ (0x152, 'M', 'œ'),
+ (0x153, 'V'),
+ (0x154, 'M', 'ŕ'),
+ (0x155, 'V'),
+ (0x156, 'M', 'ŗ'),
+ (0x157, 'V'),
+ (0x158, 'M', 'ř'),
+ (0x159, 'V'),
+ (0x15A, 'M', 'ś'),
+ (0x15B, 'V'),
+ (0x15C, 'M', 'ŝ'),
+ (0x15D, 'V'),
+ (0x15E, 'M', 'ş'),
+ (0x15F, 'V'),
+ (0x160, 'M', 'š'),
+ (0x161, 'V'),
+ (0x162, 'M', 'ţ'),
+ (0x163, 'V'),
+ (0x164, 'M', 'ť'),
+ (0x165, 'V'),
+ (0x166, 'M', 'ŧ'),
+ (0x167, 'V'),
+ (0x168, 'M', 'ũ'),
+ (0x169, 'V'),
+ (0x16A, 'M', 'ū'),
+ (0x16B, 'V'),
+ (0x16C, 'M', 'ŭ'),
+ (0x16D, 'V'),
+ (0x16E, 'M', 'ů'),
+ (0x16F, 'V'),
+ (0x170, 'M', 'ű'),
+ (0x171, 'V'),
+ (0x172, 'M', 'ų'),
+ (0x173, 'V'),
+ (0x174, 'M', 'ŵ'),
+ (0x175, 'V'),
+ (0x176, 'M', 'ŷ'),
+ (0x177, 'V'),
+ (0x178, 'M', 'ÿ'),
+ (0x179, 'M', 'ź'),
+ (0x17A, 'V'),
+ (0x17B, 'M', 'ż'),
+ (0x17C, 'V'),
+ (0x17D, 'M', 'ž'),
+ (0x17E, 'V'),
+ (0x17F, 'M', 's'),
+ (0x180, 'V'),
+ (0x181, 'M', 'ɓ'),
+ (0x182, 'M', 'ƃ'),
+ (0x183, 'V'),
+ (0x184, 'M', 'ƅ'),
+ (0x185, 'V'),
+ (0x186, 'M', 'ɔ'),
+ (0x187, 'M', 'ƈ'),
+ (0x188, 'V'),
+ (0x189, 'M', 'ɖ'),
+ (0x18A, 'M', 'ɗ'),
+ (0x18B, 'M', 'ƌ'),
+ (0x18C, 'V'),
+ (0x18E, 'M', 'ǝ'),
+ (0x18F, 'M', 'ə'),
+ (0x190, 'M', 'ɛ'),
+ (0x191, 'M', 'ƒ'),
+ (0x192, 'V'),
+ (0x193, 'M', 'ɠ'),
+ ]
+
+def _seg_4() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x194, 'M', 'ɣ'),
+ (0x195, 'V'),
+ (0x196, 'M', 'ɩ'),
+ (0x197, 'M', 'ɨ'),
+ (0x198, 'M', 'ƙ'),
+ (0x199, 'V'),
+ (0x19C, 'M', 'ɯ'),
+ (0x19D, 'M', 'ɲ'),
+ (0x19E, 'V'),
+ (0x19F, 'M', 'ɵ'),
+ (0x1A0, 'M', 'ơ'),
+ (0x1A1, 'V'),
+ (0x1A2, 'M', 'ƣ'),
+ (0x1A3, 'V'),
+ (0x1A4, 'M', 'ƥ'),
+ (0x1A5, 'V'),
+ (0x1A6, 'M', 'ʀ'),
+ (0x1A7, 'M', 'ƨ'),
+ (0x1A8, 'V'),
+ (0x1A9, 'M', 'ʃ'),
+ (0x1AA, 'V'),
+ (0x1AC, 'M', 'ƭ'),
+ (0x1AD, 'V'),
+ (0x1AE, 'M', 'ʈ'),
+ (0x1AF, 'M', 'ư'),
+ (0x1B0, 'V'),
+ (0x1B1, 'M', 'ʊ'),
+ (0x1B2, 'M', 'ʋ'),
+ (0x1B3, 'M', 'ƴ'),
+ (0x1B4, 'V'),
+ (0x1B5, 'M', 'ƶ'),
+ (0x1B6, 'V'),
+ (0x1B7, 'M', 'ʒ'),
+ (0x1B8, 'M', 'ƹ'),
+ (0x1B9, 'V'),
+ (0x1BC, 'M', 'ƽ'),
+ (0x1BD, 'V'),
+ (0x1C4, 'M', 'dž'),
+ (0x1C7, 'M', 'lj'),
+ (0x1CA, 'M', 'nj'),
+ (0x1CD, 'M', 'ǎ'),
+ (0x1CE, 'V'),
+ (0x1CF, 'M', 'ǐ'),
+ (0x1D0, 'V'),
+ (0x1D1, 'M', 'ǒ'),
+ (0x1D2, 'V'),
+ (0x1D3, 'M', 'ǔ'),
+ (0x1D4, 'V'),
+ (0x1D5, 'M', 'ǖ'),
+ (0x1D6, 'V'),
+ (0x1D7, 'M', 'ǘ'),
+ (0x1D8, 'V'),
+ (0x1D9, 'M', 'ǚ'),
+ (0x1DA, 'V'),
+ (0x1DB, 'M', 'ǜ'),
+ (0x1DC, 'V'),
+ (0x1DE, 'M', 'ǟ'),
+ (0x1DF, 'V'),
+ (0x1E0, 'M', 'ǡ'),
+ (0x1E1, 'V'),
+ (0x1E2, 'M', 'ǣ'),
+ (0x1E3, 'V'),
+ (0x1E4, 'M', 'ǥ'),
+ (0x1E5, 'V'),
+ (0x1E6, 'M', 'ǧ'),
+ (0x1E7, 'V'),
+ (0x1E8, 'M', 'ǩ'),
+ (0x1E9, 'V'),
+ (0x1EA, 'M', 'ǫ'),
+ (0x1EB, 'V'),
+ (0x1EC, 'M', 'ǭ'),
+ (0x1ED, 'V'),
+ (0x1EE, 'M', 'ǯ'),
+ (0x1EF, 'V'),
+ (0x1F1, 'M', 'dz'),
+ (0x1F4, 'M', 'ǵ'),
+ (0x1F5, 'V'),
+ (0x1F6, 'M', 'ƕ'),
+ (0x1F7, 'M', 'ƿ'),
+ (0x1F8, 'M', 'ǹ'),
+ (0x1F9, 'V'),
+ (0x1FA, 'M', 'ǻ'),
+ (0x1FB, 'V'),
+ (0x1FC, 'M', 'ǽ'),
+ (0x1FD, 'V'),
+ (0x1FE, 'M', 'ǿ'),
+ (0x1FF, 'V'),
+ (0x200, 'M', 'ȁ'),
+ (0x201, 'V'),
+ (0x202, 'M', 'ȃ'),
+ (0x203, 'V'),
+ (0x204, 'M', 'ȅ'),
+ (0x205, 'V'),
+ (0x206, 'M', 'ȇ'),
+ (0x207, 'V'),
+ (0x208, 'M', 'ȉ'),
+ (0x209, 'V'),
+ (0x20A, 'M', 'ȋ'),
+ (0x20B, 'V'),
+ (0x20C, 'M', 'ȍ'),
+ ]
+
+def _seg_5() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x20D, 'V'),
+ (0x20E, 'M', 'ȏ'),
+ (0x20F, 'V'),
+ (0x210, 'M', 'ȑ'),
+ (0x211, 'V'),
+ (0x212, 'M', 'ȓ'),
+ (0x213, 'V'),
+ (0x214, 'M', 'ȕ'),
+ (0x215, 'V'),
+ (0x216, 'M', 'ȗ'),
+ (0x217, 'V'),
+ (0x218, 'M', 'ș'),
+ (0x219, 'V'),
+ (0x21A, 'M', 'ț'),
+ (0x21B, 'V'),
+ (0x21C, 'M', 'ȝ'),
+ (0x21D, 'V'),
+ (0x21E, 'M', 'ȟ'),
+ (0x21F, 'V'),
+ (0x220, 'M', 'ƞ'),
+ (0x221, 'V'),
+ (0x222, 'M', 'ȣ'),
+ (0x223, 'V'),
+ (0x224, 'M', 'ȥ'),
+ (0x225, 'V'),
+ (0x226, 'M', 'ȧ'),
+ (0x227, 'V'),
+ (0x228, 'M', 'ȩ'),
+ (0x229, 'V'),
+ (0x22A, 'M', 'ȫ'),
+ (0x22B, 'V'),
+ (0x22C, 'M', 'ȭ'),
+ (0x22D, 'V'),
+ (0x22E, 'M', 'ȯ'),
+ (0x22F, 'V'),
+ (0x230, 'M', 'ȱ'),
+ (0x231, 'V'),
+ (0x232, 'M', 'ȳ'),
+ (0x233, 'V'),
+ (0x23A, 'M', 'ⱥ'),
+ (0x23B, 'M', 'ȼ'),
+ (0x23C, 'V'),
+ (0x23D, 'M', 'ƚ'),
+ (0x23E, 'M', 'ⱦ'),
+ (0x23F, 'V'),
+ (0x241, 'M', 'ɂ'),
+ (0x242, 'V'),
+ (0x243, 'M', 'ƀ'),
+ (0x244, 'M', 'ʉ'),
+ (0x245, 'M', 'ʌ'),
+ (0x246, 'M', 'ɇ'),
+ (0x247, 'V'),
+ (0x248, 'M', 'ɉ'),
+ (0x249, 'V'),
+ (0x24A, 'M', 'ɋ'),
+ (0x24B, 'V'),
+ (0x24C, 'M', 'ɍ'),
+ (0x24D, 'V'),
+ (0x24E, 'M', 'ɏ'),
+ (0x24F, 'V'),
+ (0x2B0, 'M', 'h'),
+ (0x2B1, 'M', 'ɦ'),
+ (0x2B2, 'M', 'j'),
+ (0x2B3, 'M', 'r'),
+ (0x2B4, 'M', 'ɹ'),
+ (0x2B5, 'M', 'ɻ'),
+ (0x2B6, 'M', 'ʁ'),
+ (0x2B7, 'M', 'w'),
+ (0x2B8, 'M', 'y'),
+ (0x2B9, 'V'),
+ (0x2D8, '3', ' ̆'),
+ (0x2D9, '3', ' ̇'),
+ (0x2DA, '3', ' ̊'),
+ (0x2DB, '3', ' ̨'),
+ (0x2DC, '3', ' ̃'),
+ (0x2DD, '3', ' ̋'),
+ (0x2DE, 'V'),
+ (0x2E0, 'M', 'ɣ'),
+ (0x2E1, 'M', 'l'),
+ (0x2E2, 'M', 's'),
+ (0x2E3, 'M', 'x'),
+ (0x2E4, 'M', 'ʕ'),
+ (0x2E5, 'V'),
+ (0x340, 'M', '̀'),
+ (0x341, 'M', '́'),
+ (0x342, 'V'),
+ (0x343, 'M', '̓'),
+ (0x344, 'M', '̈́'),
+ (0x345, 'M', 'ι'),
+ (0x346, 'V'),
+ (0x34F, 'I'),
+ (0x350, 'V'),
+ (0x370, 'M', 'ͱ'),
+ (0x371, 'V'),
+ (0x372, 'M', 'ͳ'),
+ (0x373, 'V'),
+ (0x374, 'M', 'ʹ'),
+ (0x375, 'V'),
+ (0x376, 'M', 'ͷ'),
+ (0x377, 'V'),
+ ]
+
+def _seg_6() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x378, 'X'),
+ (0x37A, '3', ' ι'),
+ (0x37B, 'V'),
+ (0x37E, '3', ';'),
+ (0x37F, 'M', 'ϳ'),
+ (0x380, 'X'),
+ (0x384, '3', ' ́'),
+ (0x385, '3', ' ̈́'),
+ (0x386, 'M', 'ά'),
+ (0x387, 'M', '·'),
+ (0x388, 'M', 'έ'),
+ (0x389, 'M', 'ή'),
+ (0x38A, 'M', 'ί'),
+ (0x38B, 'X'),
+ (0x38C, 'M', 'ό'),
+ (0x38D, 'X'),
+ (0x38E, 'M', 'ύ'),
+ (0x38F, 'M', 'ώ'),
+ (0x390, 'V'),
+ (0x391, 'M', 'α'),
+ (0x392, 'M', 'β'),
+ (0x393, 'M', 'γ'),
+ (0x394, 'M', 'δ'),
+ (0x395, 'M', 'ε'),
+ (0x396, 'M', 'ζ'),
+ (0x397, 'M', 'η'),
+ (0x398, 'M', 'θ'),
+ (0x399, 'M', 'ι'),
+ (0x39A, 'M', 'κ'),
+ (0x39B, 'M', 'λ'),
+ (0x39C, 'M', 'μ'),
+ (0x39D, 'M', 'ν'),
+ (0x39E, 'M', 'ξ'),
+ (0x39F, 'M', 'ο'),
+ (0x3A0, 'M', 'π'),
+ (0x3A1, 'M', 'ρ'),
+ (0x3A2, 'X'),
+ (0x3A3, 'M', 'σ'),
+ (0x3A4, 'M', 'τ'),
+ (0x3A5, 'M', 'υ'),
+ (0x3A6, 'M', 'φ'),
+ (0x3A7, 'M', 'χ'),
+ (0x3A8, 'M', 'ψ'),
+ (0x3A9, 'M', 'ω'),
+ (0x3AA, 'M', 'ϊ'),
+ (0x3AB, 'M', 'ϋ'),
+ (0x3AC, 'V'),
+ (0x3C2, 'D', 'σ'),
+ (0x3C3, 'V'),
+ (0x3CF, 'M', 'ϗ'),
+ (0x3D0, 'M', 'β'),
+ (0x3D1, 'M', 'θ'),
+ (0x3D2, 'M', 'υ'),
+ (0x3D3, 'M', 'ύ'),
+ (0x3D4, 'M', 'ϋ'),
+ (0x3D5, 'M', 'φ'),
+ (0x3D6, 'M', 'π'),
+ (0x3D7, 'V'),
+ (0x3D8, 'M', 'ϙ'),
+ (0x3D9, 'V'),
+ (0x3DA, 'M', 'ϛ'),
+ (0x3DB, 'V'),
+ (0x3DC, 'M', 'ϝ'),
+ (0x3DD, 'V'),
+ (0x3DE, 'M', 'ϟ'),
+ (0x3DF, 'V'),
+ (0x3E0, 'M', 'ϡ'),
+ (0x3E1, 'V'),
+ (0x3E2, 'M', 'ϣ'),
+ (0x3E3, 'V'),
+ (0x3E4, 'M', 'ϥ'),
+ (0x3E5, 'V'),
+ (0x3E6, 'M', 'ϧ'),
+ (0x3E7, 'V'),
+ (0x3E8, 'M', 'ϩ'),
+ (0x3E9, 'V'),
+ (0x3EA, 'M', 'ϫ'),
+ (0x3EB, 'V'),
+ (0x3EC, 'M', 'ϭ'),
+ (0x3ED, 'V'),
+ (0x3EE, 'M', 'ϯ'),
+ (0x3EF, 'V'),
+ (0x3F0, 'M', 'κ'),
+ (0x3F1, 'M', 'ρ'),
+ (0x3F2, 'M', 'σ'),
+ (0x3F3, 'V'),
+ (0x3F4, 'M', 'θ'),
+ (0x3F5, 'M', 'ε'),
+ (0x3F6, 'V'),
+ (0x3F7, 'M', 'ϸ'),
+ (0x3F8, 'V'),
+ (0x3F9, 'M', 'σ'),
+ (0x3FA, 'M', 'ϻ'),
+ (0x3FB, 'V'),
+ (0x3FD, 'M', 'ͻ'),
+ (0x3FE, 'M', 'ͼ'),
+ (0x3FF, 'M', 'ͽ'),
+ (0x400, 'M', 'ѐ'),
+ (0x401, 'M', 'ё'),
+ (0x402, 'M', 'ђ'),
+ ]
+
+def _seg_7() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x403, 'M', 'ѓ'),
+ (0x404, 'M', 'є'),
+ (0x405, 'M', 'ѕ'),
+ (0x406, 'M', 'і'),
+ (0x407, 'M', 'ї'),
+ (0x408, 'M', 'ј'),
+ (0x409, 'M', 'љ'),
+ (0x40A, 'M', 'њ'),
+ (0x40B, 'M', 'ћ'),
+ (0x40C, 'M', 'ќ'),
+ (0x40D, 'M', 'ѝ'),
+ (0x40E, 'M', 'ў'),
+ (0x40F, 'M', 'џ'),
+ (0x410, 'M', 'а'),
+ (0x411, 'M', 'б'),
+ (0x412, 'M', 'в'),
+ (0x413, 'M', 'г'),
+ (0x414, 'M', 'д'),
+ (0x415, 'M', 'е'),
+ (0x416, 'M', 'ж'),
+ (0x417, 'M', 'з'),
+ (0x418, 'M', 'и'),
+ (0x419, 'M', 'й'),
+ (0x41A, 'M', 'к'),
+ (0x41B, 'M', 'л'),
+ (0x41C, 'M', 'м'),
+ (0x41D, 'M', 'н'),
+ (0x41E, 'M', 'о'),
+ (0x41F, 'M', 'п'),
+ (0x420, 'M', 'р'),
+ (0x421, 'M', 'с'),
+ (0x422, 'M', 'т'),
+ (0x423, 'M', 'у'),
+ (0x424, 'M', 'ф'),
+ (0x425, 'M', 'х'),
+ (0x426, 'M', 'ц'),
+ (0x427, 'M', 'ч'),
+ (0x428, 'M', 'ш'),
+ (0x429, 'M', 'щ'),
+ (0x42A, 'M', 'ъ'),
+ (0x42B, 'M', 'ы'),
+ (0x42C, 'M', 'ь'),
+ (0x42D, 'M', 'э'),
+ (0x42E, 'M', 'ю'),
+ (0x42F, 'M', 'я'),
+ (0x430, 'V'),
+ (0x460, 'M', 'ѡ'),
+ (0x461, 'V'),
+ (0x462, 'M', 'ѣ'),
+ (0x463, 'V'),
+ (0x464, 'M', 'ѥ'),
+ (0x465, 'V'),
+ (0x466, 'M', 'ѧ'),
+ (0x467, 'V'),
+ (0x468, 'M', 'ѩ'),
+ (0x469, 'V'),
+ (0x46A, 'M', 'ѫ'),
+ (0x46B, 'V'),
+ (0x46C, 'M', 'ѭ'),
+ (0x46D, 'V'),
+ (0x46E, 'M', 'ѯ'),
+ (0x46F, 'V'),
+ (0x470, 'M', 'ѱ'),
+ (0x471, 'V'),
+ (0x472, 'M', 'ѳ'),
+ (0x473, 'V'),
+ (0x474, 'M', 'ѵ'),
+ (0x475, 'V'),
+ (0x476, 'M', 'ѷ'),
+ (0x477, 'V'),
+ (0x478, 'M', 'ѹ'),
+ (0x479, 'V'),
+ (0x47A, 'M', 'ѻ'),
+ (0x47B, 'V'),
+ (0x47C, 'M', 'ѽ'),
+ (0x47D, 'V'),
+ (0x47E, 'M', 'ѿ'),
+ (0x47F, 'V'),
+ (0x480, 'M', 'ҁ'),
+ (0x481, 'V'),
+ (0x48A, 'M', 'ҋ'),
+ (0x48B, 'V'),
+ (0x48C, 'M', 'ҍ'),
+ (0x48D, 'V'),
+ (0x48E, 'M', 'ҏ'),
+ (0x48F, 'V'),
+ (0x490, 'M', 'ґ'),
+ (0x491, 'V'),
+ (0x492, 'M', 'ғ'),
+ (0x493, 'V'),
+ (0x494, 'M', 'ҕ'),
+ (0x495, 'V'),
+ (0x496, 'M', 'җ'),
+ (0x497, 'V'),
+ (0x498, 'M', 'ҙ'),
+ (0x499, 'V'),
+ (0x49A, 'M', 'қ'),
+ (0x49B, 'V'),
+ (0x49C, 'M', 'ҝ'),
+ (0x49D, 'V'),
+ ]
+
+def _seg_8() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x49E, 'M', 'ҟ'),
+ (0x49F, 'V'),
+ (0x4A0, 'M', 'ҡ'),
+ (0x4A1, 'V'),
+ (0x4A2, 'M', 'ң'),
+ (0x4A3, 'V'),
+ (0x4A4, 'M', 'ҥ'),
+ (0x4A5, 'V'),
+ (0x4A6, 'M', 'ҧ'),
+ (0x4A7, 'V'),
+ (0x4A8, 'M', 'ҩ'),
+ (0x4A9, 'V'),
+ (0x4AA, 'M', 'ҫ'),
+ (0x4AB, 'V'),
+ (0x4AC, 'M', 'ҭ'),
+ (0x4AD, 'V'),
+ (0x4AE, 'M', 'ү'),
+ (0x4AF, 'V'),
+ (0x4B0, 'M', 'ұ'),
+ (0x4B1, 'V'),
+ (0x4B2, 'M', 'ҳ'),
+ (0x4B3, 'V'),
+ (0x4B4, 'M', 'ҵ'),
+ (0x4B5, 'V'),
+ (0x4B6, 'M', 'ҷ'),
+ (0x4B7, 'V'),
+ (0x4B8, 'M', 'ҹ'),
+ (0x4B9, 'V'),
+ (0x4BA, 'M', 'һ'),
+ (0x4BB, 'V'),
+ (0x4BC, 'M', 'ҽ'),
+ (0x4BD, 'V'),
+ (0x4BE, 'M', 'ҿ'),
+ (0x4BF, 'V'),
+ (0x4C0, 'X'),
+ (0x4C1, 'M', 'ӂ'),
+ (0x4C2, 'V'),
+ (0x4C3, 'M', 'ӄ'),
+ (0x4C4, 'V'),
+ (0x4C5, 'M', 'ӆ'),
+ (0x4C6, 'V'),
+ (0x4C7, 'M', 'ӈ'),
+ (0x4C8, 'V'),
+ (0x4C9, 'M', 'ӊ'),
+ (0x4CA, 'V'),
+ (0x4CB, 'M', 'ӌ'),
+ (0x4CC, 'V'),
+ (0x4CD, 'M', 'ӎ'),
+ (0x4CE, 'V'),
+ (0x4D0, 'M', 'ӑ'),
+ (0x4D1, 'V'),
+ (0x4D2, 'M', 'ӓ'),
+ (0x4D3, 'V'),
+ (0x4D4, 'M', 'ӕ'),
+ (0x4D5, 'V'),
+ (0x4D6, 'M', 'ӗ'),
+ (0x4D7, 'V'),
+ (0x4D8, 'M', 'ә'),
+ (0x4D9, 'V'),
+ (0x4DA, 'M', 'ӛ'),
+ (0x4DB, 'V'),
+ (0x4DC, 'M', 'ӝ'),
+ (0x4DD, 'V'),
+ (0x4DE, 'M', 'ӟ'),
+ (0x4DF, 'V'),
+ (0x4E0, 'M', 'ӡ'),
+ (0x4E1, 'V'),
+ (0x4E2, 'M', 'ӣ'),
+ (0x4E3, 'V'),
+ (0x4E4, 'M', 'ӥ'),
+ (0x4E5, 'V'),
+ (0x4E6, 'M', 'ӧ'),
+ (0x4E7, 'V'),
+ (0x4E8, 'M', 'ө'),
+ (0x4E9, 'V'),
+ (0x4EA, 'M', 'ӫ'),
+ (0x4EB, 'V'),
+ (0x4EC, 'M', 'ӭ'),
+ (0x4ED, 'V'),
+ (0x4EE, 'M', 'ӯ'),
+ (0x4EF, 'V'),
+ (0x4F0, 'M', 'ӱ'),
+ (0x4F1, 'V'),
+ (0x4F2, 'M', 'ӳ'),
+ (0x4F3, 'V'),
+ (0x4F4, 'M', 'ӵ'),
+ (0x4F5, 'V'),
+ (0x4F6, 'M', 'ӷ'),
+ (0x4F7, 'V'),
+ (0x4F8, 'M', 'ӹ'),
+ (0x4F9, 'V'),
+ (0x4FA, 'M', 'ӻ'),
+ (0x4FB, 'V'),
+ (0x4FC, 'M', 'ӽ'),
+ (0x4FD, 'V'),
+ (0x4FE, 'M', 'ӿ'),
+ (0x4FF, 'V'),
+ (0x500, 'M', 'ԁ'),
+ (0x501, 'V'),
+ (0x502, 'M', 'ԃ'),
+ ]
+
+def _seg_9() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x503, 'V'),
+ (0x504, 'M', 'ԅ'),
+ (0x505, 'V'),
+ (0x506, 'M', 'ԇ'),
+ (0x507, 'V'),
+ (0x508, 'M', 'ԉ'),
+ (0x509, 'V'),
+ (0x50A, 'M', 'ԋ'),
+ (0x50B, 'V'),
+ (0x50C, 'M', 'ԍ'),
+ (0x50D, 'V'),
+ (0x50E, 'M', 'ԏ'),
+ (0x50F, 'V'),
+ (0x510, 'M', 'ԑ'),
+ (0x511, 'V'),
+ (0x512, 'M', 'ԓ'),
+ (0x513, 'V'),
+ (0x514, 'M', 'ԕ'),
+ (0x515, 'V'),
+ (0x516, 'M', 'ԗ'),
+ (0x517, 'V'),
+ (0x518, 'M', 'ԙ'),
+ (0x519, 'V'),
+ (0x51A, 'M', 'ԛ'),
+ (0x51B, 'V'),
+ (0x51C, 'M', 'ԝ'),
+ (0x51D, 'V'),
+ (0x51E, 'M', 'ԟ'),
+ (0x51F, 'V'),
+ (0x520, 'M', 'ԡ'),
+ (0x521, 'V'),
+ (0x522, 'M', 'ԣ'),
+ (0x523, 'V'),
+ (0x524, 'M', 'ԥ'),
+ (0x525, 'V'),
+ (0x526, 'M', 'ԧ'),
+ (0x527, 'V'),
+ (0x528, 'M', 'ԩ'),
+ (0x529, 'V'),
+ (0x52A, 'M', 'ԫ'),
+ (0x52B, 'V'),
+ (0x52C, 'M', 'ԭ'),
+ (0x52D, 'V'),
+ (0x52E, 'M', 'ԯ'),
+ (0x52F, 'V'),
+ (0x530, 'X'),
+ (0x531, 'M', 'ա'),
+ (0x532, 'M', 'բ'),
+ (0x533, 'M', 'գ'),
+ (0x534, 'M', 'դ'),
+ (0x535, 'M', 'ե'),
+ (0x536, 'M', 'զ'),
+ (0x537, 'M', 'է'),
+ (0x538, 'M', 'ը'),
+ (0x539, 'M', 'թ'),
+ (0x53A, 'M', 'ժ'),
+ (0x53B, 'M', 'ի'),
+ (0x53C, 'M', 'լ'),
+ (0x53D, 'M', 'խ'),
+ (0x53E, 'M', 'ծ'),
+ (0x53F, 'M', 'կ'),
+ (0x540, 'M', 'հ'),
+ (0x541, 'M', 'ձ'),
+ (0x542, 'M', 'ղ'),
+ (0x543, 'M', 'ճ'),
+ (0x544, 'M', 'մ'),
+ (0x545, 'M', 'յ'),
+ (0x546, 'M', 'ն'),
+ (0x547, 'M', 'շ'),
+ (0x548, 'M', 'ո'),
+ (0x549, 'M', 'չ'),
+ (0x54A, 'M', 'պ'),
+ (0x54B, 'M', 'ջ'),
+ (0x54C, 'M', 'ռ'),
+ (0x54D, 'M', 'ս'),
+ (0x54E, 'M', 'վ'),
+ (0x54F, 'M', 'տ'),
+ (0x550, 'M', 'ր'),
+ (0x551, 'M', 'ց'),
+ (0x552, 'M', 'ւ'),
+ (0x553, 'M', 'փ'),
+ (0x554, 'M', 'ք'),
+ (0x555, 'M', 'օ'),
+ (0x556, 'M', 'ֆ'),
+ (0x557, 'X'),
+ (0x559, 'V'),
+ (0x587, 'M', 'եւ'),
+ (0x588, 'V'),
+ (0x58B, 'X'),
+ (0x58D, 'V'),
+ (0x590, 'X'),
+ (0x591, 'V'),
+ (0x5C8, 'X'),
+ (0x5D0, 'V'),
+ (0x5EB, 'X'),
+ (0x5EF, 'V'),
+ (0x5F5, 'X'),
+ (0x606, 'V'),
+ (0x61C, 'X'),
+ (0x61D, 'V'),
+ ]
+
+def _seg_10() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x675, 'M', 'اٴ'),
+ (0x676, 'M', 'وٴ'),
+ (0x677, 'M', 'ۇٴ'),
+ (0x678, 'M', 'يٴ'),
+ (0x679, 'V'),
+ (0x6DD, 'X'),
+ (0x6DE, 'V'),
+ (0x70E, 'X'),
+ (0x710, 'V'),
+ (0x74B, 'X'),
+ (0x74D, 'V'),
+ (0x7B2, 'X'),
+ (0x7C0, 'V'),
+ (0x7FB, 'X'),
+ (0x7FD, 'V'),
+ (0x82E, 'X'),
+ (0x830, 'V'),
+ (0x83F, 'X'),
+ (0x840, 'V'),
+ (0x85C, 'X'),
+ (0x85E, 'V'),
+ (0x85F, 'X'),
+ (0x860, 'V'),
+ (0x86B, 'X'),
+ (0x870, 'V'),
+ (0x88F, 'X'),
+ (0x898, 'V'),
+ (0x8E2, 'X'),
+ (0x8E3, 'V'),
+ (0x958, 'M', 'क़'),
+ (0x959, 'M', 'ख़'),
+ (0x95A, 'M', 'ग़'),
+ (0x95B, 'M', 'ज़'),
+ (0x95C, 'M', 'ड़'),
+ (0x95D, 'M', 'ढ़'),
+ (0x95E, 'M', 'फ़'),
+ (0x95F, 'M', 'य़'),
+ (0x960, 'V'),
+ (0x984, 'X'),
+ (0x985, 'V'),
+ (0x98D, 'X'),
+ (0x98F, 'V'),
+ (0x991, 'X'),
+ (0x993, 'V'),
+ (0x9A9, 'X'),
+ (0x9AA, 'V'),
+ (0x9B1, 'X'),
+ (0x9B2, 'V'),
+ (0x9B3, 'X'),
+ (0x9B6, 'V'),
+ (0x9BA, 'X'),
+ (0x9BC, 'V'),
+ (0x9C5, 'X'),
+ (0x9C7, 'V'),
+ (0x9C9, 'X'),
+ (0x9CB, 'V'),
+ (0x9CF, 'X'),
+ (0x9D7, 'V'),
+ (0x9D8, 'X'),
+ (0x9DC, 'M', 'ড়'),
+ (0x9DD, 'M', 'ঢ়'),
+ (0x9DE, 'X'),
+ (0x9DF, 'M', 'য়'),
+ (0x9E0, 'V'),
+ (0x9E4, 'X'),
+ (0x9E6, 'V'),
+ (0x9FF, 'X'),
+ (0xA01, 'V'),
+ (0xA04, 'X'),
+ (0xA05, 'V'),
+ (0xA0B, 'X'),
+ (0xA0F, 'V'),
+ (0xA11, 'X'),
+ (0xA13, 'V'),
+ (0xA29, 'X'),
+ (0xA2A, 'V'),
+ (0xA31, 'X'),
+ (0xA32, 'V'),
+ (0xA33, 'M', 'ਲ਼'),
+ (0xA34, 'X'),
+ (0xA35, 'V'),
+ (0xA36, 'M', 'ਸ਼'),
+ (0xA37, 'X'),
+ (0xA38, 'V'),
+ (0xA3A, 'X'),
+ (0xA3C, 'V'),
+ (0xA3D, 'X'),
+ (0xA3E, 'V'),
+ (0xA43, 'X'),
+ (0xA47, 'V'),
+ (0xA49, 'X'),
+ (0xA4B, 'V'),
+ (0xA4E, 'X'),
+ (0xA51, 'V'),
+ (0xA52, 'X'),
+ (0xA59, 'M', 'ਖ਼'),
+ (0xA5A, 'M', 'ਗ਼'),
+ (0xA5B, 'M', 'ਜ਼'),
+ (0xA5C, 'V'),
+ (0xA5D, 'X'),
+ ]
+
+def _seg_11() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xA5E, 'M', 'ਫ਼'),
+ (0xA5F, 'X'),
+ (0xA66, 'V'),
+ (0xA77, 'X'),
+ (0xA81, 'V'),
+ (0xA84, 'X'),
+ (0xA85, 'V'),
+ (0xA8E, 'X'),
+ (0xA8F, 'V'),
+ (0xA92, 'X'),
+ (0xA93, 'V'),
+ (0xAA9, 'X'),
+ (0xAAA, 'V'),
+ (0xAB1, 'X'),
+ (0xAB2, 'V'),
+ (0xAB4, 'X'),
+ (0xAB5, 'V'),
+ (0xABA, 'X'),
+ (0xABC, 'V'),
+ (0xAC6, 'X'),
+ (0xAC7, 'V'),
+ (0xACA, 'X'),
+ (0xACB, 'V'),
+ (0xACE, 'X'),
+ (0xAD0, 'V'),
+ (0xAD1, 'X'),
+ (0xAE0, 'V'),
+ (0xAE4, 'X'),
+ (0xAE6, 'V'),
+ (0xAF2, 'X'),
+ (0xAF9, 'V'),
+ (0xB00, 'X'),
+ (0xB01, 'V'),
+ (0xB04, 'X'),
+ (0xB05, 'V'),
+ (0xB0D, 'X'),
+ (0xB0F, 'V'),
+ (0xB11, 'X'),
+ (0xB13, 'V'),
+ (0xB29, 'X'),
+ (0xB2A, 'V'),
+ (0xB31, 'X'),
+ (0xB32, 'V'),
+ (0xB34, 'X'),
+ (0xB35, 'V'),
+ (0xB3A, 'X'),
+ (0xB3C, 'V'),
+ (0xB45, 'X'),
+ (0xB47, 'V'),
+ (0xB49, 'X'),
+ (0xB4B, 'V'),
+ (0xB4E, 'X'),
+ (0xB55, 'V'),
+ (0xB58, 'X'),
+ (0xB5C, 'M', 'ଡ଼'),
+ (0xB5D, 'M', 'ଢ଼'),
+ (0xB5E, 'X'),
+ (0xB5F, 'V'),
+ (0xB64, 'X'),
+ (0xB66, 'V'),
+ (0xB78, 'X'),
+ (0xB82, 'V'),
+ (0xB84, 'X'),
+ (0xB85, 'V'),
+ (0xB8B, 'X'),
+ (0xB8E, 'V'),
+ (0xB91, 'X'),
+ (0xB92, 'V'),
+ (0xB96, 'X'),
+ (0xB99, 'V'),
+ (0xB9B, 'X'),
+ (0xB9C, 'V'),
+ (0xB9D, 'X'),
+ (0xB9E, 'V'),
+ (0xBA0, 'X'),
+ (0xBA3, 'V'),
+ (0xBA5, 'X'),
+ (0xBA8, 'V'),
+ (0xBAB, 'X'),
+ (0xBAE, 'V'),
+ (0xBBA, 'X'),
+ (0xBBE, 'V'),
+ (0xBC3, 'X'),
+ (0xBC6, 'V'),
+ (0xBC9, 'X'),
+ (0xBCA, 'V'),
+ (0xBCE, 'X'),
+ (0xBD0, 'V'),
+ (0xBD1, 'X'),
+ (0xBD7, 'V'),
+ (0xBD8, 'X'),
+ (0xBE6, 'V'),
+ (0xBFB, 'X'),
+ (0xC00, 'V'),
+ (0xC0D, 'X'),
+ (0xC0E, 'V'),
+ (0xC11, 'X'),
+ (0xC12, 'V'),
+ (0xC29, 'X'),
+ (0xC2A, 'V'),
+ ]
+
+def _seg_12() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xC3A, 'X'),
+ (0xC3C, 'V'),
+ (0xC45, 'X'),
+ (0xC46, 'V'),
+ (0xC49, 'X'),
+ (0xC4A, 'V'),
+ (0xC4E, 'X'),
+ (0xC55, 'V'),
+ (0xC57, 'X'),
+ (0xC58, 'V'),
+ (0xC5B, 'X'),
+ (0xC5D, 'V'),
+ (0xC5E, 'X'),
+ (0xC60, 'V'),
+ (0xC64, 'X'),
+ (0xC66, 'V'),
+ (0xC70, 'X'),
+ (0xC77, 'V'),
+ (0xC8D, 'X'),
+ (0xC8E, 'V'),
+ (0xC91, 'X'),
+ (0xC92, 'V'),
+ (0xCA9, 'X'),
+ (0xCAA, 'V'),
+ (0xCB4, 'X'),
+ (0xCB5, 'V'),
+ (0xCBA, 'X'),
+ (0xCBC, 'V'),
+ (0xCC5, 'X'),
+ (0xCC6, 'V'),
+ (0xCC9, 'X'),
+ (0xCCA, 'V'),
+ (0xCCE, 'X'),
+ (0xCD5, 'V'),
+ (0xCD7, 'X'),
+ (0xCDD, 'V'),
+ (0xCDF, 'X'),
+ (0xCE0, 'V'),
+ (0xCE4, 'X'),
+ (0xCE6, 'V'),
+ (0xCF0, 'X'),
+ (0xCF1, 'V'),
+ (0xCF4, 'X'),
+ (0xD00, 'V'),
+ (0xD0D, 'X'),
+ (0xD0E, 'V'),
+ (0xD11, 'X'),
+ (0xD12, 'V'),
+ (0xD45, 'X'),
+ (0xD46, 'V'),
+ (0xD49, 'X'),
+ (0xD4A, 'V'),
+ (0xD50, 'X'),
+ (0xD54, 'V'),
+ (0xD64, 'X'),
+ (0xD66, 'V'),
+ (0xD80, 'X'),
+ (0xD81, 'V'),
+ (0xD84, 'X'),
+ (0xD85, 'V'),
+ (0xD97, 'X'),
+ (0xD9A, 'V'),
+ (0xDB2, 'X'),
+ (0xDB3, 'V'),
+ (0xDBC, 'X'),
+ (0xDBD, 'V'),
+ (0xDBE, 'X'),
+ (0xDC0, 'V'),
+ (0xDC7, 'X'),
+ (0xDCA, 'V'),
+ (0xDCB, 'X'),
+ (0xDCF, 'V'),
+ (0xDD5, 'X'),
+ (0xDD6, 'V'),
+ (0xDD7, 'X'),
+ (0xDD8, 'V'),
+ (0xDE0, 'X'),
+ (0xDE6, 'V'),
+ (0xDF0, 'X'),
+ (0xDF2, 'V'),
+ (0xDF5, 'X'),
+ (0xE01, 'V'),
+ (0xE33, 'M', 'ํา'),
+ (0xE34, 'V'),
+ (0xE3B, 'X'),
+ (0xE3F, 'V'),
+ (0xE5C, 'X'),
+ (0xE81, 'V'),
+ (0xE83, 'X'),
+ (0xE84, 'V'),
+ (0xE85, 'X'),
+ (0xE86, 'V'),
+ (0xE8B, 'X'),
+ (0xE8C, 'V'),
+ (0xEA4, 'X'),
+ (0xEA5, 'V'),
+ (0xEA6, 'X'),
+ (0xEA7, 'V'),
+ (0xEB3, 'M', 'ໍາ'),
+ (0xEB4, 'V'),
+ ]
+
+def _seg_13() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xEBE, 'X'),
+ (0xEC0, 'V'),
+ (0xEC5, 'X'),
+ (0xEC6, 'V'),
+ (0xEC7, 'X'),
+ (0xEC8, 'V'),
+ (0xECF, 'X'),
+ (0xED0, 'V'),
+ (0xEDA, 'X'),
+ (0xEDC, 'M', 'ຫນ'),
+ (0xEDD, 'M', 'ຫມ'),
+ (0xEDE, 'V'),
+ (0xEE0, 'X'),
+ (0xF00, 'V'),
+ (0xF0C, 'M', '་'),
+ (0xF0D, 'V'),
+ (0xF43, 'M', 'གྷ'),
+ (0xF44, 'V'),
+ (0xF48, 'X'),
+ (0xF49, 'V'),
+ (0xF4D, 'M', 'ཌྷ'),
+ (0xF4E, 'V'),
+ (0xF52, 'M', 'དྷ'),
+ (0xF53, 'V'),
+ (0xF57, 'M', 'བྷ'),
+ (0xF58, 'V'),
+ (0xF5C, 'M', 'ཛྷ'),
+ (0xF5D, 'V'),
+ (0xF69, 'M', 'ཀྵ'),
+ (0xF6A, 'V'),
+ (0xF6D, 'X'),
+ (0xF71, 'V'),
+ (0xF73, 'M', 'ཱི'),
+ (0xF74, 'V'),
+ (0xF75, 'M', 'ཱུ'),
+ (0xF76, 'M', 'ྲྀ'),
+ (0xF77, 'M', 'ྲཱྀ'),
+ (0xF78, 'M', 'ླྀ'),
+ (0xF79, 'M', 'ླཱྀ'),
+ (0xF7A, 'V'),
+ (0xF81, 'M', 'ཱྀ'),
+ (0xF82, 'V'),
+ (0xF93, 'M', 'ྒྷ'),
+ (0xF94, 'V'),
+ (0xF98, 'X'),
+ (0xF99, 'V'),
+ (0xF9D, 'M', 'ྜྷ'),
+ (0xF9E, 'V'),
+ (0xFA2, 'M', 'ྡྷ'),
+ (0xFA3, 'V'),
+ (0xFA7, 'M', 'ྦྷ'),
+ (0xFA8, 'V'),
+ (0xFAC, 'M', 'ྫྷ'),
+ (0xFAD, 'V'),
+ (0xFB9, 'M', 'ྐྵ'),
+ (0xFBA, 'V'),
+ (0xFBD, 'X'),
+ (0xFBE, 'V'),
+ (0xFCD, 'X'),
+ (0xFCE, 'V'),
+ (0xFDB, 'X'),
+ (0x1000, 'V'),
+ (0x10A0, 'X'),
+ (0x10C7, 'M', 'ⴧ'),
+ (0x10C8, 'X'),
+ (0x10CD, 'M', 'ⴭ'),
+ (0x10CE, 'X'),
+ (0x10D0, 'V'),
+ (0x10FC, 'M', 'ნ'),
+ (0x10FD, 'V'),
+ (0x115F, 'X'),
+ (0x1161, 'V'),
+ (0x1249, 'X'),
+ (0x124A, 'V'),
+ (0x124E, 'X'),
+ (0x1250, 'V'),
+ (0x1257, 'X'),
+ (0x1258, 'V'),
+ (0x1259, 'X'),
+ (0x125A, 'V'),
+ (0x125E, 'X'),
+ (0x1260, 'V'),
+ (0x1289, 'X'),
+ (0x128A, 'V'),
+ (0x128E, 'X'),
+ (0x1290, 'V'),
+ (0x12B1, 'X'),
+ (0x12B2, 'V'),
+ (0x12B6, 'X'),
+ (0x12B8, 'V'),
+ (0x12BF, 'X'),
+ (0x12C0, 'V'),
+ (0x12C1, 'X'),
+ (0x12C2, 'V'),
+ (0x12C6, 'X'),
+ (0x12C8, 'V'),
+ (0x12D7, 'X'),
+ (0x12D8, 'V'),
+ (0x1311, 'X'),
+ (0x1312, 'V'),
+ ]
+
+def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1316, 'X'),
+ (0x1318, 'V'),
+ (0x135B, 'X'),
+ (0x135D, 'V'),
+ (0x137D, 'X'),
+ (0x1380, 'V'),
+ (0x139A, 'X'),
+ (0x13A0, 'V'),
+ (0x13F6, 'X'),
+ (0x13F8, 'M', 'Ᏸ'),
+ (0x13F9, 'M', 'Ᏹ'),
+ (0x13FA, 'M', 'Ᏺ'),
+ (0x13FB, 'M', 'Ᏻ'),
+ (0x13FC, 'M', 'Ᏼ'),
+ (0x13FD, 'M', 'Ᏽ'),
+ (0x13FE, 'X'),
+ (0x1400, 'V'),
+ (0x1680, 'X'),
+ (0x1681, 'V'),
+ (0x169D, 'X'),
+ (0x16A0, 'V'),
+ (0x16F9, 'X'),
+ (0x1700, 'V'),
+ (0x1716, 'X'),
+ (0x171F, 'V'),
+ (0x1737, 'X'),
+ (0x1740, 'V'),
+ (0x1754, 'X'),
+ (0x1760, 'V'),
+ (0x176D, 'X'),
+ (0x176E, 'V'),
+ (0x1771, 'X'),
+ (0x1772, 'V'),
+ (0x1774, 'X'),
+ (0x1780, 'V'),
+ (0x17B4, 'X'),
+ (0x17B6, 'V'),
+ (0x17DE, 'X'),
+ (0x17E0, 'V'),
+ (0x17EA, 'X'),
+ (0x17F0, 'V'),
+ (0x17FA, 'X'),
+ (0x1800, 'V'),
+ (0x1806, 'X'),
+ (0x1807, 'V'),
+ (0x180B, 'I'),
+ (0x180E, 'X'),
+ (0x180F, 'I'),
+ (0x1810, 'V'),
+ (0x181A, 'X'),
+ (0x1820, 'V'),
+ (0x1879, 'X'),
+ (0x1880, 'V'),
+ (0x18AB, 'X'),
+ (0x18B0, 'V'),
+ (0x18F6, 'X'),
+ (0x1900, 'V'),
+ (0x191F, 'X'),
+ (0x1920, 'V'),
+ (0x192C, 'X'),
+ (0x1930, 'V'),
+ (0x193C, 'X'),
+ (0x1940, 'V'),
+ (0x1941, 'X'),
+ (0x1944, 'V'),
+ (0x196E, 'X'),
+ (0x1970, 'V'),
+ (0x1975, 'X'),
+ (0x1980, 'V'),
+ (0x19AC, 'X'),
+ (0x19B0, 'V'),
+ (0x19CA, 'X'),
+ (0x19D0, 'V'),
+ (0x19DB, 'X'),
+ (0x19DE, 'V'),
+ (0x1A1C, 'X'),
+ (0x1A1E, 'V'),
+ (0x1A5F, 'X'),
+ (0x1A60, 'V'),
+ (0x1A7D, 'X'),
+ (0x1A7F, 'V'),
+ (0x1A8A, 'X'),
+ (0x1A90, 'V'),
+ (0x1A9A, 'X'),
+ (0x1AA0, 'V'),
+ (0x1AAE, 'X'),
+ (0x1AB0, 'V'),
+ (0x1ACF, 'X'),
+ (0x1B00, 'V'),
+ (0x1B4D, 'X'),
+ (0x1B50, 'V'),
+ (0x1B7F, 'X'),
+ (0x1B80, 'V'),
+ (0x1BF4, 'X'),
+ (0x1BFC, 'V'),
+ (0x1C38, 'X'),
+ (0x1C3B, 'V'),
+ (0x1C4A, 'X'),
+ (0x1C4D, 'V'),
+ (0x1C80, 'M', 'в'),
+ ]
+
+def _seg_15() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1C81, 'M', 'д'),
+ (0x1C82, 'M', 'о'),
+ (0x1C83, 'M', 'с'),
+ (0x1C84, 'M', 'т'),
+ (0x1C86, 'M', 'ъ'),
+ (0x1C87, 'M', 'ѣ'),
+ (0x1C88, 'M', 'ꙋ'),
+ (0x1C89, 'X'),
+ (0x1C90, 'M', 'ა'),
+ (0x1C91, 'M', 'ბ'),
+ (0x1C92, 'M', 'გ'),
+ (0x1C93, 'M', 'დ'),
+ (0x1C94, 'M', 'ე'),
+ (0x1C95, 'M', 'ვ'),
+ (0x1C96, 'M', 'ზ'),
+ (0x1C97, 'M', 'თ'),
+ (0x1C98, 'M', 'ი'),
+ (0x1C99, 'M', 'კ'),
+ (0x1C9A, 'M', 'ლ'),
+ (0x1C9B, 'M', 'მ'),
+ (0x1C9C, 'M', 'ნ'),
+ (0x1C9D, 'M', 'ო'),
+ (0x1C9E, 'M', 'პ'),
+ (0x1C9F, 'M', 'ჟ'),
+ (0x1CA0, 'M', 'რ'),
+ (0x1CA1, 'M', 'ს'),
+ (0x1CA2, 'M', 'ტ'),
+ (0x1CA3, 'M', 'უ'),
+ (0x1CA4, 'M', 'ფ'),
+ (0x1CA5, 'M', 'ქ'),
+ (0x1CA6, 'M', 'ღ'),
+ (0x1CA7, 'M', 'ყ'),
+ (0x1CA8, 'M', 'შ'),
+ (0x1CA9, 'M', 'ჩ'),
+ (0x1CAA, 'M', 'ც'),
+ (0x1CAB, 'M', 'ძ'),
+ (0x1CAC, 'M', 'წ'),
+ (0x1CAD, 'M', 'ჭ'),
+ (0x1CAE, 'M', 'ხ'),
+ (0x1CAF, 'M', 'ჯ'),
+ (0x1CB0, 'M', 'ჰ'),
+ (0x1CB1, 'M', 'ჱ'),
+ (0x1CB2, 'M', 'ჲ'),
+ (0x1CB3, 'M', 'ჳ'),
+ (0x1CB4, 'M', 'ჴ'),
+ (0x1CB5, 'M', 'ჵ'),
+ (0x1CB6, 'M', 'ჶ'),
+ (0x1CB7, 'M', 'ჷ'),
+ (0x1CB8, 'M', 'ჸ'),
+ (0x1CB9, 'M', 'ჹ'),
+ (0x1CBA, 'M', 'ჺ'),
+ (0x1CBB, 'X'),
+ (0x1CBD, 'M', 'ჽ'),
+ (0x1CBE, 'M', 'ჾ'),
+ (0x1CBF, 'M', 'ჿ'),
+ (0x1CC0, 'V'),
+ (0x1CC8, 'X'),
+ (0x1CD0, 'V'),
+ (0x1CFB, 'X'),
+ (0x1D00, 'V'),
+ (0x1D2C, 'M', 'a'),
+ (0x1D2D, 'M', 'æ'),
+ (0x1D2E, 'M', 'b'),
+ (0x1D2F, 'V'),
+ (0x1D30, 'M', 'd'),
+ (0x1D31, 'M', 'e'),
+ (0x1D32, 'M', 'ǝ'),
+ (0x1D33, 'M', 'g'),
+ (0x1D34, 'M', 'h'),
+ (0x1D35, 'M', 'i'),
+ (0x1D36, 'M', 'j'),
+ (0x1D37, 'M', 'k'),
+ (0x1D38, 'M', 'l'),
+ (0x1D39, 'M', 'm'),
+ (0x1D3A, 'M', 'n'),
+ (0x1D3B, 'V'),
+ (0x1D3C, 'M', 'o'),
+ (0x1D3D, 'M', 'ȣ'),
+ (0x1D3E, 'M', 'p'),
+ (0x1D3F, 'M', 'r'),
+ (0x1D40, 'M', 't'),
+ (0x1D41, 'M', 'u'),
+ (0x1D42, 'M', 'w'),
+ (0x1D43, 'M', 'a'),
+ (0x1D44, 'M', 'ɐ'),
+ (0x1D45, 'M', 'ɑ'),
+ (0x1D46, 'M', 'ᴂ'),
+ (0x1D47, 'M', 'b'),
+ (0x1D48, 'M', 'd'),
+ (0x1D49, 'M', 'e'),
+ (0x1D4A, 'M', 'ə'),
+ (0x1D4B, 'M', 'ɛ'),
+ (0x1D4C, 'M', 'ɜ'),
+ (0x1D4D, 'M', 'g'),
+ (0x1D4E, 'V'),
+ (0x1D4F, 'M', 'k'),
+ (0x1D50, 'M', 'm'),
+ (0x1D51, 'M', 'ŋ'),
+ (0x1D52, 'M', 'o'),
+ (0x1D53, 'M', 'ɔ'),
+ ]
+
+def _seg_16() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D54, 'M', 'ᴖ'),
+ (0x1D55, 'M', 'ᴗ'),
+ (0x1D56, 'M', 'p'),
+ (0x1D57, 'M', 't'),
+ (0x1D58, 'M', 'u'),
+ (0x1D59, 'M', 'ᴝ'),
+ (0x1D5A, 'M', 'ɯ'),
+ (0x1D5B, 'M', 'v'),
+ (0x1D5C, 'M', 'ᴥ'),
+ (0x1D5D, 'M', 'β'),
+ (0x1D5E, 'M', 'γ'),
+ (0x1D5F, 'M', 'δ'),
+ (0x1D60, 'M', 'φ'),
+ (0x1D61, 'M', 'χ'),
+ (0x1D62, 'M', 'i'),
+ (0x1D63, 'M', 'r'),
+ (0x1D64, 'M', 'u'),
+ (0x1D65, 'M', 'v'),
+ (0x1D66, 'M', 'β'),
+ (0x1D67, 'M', 'γ'),
+ (0x1D68, 'M', 'ρ'),
+ (0x1D69, 'M', 'φ'),
+ (0x1D6A, 'M', 'χ'),
+ (0x1D6B, 'V'),
+ (0x1D78, 'M', 'н'),
+ (0x1D79, 'V'),
+ (0x1D9B, 'M', 'ɒ'),
+ (0x1D9C, 'M', 'c'),
+ (0x1D9D, 'M', 'ɕ'),
+ (0x1D9E, 'M', 'ð'),
+ (0x1D9F, 'M', 'ɜ'),
+ (0x1DA0, 'M', 'f'),
+ (0x1DA1, 'M', 'ɟ'),
+ (0x1DA2, 'M', 'ɡ'),
+ (0x1DA3, 'M', 'ɥ'),
+ (0x1DA4, 'M', 'ɨ'),
+ (0x1DA5, 'M', 'ɩ'),
+ (0x1DA6, 'M', 'ɪ'),
+ (0x1DA7, 'M', 'ᵻ'),
+ (0x1DA8, 'M', 'ʝ'),
+ (0x1DA9, 'M', 'ɭ'),
+ (0x1DAA, 'M', 'ᶅ'),
+ (0x1DAB, 'M', 'ʟ'),
+ (0x1DAC, 'M', 'ɱ'),
+ (0x1DAD, 'M', 'ɰ'),
+ (0x1DAE, 'M', 'ɲ'),
+ (0x1DAF, 'M', 'ɳ'),
+ (0x1DB0, 'M', 'ɴ'),
+ (0x1DB1, 'M', 'ɵ'),
+ (0x1DB2, 'M', 'ɸ'),
+ (0x1DB3, 'M', 'ʂ'),
+ (0x1DB4, 'M', 'ʃ'),
+ (0x1DB5, 'M', 'ƫ'),
+ (0x1DB6, 'M', 'ʉ'),
+ (0x1DB7, 'M', 'ʊ'),
+ (0x1DB8, 'M', 'ᴜ'),
+ (0x1DB9, 'M', 'ʋ'),
+ (0x1DBA, 'M', 'ʌ'),
+ (0x1DBB, 'M', 'z'),
+ (0x1DBC, 'M', 'ʐ'),
+ (0x1DBD, 'M', 'ʑ'),
+ (0x1DBE, 'M', 'ʒ'),
+ (0x1DBF, 'M', 'θ'),
+ (0x1DC0, 'V'),
+ (0x1E00, 'M', 'ḁ'),
+ (0x1E01, 'V'),
+ (0x1E02, 'M', 'ḃ'),
+ (0x1E03, 'V'),
+ (0x1E04, 'M', 'ḅ'),
+ (0x1E05, 'V'),
+ (0x1E06, 'M', 'ḇ'),
+ (0x1E07, 'V'),
+ (0x1E08, 'M', 'ḉ'),
+ (0x1E09, 'V'),
+ (0x1E0A, 'M', 'ḋ'),
+ (0x1E0B, 'V'),
+ (0x1E0C, 'M', 'ḍ'),
+ (0x1E0D, 'V'),
+ (0x1E0E, 'M', 'ḏ'),
+ (0x1E0F, 'V'),
+ (0x1E10, 'M', 'ḑ'),
+ (0x1E11, 'V'),
+ (0x1E12, 'M', 'ḓ'),
+ (0x1E13, 'V'),
+ (0x1E14, 'M', 'ḕ'),
+ (0x1E15, 'V'),
+ (0x1E16, 'M', 'ḗ'),
+ (0x1E17, 'V'),
+ (0x1E18, 'M', 'ḙ'),
+ (0x1E19, 'V'),
+ (0x1E1A, 'M', 'ḛ'),
+ (0x1E1B, 'V'),
+ (0x1E1C, 'M', 'ḝ'),
+ (0x1E1D, 'V'),
+ (0x1E1E, 'M', 'ḟ'),
+ (0x1E1F, 'V'),
+ (0x1E20, 'M', 'ḡ'),
+ (0x1E21, 'V'),
+ (0x1E22, 'M', 'ḣ'),
+ (0x1E23, 'V'),
+ ]
+
+def _seg_17() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1E24, 'M', 'ḥ'),
+ (0x1E25, 'V'),
+ (0x1E26, 'M', 'ḧ'),
+ (0x1E27, 'V'),
+ (0x1E28, 'M', 'ḩ'),
+ (0x1E29, 'V'),
+ (0x1E2A, 'M', 'ḫ'),
+ (0x1E2B, 'V'),
+ (0x1E2C, 'M', 'ḭ'),
+ (0x1E2D, 'V'),
+ (0x1E2E, 'M', 'ḯ'),
+ (0x1E2F, 'V'),
+ (0x1E30, 'M', 'ḱ'),
+ (0x1E31, 'V'),
+ (0x1E32, 'M', 'ḳ'),
+ (0x1E33, 'V'),
+ (0x1E34, 'M', 'ḵ'),
+ (0x1E35, 'V'),
+ (0x1E36, 'M', 'ḷ'),
+ (0x1E37, 'V'),
+ (0x1E38, 'M', 'ḹ'),
+ (0x1E39, 'V'),
+ (0x1E3A, 'M', 'ḻ'),
+ (0x1E3B, 'V'),
+ (0x1E3C, 'M', 'ḽ'),
+ (0x1E3D, 'V'),
+ (0x1E3E, 'M', 'ḿ'),
+ (0x1E3F, 'V'),
+ (0x1E40, 'M', 'ṁ'),
+ (0x1E41, 'V'),
+ (0x1E42, 'M', 'ṃ'),
+ (0x1E43, 'V'),
+ (0x1E44, 'M', 'ṅ'),
+ (0x1E45, 'V'),
+ (0x1E46, 'M', 'ṇ'),
+ (0x1E47, 'V'),
+ (0x1E48, 'M', 'ṉ'),
+ (0x1E49, 'V'),
+ (0x1E4A, 'M', 'ṋ'),
+ (0x1E4B, 'V'),
+ (0x1E4C, 'M', 'ṍ'),
+ (0x1E4D, 'V'),
+ (0x1E4E, 'M', 'ṏ'),
+ (0x1E4F, 'V'),
+ (0x1E50, 'M', 'ṑ'),
+ (0x1E51, 'V'),
+ (0x1E52, 'M', 'ṓ'),
+ (0x1E53, 'V'),
+ (0x1E54, 'M', 'ṕ'),
+ (0x1E55, 'V'),
+ (0x1E56, 'M', 'ṗ'),
+ (0x1E57, 'V'),
+ (0x1E58, 'M', 'ṙ'),
+ (0x1E59, 'V'),
+ (0x1E5A, 'M', 'ṛ'),
+ (0x1E5B, 'V'),
+ (0x1E5C, 'M', 'ṝ'),
+ (0x1E5D, 'V'),
+ (0x1E5E, 'M', 'ṟ'),
+ (0x1E5F, 'V'),
+ (0x1E60, 'M', 'ṡ'),
+ (0x1E61, 'V'),
+ (0x1E62, 'M', 'ṣ'),
+ (0x1E63, 'V'),
+ (0x1E64, 'M', 'ṥ'),
+ (0x1E65, 'V'),
+ (0x1E66, 'M', 'ṧ'),
+ (0x1E67, 'V'),
+ (0x1E68, 'M', 'ṩ'),
+ (0x1E69, 'V'),
+ (0x1E6A, 'M', 'ṫ'),
+ (0x1E6B, 'V'),
+ (0x1E6C, 'M', 'ṭ'),
+ (0x1E6D, 'V'),
+ (0x1E6E, 'M', 'ṯ'),
+ (0x1E6F, 'V'),
+ (0x1E70, 'M', 'ṱ'),
+ (0x1E71, 'V'),
+ (0x1E72, 'M', 'ṳ'),
+ (0x1E73, 'V'),
+ (0x1E74, 'M', 'ṵ'),
+ (0x1E75, 'V'),
+ (0x1E76, 'M', 'ṷ'),
+ (0x1E77, 'V'),
+ (0x1E78, 'M', 'ṹ'),
+ (0x1E79, 'V'),
+ (0x1E7A, 'M', 'ṻ'),
+ (0x1E7B, 'V'),
+ (0x1E7C, 'M', 'ṽ'),
+ (0x1E7D, 'V'),
+ (0x1E7E, 'M', 'ṿ'),
+ (0x1E7F, 'V'),
+ (0x1E80, 'M', 'ẁ'),
+ (0x1E81, 'V'),
+ (0x1E82, 'M', 'ẃ'),
+ (0x1E83, 'V'),
+ (0x1E84, 'M', 'ẅ'),
+ (0x1E85, 'V'),
+ (0x1E86, 'M', 'ẇ'),
+ (0x1E87, 'V'),
+ ]
+
+def _seg_18() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1E88, 'M', 'ẉ'),
+ (0x1E89, 'V'),
+ (0x1E8A, 'M', 'ẋ'),
+ (0x1E8B, 'V'),
+ (0x1E8C, 'M', 'ẍ'),
+ (0x1E8D, 'V'),
+ (0x1E8E, 'M', 'ẏ'),
+ (0x1E8F, 'V'),
+ (0x1E90, 'M', 'ẑ'),
+ (0x1E91, 'V'),
+ (0x1E92, 'M', 'ẓ'),
+ (0x1E93, 'V'),
+ (0x1E94, 'M', 'ẕ'),
+ (0x1E95, 'V'),
+ (0x1E9A, 'M', 'aʾ'),
+ (0x1E9B, 'M', 'ṡ'),
+ (0x1E9C, 'V'),
+ (0x1E9E, 'M', 'ss'),
+ (0x1E9F, 'V'),
+ (0x1EA0, 'M', 'ạ'),
+ (0x1EA1, 'V'),
+ (0x1EA2, 'M', 'ả'),
+ (0x1EA3, 'V'),
+ (0x1EA4, 'M', 'ấ'),
+ (0x1EA5, 'V'),
+ (0x1EA6, 'M', 'ầ'),
+ (0x1EA7, 'V'),
+ (0x1EA8, 'M', 'ẩ'),
+ (0x1EA9, 'V'),
+ (0x1EAA, 'M', 'ẫ'),
+ (0x1EAB, 'V'),
+ (0x1EAC, 'M', 'ậ'),
+ (0x1EAD, 'V'),
+ (0x1EAE, 'M', 'ắ'),
+ (0x1EAF, 'V'),
+ (0x1EB0, 'M', 'ằ'),
+ (0x1EB1, 'V'),
+ (0x1EB2, 'M', 'ẳ'),
+ (0x1EB3, 'V'),
+ (0x1EB4, 'M', 'ẵ'),
+ (0x1EB5, 'V'),
+ (0x1EB6, 'M', 'ặ'),
+ (0x1EB7, 'V'),
+ (0x1EB8, 'M', 'ẹ'),
+ (0x1EB9, 'V'),
+ (0x1EBA, 'M', 'ẻ'),
+ (0x1EBB, 'V'),
+ (0x1EBC, 'M', 'ẽ'),
+ (0x1EBD, 'V'),
+ (0x1EBE, 'M', 'ế'),
+ (0x1EBF, 'V'),
+ (0x1EC0, 'M', 'ề'),
+ (0x1EC1, 'V'),
+ (0x1EC2, 'M', 'ể'),
+ (0x1EC3, 'V'),
+ (0x1EC4, 'M', 'ễ'),
+ (0x1EC5, 'V'),
+ (0x1EC6, 'M', 'ệ'),
+ (0x1EC7, 'V'),
+ (0x1EC8, 'M', 'ỉ'),
+ (0x1EC9, 'V'),
+ (0x1ECA, 'M', 'ị'),
+ (0x1ECB, 'V'),
+ (0x1ECC, 'M', 'ọ'),
+ (0x1ECD, 'V'),
+ (0x1ECE, 'M', 'ỏ'),
+ (0x1ECF, 'V'),
+ (0x1ED0, 'M', 'ố'),
+ (0x1ED1, 'V'),
+ (0x1ED2, 'M', 'ồ'),
+ (0x1ED3, 'V'),
+ (0x1ED4, 'M', 'ổ'),
+ (0x1ED5, 'V'),
+ (0x1ED6, 'M', 'ỗ'),
+ (0x1ED7, 'V'),
+ (0x1ED8, 'M', 'ộ'),
+ (0x1ED9, 'V'),
+ (0x1EDA, 'M', 'ớ'),
+ (0x1EDB, 'V'),
+ (0x1EDC, 'M', 'ờ'),
+ (0x1EDD, 'V'),
+ (0x1EDE, 'M', 'ở'),
+ (0x1EDF, 'V'),
+ (0x1EE0, 'M', 'ỡ'),
+ (0x1EE1, 'V'),
+ (0x1EE2, 'M', 'ợ'),
+ (0x1EE3, 'V'),
+ (0x1EE4, 'M', 'ụ'),
+ (0x1EE5, 'V'),
+ (0x1EE6, 'M', 'ủ'),
+ (0x1EE7, 'V'),
+ (0x1EE8, 'M', 'ứ'),
+ (0x1EE9, 'V'),
+ (0x1EEA, 'M', 'ừ'),
+ (0x1EEB, 'V'),
+ (0x1EEC, 'M', 'ử'),
+ (0x1EED, 'V'),
+ (0x1EEE, 'M', 'ữ'),
+ (0x1EEF, 'V'),
+ (0x1EF0, 'M', 'ự'),
+ ]
+
+def _seg_19() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1EF1, 'V'),
+ (0x1EF2, 'M', 'ỳ'),
+ (0x1EF3, 'V'),
+ (0x1EF4, 'M', 'ỵ'),
+ (0x1EF5, 'V'),
+ (0x1EF6, 'M', 'ỷ'),
+ (0x1EF7, 'V'),
+ (0x1EF8, 'M', 'ỹ'),
+ (0x1EF9, 'V'),
+ (0x1EFA, 'M', 'ỻ'),
+ (0x1EFB, 'V'),
+ (0x1EFC, 'M', 'ỽ'),
+ (0x1EFD, 'V'),
+ (0x1EFE, 'M', 'ỿ'),
+ (0x1EFF, 'V'),
+ (0x1F08, 'M', 'ἀ'),
+ (0x1F09, 'M', 'ἁ'),
+ (0x1F0A, 'M', 'ἂ'),
+ (0x1F0B, 'M', 'ἃ'),
+ (0x1F0C, 'M', 'ἄ'),
+ (0x1F0D, 'M', 'ἅ'),
+ (0x1F0E, 'M', 'ἆ'),
+ (0x1F0F, 'M', 'ἇ'),
+ (0x1F10, 'V'),
+ (0x1F16, 'X'),
+ (0x1F18, 'M', 'ἐ'),
+ (0x1F19, 'M', 'ἑ'),
+ (0x1F1A, 'M', 'ἒ'),
+ (0x1F1B, 'M', 'ἓ'),
+ (0x1F1C, 'M', 'ἔ'),
+ (0x1F1D, 'M', 'ἕ'),
+ (0x1F1E, 'X'),
+ (0x1F20, 'V'),
+ (0x1F28, 'M', 'ἠ'),
+ (0x1F29, 'M', 'ἡ'),
+ (0x1F2A, 'M', 'ἢ'),
+ (0x1F2B, 'M', 'ἣ'),
+ (0x1F2C, 'M', 'ἤ'),
+ (0x1F2D, 'M', 'ἥ'),
+ (0x1F2E, 'M', 'ἦ'),
+ (0x1F2F, 'M', 'ἧ'),
+ (0x1F30, 'V'),
+ (0x1F38, 'M', 'ἰ'),
+ (0x1F39, 'M', 'ἱ'),
+ (0x1F3A, 'M', 'ἲ'),
+ (0x1F3B, 'M', 'ἳ'),
+ (0x1F3C, 'M', 'ἴ'),
+ (0x1F3D, 'M', 'ἵ'),
+ (0x1F3E, 'M', 'ἶ'),
+ (0x1F3F, 'M', 'ἷ'),
+ (0x1F40, 'V'),
+ (0x1F46, 'X'),
+ (0x1F48, 'M', 'ὀ'),
+ (0x1F49, 'M', 'ὁ'),
+ (0x1F4A, 'M', 'ὂ'),
+ (0x1F4B, 'M', 'ὃ'),
+ (0x1F4C, 'M', 'ὄ'),
+ (0x1F4D, 'M', 'ὅ'),
+ (0x1F4E, 'X'),
+ (0x1F50, 'V'),
+ (0x1F58, 'X'),
+ (0x1F59, 'M', 'ὑ'),
+ (0x1F5A, 'X'),
+ (0x1F5B, 'M', 'ὓ'),
+ (0x1F5C, 'X'),
+ (0x1F5D, 'M', 'ὕ'),
+ (0x1F5E, 'X'),
+ (0x1F5F, 'M', 'ὗ'),
+ (0x1F60, 'V'),
+ (0x1F68, 'M', 'ὠ'),
+ (0x1F69, 'M', 'ὡ'),
+ (0x1F6A, 'M', 'ὢ'),
+ (0x1F6B, 'M', 'ὣ'),
+ (0x1F6C, 'M', 'ὤ'),
+ (0x1F6D, 'M', 'ὥ'),
+ (0x1F6E, 'M', 'ὦ'),
+ (0x1F6F, 'M', 'ὧ'),
+ (0x1F70, 'V'),
+ (0x1F71, 'M', 'ά'),
+ (0x1F72, 'V'),
+ (0x1F73, 'M', 'έ'),
+ (0x1F74, 'V'),
+ (0x1F75, 'M', 'ή'),
+ (0x1F76, 'V'),
+ (0x1F77, 'M', 'ί'),
+ (0x1F78, 'V'),
+ (0x1F79, 'M', 'ό'),
+ (0x1F7A, 'V'),
+ (0x1F7B, 'M', 'ύ'),
+ (0x1F7C, 'V'),
+ (0x1F7D, 'M', 'ώ'),
+ (0x1F7E, 'X'),
+ (0x1F80, 'M', 'ἀι'),
+ (0x1F81, 'M', 'ἁι'),
+ (0x1F82, 'M', 'ἂι'),
+ (0x1F83, 'M', 'ἃι'),
+ (0x1F84, 'M', 'ἄι'),
+ (0x1F85, 'M', 'ἅι'),
+ (0x1F86, 'M', 'ἆι'),
+ (0x1F87, 'M', 'ἇι'),
+ ]
+
+def _seg_20() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1F88, 'M', 'ἀι'),
+ (0x1F89, 'M', 'ἁι'),
+ (0x1F8A, 'M', 'ἂι'),
+ (0x1F8B, 'M', 'ἃι'),
+ (0x1F8C, 'M', 'ἄι'),
+ (0x1F8D, 'M', 'ἅι'),
+ (0x1F8E, 'M', 'ἆι'),
+ (0x1F8F, 'M', 'ἇι'),
+ (0x1F90, 'M', 'ἠι'),
+ (0x1F91, 'M', 'ἡι'),
+ (0x1F92, 'M', 'ἢι'),
+ (0x1F93, 'M', 'ἣι'),
+ (0x1F94, 'M', 'ἤι'),
+ (0x1F95, 'M', 'ἥι'),
+ (0x1F96, 'M', 'ἦι'),
+ (0x1F97, 'M', 'ἧι'),
+ (0x1F98, 'M', 'ἠι'),
+ (0x1F99, 'M', 'ἡι'),
+ (0x1F9A, 'M', 'ἢι'),
+ (0x1F9B, 'M', 'ἣι'),
+ (0x1F9C, 'M', 'ἤι'),
+ (0x1F9D, 'M', 'ἥι'),
+ (0x1F9E, 'M', 'ἦι'),
+ (0x1F9F, 'M', 'ἧι'),
+ (0x1FA0, 'M', 'ὠι'),
+ (0x1FA1, 'M', 'ὡι'),
+ (0x1FA2, 'M', 'ὢι'),
+ (0x1FA3, 'M', 'ὣι'),
+ (0x1FA4, 'M', 'ὤι'),
+ (0x1FA5, 'M', 'ὥι'),
+ (0x1FA6, 'M', 'ὦι'),
+ (0x1FA7, 'M', 'ὧι'),
+ (0x1FA8, 'M', 'ὠι'),
+ (0x1FA9, 'M', 'ὡι'),
+ (0x1FAA, 'M', 'ὢι'),
+ (0x1FAB, 'M', 'ὣι'),
+ (0x1FAC, 'M', 'ὤι'),
+ (0x1FAD, 'M', 'ὥι'),
+ (0x1FAE, 'M', 'ὦι'),
+ (0x1FAF, 'M', 'ὧι'),
+ (0x1FB0, 'V'),
+ (0x1FB2, 'M', 'ὰι'),
+ (0x1FB3, 'M', 'αι'),
+ (0x1FB4, 'M', 'άι'),
+ (0x1FB5, 'X'),
+ (0x1FB6, 'V'),
+ (0x1FB7, 'M', 'ᾶι'),
+ (0x1FB8, 'M', 'ᾰ'),
+ (0x1FB9, 'M', 'ᾱ'),
+ (0x1FBA, 'M', 'ὰ'),
+ (0x1FBB, 'M', 'ά'),
+ (0x1FBC, 'M', 'αι'),
+ (0x1FBD, '3', ' ̓'),
+ (0x1FBE, 'M', 'ι'),
+ (0x1FBF, '3', ' ̓'),
+ (0x1FC0, '3', ' ͂'),
+ (0x1FC1, '3', ' ̈͂'),
+ (0x1FC2, 'M', 'ὴι'),
+ (0x1FC3, 'M', 'ηι'),
+ (0x1FC4, 'M', 'ήι'),
+ (0x1FC5, 'X'),
+ (0x1FC6, 'V'),
+ (0x1FC7, 'M', 'ῆι'),
+ (0x1FC8, 'M', 'ὲ'),
+ (0x1FC9, 'M', 'έ'),
+ (0x1FCA, 'M', 'ὴ'),
+ (0x1FCB, 'M', 'ή'),
+ (0x1FCC, 'M', 'ηι'),
+ (0x1FCD, '3', ' ̓̀'),
+ (0x1FCE, '3', ' ̓́'),
+ (0x1FCF, '3', ' ̓͂'),
+ (0x1FD0, 'V'),
+ (0x1FD3, 'M', 'ΐ'),
+ (0x1FD4, 'X'),
+ (0x1FD6, 'V'),
+ (0x1FD8, 'M', 'ῐ'),
+ (0x1FD9, 'M', 'ῑ'),
+ (0x1FDA, 'M', 'ὶ'),
+ (0x1FDB, 'M', 'ί'),
+ (0x1FDC, 'X'),
+ (0x1FDD, '3', ' ̔̀'),
+ (0x1FDE, '3', ' ̔́'),
+ (0x1FDF, '3', ' ̔͂'),
+ (0x1FE0, 'V'),
+ (0x1FE3, 'M', 'ΰ'),
+ (0x1FE4, 'V'),
+ (0x1FE8, 'M', 'ῠ'),
+ (0x1FE9, 'M', 'ῡ'),
+ (0x1FEA, 'M', 'ὺ'),
+ (0x1FEB, 'M', 'ύ'),
+ (0x1FEC, 'M', 'ῥ'),
+ (0x1FED, '3', ' ̈̀'),
+ (0x1FEE, '3', ' ̈́'),
+ (0x1FEF, '3', '`'),
+ (0x1FF0, 'X'),
+ (0x1FF2, 'M', 'ὼι'),
+ (0x1FF3, 'M', 'ωι'),
+ (0x1FF4, 'M', 'ώι'),
+ (0x1FF5, 'X'),
+ (0x1FF6, 'V'),
+ ]
+
+def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1FF7, 'M', 'ῶι'),
+ (0x1FF8, 'M', 'ὸ'),
+ (0x1FF9, 'M', 'ό'),
+ (0x1FFA, 'M', 'ὼ'),
+ (0x1FFB, 'M', 'ώ'),
+ (0x1FFC, 'M', 'ωι'),
+ (0x1FFD, '3', ' ́'),
+ (0x1FFE, '3', ' ̔'),
+ (0x1FFF, 'X'),
+ (0x2000, '3', ' '),
+ (0x200B, 'I'),
+ (0x200C, 'D', ''),
+ (0x200E, 'X'),
+ (0x2010, 'V'),
+ (0x2011, 'M', '‐'),
+ (0x2012, 'V'),
+ (0x2017, '3', ' ̳'),
+ (0x2018, 'V'),
+ (0x2024, 'X'),
+ (0x2027, 'V'),
+ (0x2028, 'X'),
+ (0x202F, '3', ' '),
+ (0x2030, 'V'),
+ (0x2033, 'M', '′′'),
+ (0x2034, 'M', '′′′'),
+ (0x2035, 'V'),
+ (0x2036, 'M', '‵‵'),
+ (0x2037, 'M', '‵‵‵'),
+ (0x2038, 'V'),
+ (0x203C, '3', '!!'),
+ (0x203D, 'V'),
+ (0x203E, '3', ' ̅'),
+ (0x203F, 'V'),
+ (0x2047, '3', '??'),
+ (0x2048, '3', '?!'),
+ (0x2049, '3', '!?'),
+ (0x204A, 'V'),
+ (0x2057, 'M', '′′′′'),
+ (0x2058, 'V'),
+ (0x205F, '3', ' '),
+ (0x2060, 'I'),
+ (0x2061, 'X'),
+ (0x2064, 'I'),
+ (0x2065, 'X'),
+ (0x2070, 'M', '0'),
+ (0x2071, 'M', 'i'),
+ (0x2072, 'X'),
+ (0x2074, 'M', '4'),
+ (0x2075, 'M', '5'),
+ (0x2076, 'M', '6'),
+ (0x2077, 'M', '7'),
+ (0x2078, 'M', '8'),
+ (0x2079, 'M', '9'),
+ (0x207A, '3', '+'),
+ (0x207B, 'M', '−'),
+ (0x207C, '3', '='),
+ (0x207D, '3', '('),
+ (0x207E, '3', ')'),
+ (0x207F, 'M', 'n'),
+ (0x2080, 'M', '0'),
+ (0x2081, 'M', '1'),
+ (0x2082, 'M', '2'),
+ (0x2083, 'M', '3'),
+ (0x2084, 'M', '4'),
+ (0x2085, 'M', '5'),
+ (0x2086, 'M', '6'),
+ (0x2087, 'M', '7'),
+ (0x2088, 'M', '8'),
+ (0x2089, 'M', '9'),
+ (0x208A, '3', '+'),
+ (0x208B, 'M', '−'),
+ (0x208C, '3', '='),
+ (0x208D, '3', '('),
+ (0x208E, '3', ')'),
+ (0x208F, 'X'),
+ (0x2090, 'M', 'a'),
+ (0x2091, 'M', 'e'),
+ (0x2092, 'M', 'o'),
+ (0x2093, 'M', 'x'),
+ (0x2094, 'M', 'ə'),
+ (0x2095, 'M', 'h'),
+ (0x2096, 'M', 'k'),
+ (0x2097, 'M', 'l'),
+ (0x2098, 'M', 'm'),
+ (0x2099, 'M', 'n'),
+ (0x209A, 'M', 'p'),
+ (0x209B, 'M', 's'),
+ (0x209C, 'M', 't'),
+ (0x209D, 'X'),
+ (0x20A0, 'V'),
+ (0x20A8, 'M', 'rs'),
+ (0x20A9, 'V'),
+ (0x20C1, 'X'),
+ (0x20D0, 'V'),
+ (0x20F1, 'X'),
+ (0x2100, '3', 'a/c'),
+ (0x2101, '3', 'a/s'),
+ (0x2102, 'M', 'c'),
+ (0x2103, 'M', '°c'),
+ (0x2104, 'V'),
+ ]
+
+def _seg_22() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2105, '3', 'c/o'),
+ (0x2106, '3', 'c/u'),
+ (0x2107, 'M', 'ɛ'),
+ (0x2108, 'V'),
+ (0x2109, 'M', '°f'),
+ (0x210A, 'M', 'g'),
+ (0x210B, 'M', 'h'),
+ (0x210F, 'M', 'ħ'),
+ (0x2110, 'M', 'i'),
+ (0x2112, 'M', 'l'),
+ (0x2114, 'V'),
+ (0x2115, 'M', 'n'),
+ (0x2116, 'M', 'no'),
+ (0x2117, 'V'),
+ (0x2119, 'M', 'p'),
+ (0x211A, 'M', 'q'),
+ (0x211B, 'M', 'r'),
+ (0x211E, 'V'),
+ (0x2120, 'M', 'sm'),
+ (0x2121, 'M', 'tel'),
+ (0x2122, 'M', 'tm'),
+ (0x2123, 'V'),
+ (0x2124, 'M', 'z'),
+ (0x2125, 'V'),
+ (0x2126, 'M', 'ω'),
+ (0x2127, 'V'),
+ (0x2128, 'M', 'z'),
+ (0x2129, 'V'),
+ (0x212A, 'M', 'k'),
+ (0x212B, 'M', 'å'),
+ (0x212C, 'M', 'b'),
+ (0x212D, 'M', 'c'),
+ (0x212E, 'V'),
+ (0x212F, 'M', 'e'),
+ (0x2131, 'M', 'f'),
+ (0x2132, 'X'),
+ (0x2133, 'M', 'm'),
+ (0x2134, 'M', 'o'),
+ (0x2135, 'M', 'א'),
+ (0x2136, 'M', 'ב'),
+ (0x2137, 'M', 'ג'),
+ (0x2138, 'M', 'ד'),
+ (0x2139, 'M', 'i'),
+ (0x213A, 'V'),
+ (0x213B, 'M', 'fax'),
+ (0x213C, 'M', 'π'),
+ (0x213D, 'M', 'γ'),
+ (0x213F, 'M', 'π'),
+ (0x2140, 'M', '∑'),
+ (0x2141, 'V'),
+ (0x2145, 'M', 'd'),
+ (0x2147, 'M', 'e'),
+ (0x2148, 'M', 'i'),
+ (0x2149, 'M', 'j'),
+ (0x214A, 'V'),
+ (0x2150, 'M', '1⁄7'),
+ (0x2151, 'M', '1⁄9'),
+ (0x2152, 'M', '1⁄10'),
+ (0x2153, 'M', '1⁄3'),
+ (0x2154, 'M', '2⁄3'),
+ (0x2155, 'M', '1⁄5'),
+ (0x2156, 'M', '2⁄5'),
+ (0x2157, 'M', '3⁄5'),
+ (0x2158, 'M', '4⁄5'),
+ (0x2159, 'M', '1⁄6'),
+ (0x215A, 'M', '5⁄6'),
+ (0x215B, 'M', '1⁄8'),
+ (0x215C, 'M', '3⁄8'),
+ (0x215D, 'M', '5⁄8'),
+ (0x215E, 'M', '7⁄8'),
+ (0x215F, 'M', '1⁄'),
+ (0x2160, 'M', 'i'),
+ (0x2161, 'M', 'ii'),
+ (0x2162, 'M', 'iii'),
+ (0x2163, 'M', 'iv'),
+ (0x2164, 'M', 'v'),
+ (0x2165, 'M', 'vi'),
+ (0x2166, 'M', 'vii'),
+ (0x2167, 'M', 'viii'),
+ (0x2168, 'M', 'ix'),
+ (0x2169, 'M', 'x'),
+ (0x216A, 'M', 'xi'),
+ (0x216B, 'M', 'xii'),
+ (0x216C, 'M', 'l'),
+ (0x216D, 'M', 'c'),
+ (0x216E, 'M', 'd'),
+ (0x216F, 'M', 'm'),
+ (0x2170, 'M', 'i'),
+ (0x2171, 'M', 'ii'),
+ (0x2172, 'M', 'iii'),
+ (0x2173, 'M', 'iv'),
+ (0x2174, 'M', 'v'),
+ (0x2175, 'M', 'vi'),
+ (0x2176, 'M', 'vii'),
+ (0x2177, 'M', 'viii'),
+ (0x2178, 'M', 'ix'),
+ (0x2179, 'M', 'x'),
+ (0x217A, 'M', 'xi'),
+ (0x217B, 'M', 'xii'),
+ (0x217C, 'M', 'l'),
+ ]
+
+def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x217D, 'M', 'c'),
+ (0x217E, 'M', 'd'),
+ (0x217F, 'M', 'm'),
+ (0x2180, 'V'),
+ (0x2183, 'X'),
+ (0x2184, 'V'),
+ (0x2189, 'M', '0⁄3'),
+ (0x218A, 'V'),
+ (0x218C, 'X'),
+ (0x2190, 'V'),
+ (0x222C, 'M', '∫∫'),
+ (0x222D, 'M', '∫∫∫'),
+ (0x222E, 'V'),
+ (0x222F, 'M', '∮∮'),
+ (0x2230, 'M', '∮∮∮'),
+ (0x2231, 'V'),
+ (0x2260, '3'),
+ (0x2261, 'V'),
+ (0x226E, '3'),
+ (0x2270, 'V'),
+ (0x2329, 'M', '〈'),
+ (0x232A, 'M', '〉'),
+ (0x232B, 'V'),
+ (0x2427, 'X'),
+ (0x2440, 'V'),
+ (0x244B, 'X'),
+ (0x2460, 'M', '1'),
+ (0x2461, 'M', '2'),
+ (0x2462, 'M', '3'),
+ (0x2463, 'M', '4'),
+ (0x2464, 'M', '5'),
+ (0x2465, 'M', '6'),
+ (0x2466, 'M', '7'),
+ (0x2467, 'M', '8'),
+ (0x2468, 'M', '9'),
+ (0x2469, 'M', '10'),
+ (0x246A, 'M', '11'),
+ (0x246B, 'M', '12'),
+ (0x246C, 'M', '13'),
+ (0x246D, 'M', '14'),
+ (0x246E, 'M', '15'),
+ (0x246F, 'M', '16'),
+ (0x2470, 'M', '17'),
+ (0x2471, 'M', '18'),
+ (0x2472, 'M', '19'),
+ (0x2473, 'M', '20'),
+ (0x2474, '3', '(1)'),
+ (0x2475, '3', '(2)'),
+ (0x2476, '3', '(3)'),
+ (0x2477, '3', '(4)'),
+ (0x2478, '3', '(5)'),
+ (0x2479, '3', '(6)'),
+ (0x247A, '3', '(7)'),
+ (0x247B, '3', '(8)'),
+ (0x247C, '3', '(9)'),
+ (0x247D, '3', '(10)'),
+ (0x247E, '3', '(11)'),
+ (0x247F, '3', '(12)'),
+ (0x2480, '3', '(13)'),
+ (0x2481, '3', '(14)'),
+ (0x2482, '3', '(15)'),
+ (0x2483, '3', '(16)'),
+ (0x2484, '3', '(17)'),
+ (0x2485, '3', '(18)'),
+ (0x2486, '3', '(19)'),
+ (0x2487, '3', '(20)'),
+ (0x2488, 'X'),
+ (0x249C, '3', '(a)'),
+ (0x249D, '3', '(b)'),
+ (0x249E, '3', '(c)'),
+ (0x249F, '3', '(d)'),
+ (0x24A0, '3', '(e)'),
+ (0x24A1, '3', '(f)'),
+ (0x24A2, '3', '(g)'),
+ (0x24A3, '3', '(h)'),
+ (0x24A4, '3', '(i)'),
+ (0x24A5, '3', '(j)'),
+ (0x24A6, '3', '(k)'),
+ (0x24A7, '3', '(l)'),
+ (0x24A8, '3', '(m)'),
+ (0x24A9, '3', '(n)'),
+ (0x24AA, '3', '(o)'),
+ (0x24AB, '3', '(p)'),
+ (0x24AC, '3', '(q)'),
+ (0x24AD, '3', '(r)'),
+ (0x24AE, '3', '(s)'),
+ (0x24AF, '3', '(t)'),
+ (0x24B0, '3', '(u)'),
+ (0x24B1, '3', '(v)'),
+ (0x24B2, '3', '(w)'),
+ (0x24B3, '3', '(x)'),
+ (0x24B4, '3', '(y)'),
+ (0x24B5, '3', '(z)'),
+ (0x24B6, 'M', 'a'),
+ (0x24B7, 'M', 'b'),
+ (0x24B8, 'M', 'c'),
+ (0x24B9, 'M', 'd'),
+ (0x24BA, 'M', 'e'),
+ (0x24BB, 'M', 'f'),
+ (0x24BC, 'M', 'g'),
+ ]
+
+def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x24BD, 'M', 'h'),
+ (0x24BE, 'M', 'i'),
+ (0x24BF, 'M', 'j'),
+ (0x24C0, 'M', 'k'),
+ (0x24C1, 'M', 'l'),
+ (0x24C2, 'M', 'm'),
+ (0x24C3, 'M', 'n'),
+ (0x24C4, 'M', 'o'),
+ (0x24C5, 'M', 'p'),
+ (0x24C6, 'M', 'q'),
+ (0x24C7, 'M', 'r'),
+ (0x24C8, 'M', 's'),
+ (0x24C9, 'M', 't'),
+ (0x24CA, 'M', 'u'),
+ (0x24CB, 'M', 'v'),
+ (0x24CC, 'M', 'w'),
+ (0x24CD, 'M', 'x'),
+ (0x24CE, 'M', 'y'),
+ (0x24CF, 'M', 'z'),
+ (0x24D0, 'M', 'a'),
+ (0x24D1, 'M', 'b'),
+ (0x24D2, 'M', 'c'),
+ (0x24D3, 'M', 'd'),
+ (0x24D4, 'M', 'e'),
+ (0x24D5, 'M', 'f'),
+ (0x24D6, 'M', 'g'),
+ (0x24D7, 'M', 'h'),
+ (0x24D8, 'M', 'i'),
+ (0x24D9, 'M', 'j'),
+ (0x24DA, 'M', 'k'),
+ (0x24DB, 'M', 'l'),
+ (0x24DC, 'M', 'm'),
+ (0x24DD, 'M', 'n'),
+ (0x24DE, 'M', 'o'),
+ (0x24DF, 'M', 'p'),
+ (0x24E0, 'M', 'q'),
+ (0x24E1, 'M', 'r'),
+ (0x24E2, 'M', 's'),
+ (0x24E3, 'M', 't'),
+ (0x24E4, 'M', 'u'),
+ (0x24E5, 'M', 'v'),
+ (0x24E6, 'M', 'w'),
+ (0x24E7, 'M', 'x'),
+ (0x24E8, 'M', 'y'),
+ (0x24E9, 'M', 'z'),
+ (0x24EA, 'M', '0'),
+ (0x24EB, 'V'),
+ (0x2A0C, 'M', '∫∫∫∫'),
+ (0x2A0D, 'V'),
+ (0x2A74, '3', '::='),
+ (0x2A75, '3', '=='),
+ (0x2A76, '3', '==='),
+ (0x2A77, 'V'),
+ (0x2ADC, 'M', '⫝̸'),
+ (0x2ADD, 'V'),
+ (0x2B74, 'X'),
+ (0x2B76, 'V'),
+ (0x2B96, 'X'),
+ (0x2B97, 'V'),
+ (0x2C00, 'M', 'ⰰ'),
+ (0x2C01, 'M', 'ⰱ'),
+ (0x2C02, 'M', 'ⰲ'),
+ (0x2C03, 'M', 'ⰳ'),
+ (0x2C04, 'M', 'ⰴ'),
+ (0x2C05, 'M', 'ⰵ'),
+ (0x2C06, 'M', 'ⰶ'),
+ (0x2C07, 'M', 'ⰷ'),
+ (0x2C08, 'M', 'ⰸ'),
+ (0x2C09, 'M', 'ⰹ'),
+ (0x2C0A, 'M', 'ⰺ'),
+ (0x2C0B, 'M', 'ⰻ'),
+ (0x2C0C, 'M', 'ⰼ'),
+ (0x2C0D, 'M', 'ⰽ'),
+ (0x2C0E, 'M', 'ⰾ'),
+ (0x2C0F, 'M', 'ⰿ'),
+ (0x2C10, 'M', 'ⱀ'),
+ (0x2C11, 'M', 'ⱁ'),
+ (0x2C12, 'M', 'ⱂ'),
+ (0x2C13, 'M', 'ⱃ'),
+ (0x2C14, 'M', 'ⱄ'),
+ (0x2C15, 'M', 'ⱅ'),
+ (0x2C16, 'M', 'ⱆ'),
+ (0x2C17, 'M', 'ⱇ'),
+ (0x2C18, 'M', 'ⱈ'),
+ (0x2C19, 'M', 'ⱉ'),
+ (0x2C1A, 'M', 'ⱊ'),
+ (0x2C1B, 'M', 'ⱋ'),
+ (0x2C1C, 'M', 'ⱌ'),
+ (0x2C1D, 'M', 'ⱍ'),
+ (0x2C1E, 'M', 'ⱎ'),
+ (0x2C1F, 'M', 'ⱏ'),
+ (0x2C20, 'M', 'ⱐ'),
+ (0x2C21, 'M', 'ⱑ'),
+ (0x2C22, 'M', 'ⱒ'),
+ (0x2C23, 'M', 'ⱓ'),
+ (0x2C24, 'M', 'ⱔ'),
+ (0x2C25, 'M', 'ⱕ'),
+ (0x2C26, 'M', 'ⱖ'),
+ (0x2C27, 'M', 'ⱗ'),
+ (0x2C28, 'M', 'ⱘ'),
+ ]
+
+def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2C29, 'M', 'ⱙ'),
+ (0x2C2A, 'M', 'ⱚ'),
+ (0x2C2B, 'M', 'ⱛ'),
+ (0x2C2C, 'M', 'ⱜ'),
+ (0x2C2D, 'M', 'ⱝ'),
+ (0x2C2E, 'M', 'ⱞ'),
+ (0x2C2F, 'M', 'ⱟ'),
+ (0x2C30, 'V'),
+ (0x2C60, 'M', 'ⱡ'),
+ (0x2C61, 'V'),
+ (0x2C62, 'M', 'ɫ'),
+ (0x2C63, 'M', 'ᵽ'),
+ (0x2C64, 'M', 'ɽ'),
+ (0x2C65, 'V'),
+ (0x2C67, 'M', 'ⱨ'),
+ (0x2C68, 'V'),
+ (0x2C69, 'M', 'ⱪ'),
+ (0x2C6A, 'V'),
+ (0x2C6B, 'M', 'ⱬ'),
+ (0x2C6C, 'V'),
+ (0x2C6D, 'M', 'ɑ'),
+ (0x2C6E, 'M', 'ɱ'),
+ (0x2C6F, 'M', 'ɐ'),
+ (0x2C70, 'M', 'ɒ'),
+ (0x2C71, 'V'),
+ (0x2C72, 'M', 'ⱳ'),
+ (0x2C73, 'V'),
+ (0x2C75, 'M', 'ⱶ'),
+ (0x2C76, 'V'),
+ (0x2C7C, 'M', 'j'),
+ (0x2C7D, 'M', 'v'),
+ (0x2C7E, 'M', 'ȿ'),
+ (0x2C7F, 'M', 'ɀ'),
+ (0x2C80, 'M', 'ⲁ'),
+ (0x2C81, 'V'),
+ (0x2C82, 'M', 'ⲃ'),
+ (0x2C83, 'V'),
+ (0x2C84, 'M', 'ⲅ'),
+ (0x2C85, 'V'),
+ (0x2C86, 'M', 'ⲇ'),
+ (0x2C87, 'V'),
+ (0x2C88, 'M', 'ⲉ'),
+ (0x2C89, 'V'),
+ (0x2C8A, 'M', 'ⲋ'),
+ (0x2C8B, 'V'),
+ (0x2C8C, 'M', 'ⲍ'),
+ (0x2C8D, 'V'),
+ (0x2C8E, 'M', 'ⲏ'),
+ (0x2C8F, 'V'),
+ (0x2C90, 'M', 'ⲑ'),
+ (0x2C91, 'V'),
+ (0x2C92, 'M', 'ⲓ'),
+ (0x2C93, 'V'),
+ (0x2C94, 'M', 'ⲕ'),
+ (0x2C95, 'V'),
+ (0x2C96, 'M', 'ⲗ'),
+ (0x2C97, 'V'),
+ (0x2C98, 'M', 'ⲙ'),
+ (0x2C99, 'V'),
+ (0x2C9A, 'M', 'ⲛ'),
+ (0x2C9B, 'V'),
+ (0x2C9C, 'M', 'ⲝ'),
+ (0x2C9D, 'V'),
+ (0x2C9E, 'M', 'ⲟ'),
+ (0x2C9F, 'V'),
+ (0x2CA0, 'M', 'ⲡ'),
+ (0x2CA1, 'V'),
+ (0x2CA2, 'M', 'ⲣ'),
+ (0x2CA3, 'V'),
+ (0x2CA4, 'M', 'ⲥ'),
+ (0x2CA5, 'V'),
+ (0x2CA6, 'M', 'ⲧ'),
+ (0x2CA7, 'V'),
+ (0x2CA8, 'M', 'ⲩ'),
+ (0x2CA9, 'V'),
+ (0x2CAA, 'M', 'ⲫ'),
+ (0x2CAB, 'V'),
+ (0x2CAC, 'M', 'ⲭ'),
+ (0x2CAD, 'V'),
+ (0x2CAE, 'M', 'ⲯ'),
+ (0x2CAF, 'V'),
+ (0x2CB0, 'M', 'ⲱ'),
+ (0x2CB1, 'V'),
+ (0x2CB2, 'M', 'ⲳ'),
+ (0x2CB3, 'V'),
+ (0x2CB4, 'M', 'ⲵ'),
+ (0x2CB5, 'V'),
+ (0x2CB6, 'M', 'ⲷ'),
+ (0x2CB7, 'V'),
+ (0x2CB8, 'M', 'ⲹ'),
+ (0x2CB9, 'V'),
+ (0x2CBA, 'M', 'ⲻ'),
+ (0x2CBB, 'V'),
+ (0x2CBC, 'M', 'ⲽ'),
+ (0x2CBD, 'V'),
+ (0x2CBE, 'M', 'ⲿ'),
+ (0x2CBF, 'V'),
+ (0x2CC0, 'M', 'ⳁ'),
+ (0x2CC1, 'V'),
+ (0x2CC2, 'M', 'ⳃ'),
+ ]
+
+def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2CC3, 'V'),
+ (0x2CC4, 'M', 'ⳅ'),
+ (0x2CC5, 'V'),
+ (0x2CC6, 'M', 'ⳇ'),
+ (0x2CC7, 'V'),
+ (0x2CC8, 'M', 'ⳉ'),
+ (0x2CC9, 'V'),
+ (0x2CCA, 'M', 'ⳋ'),
+ (0x2CCB, 'V'),
+ (0x2CCC, 'M', 'ⳍ'),
+ (0x2CCD, 'V'),
+ (0x2CCE, 'M', 'ⳏ'),
+ (0x2CCF, 'V'),
+ (0x2CD0, 'M', 'ⳑ'),
+ (0x2CD1, 'V'),
+ (0x2CD2, 'M', 'ⳓ'),
+ (0x2CD3, 'V'),
+ (0x2CD4, 'M', 'ⳕ'),
+ (0x2CD5, 'V'),
+ (0x2CD6, 'M', 'ⳗ'),
+ (0x2CD7, 'V'),
+ (0x2CD8, 'M', 'ⳙ'),
+ (0x2CD9, 'V'),
+ (0x2CDA, 'M', 'ⳛ'),
+ (0x2CDB, 'V'),
+ (0x2CDC, 'M', 'ⳝ'),
+ (0x2CDD, 'V'),
+ (0x2CDE, 'M', 'ⳟ'),
+ (0x2CDF, 'V'),
+ (0x2CE0, 'M', 'ⳡ'),
+ (0x2CE1, 'V'),
+ (0x2CE2, 'M', 'ⳣ'),
+ (0x2CE3, 'V'),
+ (0x2CEB, 'M', 'ⳬ'),
+ (0x2CEC, 'V'),
+ (0x2CED, 'M', 'ⳮ'),
+ (0x2CEE, 'V'),
+ (0x2CF2, 'M', 'ⳳ'),
+ (0x2CF3, 'V'),
+ (0x2CF4, 'X'),
+ (0x2CF9, 'V'),
+ (0x2D26, 'X'),
+ (0x2D27, 'V'),
+ (0x2D28, 'X'),
+ (0x2D2D, 'V'),
+ (0x2D2E, 'X'),
+ (0x2D30, 'V'),
+ (0x2D68, 'X'),
+ (0x2D6F, 'M', 'ⵡ'),
+ (0x2D70, 'V'),
+ (0x2D71, 'X'),
+ (0x2D7F, 'V'),
+ (0x2D97, 'X'),
+ (0x2DA0, 'V'),
+ (0x2DA7, 'X'),
+ (0x2DA8, 'V'),
+ (0x2DAF, 'X'),
+ (0x2DB0, 'V'),
+ (0x2DB7, 'X'),
+ (0x2DB8, 'V'),
+ (0x2DBF, 'X'),
+ (0x2DC0, 'V'),
+ (0x2DC7, 'X'),
+ (0x2DC8, 'V'),
+ (0x2DCF, 'X'),
+ (0x2DD0, 'V'),
+ (0x2DD7, 'X'),
+ (0x2DD8, 'V'),
+ (0x2DDF, 'X'),
+ (0x2DE0, 'V'),
+ (0x2E5E, 'X'),
+ (0x2E80, 'V'),
+ (0x2E9A, 'X'),
+ (0x2E9B, 'V'),
+ (0x2E9F, 'M', '母'),
+ (0x2EA0, 'V'),
+ (0x2EF3, 'M', '龟'),
+ (0x2EF4, 'X'),
+ (0x2F00, 'M', '一'),
+ (0x2F01, 'M', '丨'),
+ (0x2F02, 'M', '丶'),
+ (0x2F03, 'M', '丿'),
+ (0x2F04, 'M', '乙'),
+ (0x2F05, 'M', '亅'),
+ (0x2F06, 'M', '二'),
+ (0x2F07, 'M', '亠'),
+ (0x2F08, 'M', '人'),
+ (0x2F09, 'M', '儿'),
+ (0x2F0A, 'M', '入'),
+ (0x2F0B, 'M', '八'),
+ (0x2F0C, 'M', '冂'),
+ (0x2F0D, 'M', '冖'),
+ (0x2F0E, 'M', '冫'),
+ (0x2F0F, 'M', '几'),
+ (0x2F10, 'M', '凵'),
+ (0x2F11, 'M', '刀'),
+ (0x2F12, 'M', '力'),
+ (0x2F13, 'M', '勹'),
+ (0x2F14, 'M', '匕'),
+ (0x2F15, 'M', '匚'),
+ ]
+
+def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2F16, 'M', '匸'),
+ (0x2F17, 'M', '十'),
+ (0x2F18, 'M', '卜'),
+ (0x2F19, 'M', '卩'),
+ (0x2F1A, 'M', '厂'),
+ (0x2F1B, 'M', '厶'),
+ (0x2F1C, 'M', '又'),
+ (0x2F1D, 'M', '口'),
+ (0x2F1E, 'M', '囗'),
+ (0x2F1F, 'M', '土'),
+ (0x2F20, 'M', '士'),
+ (0x2F21, 'M', '夂'),
+ (0x2F22, 'M', '夊'),
+ (0x2F23, 'M', '夕'),
+ (0x2F24, 'M', '大'),
+ (0x2F25, 'M', '女'),
+ (0x2F26, 'M', '子'),
+ (0x2F27, 'M', '宀'),
+ (0x2F28, 'M', '寸'),
+ (0x2F29, 'M', '小'),
+ (0x2F2A, 'M', '尢'),
+ (0x2F2B, 'M', '尸'),
+ (0x2F2C, 'M', '屮'),
+ (0x2F2D, 'M', '山'),
+ (0x2F2E, 'M', '巛'),
+ (0x2F2F, 'M', '工'),
+ (0x2F30, 'M', '己'),
+ (0x2F31, 'M', '巾'),
+ (0x2F32, 'M', '干'),
+ (0x2F33, 'M', '幺'),
+ (0x2F34, 'M', '广'),
+ (0x2F35, 'M', '廴'),
+ (0x2F36, 'M', '廾'),
+ (0x2F37, 'M', '弋'),
+ (0x2F38, 'M', '弓'),
+ (0x2F39, 'M', '彐'),
+ (0x2F3A, 'M', '彡'),
+ (0x2F3B, 'M', '彳'),
+ (0x2F3C, 'M', '心'),
+ (0x2F3D, 'M', '戈'),
+ (0x2F3E, 'M', '戶'),
+ (0x2F3F, 'M', '手'),
+ (0x2F40, 'M', '支'),
+ (0x2F41, 'M', '攴'),
+ (0x2F42, 'M', '文'),
+ (0x2F43, 'M', '斗'),
+ (0x2F44, 'M', '斤'),
+ (0x2F45, 'M', '方'),
+ (0x2F46, 'M', '无'),
+ (0x2F47, 'M', '日'),
+ (0x2F48, 'M', '曰'),
+ (0x2F49, 'M', '月'),
+ (0x2F4A, 'M', '木'),
+ (0x2F4B, 'M', '欠'),
+ (0x2F4C, 'M', '止'),
+ (0x2F4D, 'M', '歹'),
+ (0x2F4E, 'M', '殳'),
+ (0x2F4F, 'M', '毋'),
+ (0x2F50, 'M', '比'),
+ (0x2F51, 'M', '毛'),
+ (0x2F52, 'M', '氏'),
+ (0x2F53, 'M', '气'),
+ (0x2F54, 'M', '水'),
+ (0x2F55, 'M', '火'),
+ (0x2F56, 'M', '爪'),
+ (0x2F57, 'M', '父'),
+ (0x2F58, 'M', '爻'),
+ (0x2F59, 'M', '爿'),
+ (0x2F5A, 'M', '片'),
+ (0x2F5B, 'M', '牙'),
+ (0x2F5C, 'M', '牛'),
+ (0x2F5D, 'M', '犬'),
+ (0x2F5E, 'M', '玄'),
+ (0x2F5F, 'M', '玉'),
+ (0x2F60, 'M', '瓜'),
+ (0x2F61, 'M', '瓦'),
+ (0x2F62, 'M', '甘'),
+ (0x2F63, 'M', '生'),
+ (0x2F64, 'M', '用'),
+ (0x2F65, 'M', '田'),
+ (0x2F66, 'M', '疋'),
+ (0x2F67, 'M', '疒'),
+ (0x2F68, 'M', '癶'),
+ (0x2F69, 'M', '白'),
+ (0x2F6A, 'M', '皮'),
+ (0x2F6B, 'M', '皿'),
+ (0x2F6C, 'M', '目'),
+ (0x2F6D, 'M', '矛'),
+ (0x2F6E, 'M', '矢'),
+ (0x2F6F, 'M', '石'),
+ (0x2F70, 'M', '示'),
+ (0x2F71, 'M', '禸'),
+ (0x2F72, 'M', '禾'),
+ (0x2F73, 'M', '穴'),
+ (0x2F74, 'M', '立'),
+ (0x2F75, 'M', '竹'),
+ (0x2F76, 'M', '米'),
+ (0x2F77, 'M', '糸'),
+ (0x2F78, 'M', '缶'),
+ (0x2F79, 'M', '网'),
+ ]
+
+def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2F7A, 'M', '羊'),
+ (0x2F7B, 'M', '羽'),
+ (0x2F7C, 'M', '老'),
+ (0x2F7D, 'M', '而'),
+ (0x2F7E, 'M', '耒'),
+ (0x2F7F, 'M', '耳'),
+ (0x2F80, 'M', '聿'),
+ (0x2F81, 'M', '肉'),
+ (0x2F82, 'M', '臣'),
+ (0x2F83, 'M', '自'),
+ (0x2F84, 'M', '至'),
+ (0x2F85, 'M', '臼'),
+ (0x2F86, 'M', '舌'),
+ (0x2F87, 'M', '舛'),
+ (0x2F88, 'M', '舟'),
+ (0x2F89, 'M', '艮'),
+ (0x2F8A, 'M', '色'),
+ (0x2F8B, 'M', '艸'),
+ (0x2F8C, 'M', '虍'),
+ (0x2F8D, 'M', '虫'),
+ (0x2F8E, 'M', '血'),
+ (0x2F8F, 'M', '行'),
+ (0x2F90, 'M', '衣'),
+ (0x2F91, 'M', '襾'),
+ (0x2F92, 'M', '見'),
+ (0x2F93, 'M', '角'),
+ (0x2F94, 'M', '言'),
+ (0x2F95, 'M', '谷'),
+ (0x2F96, 'M', '豆'),
+ (0x2F97, 'M', '豕'),
+ (0x2F98, 'M', '豸'),
+ (0x2F99, 'M', '貝'),
+ (0x2F9A, 'M', '赤'),
+ (0x2F9B, 'M', '走'),
+ (0x2F9C, 'M', '足'),
+ (0x2F9D, 'M', '身'),
+ (0x2F9E, 'M', '車'),
+ (0x2F9F, 'M', '辛'),
+ (0x2FA0, 'M', '辰'),
+ (0x2FA1, 'M', '辵'),
+ (0x2FA2, 'M', '邑'),
+ (0x2FA3, 'M', '酉'),
+ (0x2FA4, 'M', '釆'),
+ (0x2FA5, 'M', '里'),
+ (0x2FA6, 'M', '金'),
+ (0x2FA7, 'M', '長'),
+ (0x2FA8, 'M', '門'),
+ (0x2FA9, 'M', '阜'),
+ (0x2FAA, 'M', '隶'),
+ (0x2FAB, 'M', '隹'),
+ (0x2FAC, 'M', '雨'),
+ (0x2FAD, 'M', '靑'),
+ (0x2FAE, 'M', '非'),
+ (0x2FAF, 'M', '面'),
+ (0x2FB0, 'M', '革'),
+ (0x2FB1, 'M', '韋'),
+ (0x2FB2, 'M', '韭'),
+ (0x2FB3, 'M', '音'),
+ (0x2FB4, 'M', '頁'),
+ (0x2FB5, 'M', '風'),
+ (0x2FB6, 'M', '飛'),
+ (0x2FB7, 'M', '食'),
+ (0x2FB8, 'M', '首'),
+ (0x2FB9, 'M', '香'),
+ (0x2FBA, 'M', '馬'),
+ (0x2FBB, 'M', '骨'),
+ (0x2FBC, 'M', '高'),
+ (0x2FBD, 'M', '髟'),
+ (0x2FBE, 'M', '鬥'),
+ (0x2FBF, 'M', '鬯'),
+ (0x2FC0, 'M', '鬲'),
+ (0x2FC1, 'M', '鬼'),
+ (0x2FC2, 'M', '魚'),
+ (0x2FC3, 'M', '鳥'),
+ (0x2FC4, 'M', '鹵'),
+ (0x2FC5, 'M', '鹿'),
+ (0x2FC6, 'M', '麥'),
+ (0x2FC7, 'M', '麻'),
+ (0x2FC8, 'M', '黃'),
+ (0x2FC9, 'M', '黍'),
+ (0x2FCA, 'M', '黑'),
+ (0x2FCB, 'M', '黹'),
+ (0x2FCC, 'M', '黽'),
+ (0x2FCD, 'M', '鼎'),
+ (0x2FCE, 'M', '鼓'),
+ (0x2FCF, 'M', '鼠'),
+ (0x2FD0, 'M', '鼻'),
+ (0x2FD1, 'M', '齊'),
+ (0x2FD2, 'M', '齒'),
+ (0x2FD3, 'M', '龍'),
+ (0x2FD4, 'M', '龜'),
+ (0x2FD5, 'M', '龠'),
+ (0x2FD6, 'X'),
+ (0x3000, '3', ' '),
+ (0x3001, 'V'),
+ (0x3002, 'M', '.'),
+ (0x3003, 'V'),
+ (0x3036, 'M', '〒'),
+ (0x3037, 'V'),
+ (0x3038, 'M', '十'),
+ ]
+
+def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x3039, 'M', '卄'),
+ (0x303A, 'M', '卅'),
+ (0x303B, 'V'),
+ (0x3040, 'X'),
+ (0x3041, 'V'),
+ (0x3097, 'X'),
+ (0x3099, 'V'),
+ (0x309B, '3', ' ゙'),
+ (0x309C, '3', ' ゚'),
+ (0x309D, 'V'),
+ (0x309F, 'M', 'より'),
+ (0x30A0, 'V'),
+ (0x30FF, 'M', 'コト'),
+ (0x3100, 'X'),
+ (0x3105, 'V'),
+ (0x3130, 'X'),
+ (0x3131, 'M', 'ᄀ'),
+ (0x3132, 'M', 'ᄁ'),
+ (0x3133, 'M', 'ᆪ'),
+ (0x3134, 'M', 'ᄂ'),
+ (0x3135, 'M', 'ᆬ'),
+ (0x3136, 'M', 'ᆭ'),
+ (0x3137, 'M', 'ᄃ'),
+ (0x3138, 'M', 'ᄄ'),
+ (0x3139, 'M', 'ᄅ'),
+ (0x313A, 'M', 'ᆰ'),
+ (0x313B, 'M', 'ᆱ'),
+ (0x313C, 'M', 'ᆲ'),
+ (0x313D, 'M', 'ᆳ'),
+ (0x313E, 'M', 'ᆴ'),
+ (0x313F, 'M', 'ᆵ'),
+ (0x3140, 'M', 'ᄚ'),
+ (0x3141, 'M', 'ᄆ'),
+ (0x3142, 'M', 'ᄇ'),
+ (0x3143, 'M', 'ᄈ'),
+ (0x3144, 'M', 'ᄡ'),
+ (0x3145, 'M', 'ᄉ'),
+ (0x3146, 'M', 'ᄊ'),
+ (0x3147, 'M', 'ᄋ'),
+ (0x3148, 'M', 'ᄌ'),
+ (0x3149, 'M', 'ᄍ'),
+ (0x314A, 'M', 'ᄎ'),
+ (0x314B, 'M', 'ᄏ'),
+ (0x314C, 'M', 'ᄐ'),
+ (0x314D, 'M', 'ᄑ'),
+ (0x314E, 'M', 'ᄒ'),
+ (0x314F, 'M', 'ᅡ'),
+ (0x3150, 'M', 'ᅢ'),
+ (0x3151, 'M', 'ᅣ'),
+ (0x3152, 'M', 'ᅤ'),
+ (0x3153, 'M', 'ᅥ'),
+ (0x3154, 'M', 'ᅦ'),
+ (0x3155, 'M', 'ᅧ'),
+ (0x3156, 'M', 'ᅨ'),
+ (0x3157, 'M', 'ᅩ'),
+ (0x3158, 'M', 'ᅪ'),
+ (0x3159, 'M', 'ᅫ'),
+ (0x315A, 'M', 'ᅬ'),
+ (0x315B, 'M', 'ᅭ'),
+ (0x315C, 'M', 'ᅮ'),
+ (0x315D, 'M', 'ᅯ'),
+ (0x315E, 'M', 'ᅰ'),
+ (0x315F, 'M', 'ᅱ'),
+ (0x3160, 'M', 'ᅲ'),
+ (0x3161, 'M', 'ᅳ'),
+ (0x3162, 'M', 'ᅴ'),
+ (0x3163, 'M', 'ᅵ'),
+ (0x3164, 'X'),
+ (0x3165, 'M', 'ᄔ'),
+ (0x3166, 'M', 'ᄕ'),
+ (0x3167, 'M', 'ᇇ'),
+ (0x3168, 'M', 'ᇈ'),
+ (0x3169, 'M', 'ᇌ'),
+ (0x316A, 'M', 'ᇎ'),
+ (0x316B, 'M', 'ᇓ'),
+ (0x316C, 'M', 'ᇗ'),
+ (0x316D, 'M', 'ᇙ'),
+ (0x316E, 'M', 'ᄜ'),
+ (0x316F, 'M', 'ᇝ'),
+ (0x3170, 'M', 'ᇟ'),
+ (0x3171, 'M', 'ᄝ'),
+ (0x3172, 'M', 'ᄞ'),
+ (0x3173, 'M', 'ᄠ'),
+ (0x3174, 'M', 'ᄢ'),
+ (0x3175, 'M', 'ᄣ'),
+ (0x3176, 'M', 'ᄧ'),
+ (0x3177, 'M', 'ᄩ'),
+ (0x3178, 'M', 'ᄫ'),
+ (0x3179, 'M', 'ᄬ'),
+ (0x317A, 'M', 'ᄭ'),
+ (0x317B, 'M', 'ᄮ'),
+ (0x317C, 'M', 'ᄯ'),
+ (0x317D, 'M', 'ᄲ'),
+ (0x317E, 'M', 'ᄶ'),
+ (0x317F, 'M', 'ᅀ'),
+ (0x3180, 'M', 'ᅇ'),
+ (0x3181, 'M', 'ᅌ'),
+ (0x3182, 'M', 'ᇱ'),
+ (0x3183, 'M', 'ᇲ'),
+ (0x3184, 'M', 'ᅗ'),
+ ]
+
+def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x3185, 'M', 'ᅘ'),
+ (0x3186, 'M', 'ᅙ'),
+ (0x3187, 'M', 'ᆄ'),
+ (0x3188, 'M', 'ᆅ'),
+ (0x3189, 'M', 'ᆈ'),
+ (0x318A, 'M', 'ᆑ'),
+ (0x318B, 'M', 'ᆒ'),
+ (0x318C, 'M', 'ᆔ'),
+ (0x318D, 'M', 'ᆞ'),
+ (0x318E, 'M', 'ᆡ'),
+ (0x318F, 'X'),
+ (0x3190, 'V'),
+ (0x3192, 'M', '一'),
+ (0x3193, 'M', '二'),
+ (0x3194, 'M', '三'),
+ (0x3195, 'M', '四'),
+ (0x3196, 'M', '上'),
+ (0x3197, 'M', '中'),
+ (0x3198, 'M', '下'),
+ (0x3199, 'M', '甲'),
+ (0x319A, 'M', '乙'),
+ (0x319B, 'M', '丙'),
+ (0x319C, 'M', '丁'),
+ (0x319D, 'M', '天'),
+ (0x319E, 'M', '地'),
+ (0x319F, 'M', '人'),
+ (0x31A0, 'V'),
+ (0x31E4, 'X'),
+ (0x31F0, 'V'),
+ (0x3200, '3', '(ᄀ)'),
+ (0x3201, '3', '(ᄂ)'),
+ (0x3202, '3', '(ᄃ)'),
+ (0x3203, '3', '(ᄅ)'),
+ (0x3204, '3', '(ᄆ)'),
+ (0x3205, '3', '(ᄇ)'),
+ (0x3206, '3', '(ᄉ)'),
+ (0x3207, '3', '(ᄋ)'),
+ (0x3208, '3', '(ᄌ)'),
+ (0x3209, '3', '(ᄎ)'),
+ (0x320A, '3', '(ᄏ)'),
+ (0x320B, '3', '(ᄐ)'),
+ (0x320C, '3', '(ᄑ)'),
+ (0x320D, '3', '(ᄒ)'),
+ (0x320E, '3', '(가)'),
+ (0x320F, '3', '(나)'),
+ (0x3210, '3', '(다)'),
+ (0x3211, '3', '(라)'),
+ (0x3212, '3', '(마)'),
+ (0x3213, '3', '(바)'),
+ (0x3214, '3', '(사)'),
+ (0x3215, '3', '(아)'),
+ (0x3216, '3', '(자)'),
+ (0x3217, '3', '(차)'),
+ (0x3218, '3', '(카)'),
+ (0x3219, '3', '(타)'),
+ (0x321A, '3', '(파)'),
+ (0x321B, '3', '(하)'),
+ (0x321C, '3', '(주)'),
+ (0x321D, '3', '(오전)'),
+ (0x321E, '3', '(오후)'),
+ (0x321F, 'X'),
+ (0x3220, '3', '(一)'),
+ (0x3221, '3', '(二)'),
+ (0x3222, '3', '(三)'),
+ (0x3223, '3', '(四)'),
+ (0x3224, '3', '(五)'),
+ (0x3225, '3', '(六)'),
+ (0x3226, '3', '(七)'),
+ (0x3227, '3', '(八)'),
+ (0x3228, '3', '(九)'),
+ (0x3229, '3', '(十)'),
+ (0x322A, '3', '(月)'),
+ (0x322B, '3', '(火)'),
+ (0x322C, '3', '(水)'),
+ (0x322D, '3', '(木)'),
+ (0x322E, '3', '(金)'),
+ (0x322F, '3', '(土)'),
+ (0x3230, '3', '(日)'),
+ (0x3231, '3', '(株)'),
+ (0x3232, '3', '(有)'),
+ (0x3233, '3', '(社)'),
+ (0x3234, '3', '(名)'),
+ (0x3235, '3', '(特)'),
+ (0x3236, '3', '(財)'),
+ (0x3237, '3', '(祝)'),
+ (0x3238, '3', '(労)'),
+ (0x3239, '3', '(代)'),
+ (0x323A, '3', '(呼)'),
+ (0x323B, '3', '(学)'),
+ (0x323C, '3', '(監)'),
+ (0x323D, '3', '(企)'),
+ (0x323E, '3', '(資)'),
+ (0x323F, '3', '(協)'),
+ (0x3240, '3', '(祭)'),
+ (0x3241, '3', '(休)'),
+ (0x3242, '3', '(自)'),
+ (0x3243, '3', '(至)'),
+ (0x3244, 'M', '問'),
+ (0x3245, 'M', '幼'),
+ (0x3246, 'M', '文'),
+ ]
+
+def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x3247, 'M', '箏'),
+ (0x3248, 'V'),
+ (0x3250, 'M', 'pte'),
+ (0x3251, 'M', '21'),
+ (0x3252, 'M', '22'),
+ (0x3253, 'M', '23'),
+ (0x3254, 'M', '24'),
+ (0x3255, 'M', '25'),
+ (0x3256, 'M', '26'),
+ (0x3257, 'M', '27'),
+ (0x3258, 'M', '28'),
+ (0x3259, 'M', '29'),
+ (0x325A, 'M', '30'),
+ (0x325B, 'M', '31'),
+ (0x325C, 'M', '32'),
+ (0x325D, 'M', '33'),
+ (0x325E, 'M', '34'),
+ (0x325F, 'M', '35'),
+ (0x3260, 'M', 'ᄀ'),
+ (0x3261, 'M', 'ᄂ'),
+ (0x3262, 'M', 'ᄃ'),
+ (0x3263, 'M', 'ᄅ'),
+ (0x3264, 'M', 'ᄆ'),
+ (0x3265, 'M', 'ᄇ'),
+ (0x3266, 'M', 'ᄉ'),
+ (0x3267, 'M', 'ᄋ'),
+ (0x3268, 'M', 'ᄌ'),
+ (0x3269, 'M', 'ᄎ'),
+ (0x326A, 'M', 'ᄏ'),
+ (0x326B, 'M', 'ᄐ'),
+ (0x326C, 'M', 'ᄑ'),
+ (0x326D, 'M', 'ᄒ'),
+ (0x326E, 'M', '가'),
+ (0x326F, 'M', '나'),
+ (0x3270, 'M', '다'),
+ (0x3271, 'M', '라'),
+ (0x3272, 'M', '마'),
+ (0x3273, 'M', '바'),
+ (0x3274, 'M', '사'),
+ (0x3275, 'M', '아'),
+ (0x3276, 'M', '자'),
+ (0x3277, 'M', '차'),
+ (0x3278, 'M', '카'),
+ (0x3279, 'M', '타'),
+ (0x327A, 'M', '파'),
+ (0x327B, 'M', '하'),
+ (0x327C, 'M', '참고'),
+ (0x327D, 'M', '주의'),
+ (0x327E, 'M', '우'),
+ (0x327F, 'V'),
+ (0x3280, 'M', '一'),
+ (0x3281, 'M', '二'),
+ (0x3282, 'M', '三'),
+ (0x3283, 'M', '四'),
+ (0x3284, 'M', '五'),
+ (0x3285, 'M', '六'),
+ (0x3286, 'M', '七'),
+ (0x3287, 'M', '八'),
+ (0x3288, 'M', '九'),
+ (0x3289, 'M', '十'),
+ (0x328A, 'M', '月'),
+ (0x328B, 'M', '火'),
+ (0x328C, 'M', '水'),
+ (0x328D, 'M', '木'),
+ (0x328E, 'M', '金'),
+ (0x328F, 'M', '土'),
+ (0x3290, 'M', '日'),
+ (0x3291, 'M', '株'),
+ (0x3292, 'M', '有'),
+ (0x3293, 'M', '社'),
+ (0x3294, 'M', '名'),
+ (0x3295, 'M', '特'),
+ (0x3296, 'M', '財'),
+ (0x3297, 'M', '祝'),
+ (0x3298, 'M', '労'),
+ (0x3299, 'M', '秘'),
+ (0x329A, 'M', '男'),
+ (0x329B, 'M', '女'),
+ (0x329C, 'M', '適'),
+ (0x329D, 'M', '優'),
+ (0x329E, 'M', '印'),
+ (0x329F, 'M', '注'),
+ (0x32A0, 'M', '項'),
+ (0x32A1, 'M', '休'),
+ (0x32A2, 'M', '写'),
+ (0x32A3, 'M', '正'),
+ (0x32A4, 'M', '上'),
+ (0x32A5, 'M', '中'),
+ (0x32A6, 'M', '下'),
+ (0x32A7, 'M', '左'),
+ (0x32A8, 'M', '右'),
+ (0x32A9, 'M', '医'),
+ (0x32AA, 'M', '宗'),
+ (0x32AB, 'M', '学'),
+ (0x32AC, 'M', '監'),
+ (0x32AD, 'M', '企'),
+ (0x32AE, 'M', '資'),
+ (0x32AF, 'M', '協'),
+ (0x32B0, 'M', '夜'),
+ (0x32B1, 'M', '36'),
+ ]
+
+def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x32B2, 'M', '37'),
+ (0x32B3, 'M', '38'),
+ (0x32B4, 'M', '39'),
+ (0x32B5, 'M', '40'),
+ (0x32B6, 'M', '41'),
+ (0x32B7, 'M', '42'),
+ (0x32B8, 'M', '43'),
+ (0x32B9, 'M', '44'),
+ (0x32BA, 'M', '45'),
+ (0x32BB, 'M', '46'),
+ (0x32BC, 'M', '47'),
+ (0x32BD, 'M', '48'),
+ (0x32BE, 'M', '49'),
+ (0x32BF, 'M', '50'),
+ (0x32C0, 'M', '1月'),
+ (0x32C1, 'M', '2月'),
+ (0x32C2, 'M', '3月'),
+ (0x32C3, 'M', '4月'),
+ (0x32C4, 'M', '5月'),
+ (0x32C5, 'M', '6月'),
+ (0x32C6, 'M', '7月'),
+ (0x32C7, 'M', '8月'),
+ (0x32C8, 'M', '9月'),
+ (0x32C9, 'M', '10月'),
+ (0x32CA, 'M', '11月'),
+ (0x32CB, 'M', '12月'),
+ (0x32CC, 'M', 'hg'),
+ (0x32CD, 'M', 'erg'),
+ (0x32CE, 'M', 'ev'),
+ (0x32CF, 'M', 'ltd'),
+ (0x32D0, 'M', 'ア'),
+ (0x32D1, 'M', 'イ'),
+ (0x32D2, 'M', 'ウ'),
+ (0x32D3, 'M', 'エ'),
+ (0x32D4, 'M', 'オ'),
+ (0x32D5, 'M', 'カ'),
+ (0x32D6, 'M', 'キ'),
+ (0x32D7, 'M', 'ク'),
+ (0x32D8, 'M', 'ケ'),
+ (0x32D9, 'M', 'コ'),
+ (0x32DA, 'M', 'サ'),
+ (0x32DB, 'M', 'シ'),
+ (0x32DC, 'M', 'ス'),
+ (0x32DD, 'M', 'セ'),
+ (0x32DE, 'M', 'ソ'),
+ (0x32DF, 'M', 'タ'),
+ (0x32E0, 'M', 'チ'),
+ (0x32E1, 'M', 'ツ'),
+ (0x32E2, 'M', 'テ'),
+ (0x32E3, 'M', 'ト'),
+ (0x32E4, 'M', 'ナ'),
+ (0x32E5, 'M', 'ニ'),
+ (0x32E6, 'M', 'ヌ'),
+ (0x32E7, 'M', 'ネ'),
+ (0x32E8, 'M', 'ノ'),
+ (0x32E9, 'M', 'ハ'),
+ (0x32EA, 'M', 'ヒ'),
+ (0x32EB, 'M', 'フ'),
+ (0x32EC, 'M', 'ヘ'),
+ (0x32ED, 'M', 'ホ'),
+ (0x32EE, 'M', 'マ'),
+ (0x32EF, 'M', 'ミ'),
+ (0x32F0, 'M', 'ム'),
+ (0x32F1, 'M', 'メ'),
+ (0x32F2, 'M', 'モ'),
+ (0x32F3, 'M', 'ヤ'),
+ (0x32F4, 'M', 'ユ'),
+ (0x32F5, 'M', 'ヨ'),
+ (0x32F6, 'M', 'ラ'),
+ (0x32F7, 'M', 'リ'),
+ (0x32F8, 'M', 'ル'),
+ (0x32F9, 'M', 'レ'),
+ (0x32FA, 'M', 'ロ'),
+ (0x32FB, 'M', 'ワ'),
+ (0x32FC, 'M', 'ヰ'),
+ (0x32FD, 'M', 'ヱ'),
+ (0x32FE, 'M', 'ヲ'),
+ (0x32FF, 'M', '令和'),
+ (0x3300, 'M', 'アパート'),
+ (0x3301, 'M', 'アルファ'),
+ (0x3302, 'M', 'アンペア'),
+ (0x3303, 'M', 'アール'),
+ (0x3304, 'M', 'イニング'),
+ (0x3305, 'M', 'インチ'),
+ (0x3306, 'M', 'ウォン'),
+ (0x3307, 'M', 'エスクード'),
+ (0x3308, 'M', 'エーカー'),
+ (0x3309, 'M', 'オンス'),
+ (0x330A, 'M', 'オーム'),
+ (0x330B, 'M', 'カイリ'),
+ (0x330C, 'M', 'カラット'),
+ (0x330D, 'M', 'カロリー'),
+ (0x330E, 'M', 'ガロン'),
+ (0x330F, 'M', 'ガンマ'),
+ (0x3310, 'M', 'ギガ'),
+ (0x3311, 'M', 'ギニー'),
+ (0x3312, 'M', 'キュリー'),
+ (0x3313, 'M', 'ギルダー'),
+ (0x3314, 'M', 'キロ'),
+ (0x3315, 'M', 'キログラム'),
+ ]
+
+def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x3316, 'M', 'キロメートル'),
+ (0x3317, 'M', 'キロワット'),
+ (0x3318, 'M', 'グラム'),
+ (0x3319, 'M', 'グラムトン'),
+ (0x331A, 'M', 'クルゼイロ'),
+ (0x331B, 'M', 'クローネ'),
+ (0x331C, 'M', 'ケース'),
+ (0x331D, 'M', 'コルナ'),
+ (0x331E, 'M', 'コーポ'),
+ (0x331F, 'M', 'サイクル'),
+ (0x3320, 'M', 'サンチーム'),
+ (0x3321, 'M', 'シリング'),
+ (0x3322, 'M', 'センチ'),
+ (0x3323, 'M', 'セント'),
+ (0x3324, 'M', 'ダース'),
+ (0x3325, 'M', 'デシ'),
+ (0x3326, 'M', 'ドル'),
+ (0x3327, 'M', 'トン'),
+ (0x3328, 'M', 'ナノ'),
+ (0x3329, 'M', 'ノット'),
+ (0x332A, 'M', 'ハイツ'),
+ (0x332B, 'M', 'パーセント'),
+ (0x332C, 'M', 'パーツ'),
+ (0x332D, 'M', 'バーレル'),
+ (0x332E, 'M', 'ピアストル'),
+ (0x332F, 'M', 'ピクル'),
+ (0x3330, 'M', 'ピコ'),
+ (0x3331, 'M', 'ビル'),
+ (0x3332, 'M', 'ファラッド'),
+ (0x3333, 'M', 'フィート'),
+ (0x3334, 'M', 'ブッシェル'),
+ (0x3335, 'M', 'フラン'),
+ (0x3336, 'M', 'ヘクタール'),
+ (0x3337, 'M', 'ペソ'),
+ (0x3338, 'M', 'ペニヒ'),
+ (0x3339, 'M', 'ヘルツ'),
+ (0x333A, 'M', 'ペンス'),
+ (0x333B, 'M', 'ページ'),
+ (0x333C, 'M', 'ベータ'),
+ (0x333D, 'M', 'ポイント'),
+ (0x333E, 'M', 'ボルト'),
+ (0x333F, 'M', 'ホン'),
+ (0x3340, 'M', 'ポンド'),
+ (0x3341, 'M', 'ホール'),
+ (0x3342, 'M', 'ホーン'),
+ (0x3343, 'M', 'マイクロ'),
+ (0x3344, 'M', 'マイル'),
+ (0x3345, 'M', 'マッハ'),
+ (0x3346, 'M', 'マルク'),
+ (0x3347, 'M', 'マンション'),
+ (0x3348, 'M', 'ミクロン'),
+ (0x3349, 'M', 'ミリ'),
+ (0x334A, 'M', 'ミリバール'),
+ (0x334B, 'M', 'メガ'),
+ (0x334C, 'M', 'メガトン'),
+ (0x334D, 'M', 'メートル'),
+ (0x334E, 'M', 'ヤード'),
+ (0x334F, 'M', 'ヤール'),
+ (0x3350, 'M', 'ユアン'),
+ (0x3351, 'M', 'リットル'),
+ (0x3352, 'M', 'リラ'),
+ (0x3353, 'M', 'ルピー'),
+ (0x3354, 'M', 'ルーブル'),
+ (0x3355, 'M', 'レム'),
+ (0x3356, 'M', 'レントゲン'),
+ (0x3357, 'M', 'ワット'),
+ (0x3358, 'M', '0点'),
+ (0x3359, 'M', '1点'),
+ (0x335A, 'M', '2点'),
+ (0x335B, 'M', '3点'),
+ (0x335C, 'M', '4点'),
+ (0x335D, 'M', '5点'),
+ (0x335E, 'M', '6点'),
+ (0x335F, 'M', '7点'),
+ (0x3360, 'M', '8点'),
+ (0x3361, 'M', '9点'),
+ (0x3362, 'M', '10点'),
+ (0x3363, 'M', '11点'),
+ (0x3364, 'M', '12点'),
+ (0x3365, 'M', '13点'),
+ (0x3366, 'M', '14点'),
+ (0x3367, 'M', '15点'),
+ (0x3368, 'M', '16点'),
+ (0x3369, 'M', '17点'),
+ (0x336A, 'M', '18点'),
+ (0x336B, 'M', '19点'),
+ (0x336C, 'M', '20点'),
+ (0x336D, 'M', '21点'),
+ (0x336E, 'M', '22点'),
+ (0x336F, 'M', '23点'),
+ (0x3370, 'M', '24点'),
+ (0x3371, 'M', 'hpa'),
+ (0x3372, 'M', 'da'),
+ (0x3373, 'M', 'au'),
+ (0x3374, 'M', 'bar'),
+ (0x3375, 'M', 'ov'),
+ (0x3376, 'M', 'pc'),
+ (0x3377, 'M', 'dm'),
+ (0x3378, 'M', 'dm2'),
+ (0x3379, 'M', 'dm3'),
+ ]
+
+def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x337A, 'M', 'iu'),
+ (0x337B, 'M', '平成'),
+ (0x337C, 'M', '昭和'),
+ (0x337D, 'M', '大正'),
+ (0x337E, 'M', '明治'),
+ (0x337F, 'M', '株式会社'),
+ (0x3380, 'M', 'pa'),
+ (0x3381, 'M', 'na'),
+ (0x3382, 'M', 'μa'),
+ (0x3383, 'M', 'ma'),
+ (0x3384, 'M', 'ka'),
+ (0x3385, 'M', 'kb'),
+ (0x3386, 'M', 'mb'),
+ (0x3387, 'M', 'gb'),
+ (0x3388, 'M', 'cal'),
+ (0x3389, 'M', 'kcal'),
+ (0x338A, 'M', 'pf'),
+ (0x338B, 'M', 'nf'),
+ (0x338C, 'M', 'μf'),
+ (0x338D, 'M', 'μg'),
+ (0x338E, 'M', 'mg'),
+ (0x338F, 'M', 'kg'),
+ (0x3390, 'M', 'hz'),
+ (0x3391, 'M', 'khz'),
+ (0x3392, 'M', 'mhz'),
+ (0x3393, 'M', 'ghz'),
+ (0x3394, 'M', 'thz'),
+ (0x3395, 'M', 'μl'),
+ (0x3396, 'M', 'ml'),
+ (0x3397, 'M', 'dl'),
+ (0x3398, 'M', 'kl'),
+ (0x3399, 'M', 'fm'),
+ (0x339A, 'M', 'nm'),
+ (0x339B, 'M', 'μm'),
+ (0x339C, 'M', 'mm'),
+ (0x339D, 'M', 'cm'),
+ (0x339E, 'M', 'km'),
+ (0x339F, 'M', 'mm2'),
+ (0x33A0, 'M', 'cm2'),
+ (0x33A1, 'M', 'm2'),
+ (0x33A2, 'M', 'km2'),
+ (0x33A3, 'M', 'mm3'),
+ (0x33A4, 'M', 'cm3'),
+ (0x33A5, 'M', 'm3'),
+ (0x33A6, 'M', 'km3'),
+ (0x33A7, 'M', 'm∕s'),
+ (0x33A8, 'M', 'm∕s2'),
+ (0x33A9, 'M', 'pa'),
+ (0x33AA, 'M', 'kpa'),
+ (0x33AB, 'M', 'mpa'),
+ (0x33AC, 'M', 'gpa'),
+ (0x33AD, 'M', 'rad'),
+ (0x33AE, 'M', 'rad∕s'),
+ (0x33AF, 'M', 'rad∕s2'),
+ (0x33B0, 'M', 'ps'),
+ (0x33B1, 'M', 'ns'),
+ (0x33B2, 'M', 'μs'),
+ (0x33B3, 'M', 'ms'),
+ (0x33B4, 'M', 'pv'),
+ (0x33B5, 'M', 'nv'),
+ (0x33B6, 'M', 'μv'),
+ (0x33B7, 'M', 'mv'),
+ (0x33B8, 'M', 'kv'),
+ (0x33B9, 'M', 'mv'),
+ (0x33BA, 'M', 'pw'),
+ (0x33BB, 'M', 'nw'),
+ (0x33BC, 'M', 'μw'),
+ (0x33BD, 'M', 'mw'),
+ (0x33BE, 'M', 'kw'),
+ (0x33BF, 'M', 'mw'),
+ (0x33C0, 'M', 'kω'),
+ (0x33C1, 'M', 'mω'),
+ (0x33C2, 'X'),
+ (0x33C3, 'M', 'bq'),
+ (0x33C4, 'M', 'cc'),
+ (0x33C5, 'M', 'cd'),
+ (0x33C6, 'M', 'c∕kg'),
+ (0x33C7, 'X'),
+ (0x33C8, 'M', 'db'),
+ (0x33C9, 'M', 'gy'),
+ (0x33CA, 'M', 'ha'),
+ (0x33CB, 'M', 'hp'),
+ (0x33CC, 'M', 'in'),
+ (0x33CD, 'M', 'kk'),
+ (0x33CE, 'M', 'km'),
+ (0x33CF, 'M', 'kt'),
+ (0x33D0, 'M', 'lm'),
+ (0x33D1, 'M', 'ln'),
+ (0x33D2, 'M', 'log'),
+ (0x33D3, 'M', 'lx'),
+ (0x33D4, 'M', 'mb'),
+ (0x33D5, 'M', 'mil'),
+ (0x33D6, 'M', 'mol'),
+ (0x33D7, 'M', 'ph'),
+ (0x33D8, 'X'),
+ (0x33D9, 'M', 'ppm'),
+ (0x33DA, 'M', 'pr'),
+ (0x33DB, 'M', 'sr'),
+ (0x33DC, 'M', 'sv'),
+ (0x33DD, 'M', 'wb'),
+ ]
+
+def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x33DE, 'M', 'v∕m'),
+ (0x33DF, 'M', 'a∕m'),
+ (0x33E0, 'M', '1日'),
+ (0x33E1, 'M', '2日'),
+ (0x33E2, 'M', '3日'),
+ (0x33E3, 'M', '4日'),
+ (0x33E4, 'M', '5日'),
+ (0x33E5, 'M', '6日'),
+ (0x33E6, 'M', '7日'),
+ (0x33E7, 'M', '8日'),
+ (0x33E8, 'M', '9日'),
+ (0x33E9, 'M', '10日'),
+ (0x33EA, 'M', '11日'),
+ (0x33EB, 'M', '12日'),
+ (0x33EC, 'M', '13日'),
+ (0x33ED, 'M', '14日'),
+ (0x33EE, 'M', '15日'),
+ (0x33EF, 'M', '16日'),
+ (0x33F0, 'M', '17日'),
+ (0x33F1, 'M', '18日'),
+ (0x33F2, 'M', '19日'),
+ (0x33F3, 'M', '20日'),
+ (0x33F4, 'M', '21日'),
+ (0x33F5, 'M', '22日'),
+ (0x33F6, 'M', '23日'),
+ (0x33F7, 'M', '24日'),
+ (0x33F8, 'M', '25日'),
+ (0x33F9, 'M', '26日'),
+ (0x33FA, 'M', '27日'),
+ (0x33FB, 'M', '28日'),
+ (0x33FC, 'M', '29日'),
+ (0x33FD, 'M', '30日'),
+ (0x33FE, 'M', '31日'),
+ (0x33FF, 'M', 'gal'),
+ (0x3400, 'V'),
+ (0xA48D, 'X'),
+ (0xA490, 'V'),
+ (0xA4C7, 'X'),
+ (0xA4D0, 'V'),
+ (0xA62C, 'X'),
+ (0xA640, 'M', 'ꙁ'),
+ (0xA641, 'V'),
+ (0xA642, 'M', 'ꙃ'),
+ (0xA643, 'V'),
+ (0xA644, 'M', 'ꙅ'),
+ (0xA645, 'V'),
+ (0xA646, 'M', 'ꙇ'),
+ (0xA647, 'V'),
+ (0xA648, 'M', 'ꙉ'),
+ (0xA649, 'V'),
+ (0xA64A, 'M', 'ꙋ'),
+ (0xA64B, 'V'),
+ (0xA64C, 'M', 'ꙍ'),
+ (0xA64D, 'V'),
+ (0xA64E, 'M', 'ꙏ'),
+ (0xA64F, 'V'),
+ (0xA650, 'M', 'ꙑ'),
+ (0xA651, 'V'),
+ (0xA652, 'M', 'ꙓ'),
+ (0xA653, 'V'),
+ (0xA654, 'M', 'ꙕ'),
+ (0xA655, 'V'),
+ (0xA656, 'M', 'ꙗ'),
+ (0xA657, 'V'),
+ (0xA658, 'M', 'ꙙ'),
+ (0xA659, 'V'),
+ (0xA65A, 'M', 'ꙛ'),
+ (0xA65B, 'V'),
+ (0xA65C, 'M', 'ꙝ'),
+ (0xA65D, 'V'),
+ (0xA65E, 'M', 'ꙟ'),
+ (0xA65F, 'V'),
+ (0xA660, 'M', 'ꙡ'),
+ (0xA661, 'V'),
+ (0xA662, 'M', 'ꙣ'),
+ (0xA663, 'V'),
+ (0xA664, 'M', 'ꙥ'),
+ (0xA665, 'V'),
+ (0xA666, 'M', 'ꙧ'),
+ (0xA667, 'V'),
+ (0xA668, 'M', 'ꙩ'),
+ (0xA669, 'V'),
+ (0xA66A, 'M', 'ꙫ'),
+ (0xA66B, 'V'),
+ (0xA66C, 'M', 'ꙭ'),
+ (0xA66D, 'V'),
+ (0xA680, 'M', 'ꚁ'),
+ (0xA681, 'V'),
+ (0xA682, 'M', 'ꚃ'),
+ (0xA683, 'V'),
+ (0xA684, 'M', 'ꚅ'),
+ (0xA685, 'V'),
+ (0xA686, 'M', 'ꚇ'),
+ (0xA687, 'V'),
+ (0xA688, 'M', 'ꚉ'),
+ (0xA689, 'V'),
+ (0xA68A, 'M', 'ꚋ'),
+ (0xA68B, 'V'),
+ (0xA68C, 'M', 'ꚍ'),
+ (0xA68D, 'V'),
+ ]
+
+def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xA68E, 'M', 'ꚏ'),
+ (0xA68F, 'V'),
+ (0xA690, 'M', 'ꚑ'),
+ (0xA691, 'V'),
+ (0xA692, 'M', 'ꚓ'),
+ (0xA693, 'V'),
+ (0xA694, 'M', 'ꚕ'),
+ (0xA695, 'V'),
+ (0xA696, 'M', 'ꚗ'),
+ (0xA697, 'V'),
+ (0xA698, 'M', 'ꚙ'),
+ (0xA699, 'V'),
+ (0xA69A, 'M', 'ꚛ'),
+ (0xA69B, 'V'),
+ (0xA69C, 'M', 'ъ'),
+ (0xA69D, 'M', 'ь'),
+ (0xA69E, 'V'),
+ (0xA6F8, 'X'),
+ (0xA700, 'V'),
+ (0xA722, 'M', 'ꜣ'),
+ (0xA723, 'V'),
+ (0xA724, 'M', 'ꜥ'),
+ (0xA725, 'V'),
+ (0xA726, 'M', 'ꜧ'),
+ (0xA727, 'V'),
+ (0xA728, 'M', 'ꜩ'),
+ (0xA729, 'V'),
+ (0xA72A, 'M', 'ꜫ'),
+ (0xA72B, 'V'),
+ (0xA72C, 'M', 'ꜭ'),
+ (0xA72D, 'V'),
+ (0xA72E, 'M', 'ꜯ'),
+ (0xA72F, 'V'),
+ (0xA732, 'M', 'ꜳ'),
+ (0xA733, 'V'),
+ (0xA734, 'M', 'ꜵ'),
+ (0xA735, 'V'),
+ (0xA736, 'M', 'ꜷ'),
+ (0xA737, 'V'),
+ (0xA738, 'M', 'ꜹ'),
+ (0xA739, 'V'),
+ (0xA73A, 'M', 'ꜻ'),
+ (0xA73B, 'V'),
+ (0xA73C, 'M', 'ꜽ'),
+ (0xA73D, 'V'),
+ (0xA73E, 'M', 'ꜿ'),
+ (0xA73F, 'V'),
+ (0xA740, 'M', 'ꝁ'),
+ (0xA741, 'V'),
+ (0xA742, 'M', 'ꝃ'),
+ (0xA743, 'V'),
+ (0xA744, 'M', 'ꝅ'),
+ (0xA745, 'V'),
+ (0xA746, 'M', 'ꝇ'),
+ (0xA747, 'V'),
+ (0xA748, 'M', 'ꝉ'),
+ (0xA749, 'V'),
+ (0xA74A, 'M', 'ꝋ'),
+ (0xA74B, 'V'),
+ (0xA74C, 'M', 'ꝍ'),
+ (0xA74D, 'V'),
+ (0xA74E, 'M', 'ꝏ'),
+ (0xA74F, 'V'),
+ (0xA750, 'M', 'ꝑ'),
+ (0xA751, 'V'),
+ (0xA752, 'M', 'ꝓ'),
+ (0xA753, 'V'),
+ (0xA754, 'M', 'ꝕ'),
+ (0xA755, 'V'),
+ (0xA756, 'M', 'ꝗ'),
+ (0xA757, 'V'),
+ (0xA758, 'M', 'ꝙ'),
+ (0xA759, 'V'),
+ (0xA75A, 'M', 'ꝛ'),
+ (0xA75B, 'V'),
+ (0xA75C, 'M', 'ꝝ'),
+ (0xA75D, 'V'),
+ (0xA75E, 'M', 'ꝟ'),
+ (0xA75F, 'V'),
+ (0xA760, 'M', 'ꝡ'),
+ (0xA761, 'V'),
+ (0xA762, 'M', 'ꝣ'),
+ (0xA763, 'V'),
+ (0xA764, 'M', 'ꝥ'),
+ (0xA765, 'V'),
+ (0xA766, 'M', 'ꝧ'),
+ (0xA767, 'V'),
+ (0xA768, 'M', 'ꝩ'),
+ (0xA769, 'V'),
+ (0xA76A, 'M', 'ꝫ'),
+ (0xA76B, 'V'),
+ (0xA76C, 'M', 'ꝭ'),
+ (0xA76D, 'V'),
+ (0xA76E, 'M', 'ꝯ'),
+ (0xA76F, 'V'),
+ (0xA770, 'M', 'ꝯ'),
+ (0xA771, 'V'),
+ (0xA779, 'M', 'ꝺ'),
+ (0xA77A, 'V'),
+ (0xA77B, 'M', 'ꝼ'),
+ ]
+
+def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xA77C, 'V'),
+ (0xA77D, 'M', 'ᵹ'),
+ (0xA77E, 'M', 'ꝿ'),
+ (0xA77F, 'V'),
+ (0xA780, 'M', 'ꞁ'),
+ (0xA781, 'V'),
+ (0xA782, 'M', 'ꞃ'),
+ (0xA783, 'V'),
+ (0xA784, 'M', 'ꞅ'),
+ (0xA785, 'V'),
+ (0xA786, 'M', 'ꞇ'),
+ (0xA787, 'V'),
+ (0xA78B, 'M', 'ꞌ'),
+ (0xA78C, 'V'),
+ (0xA78D, 'M', 'ɥ'),
+ (0xA78E, 'V'),
+ (0xA790, 'M', 'ꞑ'),
+ (0xA791, 'V'),
+ (0xA792, 'M', 'ꞓ'),
+ (0xA793, 'V'),
+ (0xA796, 'M', 'ꞗ'),
+ (0xA797, 'V'),
+ (0xA798, 'M', 'ꞙ'),
+ (0xA799, 'V'),
+ (0xA79A, 'M', 'ꞛ'),
+ (0xA79B, 'V'),
+ (0xA79C, 'M', 'ꞝ'),
+ (0xA79D, 'V'),
+ (0xA79E, 'M', 'ꞟ'),
+ (0xA79F, 'V'),
+ (0xA7A0, 'M', 'ꞡ'),
+ (0xA7A1, 'V'),
+ (0xA7A2, 'M', 'ꞣ'),
+ (0xA7A3, 'V'),
+ (0xA7A4, 'M', 'ꞥ'),
+ (0xA7A5, 'V'),
+ (0xA7A6, 'M', 'ꞧ'),
+ (0xA7A7, 'V'),
+ (0xA7A8, 'M', 'ꞩ'),
+ (0xA7A9, 'V'),
+ (0xA7AA, 'M', 'ɦ'),
+ (0xA7AB, 'M', 'ɜ'),
+ (0xA7AC, 'M', 'ɡ'),
+ (0xA7AD, 'M', 'ɬ'),
+ (0xA7AE, 'M', 'ɪ'),
+ (0xA7AF, 'V'),
+ (0xA7B0, 'M', 'ʞ'),
+ (0xA7B1, 'M', 'ʇ'),
+ (0xA7B2, 'M', 'ʝ'),
+ (0xA7B3, 'M', 'ꭓ'),
+ (0xA7B4, 'M', 'ꞵ'),
+ (0xA7B5, 'V'),
+ (0xA7B6, 'M', 'ꞷ'),
+ (0xA7B7, 'V'),
+ (0xA7B8, 'M', 'ꞹ'),
+ (0xA7B9, 'V'),
+ (0xA7BA, 'M', 'ꞻ'),
+ (0xA7BB, 'V'),
+ (0xA7BC, 'M', 'ꞽ'),
+ (0xA7BD, 'V'),
+ (0xA7BE, 'M', 'ꞿ'),
+ (0xA7BF, 'V'),
+ (0xA7C0, 'M', 'ꟁ'),
+ (0xA7C1, 'V'),
+ (0xA7C2, 'M', 'ꟃ'),
+ (0xA7C3, 'V'),
+ (0xA7C4, 'M', 'ꞔ'),
+ (0xA7C5, 'M', 'ʂ'),
+ (0xA7C6, 'M', 'ᶎ'),
+ (0xA7C7, 'M', 'ꟈ'),
+ (0xA7C8, 'V'),
+ (0xA7C9, 'M', 'ꟊ'),
+ (0xA7CA, 'V'),
+ (0xA7CB, 'X'),
+ (0xA7D0, 'M', 'ꟑ'),
+ (0xA7D1, 'V'),
+ (0xA7D2, 'X'),
+ (0xA7D3, 'V'),
+ (0xA7D4, 'X'),
+ (0xA7D5, 'V'),
+ (0xA7D6, 'M', 'ꟗ'),
+ (0xA7D7, 'V'),
+ (0xA7D8, 'M', 'ꟙ'),
+ (0xA7D9, 'V'),
+ (0xA7DA, 'X'),
+ (0xA7F2, 'M', 'c'),
+ (0xA7F3, 'M', 'f'),
+ (0xA7F4, 'M', 'q'),
+ (0xA7F5, 'M', 'ꟶ'),
+ (0xA7F6, 'V'),
+ (0xA7F8, 'M', 'ħ'),
+ (0xA7F9, 'M', 'œ'),
+ (0xA7FA, 'V'),
+ (0xA82D, 'X'),
+ (0xA830, 'V'),
+ (0xA83A, 'X'),
+ (0xA840, 'V'),
+ (0xA878, 'X'),
+ (0xA880, 'V'),
+ (0xA8C6, 'X'),
+ ]
+
+def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xA8CE, 'V'),
+ (0xA8DA, 'X'),
+ (0xA8E0, 'V'),
+ (0xA954, 'X'),
+ (0xA95F, 'V'),
+ (0xA97D, 'X'),
+ (0xA980, 'V'),
+ (0xA9CE, 'X'),
+ (0xA9CF, 'V'),
+ (0xA9DA, 'X'),
+ (0xA9DE, 'V'),
+ (0xA9FF, 'X'),
+ (0xAA00, 'V'),
+ (0xAA37, 'X'),
+ (0xAA40, 'V'),
+ (0xAA4E, 'X'),
+ (0xAA50, 'V'),
+ (0xAA5A, 'X'),
+ (0xAA5C, 'V'),
+ (0xAAC3, 'X'),
+ (0xAADB, 'V'),
+ (0xAAF7, 'X'),
+ (0xAB01, 'V'),
+ (0xAB07, 'X'),
+ (0xAB09, 'V'),
+ (0xAB0F, 'X'),
+ (0xAB11, 'V'),
+ (0xAB17, 'X'),
+ (0xAB20, 'V'),
+ (0xAB27, 'X'),
+ (0xAB28, 'V'),
+ (0xAB2F, 'X'),
+ (0xAB30, 'V'),
+ (0xAB5C, 'M', 'ꜧ'),
+ (0xAB5D, 'M', 'ꬷ'),
+ (0xAB5E, 'M', 'ɫ'),
+ (0xAB5F, 'M', 'ꭒ'),
+ (0xAB60, 'V'),
+ (0xAB69, 'M', 'ʍ'),
+ (0xAB6A, 'V'),
+ (0xAB6C, 'X'),
+ (0xAB70, 'M', 'Ꭰ'),
+ (0xAB71, 'M', 'Ꭱ'),
+ (0xAB72, 'M', 'Ꭲ'),
+ (0xAB73, 'M', 'Ꭳ'),
+ (0xAB74, 'M', 'Ꭴ'),
+ (0xAB75, 'M', 'Ꭵ'),
+ (0xAB76, 'M', 'Ꭶ'),
+ (0xAB77, 'M', 'Ꭷ'),
+ (0xAB78, 'M', 'Ꭸ'),
+ (0xAB79, 'M', 'Ꭹ'),
+ (0xAB7A, 'M', 'Ꭺ'),
+ (0xAB7B, 'M', 'Ꭻ'),
+ (0xAB7C, 'M', 'Ꭼ'),
+ (0xAB7D, 'M', 'Ꭽ'),
+ (0xAB7E, 'M', 'Ꭾ'),
+ (0xAB7F, 'M', 'Ꭿ'),
+ (0xAB80, 'M', 'Ꮀ'),
+ (0xAB81, 'M', 'Ꮁ'),
+ (0xAB82, 'M', 'Ꮂ'),
+ (0xAB83, 'M', 'Ꮃ'),
+ (0xAB84, 'M', 'Ꮄ'),
+ (0xAB85, 'M', 'Ꮅ'),
+ (0xAB86, 'M', 'Ꮆ'),
+ (0xAB87, 'M', 'Ꮇ'),
+ (0xAB88, 'M', 'Ꮈ'),
+ (0xAB89, 'M', 'Ꮉ'),
+ (0xAB8A, 'M', 'Ꮊ'),
+ (0xAB8B, 'M', 'Ꮋ'),
+ (0xAB8C, 'M', 'Ꮌ'),
+ (0xAB8D, 'M', 'Ꮍ'),
+ (0xAB8E, 'M', 'Ꮎ'),
+ (0xAB8F, 'M', 'Ꮏ'),
+ (0xAB90, 'M', 'Ꮐ'),
+ (0xAB91, 'M', 'Ꮑ'),
+ (0xAB92, 'M', 'Ꮒ'),
+ (0xAB93, 'M', 'Ꮓ'),
+ (0xAB94, 'M', 'Ꮔ'),
+ (0xAB95, 'M', 'Ꮕ'),
+ (0xAB96, 'M', 'Ꮖ'),
+ (0xAB97, 'M', 'Ꮗ'),
+ (0xAB98, 'M', 'Ꮘ'),
+ (0xAB99, 'M', 'Ꮙ'),
+ (0xAB9A, 'M', 'Ꮚ'),
+ (0xAB9B, 'M', 'Ꮛ'),
+ (0xAB9C, 'M', 'Ꮜ'),
+ (0xAB9D, 'M', 'Ꮝ'),
+ (0xAB9E, 'M', 'Ꮞ'),
+ (0xAB9F, 'M', 'Ꮟ'),
+ (0xABA0, 'M', 'Ꮠ'),
+ (0xABA1, 'M', 'Ꮡ'),
+ (0xABA2, 'M', 'Ꮢ'),
+ (0xABA3, 'M', 'Ꮣ'),
+ (0xABA4, 'M', 'Ꮤ'),
+ (0xABA5, 'M', 'Ꮥ'),
+ (0xABA6, 'M', 'Ꮦ'),
+ (0xABA7, 'M', 'Ꮧ'),
+ (0xABA8, 'M', 'Ꮨ'),
+ (0xABA9, 'M', 'Ꮩ'),
+ (0xABAA, 'M', 'Ꮪ'),
+ ]
+
+def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xABAB, 'M', 'Ꮫ'),
+ (0xABAC, 'M', 'Ꮬ'),
+ (0xABAD, 'M', 'Ꮭ'),
+ (0xABAE, 'M', 'Ꮮ'),
+ (0xABAF, 'M', 'Ꮯ'),
+ (0xABB0, 'M', 'Ꮰ'),
+ (0xABB1, 'M', 'Ꮱ'),
+ (0xABB2, 'M', 'Ꮲ'),
+ (0xABB3, 'M', 'Ꮳ'),
+ (0xABB4, 'M', 'Ꮴ'),
+ (0xABB5, 'M', 'Ꮵ'),
+ (0xABB6, 'M', 'Ꮶ'),
+ (0xABB7, 'M', 'Ꮷ'),
+ (0xABB8, 'M', 'Ꮸ'),
+ (0xABB9, 'M', 'Ꮹ'),
+ (0xABBA, 'M', 'Ꮺ'),
+ (0xABBB, 'M', 'Ꮻ'),
+ (0xABBC, 'M', 'Ꮼ'),
+ (0xABBD, 'M', 'Ꮽ'),
+ (0xABBE, 'M', 'Ꮾ'),
+ (0xABBF, 'M', 'Ꮿ'),
+ (0xABC0, 'V'),
+ (0xABEE, 'X'),
+ (0xABF0, 'V'),
+ (0xABFA, 'X'),
+ (0xAC00, 'V'),
+ (0xD7A4, 'X'),
+ (0xD7B0, 'V'),
+ (0xD7C7, 'X'),
+ (0xD7CB, 'V'),
+ (0xD7FC, 'X'),
+ (0xF900, 'M', '豈'),
+ (0xF901, 'M', '更'),
+ (0xF902, 'M', '車'),
+ (0xF903, 'M', '賈'),
+ (0xF904, 'M', '滑'),
+ (0xF905, 'M', '串'),
+ (0xF906, 'M', '句'),
+ (0xF907, 'M', '龜'),
+ (0xF909, 'M', '契'),
+ (0xF90A, 'M', '金'),
+ (0xF90B, 'M', '喇'),
+ (0xF90C, 'M', '奈'),
+ (0xF90D, 'M', '懶'),
+ (0xF90E, 'M', '癩'),
+ (0xF90F, 'M', '羅'),
+ (0xF910, 'M', '蘿'),
+ (0xF911, 'M', '螺'),
+ (0xF912, 'M', '裸'),
+ (0xF913, 'M', '邏'),
+ (0xF914, 'M', '樂'),
+ (0xF915, 'M', '洛'),
+ (0xF916, 'M', '烙'),
+ (0xF917, 'M', '珞'),
+ (0xF918, 'M', '落'),
+ (0xF919, 'M', '酪'),
+ (0xF91A, 'M', '駱'),
+ (0xF91B, 'M', '亂'),
+ (0xF91C, 'M', '卵'),
+ (0xF91D, 'M', '欄'),
+ (0xF91E, 'M', '爛'),
+ (0xF91F, 'M', '蘭'),
+ (0xF920, 'M', '鸞'),
+ (0xF921, 'M', '嵐'),
+ (0xF922, 'M', '濫'),
+ (0xF923, 'M', '藍'),
+ (0xF924, 'M', '襤'),
+ (0xF925, 'M', '拉'),
+ (0xF926, 'M', '臘'),
+ (0xF927, 'M', '蠟'),
+ (0xF928, 'M', '廊'),
+ (0xF929, 'M', '朗'),
+ (0xF92A, 'M', '浪'),
+ (0xF92B, 'M', '狼'),
+ (0xF92C, 'M', '郎'),
+ (0xF92D, 'M', '來'),
+ (0xF92E, 'M', '冷'),
+ (0xF92F, 'M', '勞'),
+ (0xF930, 'M', '擄'),
+ (0xF931, 'M', '櫓'),
+ (0xF932, 'M', '爐'),
+ (0xF933, 'M', '盧'),
+ (0xF934, 'M', '老'),
+ (0xF935, 'M', '蘆'),
+ (0xF936, 'M', '虜'),
+ (0xF937, 'M', '路'),
+ (0xF938, 'M', '露'),
+ (0xF939, 'M', '魯'),
+ (0xF93A, 'M', '鷺'),
+ (0xF93B, 'M', '碌'),
+ (0xF93C, 'M', '祿'),
+ (0xF93D, 'M', '綠'),
+ (0xF93E, 'M', '菉'),
+ (0xF93F, 'M', '錄'),
+ (0xF940, 'M', '鹿'),
+ (0xF941, 'M', '論'),
+ (0xF942, 'M', '壟'),
+ (0xF943, 'M', '弄'),
+ (0xF944, 'M', '籠'),
+ (0xF945, 'M', '聾'),
+ ]
+
+def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xF946, 'M', '牢'),
+ (0xF947, 'M', '磊'),
+ (0xF948, 'M', '賂'),
+ (0xF949, 'M', '雷'),
+ (0xF94A, 'M', '壘'),
+ (0xF94B, 'M', '屢'),
+ (0xF94C, 'M', '樓'),
+ (0xF94D, 'M', '淚'),
+ (0xF94E, 'M', '漏'),
+ (0xF94F, 'M', '累'),
+ (0xF950, 'M', '縷'),
+ (0xF951, 'M', '陋'),
+ (0xF952, 'M', '勒'),
+ (0xF953, 'M', '肋'),
+ (0xF954, 'M', '凜'),
+ (0xF955, 'M', '凌'),
+ (0xF956, 'M', '稜'),
+ (0xF957, 'M', '綾'),
+ (0xF958, 'M', '菱'),
+ (0xF959, 'M', '陵'),
+ (0xF95A, 'M', '讀'),
+ (0xF95B, 'M', '拏'),
+ (0xF95C, 'M', '樂'),
+ (0xF95D, 'M', '諾'),
+ (0xF95E, 'M', '丹'),
+ (0xF95F, 'M', '寧'),
+ (0xF960, 'M', '怒'),
+ (0xF961, 'M', '率'),
+ (0xF962, 'M', '異'),
+ (0xF963, 'M', '北'),
+ (0xF964, 'M', '磻'),
+ (0xF965, 'M', '便'),
+ (0xF966, 'M', '復'),
+ (0xF967, 'M', '不'),
+ (0xF968, 'M', '泌'),
+ (0xF969, 'M', '數'),
+ (0xF96A, 'M', '索'),
+ (0xF96B, 'M', '參'),
+ (0xF96C, 'M', '塞'),
+ (0xF96D, 'M', '省'),
+ (0xF96E, 'M', '葉'),
+ (0xF96F, 'M', '說'),
+ (0xF970, 'M', '殺'),
+ (0xF971, 'M', '辰'),
+ (0xF972, 'M', '沈'),
+ (0xF973, 'M', '拾'),
+ (0xF974, 'M', '若'),
+ (0xF975, 'M', '掠'),
+ (0xF976, 'M', '略'),
+ (0xF977, 'M', '亮'),
+ (0xF978, 'M', '兩'),
+ (0xF979, 'M', '凉'),
+ (0xF97A, 'M', '梁'),
+ (0xF97B, 'M', '糧'),
+ (0xF97C, 'M', '良'),
+ (0xF97D, 'M', '諒'),
+ (0xF97E, 'M', '量'),
+ (0xF97F, 'M', '勵'),
+ (0xF980, 'M', '呂'),
+ (0xF981, 'M', '女'),
+ (0xF982, 'M', '廬'),
+ (0xF983, 'M', '旅'),
+ (0xF984, 'M', '濾'),
+ (0xF985, 'M', '礪'),
+ (0xF986, 'M', '閭'),
+ (0xF987, 'M', '驪'),
+ (0xF988, 'M', '麗'),
+ (0xF989, 'M', '黎'),
+ (0xF98A, 'M', '力'),
+ (0xF98B, 'M', '曆'),
+ (0xF98C, 'M', '歷'),
+ (0xF98D, 'M', '轢'),
+ (0xF98E, 'M', '年'),
+ (0xF98F, 'M', '憐'),
+ (0xF990, 'M', '戀'),
+ (0xF991, 'M', '撚'),
+ (0xF992, 'M', '漣'),
+ (0xF993, 'M', '煉'),
+ (0xF994, 'M', '璉'),
+ (0xF995, 'M', '秊'),
+ (0xF996, 'M', '練'),
+ (0xF997, 'M', '聯'),
+ (0xF998, 'M', '輦'),
+ (0xF999, 'M', '蓮'),
+ (0xF99A, 'M', '連'),
+ (0xF99B, 'M', '鍊'),
+ (0xF99C, 'M', '列'),
+ (0xF99D, 'M', '劣'),
+ (0xF99E, 'M', '咽'),
+ (0xF99F, 'M', '烈'),
+ (0xF9A0, 'M', '裂'),
+ (0xF9A1, 'M', '說'),
+ (0xF9A2, 'M', '廉'),
+ (0xF9A3, 'M', '念'),
+ (0xF9A4, 'M', '捻'),
+ (0xF9A5, 'M', '殮'),
+ (0xF9A6, 'M', '簾'),
+ (0xF9A7, 'M', '獵'),
+ (0xF9A8, 'M', '令'),
+ (0xF9A9, 'M', '囹'),
+ ]
+
+def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xF9AA, 'M', '寧'),
+ (0xF9AB, 'M', '嶺'),
+ (0xF9AC, 'M', '怜'),
+ (0xF9AD, 'M', '玲'),
+ (0xF9AE, 'M', '瑩'),
+ (0xF9AF, 'M', '羚'),
+ (0xF9B0, 'M', '聆'),
+ (0xF9B1, 'M', '鈴'),
+ (0xF9B2, 'M', '零'),
+ (0xF9B3, 'M', '靈'),
+ (0xF9B4, 'M', '領'),
+ (0xF9B5, 'M', '例'),
+ (0xF9B6, 'M', '禮'),
+ (0xF9B7, 'M', '醴'),
+ (0xF9B8, 'M', '隸'),
+ (0xF9B9, 'M', '惡'),
+ (0xF9BA, 'M', '了'),
+ (0xF9BB, 'M', '僚'),
+ (0xF9BC, 'M', '寮'),
+ (0xF9BD, 'M', '尿'),
+ (0xF9BE, 'M', '料'),
+ (0xF9BF, 'M', '樂'),
+ (0xF9C0, 'M', '燎'),
+ (0xF9C1, 'M', '療'),
+ (0xF9C2, 'M', '蓼'),
+ (0xF9C3, 'M', '遼'),
+ (0xF9C4, 'M', '龍'),
+ (0xF9C5, 'M', '暈'),
+ (0xF9C6, 'M', '阮'),
+ (0xF9C7, 'M', '劉'),
+ (0xF9C8, 'M', '杻'),
+ (0xF9C9, 'M', '柳'),
+ (0xF9CA, 'M', '流'),
+ (0xF9CB, 'M', '溜'),
+ (0xF9CC, 'M', '琉'),
+ (0xF9CD, 'M', '留'),
+ (0xF9CE, 'M', '硫'),
+ (0xF9CF, 'M', '紐'),
+ (0xF9D0, 'M', '類'),
+ (0xF9D1, 'M', '六'),
+ (0xF9D2, 'M', '戮'),
+ (0xF9D3, 'M', '陸'),
+ (0xF9D4, 'M', '倫'),
+ (0xF9D5, 'M', '崙'),
+ (0xF9D6, 'M', '淪'),
+ (0xF9D7, 'M', '輪'),
+ (0xF9D8, 'M', '律'),
+ (0xF9D9, 'M', '慄'),
+ (0xF9DA, 'M', '栗'),
+ (0xF9DB, 'M', '率'),
+ (0xF9DC, 'M', '隆'),
+ (0xF9DD, 'M', '利'),
+ (0xF9DE, 'M', '吏'),
+ (0xF9DF, 'M', '履'),
+ (0xF9E0, 'M', '易'),
+ (0xF9E1, 'M', '李'),
+ (0xF9E2, 'M', '梨'),
+ (0xF9E3, 'M', '泥'),
+ (0xF9E4, 'M', '理'),
+ (0xF9E5, 'M', '痢'),
+ (0xF9E6, 'M', '罹'),
+ (0xF9E7, 'M', '裏'),
+ (0xF9E8, 'M', '裡'),
+ (0xF9E9, 'M', '里'),
+ (0xF9EA, 'M', '離'),
+ (0xF9EB, 'M', '匿'),
+ (0xF9EC, 'M', '溺'),
+ (0xF9ED, 'M', '吝'),
+ (0xF9EE, 'M', '燐'),
+ (0xF9EF, 'M', '璘'),
+ (0xF9F0, 'M', '藺'),
+ (0xF9F1, 'M', '隣'),
+ (0xF9F2, 'M', '鱗'),
+ (0xF9F3, 'M', '麟'),
+ (0xF9F4, 'M', '林'),
+ (0xF9F5, 'M', '淋'),
+ (0xF9F6, 'M', '臨'),
+ (0xF9F7, 'M', '立'),
+ (0xF9F8, 'M', '笠'),
+ (0xF9F9, 'M', '粒'),
+ (0xF9FA, 'M', '狀'),
+ (0xF9FB, 'M', '炙'),
+ (0xF9FC, 'M', '識'),
+ (0xF9FD, 'M', '什'),
+ (0xF9FE, 'M', '茶'),
+ (0xF9FF, 'M', '刺'),
+ (0xFA00, 'M', '切'),
+ (0xFA01, 'M', '度'),
+ (0xFA02, 'M', '拓'),
+ (0xFA03, 'M', '糖'),
+ (0xFA04, 'M', '宅'),
+ (0xFA05, 'M', '洞'),
+ (0xFA06, 'M', '暴'),
+ (0xFA07, 'M', '輻'),
+ (0xFA08, 'M', '行'),
+ (0xFA09, 'M', '降'),
+ (0xFA0A, 'M', '見'),
+ (0xFA0B, 'M', '廓'),
+ (0xFA0C, 'M', '兀'),
+ (0xFA0D, 'M', '嗀'),
+ ]
+
+def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFA0E, 'V'),
+ (0xFA10, 'M', '塚'),
+ (0xFA11, 'V'),
+ (0xFA12, 'M', '晴'),
+ (0xFA13, 'V'),
+ (0xFA15, 'M', '凞'),
+ (0xFA16, 'M', '猪'),
+ (0xFA17, 'M', '益'),
+ (0xFA18, 'M', '礼'),
+ (0xFA19, 'M', '神'),
+ (0xFA1A, 'M', '祥'),
+ (0xFA1B, 'M', '福'),
+ (0xFA1C, 'M', '靖'),
+ (0xFA1D, 'M', '精'),
+ (0xFA1E, 'M', '羽'),
+ (0xFA1F, 'V'),
+ (0xFA20, 'M', '蘒'),
+ (0xFA21, 'V'),
+ (0xFA22, 'M', '諸'),
+ (0xFA23, 'V'),
+ (0xFA25, 'M', '逸'),
+ (0xFA26, 'M', '都'),
+ (0xFA27, 'V'),
+ (0xFA2A, 'M', '飯'),
+ (0xFA2B, 'M', '飼'),
+ (0xFA2C, 'M', '館'),
+ (0xFA2D, 'M', '鶴'),
+ (0xFA2E, 'M', '郞'),
+ (0xFA2F, 'M', '隷'),
+ (0xFA30, 'M', '侮'),
+ (0xFA31, 'M', '僧'),
+ (0xFA32, 'M', '免'),
+ (0xFA33, 'M', '勉'),
+ (0xFA34, 'M', '勤'),
+ (0xFA35, 'M', '卑'),
+ (0xFA36, 'M', '喝'),
+ (0xFA37, 'M', '嘆'),
+ (0xFA38, 'M', '器'),
+ (0xFA39, 'M', '塀'),
+ (0xFA3A, 'M', '墨'),
+ (0xFA3B, 'M', '層'),
+ (0xFA3C, 'M', '屮'),
+ (0xFA3D, 'M', '悔'),
+ (0xFA3E, 'M', '慨'),
+ (0xFA3F, 'M', '憎'),
+ (0xFA40, 'M', '懲'),
+ (0xFA41, 'M', '敏'),
+ (0xFA42, 'M', '既'),
+ (0xFA43, 'M', '暑'),
+ (0xFA44, 'M', '梅'),
+ (0xFA45, 'M', '海'),
+ (0xFA46, 'M', '渚'),
+ (0xFA47, 'M', '漢'),
+ (0xFA48, 'M', '煮'),
+ (0xFA49, 'M', '爫'),
+ (0xFA4A, 'M', '琢'),
+ (0xFA4B, 'M', '碑'),
+ (0xFA4C, 'M', '社'),
+ (0xFA4D, 'M', '祉'),
+ (0xFA4E, 'M', '祈'),
+ (0xFA4F, 'M', '祐'),
+ (0xFA50, 'M', '祖'),
+ (0xFA51, 'M', '祝'),
+ (0xFA52, 'M', '禍'),
+ (0xFA53, 'M', '禎'),
+ (0xFA54, 'M', '穀'),
+ (0xFA55, 'M', '突'),
+ (0xFA56, 'M', '節'),
+ (0xFA57, 'M', '練'),
+ (0xFA58, 'M', '縉'),
+ (0xFA59, 'M', '繁'),
+ (0xFA5A, 'M', '署'),
+ (0xFA5B, 'M', '者'),
+ (0xFA5C, 'M', '臭'),
+ (0xFA5D, 'M', '艹'),
+ (0xFA5F, 'M', '著'),
+ (0xFA60, 'M', '褐'),
+ (0xFA61, 'M', '視'),
+ (0xFA62, 'M', '謁'),
+ (0xFA63, 'M', '謹'),
+ (0xFA64, 'M', '賓'),
+ (0xFA65, 'M', '贈'),
+ (0xFA66, 'M', '辶'),
+ (0xFA67, 'M', '逸'),
+ (0xFA68, 'M', '難'),
+ (0xFA69, 'M', '響'),
+ (0xFA6A, 'M', '頻'),
+ (0xFA6B, 'M', '恵'),
+ (0xFA6C, 'M', '𤋮'),
+ (0xFA6D, 'M', '舘'),
+ (0xFA6E, 'X'),
+ (0xFA70, 'M', '並'),
+ (0xFA71, 'M', '况'),
+ (0xFA72, 'M', '全'),
+ (0xFA73, 'M', '侀'),
+ (0xFA74, 'M', '充'),
+ (0xFA75, 'M', '冀'),
+ (0xFA76, 'M', '勇'),
+ (0xFA77, 'M', '勺'),
+ (0xFA78, 'M', '喝'),
+ ]
+
+def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFA79, 'M', '啕'),
+ (0xFA7A, 'M', '喙'),
+ (0xFA7B, 'M', '嗢'),
+ (0xFA7C, 'M', '塚'),
+ (0xFA7D, 'M', '墳'),
+ (0xFA7E, 'M', '奄'),
+ (0xFA7F, 'M', '奔'),
+ (0xFA80, 'M', '婢'),
+ (0xFA81, 'M', '嬨'),
+ (0xFA82, 'M', '廒'),
+ (0xFA83, 'M', '廙'),
+ (0xFA84, 'M', '彩'),
+ (0xFA85, 'M', '徭'),
+ (0xFA86, 'M', '惘'),
+ (0xFA87, 'M', '慎'),
+ (0xFA88, 'M', '愈'),
+ (0xFA89, 'M', '憎'),
+ (0xFA8A, 'M', '慠'),
+ (0xFA8B, 'M', '懲'),
+ (0xFA8C, 'M', '戴'),
+ (0xFA8D, 'M', '揄'),
+ (0xFA8E, 'M', '搜'),
+ (0xFA8F, 'M', '摒'),
+ (0xFA90, 'M', '敖'),
+ (0xFA91, 'M', '晴'),
+ (0xFA92, 'M', '朗'),
+ (0xFA93, 'M', '望'),
+ (0xFA94, 'M', '杖'),
+ (0xFA95, 'M', '歹'),
+ (0xFA96, 'M', '殺'),
+ (0xFA97, 'M', '流'),
+ (0xFA98, 'M', '滛'),
+ (0xFA99, 'M', '滋'),
+ (0xFA9A, 'M', '漢'),
+ (0xFA9B, 'M', '瀞'),
+ (0xFA9C, 'M', '煮'),
+ (0xFA9D, 'M', '瞧'),
+ (0xFA9E, 'M', '爵'),
+ (0xFA9F, 'M', '犯'),
+ (0xFAA0, 'M', '猪'),
+ (0xFAA1, 'M', '瑱'),
+ (0xFAA2, 'M', '甆'),
+ (0xFAA3, 'M', '画'),
+ (0xFAA4, 'M', '瘝'),
+ (0xFAA5, 'M', '瘟'),
+ (0xFAA6, 'M', '益'),
+ (0xFAA7, 'M', '盛'),
+ (0xFAA8, 'M', '直'),
+ (0xFAA9, 'M', '睊'),
+ (0xFAAA, 'M', '着'),
+ (0xFAAB, 'M', '磌'),
+ (0xFAAC, 'M', '窱'),
+ (0xFAAD, 'M', '節'),
+ (0xFAAE, 'M', '类'),
+ (0xFAAF, 'M', '絛'),
+ (0xFAB0, 'M', '練'),
+ (0xFAB1, 'M', '缾'),
+ (0xFAB2, 'M', '者'),
+ (0xFAB3, 'M', '荒'),
+ (0xFAB4, 'M', '華'),
+ (0xFAB5, 'M', '蝹'),
+ (0xFAB6, 'M', '襁'),
+ (0xFAB7, 'M', '覆'),
+ (0xFAB8, 'M', '視'),
+ (0xFAB9, 'M', '調'),
+ (0xFABA, 'M', '諸'),
+ (0xFABB, 'M', '請'),
+ (0xFABC, 'M', '謁'),
+ (0xFABD, 'M', '諾'),
+ (0xFABE, 'M', '諭'),
+ (0xFABF, 'M', '謹'),
+ (0xFAC0, 'M', '變'),
+ (0xFAC1, 'M', '贈'),
+ (0xFAC2, 'M', '輸'),
+ (0xFAC3, 'M', '遲'),
+ (0xFAC4, 'M', '醙'),
+ (0xFAC5, 'M', '鉶'),
+ (0xFAC6, 'M', '陼'),
+ (0xFAC7, 'M', '難'),
+ (0xFAC8, 'M', '靖'),
+ (0xFAC9, 'M', '韛'),
+ (0xFACA, 'M', '響'),
+ (0xFACB, 'M', '頋'),
+ (0xFACC, 'M', '頻'),
+ (0xFACD, 'M', '鬒'),
+ (0xFACE, 'M', '龜'),
+ (0xFACF, 'M', '𢡊'),
+ (0xFAD0, 'M', '𢡄'),
+ (0xFAD1, 'M', '𣏕'),
+ (0xFAD2, 'M', '㮝'),
+ (0xFAD3, 'M', '䀘'),
+ (0xFAD4, 'M', '䀹'),
+ (0xFAD5, 'M', '𥉉'),
+ (0xFAD6, 'M', '𥳐'),
+ (0xFAD7, 'M', '𧻓'),
+ (0xFAD8, 'M', '齃'),
+ (0xFAD9, 'M', '龎'),
+ (0xFADA, 'X'),
+ (0xFB00, 'M', 'ff'),
+ (0xFB01, 'M', 'fi'),
+ ]
+
+def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFB02, 'M', 'fl'),
+ (0xFB03, 'M', 'ffi'),
+ (0xFB04, 'M', 'ffl'),
+ (0xFB05, 'M', 'st'),
+ (0xFB07, 'X'),
+ (0xFB13, 'M', 'մն'),
+ (0xFB14, 'M', 'մե'),
+ (0xFB15, 'M', 'մի'),
+ (0xFB16, 'M', 'վն'),
+ (0xFB17, 'M', 'մխ'),
+ (0xFB18, 'X'),
+ (0xFB1D, 'M', 'יִ'),
+ (0xFB1E, 'V'),
+ (0xFB1F, 'M', 'ײַ'),
+ (0xFB20, 'M', 'ע'),
+ (0xFB21, 'M', 'א'),
+ (0xFB22, 'M', 'ד'),
+ (0xFB23, 'M', 'ה'),
+ (0xFB24, 'M', 'כ'),
+ (0xFB25, 'M', 'ל'),
+ (0xFB26, 'M', 'ם'),
+ (0xFB27, 'M', 'ר'),
+ (0xFB28, 'M', 'ת'),
+ (0xFB29, '3', '+'),
+ (0xFB2A, 'M', 'שׁ'),
+ (0xFB2B, 'M', 'שׂ'),
+ (0xFB2C, 'M', 'שּׁ'),
+ (0xFB2D, 'M', 'שּׂ'),
+ (0xFB2E, 'M', 'אַ'),
+ (0xFB2F, 'M', 'אָ'),
+ (0xFB30, 'M', 'אּ'),
+ (0xFB31, 'M', 'בּ'),
+ (0xFB32, 'M', 'גּ'),
+ (0xFB33, 'M', 'דּ'),
+ (0xFB34, 'M', 'הּ'),
+ (0xFB35, 'M', 'וּ'),
+ (0xFB36, 'M', 'זּ'),
+ (0xFB37, 'X'),
+ (0xFB38, 'M', 'טּ'),
+ (0xFB39, 'M', 'יּ'),
+ (0xFB3A, 'M', 'ךּ'),
+ (0xFB3B, 'M', 'כּ'),
+ (0xFB3C, 'M', 'לּ'),
+ (0xFB3D, 'X'),
+ (0xFB3E, 'M', 'מּ'),
+ (0xFB3F, 'X'),
+ (0xFB40, 'M', 'נּ'),
+ (0xFB41, 'M', 'סּ'),
+ (0xFB42, 'X'),
+ (0xFB43, 'M', 'ףּ'),
+ (0xFB44, 'M', 'פּ'),
+ (0xFB45, 'X'),
+ (0xFB46, 'M', 'צּ'),
+ (0xFB47, 'M', 'קּ'),
+ (0xFB48, 'M', 'רּ'),
+ (0xFB49, 'M', 'שּ'),
+ (0xFB4A, 'M', 'תּ'),
+ (0xFB4B, 'M', 'וֹ'),
+ (0xFB4C, 'M', 'בֿ'),
+ (0xFB4D, 'M', 'כֿ'),
+ (0xFB4E, 'M', 'פֿ'),
+ (0xFB4F, 'M', 'אל'),
+ (0xFB50, 'M', 'ٱ'),
+ (0xFB52, 'M', 'ٻ'),
+ (0xFB56, 'M', 'پ'),
+ (0xFB5A, 'M', 'ڀ'),
+ (0xFB5E, 'M', 'ٺ'),
+ (0xFB62, 'M', 'ٿ'),
+ (0xFB66, 'M', 'ٹ'),
+ (0xFB6A, 'M', 'ڤ'),
+ (0xFB6E, 'M', 'ڦ'),
+ (0xFB72, 'M', 'ڄ'),
+ (0xFB76, 'M', 'ڃ'),
+ (0xFB7A, 'M', 'چ'),
+ (0xFB7E, 'M', 'ڇ'),
+ (0xFB82, 'M', 'ڍ'),
+ (0xFB84, 'M', 'ڌ'),
+ (0xFB86, 'M', 'ڎ'),
+ (0xFB88, 'M', 'ڈ'),
+ (0xFB8A, 'M', 'ژ'),
+ (0xFB8C, 'M', 'ڑ'),
+ (0xFB8E, 'M', 'ک'),
+ (0xFB92, 'M', 'گ'),
+ (0xFB96, 'M', 'ڳ'),
+ (0xFB9A, 'M', 'ڱ'),
+ (0xFB9E, 'M', 'ں'),
+ (0xFBA0, 'M', 'ڻ'),
+ (0xFBA4, 'M', 'ۀ'),
+ (0xFBA6, 'M', 'ہ'),
+ (0xFBAA, 'M', 'ھ'),
+ (0xFBAE, 'M', 'ے'),
+ (0xFBB0, 'M', 'ۓ'),
+ (0xFBB2, 'V'),
+ (0xFBC3, 'X'),
+ (0xFBD3, 'M', 'ڭ'),
+ (0xFBD7, 'M', 'ۇ'),
+ (0xFBD9, 'M', 'ۆ'),
+ (0xFBDB, 'M', 'ۈ'),
+ (0xFBDD, 'M', 'ۇٴ'),
+ (0xFBDE, 'M', 'ۋ'),
+ ]
+
+def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFBE0, 'M', 'ۅ'),
+ (0xFBE2, 'M', 'ۉ'),
+ (0xFBE4, 'M', 'ې'),
+ (0xFBE8, 'M', 'ى'),
+ (0xFBEA, 'M', 'ئا'),
+ (0xFBEC, 'M', 'ئە'),
+ (0xFBEE, 'M', 'ئو'),
+ (0xFBF0, 'M', 'ئۇ'),
+ (0xFBF2, 'M', 'ئۆ'),
+ (0xFBF4, 'M', 'ئۈ'),
+ (0xFBF6, 'M', 'ئې'),
+ (0xFBF9, 'M', 'ئى'),
+ (0xFBFC, 'M', 'ی'),
+ (0xFC00, 'M', 'ئج'),
+ (0xFC01, 'M', 'ئح'),
+ (0xFC02, 'M', 'ئم'),
+ (0xFC03, 'M', 'ئى'),
+ (0xFC04, 'M', 'ئي'),
+ (0xFC05, 'M', 'بج'),
+ (0xFC06, 'M', 'بح'),
+ (0xFC07, 'M', 'بخ'),
+ (0xFC08, 'M', 'بم'),
+ (0xFC09, 'M', 'بى'),
+ (0xFC0A, 'M', 'بي'),
+ (0xFC0B, 'M', 'تج'),
+ (0xFC0C, 'M', 'تح'),
+ (0xFC0D, 'M', 'تخ'),
+ (0xFC0E, 'M', 'تم'),
+ (0xFC0F, 'M', 'تى'),
+ (0xFC10, 'M', 'تي'),
+ (0xFC11, 'M', 'ثج'),
+ (0xFC12, 'M', 'ثم'),
+ (0xFC13, 'M', 'ثى'),
+ (0xFC14, 'M', 'ثي'),
+ (0xFC15, 'M', 'جح'),
+ (0xFC16, 'M', 'جم'),
+ (0xFC17, 'M', 'حج'),
+ (0xFC18, 'M', 'حم'),
+ (0xFC19, 'M', 'خج'),
+ (0xFC1A, 'M', 'خح'),
+ (0xFC1B, 'M', 'خم'),
+ (0xFC1C, 'M', 'سج'),
+ (0xFC1D, 'M', 'سح'),
+ (0xFC1E, 'M', 'سخ'),
+ (0xFC1F, 'M', 'سم'),
+ (0xFC20, 'M', 'صح'),
+ (0xFC21, 'M', 'صم'),
+ (0xFC22, 'M', 'ضج'),
+ (0xFC23, 'M', 'ضح'),
+ (0xFC24, 'M', 'ضخ'),
+ (0xFC25, 'M', 'ضم'),
+ (0xFC26, 'M', 'طح'),
+ (0xFC27, 'M', 'طم'),
+ (0xFC28, 'M', 'ظم'),
+ (0xFC29, 'M', 'عج'),
+ (0xFC2A, 'M', 'عم'),
+ (0xFC2B, 'M', 'غج'),
+ (0xFC2C, 'M', 'غم'),
+ (0xFC2D, 'M', 'فج'),
+ (0xFC2E, 'M', 'فح'),
+ (0xFC2F, 'M', 'فخ'),
+ (0xFC30, 'M', 'فم'),
+ (0xFC31, 'M', 'فى'),
+ (0xFC32, 'M', 'في'),
+ (0xFC33, 'M', 'قح'),
+ (0xFC34, 'M', 'قم'),
+ (0xFC35, 'M', 'قى'),
+ (0xFC36, 'M', 'قي'),
+ (0xFC37, 'M', 'كا'),
+ (0xFC38, 'M', 'كج'),
+ (0xFC39, 'M', 'كح'),
+ (0xFC3A, 'M', 'كخ'),
+ (0xFC3B, 'M', 'كل'),
+ (0xFC3C, 'M', 'كم'),
+ (0xFC3D, 'M', 'كى'),
+ (0xFC3E, 'M', 'كي'),
+ (0xFC3F, 'M', 'لج'),
+ (0xFC40, 'M', 'لح'),
+ (0xFC41, 'M', 'لخ'),
+ (0xFC42, 'M', 'لم'),
+ (0xFC43, 'M', 'لى'),
+ (0xFC44, 'M', 'لي'),
+ (0xFC45, 'M', 'مج'),
+ (0xFC46, 'M', 'مح'),
+ (0xFC47, 'M', 'مخ'),
+ (0xFC48, 'M', 'مم'),
+ (0xFC49, 'M', 'مى'),
+ (0xFC4A, 'M', 'مي'),
+ (0xFC4B, 'M', 'نج'),
+ (0xFC4C, 'M', 'نح'),
+ (0xFC4D, 'M', 'نخ'),
+ (0xFC4E, 'M', 'نم'),
+ (0xFC4F, 'M', 'نى'),
+ (0xFC50, 'M', 'ني'),
+ (0xFC51, 'M', 'هج'),
+ (0xFC52, 'M', 'هم'),
+ (0xFC53, 'M', 'هى'),
+ (0xFC54, 'M', 'هي'),
+ (0xFC55, 'M', 'يج'),
+ (0xFC56, 'M', 'يح'),
+ ]
+
+def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFC57, 'M', 'يخ'),
+ (0xFC58, 'M', 'يم'),
+ (0xFC59, 'M', 'يى'),
+ (0xFC5A, 'M', 'يي'),
+ (0xFC5B, 'M', 'ذٰ'),
+ (0xFC5C, 'M', 'رٰ'),
+ (0xFC5D, 'M', 'ىٰ'),
+ (0xFC5E, '3', ' ٌّ'),
+ (0xFC5F, '3', ' ٍّ'),
+ (0xFC60, '3', ' َّ'),
+ (0xFC61, '3', ' ُّ'),
+ (0xFC62, '3', ' ِّ'),
+ (0xFC63, '3', ' ّٰ'),
+ (0xFC64, 'M', 'ئر'),
+ (0xFC65, 'M', 'ئز'),
+ (0xFC66, 'M', 'ئم'),
+ (0xFC67, 'M', 'ئن'),
+ (0xFC68, 'M', 'ئى'),
+ (0xFC69, 'M', 'ئي'),
+ (0xFC6A, 'M', 'بر'),
+ (0xFC6B, 'M', 'بز'),
+ (0xFC6C, 'M', 'بم'),
+ (0xFC6D, 'M', 'بن'),
+ (0xFC6E, 'M', 'بى'),
+ (0xFC6F, 'M', 'بي'),
+ (0xFC70, 'M', 'تر'),
+ (0xFC71, 'M', 'تز'),
+ (0xFC72, 'M', 'تم'),
+ (0xFC73, 'M', 'تن'),
+ (0xFC74, 'M', 'تى'),
+ (0xFC75, 'M', 'تي'),
+ (0xFC76, 'M', 'ثر'),
+ (0xFC77, 'M', 'ثز'),
+ (0xFC78, 'M', 'ثم'),
+ (0xFC79, 'M', 'ثن'),
+ (0xFC7A, 'M', 'ثى'),
+ (0xFC7B, 'M', 'ثي'),
+ (0xFC7C, 'M', 'فى'),
+ (0xFC7D, 'M', 'في'),
+ (0xFC7E, 'M', 'قى'),
+ (0xFC7F, 'M', 'قي'),
+ (0xFC80, 'M', 'كا'),
+ (0xFC81, 'M', 'كل'),
+ (0xFC82, 'M', 'كم'),
+ (0xFC83, 'M', 'كى'),
+ (0xFC84, 'M', 'كي'),
+ (0xFC85, 'M', 'لم'),
+ (0xFC86, 'M', 'لى'),
+ (0xFC87, 'M', 'لي'),
+ (0xFC88, 'M', 'ما'),
+ (0xFC89, 'M', 'مم'),
+ (0xFC8A, 'M', 'نر'),
+ (0xFC8B, 'M', 'نز'),
+ (0xFC8C, 'M', 'نم'),
+ (0xFC8D, 'M', 'نن'),
+ (0xFC8E, 'M', 'نى'),
+ (0xFC8F, 'M', 'ني'),
+ (0xFC90, 'M', 'ىٰ'),
+ (0xFC91, 'M', 'ير'),
+ (0xFC92, 'M', 'يز'),
+ (0xFC93, 'M', 'يم'),
+ (0xFC94, 'M', 'ين'),
+ (0xFC95, 'M', 'يى'),
+ (0xFC96, 'M', 'يي'),
+ (0xFC97, 'M', 'ئج'),
+ (0xFC98, 'M', 'ئح'),
+ (0xFC99, 'M', 'ئخ'),
+ (0xFC9A, 'M', 'ئم'),
+ (0xFC9B, 'M', 'ئه'),
+ (0xFC9C, 'M', 'بج'),
+ (0xFC9D, 'M', 'بح'),
+ (0xFC9E, 'M', 'بخ'),
+ (0xFC9F, 'M', 'بم'),
+ (0xFCA0, 'M', 'به'),
+ (0xFCA1, 'M', 'تج'),
+ (0xFCA2, 'M', 'تح'),
+ (0xFCA3, 'M', 'تخ'),
+ (0xFCA4, 'M', 'تم'),
+ (0xFCA5, 'M', 'ته'),
+ (0xFCA6, 'M', 'ثم'),
+ (0xFCA7, 'M', 'جح'),
+ (0xFCA8, 'M', 'جم'),
+ (0xFCA9, 'M', 'حج'),
+ (0xFCAA, 'M', 'حم'),
+ (0xFCAB, 'M', 'خج'),
+ (0xFCAC, 'M', 'خم'),
+ (0xFCAD, 'M', 'سج'),
+ (0xFCAE, 'M', 'سح'),
+ (0xFCAF, 'M', 'سخ'),
+ (0xFCB0, 'M', 'سم'),
+ (0xFCB1, 'M', 'صح'),
+ (0xFCB2, 'M', 'صخ'),
+ (0xFCB3, 'M', 'صم'),
+ (0xFCB4, 'M', 'ضج'),
+ (0xFCB5, 'M', 'ضح'),
+ (0xFCB6, 'M', 'ضخ'),
+ (0xFCB7, 'M', 'ضم'),
+ (0xFCB8, 'M', 'طح'),
+ (0xFCB9, 'M', 'ظم'),
+ (0xFCBA, 'M', 'عج'),
+ ]
+
+def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFCBB, 'M', 'عم'),
+ (0xFCBC, 'M', 'غج'),
+ (0xFCBD, 'M', 'غم'),
+ (0xFCBE, 'M', 'فج'),
+ (0xFCBF, 'M', 'فح'),
+ (0xFCC0, 'M', 'فخ'),
+ (0xFCC1, 'M', 'فم'),
+ (0xFCC2, 'M', 'قح'),
+ (0xFCC3, 'M', 'قم'),
+ (0xFCC4, 'M', 'كج'),
+ (0xFCC5, 'M', 'كح'),
+ (0xFCC6, 'M', 'كخ'),
+ (0xFCC7, 'M', 'كل'),
+ (0xFCC8, 'M', 'كم'),
+ (0xFCC9, 'M', 'لج'),
+ (0xFCCA, 'M', 'لح'),
+ (0xFCCB, 'M', 'لخ'),
+ (0xFCCC, 'M', 'لم'),
+ (0xFCCD, 'M', 'له'),
+ (0xFCCE, 'M', 'مج'),
+ (0xFCCF, 'M', 'مح'),
+ (0xFCD0, 'M', 'مخ'),
+ (0xFCD1, 'M', 'مم'),
+ (0xFCD2, 'M', 'نج'),
+ (0xFCD3, 'M', 'نح'),
+ (0xFCD4, 'M', 'نخ'),
+ (0xFCD5, 'M', 'نم'),
+ (0xFCD6, 'M', 'نه'),
+ (0xFCD7, 'M', 'هج'),
+ (0xFCD8, 'M', 'هم'),
+ (0xFCD9, 'M', 'هٰ'),
+ (0xFCDA, 'M', 'يج'),
+ (0xFCDB, 'M', 'يح'),
+ (0xFCDC, 'M', 'يخ'),
+ (0xFCDD, 'M', 'يم'),
+ (0xFCDE, 'M', 'يه'),
+ (0xFCDF, 'M', 'ئم'),
+ (0xFCE0, 'M', 'ئه'),
+ (0xFCE1, 'M', 'بم'),
+ (0xFCE2, 'M', 'به'),
+ (0xFCE3, 'M', 'تم'),
+ (0xFCE4, 'M', 'ته'),
+ (0xFCE5, 'M', 'ثم'),
+ (0xFCE6, 'M', 'ثه'),
+ (0xFCE7, 'M', 'سم'),
+ (0xFCE8, 'M', 'سه'),
+ (0xFCE9, 'M', 'شم'),
+ (0xFCEA, 'M', 'شه'),
+ (0xFCEB, 'M', 'كل'),
+ (0xFCEC, 'M', 'كم'),
+ (0xFCED, 'M', 'لم'),
+ (0xFCEE, 'M', 'نم'),
+ (0xFCEF, 'M', 'نه'),
+ (0xFCF0, 'M', 'يم'),
+ (0xFCF1, 'M', 'يه'),
+ (0xFCF2, 'M', 'ـَّ'),
+ (0xFCF3, 'M', 'ـُّ'),
+ (0xFCF4, 'M', 'ـِّ'),
+ (0xFCF5, 'M', 'طى'),
+ (0xFCF6, 'M', 'طي'),
+ (0xFCF7, 'M', 'عى'),
+ (0xFCF8, 'M', 'عي'),
+ (0xFCF9, 'M', 'غى'),
+ (0xFCFA, 'M', 'غي'),
+ (0xFCFB, 'M', 'سى'),
+ (0xFCFC, 'M', 'سي'),
+ (0xFCFD, 'M', 'شى'),
+ (0xFCFE, 'M', 'شي'),
+ (0xFCFF, 'M', 'حى'),
+ (0xFD00, 'M', 'حي'),
+ (0xFD01, 'M', 'جى'),
+ (0xFD02, 'M', 'جي'),
+ (0xFD03, 'M', 'خى'),
+ (0xFD04, 'M', 'خي'),
+ (0xFD05, 'M', 'صى'),
+ (0xFD06, 'M', 'صي'),
+ (0xFD07, 'M', 'ضى'),
+ (0xFD08, 'M', 'ضي'),
+ (0xFD09, 'M', 'شج'),
+ (0xFD0A, 'M', 'شح'),
+ (0xFD0B, 'M', 'شخ'),
+ (0xFD0C, 'M', 'شم'),
+ (0xFD0D, 'M', 'شر'),
+ (0xFD0E, 'M', 'سر'),
+ (0xFD0F, 'M', 'صر'),
+ (0xFD10, 'M', 'ضر'),
+ (0xFD11, 'M', 'طى'),
+ (0xFD12, 'M', 'طي'),
+ (0xFD13, 'M', 'عى'),
+ (0xFD14, 'M', 'عي'),
+ (0xFD15, 'M', 'غى'),
+ (0xFD16, 'M', 'غي'),
+ (0xFD17, 'M', 'سى'),
+ (0xFD18, 'M', 'سي'),
+ (0xFD19, 'M', 'شى'),
+ (0xFD1A, 'M', 'شي'),
+ (0xFD1B, 'M', 'حى'),
+ (0xFD1C, 'M', 'حي'),
+ (0xFD1D, 'M', 'جى'),
+ (0xFD1E, 'M', 'جي'),
+ ]
+
+def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFD1F, 'M', 'خى'),
+ (0xFD20, 'M', 'خي'),
+ (0xFD21, 'M', 'صى'),
+ (0xFD22, 'M', 'صي'),
+ (0xFD23, 'M', 'ضى'),
+ (0xFD24, 'M', 'ضي'),
+ (0xFD25, 'M', 'شج'),
+ (0xFD26, 'M', 'شح'),
+ (0xFD27, 'M', 'شخ'),
+ (0xFD28, 'M', 'شم'),
+ (0xFD29, 'M', 'شر'),
+ (0xFD2A, 'M', 'سر'),
+ (0xFD2B, 'M', 'صر'),
+ (0xFD2C, 'M', 'ضر'),
+ (0xFD2D, 'M', 'شج'),
+ (0xFD2E, 'M', 'شح'),
+ (0xFD2F, 'M', 'شخ'),
+ (0xFD30, 'M', 'شم'),
+ (0xFD31, 'M', 'سه'),
+ (0xFD32, 'M', 'شه'),
+ (0xFD33, 'M', 'طم'),
+ (0xFD34, 'M', 'سج'),
+ (0xFD35, 'M', 'سح'),
+ (0xFD36, 'M', 'سخ'),
+ (0xFD37, 'M', 'شج'),
+ (0xFD38, 'M', 'شح'),
+ (0xFD39, 'M', 'شخ'),
+ (0xFD3A, 'M', 'طم'),
+ (0xFD3B, 'M', 'ظم'),
+ (0xFD3C, 'M', 'اً'),
+ (0xFD3E, 'V'),
+ (0xFD50, 'M', 'تجم'),
+ (0xFD51, 'M', 'تحج'),
+ (0xFD53, 'M', 'تحم'),
+ (0xFD54, 'M', 'تخم'),
+ (0xFD55, 'M', 'تمج'),
+ (0xFD56, 'M', 'تمح'),
+ (0xFD57, 'M', 'تمخ'),
+ (0xFD58, 'M', 'جمح'),
+ (0xFD5A, 'M', 'حمي'),
+ (0xFD5B, 'M', 'حمى'),
+ (0xFD5C, 'M', 'سحج'),
+ (0xFD5D, 'M', 'سجح'),
+ (0xFD5E, 'M', 'سجى'),
+ (0xFD5F, 'M', 'سمح'),
+ (0xFD61, 'M', 'سمج'),
+ (0xFD62, 'M', 'سمم'),
+ (0xFD64, 'M', 'صحح'),
+ (0xFD66, 'M', 'صمم'),
+ (0xFD67, 'M', 'شحم'),
+ (0xFD69, 'M', 'شجي'),
+ (0xFD6A, 'M', 'شمخ'),
+ (0xFD6C, 'M', 'شمم'),
+ (0xFD6E, 'M', 'ضحى'),
+ (0xFD6F, 'M', 'ضخم'),
+ (0xFD71, 'M', 'طمح'),
+ (0xFD73, 'M', 'طمم'),
+ (0xFD74, 'M', 'طمي'),
+ (0xFD75, 'M', 'عجم'),
+ (0xFD76, 'M', 'عمم'),
+ (0xFD78, 'M', 'عمى'),
+ (0xFD79, 'M', 'غمم'),
+ (0xFD7A, 'M', 'غمي'),
+ (0xFD7B, 'M', 'غمى'),
+ (0xFD7C, 'M', 'فخم'),
+ (0xFD7E, 'M', 'قمح'),
+ (0xFD7F, 'M', 'قمم'),
+ (0xFD80, 'M', 'لحم'),
+ (0xFD81, 'M', 'لحي'),
+ (0xFD82, 'M', 'لحى'),
+ (0xFD83, 'M', 'لجج'),
+ (0xFD85, 'M', 'لخم'),
+ (0xFD87, 'M', 'لمح'),
+ (0xFD89, 'M', 'محج'),
+ (0xFD8A, 'M', 'محم'),
+ (0xFD8B, 'M', 'محي'),
+ (0xFD8C, 'M', 'مجح'),
+ (0xFD8D, 'M', 'مجم'),
+ (0xFD8E, 'M', 'مخج'),
+ (0xFD8F, 'M', 'مخم'),
+ (0xFD90, 'X'),
+ (0xFD92, 'M', 'مجخ'),
+ (0xFD93, 'M', 'همج'),
+ (0xFD94, 'M', 'همم'),
+ (0xFD95, 'M', 'نحم'),
+ (0xFD96, 'M', 'نحى'),
+ (0xFD97, 'M', 'نجم'),
+ (0xFD99, 'M', 'نجى'),
+ (0xFD9A, 'M', 'نمي'),
+ (0xFD9B, 'M', 'نمى'),
+ (0xFD9C, 'M', 'يمم'),
+ (0xFD9E, 'M', 'بخي'),
+ (0xFD9F, 'M', 'تجي'),
+ (0xFDA0, 'M', 'تجى'),
+ (0xFDA1, 'M', 'تخي'),
+ (0xFDA2, 'M', 'تخى'),
+ (0xFDA3, 'M', 'تمي'),
+ (0xFDA4, 'M', 'تمى'),
+ (0xFDA5, 'M', 'جمي'),
+ (0xFDA6, 'M', 'جحى'),
+ ]
+
+def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFDA7, 'M', 'جمى'),
+ (0xFDA8, 'M', 'سخى'),
+ (0xFDA9, 'M', 'صحي'),
+ (0xFDAA, 'M', 'شحي'),
+ (0xFDAB, 'M', 'ضحي'),
+ (0xFDAC, 'M', 'لجي'),
+ (0xFDAD, 'M', 'لمي'),
+ (0xFDAE, 'M', 'يحي'),
+ (0xFDAF, 'M', 'يجي'),
+ (0xFDB0, 'M', 'يمي'),
+ (0xFDB1, 'M', 'ممي'),
+ (0xFDB2, 'M', 'قمي'),
+ (0xFDB3, 'M', 'نحي'),
+ (0xFDB4, 'M', 'قمح'),
+ (0xFDB5, 'M', 'لحم'),
+ (0xFDB6, 'M', 'عمي'),
+ (0xFDB7, 'M', 'كمي'),
+ (0xFDB8, 'M', 'نجح'),
+ (0xFDB9, 'M', 'مخي'),
+ (0xFDBA, 'M', 'لجم'),
+ (0xFDBB, 'M', 'كمم'),
+ (0xFDBC, 'M', 'لجم'),
+ (0xFDBD, 'M', 'نجح'),
+ (0xFDBE, 'M', 'جحي'),
+ (0xFDBF, 'M', 'حجي'),
+ (0xFDC0, 'M', 'مجي'),
+ (0xFDC1, 'M', 'فمي'),
+ (0xFDC2, 'M', 'بحي'),
+ (0xFDC3, 'M', 'كمم'),
+ (0xFDC4, 'M', 'عجم'),
+ (0xFDC5, 'M', 'صمم'),
+ (0xFDC6, 'M', 'سخي'),
+ (0xFDC7, 'M', 'نجي'),
+ (0xFDC8, 'X'),
+ (0xFDCF, 'V'),
+ (0xFDD0, 'X'),
+ (0xFDF0, 'M', 'صلے'),
+ (0xFDF1, 'M', 'قلے'),
+ (0xFDF2, 'M', 'الله'),
+ (0xFDF3, 'M', 'اكبر'),
+ (0xFDF4, 'M', 'محمد'),
+ (0xFDF5, 'M', 'صلعم'),
+ (0xFDF6, 'M', 'رسول'),
+ (0xFDF7, 'M', 'عليه'),
+ (0xFDF8, 'M', 'وسلم'),
+ (0xFDF9, 'M', 'صلى'),
+ (0xFDFA, '3', 'صلى الله عليه وسلم'),
+ (0xFDFB, '3', 'جل جلاله'),
+ (0xFDFC, 'M', 'ریال'),
+ (0xFDFD, 'V'),
+ (0xFE00, 'I'),
+ (0xFE10, '3', ','),
+ (0xFE11, 'M', '、'),
+ (0xFE12, 'X'),
+ (0xFE13, '3', ':'),
+ (0xFE14, '3', ';'),
+ (0xFE15, '3', '!'),
+ (0xFE16, '3', '?'),
+ (0xFE17, 'M', '〖'),
+ (0xFE18, 'M', '〗'),
+ (0xFE19, 'X'),
+ (0xFE20, 'V'),
+ (0xFE30, 'X'),
+ (0xFE31, 'M', '—'),
+ (0xFE32, 'M', '–'),
+ (0xFE33, '3', '_'),
+ (0xFE35, '3', '('),
+ (0xFE36, '3', ')'),
+ (0xFE37, '3', '{'),
+ (0xFE38, '3', '}'),
+ (0xFE39, 'M', '〔'),
+ (0xFE3A, 'M', '〕'),
+ (0xFE3B, 'M', '【'),
+ (0xFE3C, 'M', '】'),
+ (0xFE3D, 'M', '《'),
+ (0xFE3E, 'M', '》'),
+ (0xFE3F, 'M', '〈'),
+ (0xFE40, 'M', '〉'),
+ (0xFE41, 'M', '「'),
+ (0xFE42, 'M', '」'),
+ (0xFE43, 'M', '『'),
+ (0xFE44, 'M', '』'),
+ (0xFE45, 'V'),
+ (0xFE47, '3', '['),
+ (0xFE48, '3', ']'),
+ (0xFE49, '3', ' ̅'),
+ (0xFE4D, '3', '_'),
+ (0xFE50, '3', ','),
+ (0xFE51, 'M', '、'),
+ (0xFE52, 'X'),
+ (0xFE54, '3', ';'),
+ (0xFE55, '3', ':'),
+ (0xFE56, '3', '?'),
+ (0xFE57, '3', '!'),
+ (0xFE58, 'M', '—'),
+ (0xFE59, '3', '('),
+ (0xFE5A, '3', ')'),
+ (0xFE5B, '3', '{'),
+ (0xFE5C, '3', '}'),
+ (0xFE5D, 'M', '〔'),
+ ]
+
+def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFE5E, 'M', '〕'),
+ (0xFE5F, '3', '#'),
+ (0xFE60, '3', '&'),
+ (0xFE61, '3', '*'),
+ (0xFE62, '3', '+'),
+ (0xFE63, 'M', '-'),
+ (0xFE64, '3', '<'),
+ (0xFE65, '3', '>'),
+ (0xFE66, '3', '='),
+ (0xFE67, 'X'),
+ (0xFE68, '3', '\\'),
+ (0xFE69, '3', '$'),
+ (0xFE6A, '3', '%'),
+ (0xFE6B, '3', '@'),
+ (0xFE6C, 'X'),
+ (0xFE70, '3', ' ً'),
+ (0xFE71, 'M', 'ـً'),
+ (0xFE72, '3', ' ٌ'),
+ (0xFE73, 'V'),
+ (0xFE74, '3', ' ٍ'),
+ (0xFE75, 'X'),
+ (0xFE76, '3', ' َ'),
+ (0xFE77, 'M', 'ـَ'),
+ (0xFE78, '3', ' ُ'),
+ (0xFE79, 'M', 'ـُ'),
+ (0xFE7A, '3', ' ِ'),
+ (0xFE7B, 'M', 'ـِ'),
+ (0xFE7C, '3', ' ّ'),
+ (0xFE7D, 'M', 'ـّ'),
+ (0xFE7E, '3', ' ْ'),
+ (0xFE7F, 'M', 'ـْ'),
+ (0xFE80, 'M', 'ء'),
+ (0xFE81, 'M', 'آ'),
+ (0xFE83, 'M', 'أ'),
+ (0xFE85, 'M', 'ؤ'),
+ (0xFE87, 'M', 'إ'),
+ (0xFE89, 'M', 'ئ'),
+ (0xFE8D, 'M', 'ا'),
+ (0xFE8F, 'M', 'ب'),
+ (0xFE93, 'M', 'ة'),
+ (0xFE95, 'M', 'ت'),
+ (0xFE99, 'M', 'ث'),
+ (0xFE9D, 'M', 'ج'),
+ (0xFEA1, 'M', 'ح'),
+ (0xFEA5, 'M', 'خ'),
+ (0xFEA9, 'M', 'د'),
+ (0xFEAB, 'M', 'ذ'),
+ (0xFEAD, 'M', 'ر'),
+ (0xFEAF, 'M', 'ز'),
+ (0xFEB1, 'M', 'س'),
+ (0xFEB5, 'M', 'ش'),
+ (0xFEB9, 'M', 'ص'),
+ (0xFEBD, 'M', 'ض'),
+ (0xFEC1, 'M', 'ط'),
+ (0xFEC5, 'M', 'ظ'),
+ (0xFEC9, 'M', 'ع'),
+ (0xFECD, 'M', 'غ'),
+ (0xFED1, 'M', 'ف'),
+ (0xFED5, 'M', 'ق'),
+ (0xFED9, 'M', 'ك'),
+ (0xFEDD, 'M', 'ل'),
+ (0xFEE1, 'M', 'م'),
+ (0xFEE5, 'M', 'ن'),
+ (0xFEE9, 'M', 'ه'),
+ (0xFEED, 'M', 'و'),
+ (0xFEEF, 'M', 'ى'),
+ (0xFEF1, 'M', 'ي'),
+ (0xFEF5, 'M', 'لآ'),
+ (0xFEF7, 'M', 'لأ'),
+ (0xFEF9, 'M', 'لإ'),
+ (0xFEFB, 'M', 'لا'),
+ (0xFEFD, 'X'),
+ (0xFEFF, 'I'),
+ (0xFF00, 'X'),
+ (0xFF01, '3', '!'),
+ (0xFF02, '3', '"'),
+ (0xFF03, '3', '#'),
+ (0xFF04, '3', '$'),
+ (0xFF05, '3', '%'),
+ (0xFF06, '3', '&'),
+ (0xFF07, '3', '\''),
+ (0xFF08, '3', '('),
+ (0xFF09, '3', ')'),
+ (0xFF0A, '3', '*'),
+ (0xFF0B, '3', '+'),
+ (0xFF0C, '3', ','),
+ (0xFF0D, 'M', '-'),
+ (0xFF0E, 'M', '.'),
+ (0xFF0F, '3', '/'),
+ (0xFF10, 'M', '0'),
+ (0xFF11, 'M', '1'),
+ (0xFF12, 'M', '2'),
+ (0xFF13, 'M', '3'),
+ (0xFF14, 'M', '4'),
+ (0xFF15, 'M', '5'),
+ (0xFF16, 'M', '6'),
+ (0xFF17, 'M', '7'),
+ (0xFF18, 'M', '8'),
+ (0xFF19, 'M', '9'),
+ (0xFF1A, '3', ':'),
+ ]
+
+def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFF1B, '3', ';'),
+ (0xFF1C, '3', '<'),
+ (0xFF1D, '3', '='),
+ (0xFF1E, '3', '>'),
+ (0xFF1F, '3', '?'),
+ (0xFF20, '3', '@'),
+ (0xFF21, 'M', 'a'),
+ (0xFF22, 'M', 'b'),
+ (0xFF23, 'M', 'c'),
+ (0xFF24, 'M', 'd'),
+ (0xFF25, 'M', 'e'),
+ (0xFF26, 'M', 'f'),
+ (0xFF27, 'M', 'g'),
+ (0xFF28, 'M', 'h'),
+ (0xFF29, 'M', 'i'),
+ (0xFF2A, 'M', 'j'),
+ (0xFF2B, 'M', 'k'),
+ (0xFF2C, 'M', 'l'),
+ (0xFF2D, 'M', 'm'),
+ (0xFF2E, 'M', 'n'),
+ (0xFF2F, 'M', 'o'),
+ (0xFF30, 'M', 'p'),
+ (0xFF31, 'M', 'q'),
+ (0xFF32, 'M', 'r'),
+ (0xFF33, 'M', 's'),
+ (0xFF34, 'M', 't'),
+ (0xFF35, 'M', 'u'),
+ (0xFF36, 'M', 'v'),
+ (0xFF37, 'M', 'w'),
+ (0xFF38, 'M', 'x'),
+ (0xFF39, 'M', 'y'),
+ (0xFF3A, 'M', 'z'),
+ (0xFF3B, '3', '['),
+ (0xFF3C, '3', '\\'),
+ (0xFF3D, '3', ']'),
+ (0xFF3E, '3', '^'),
+ (0xFF3F, '3', '_'),
+ (0xFF40, '3', '`'),
+ (0xFF41, 'M', 'a'),
+ (0xFF42, 'M', 'b'),
+ (0xFF43, 'M', 'c'),
+ (0xFF44, 'M', 'd'),
+ (0xFF45, 'M', 'e'),
+ (0xFF46, 'M', 'f'),
+ (0xFF47, 'M', 'g'),
+ (0xFF48, 'M', 'h'),
+ (0xFF49, 'M', 'i'),
+ (0xFF4A, 'M', 'j'),
+ (0xFF4B, 'M', 'k'),
+ (0xFF4C, 'M', 'l'),
+ (0xFF4D, 'M', 'm'),
+ (0xFF4E, 'M', 'n'),
+ (0xFF4F, 'M', 'o'),
+ (0xFF50, 'M', 'p'),
+ (0xFF51, 'M', 'q'),
+ (0xFF52, 'M', 'r'),
+ (0xFF53, 'M', 's'),
+ (0xFF54, 'M', 't'),
+ (0xFF55, 'M', 'u'),
+ (0xFF56, 'M', 'v'),
+ (0xFF57, 'M', 'w'),
+ (0xFF58, 'M', 'x'),
+ (0xFF59, 'M', 'y'),
+ (0xFF5A, 'M', 'z'),
+ (0xFF5B, '3', '{'),
+ (0xFF5C, '3', '|'),
+ (0xFF5D, '3', '}'),
+ (0xFF5E, '3', '~'),
+ (0xFF5F, 'M', '⦅'),
+ (0xFF60, 'M', '⦆'),
+ (0xFF61, 'M', '.'),
+ (0xFF62, 'M', '「'),
+ (0xFF63, 'M', '」'),
+ (0xFF64, 'M', '、'),
+ (0xFF65, 'M', '・'),
+ (0xFF66, 'M', 'ヲ'),
+ (0xFF67, 'M', 'ァ'),
+ (0xFF68, 'M', 'ィ'),
+ (0xFF69, 'M', 'ゥ'),
+ (0xFF6A, 'M', 'ェ'),
+ (0xFF6B, 'M', 'ォ'),
+ (0xFF6C, 'M', 'ャ'),
+ (0xFF6D, 'M', 'ュ'),
+ (0xFF6E, 'M', 'ョ'),
+ (0xFF6F, 'M', 'ッ'),
+ (0xFF70, 'M', 'ー'),
+ (0xFF71, 'M', 'ア'),
+ (0xFF72, 'M', 'イ'),
+ (0xFF73, 'M', 'ウ'),
+ (0xFF74, 'M', 'エ'),
+ (0xFF75, 'M', 'オ'),
+ (0xFF76, 'M', 'カ'),
+ (0xFF77, 'M', 'キ'),
+ (0xFF78, 'M', 'ク'),
+ (0xFF79, 'M', 'ケ'),
+ (0xFF7A, 'M', 'コ'),
+ (0xFF7B, 'M', 'サ'),
+ (0xFF7C, 'M', 'シ'),
+ (0xFF7D, 'M', 'ス'),
+ (0xFF7E, 'M', 'セ'),
+ ]
+
+def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFF7F, 'M', 'ソ'),
+ (0xFF80, 'M', 'タ'),
+ (0xFF81, 'M', 'チ'),
+ (0xFF82, 'M', 'ツ'),
+ (0xFF83, 'M', 'テ'),
+ (0xFF84, 'M', 'ト'),
+ (0xFF85, 'M', 'ナ'),
+ (0xFF86, 'M', 'ニ'),
+ (0xFF87, 'M', 'ヌ'),
+ (0xFF88, 'M', 'ネ'),
+ (0xFF89, 'M', 'ノ'),
+ (0xFF8A, 'M', 'ハ'),
+ (0xFF8B, 'M', 'ヒ'),
+ (0xFF8C, 'M', 'フ'),
+ (0xFF8D, 'M', 'ヘ'),
+ (0xFF8E, 'M', 'ホ'),
+ (0xFF8F, 'M', 'マ'),
+ (0xFF90, 'M', 'ミ'),
+ (0xFF91, 'M', 'ム'),
+ (0xFF92, 'M', 'メ'),
+ (0xFF93, 'M', 'モ'),
+ (0xFF94, 'M', 'ヤ'),
+ (0xFF95, 'M', 'ユ'),
+ (0xFF96, 'M', 'ヨ'),
+ (0xFF97, 'M', 'ラ'),
+ (0xFF98, 'M', 'リ'),
+ (0xFF99, 'M', 'ル'),
+ (0xFF9A, 'M', 'レ'),
+ (0xFF9B, 'M', 'ロ'),
+ (0xFF9C, 'M', 'ワ'),
+ (0xFF9D, 'M', 'ン'),
+ (0xFF9E, 'M', '゙'),
+ (0xFF9F, 'M', '゚'),
+ (0xFFA0, 'X'),
+ (0xFFA1, 'M', 'ᄀ'),
+ (0xFFA2, 'M', 'ᄁ'),
+ (0xFFA3, 'M', 'ᆪ'),
+ (0xFFA4, 'M', 'ᄂ'),
+ (0xFFA5, 'M', 'ᆬ'),
+ (0xFFA6, 'M', 'ᆭ'),
+ (0xFFA7, 'M', 'ᄃ'),
+ (0xFFA8, 'M', 'ᄄ'),
+ (0xFFA9, 'M', 'ᄅ'),
+ (0xFFAA, 'M', 'ᆰ'),
+ (0xFFAB, 'M', 'ᆱ'),
+ (0xFFAC, 'M', 'ᆲ'),
+ (0xFFAD, 'M', 'ᆳ'),
+ (0xFFAE, 'M', 'ᆴ'),
+ (0xFFAF, 'M', 'ᆵ'),
+ (0xFFB0, 'M', 'ᄚ'),
+ (0xFFB1, 'M', 'ᄆ'),
+ (0xFFB2, 'M', 'ᄇ'),
+ (0xFFB3, 'M', 'ᄈ'),
+ (0xFFB4, 'M', 'ᄡ'),
+ (0xFFB5, 'M', 'ᄉ'),
+ (0xFFB6, 'M', 'ᄊ'),
+ (0xFFB7, 'M', 'ᄋ'),
+ (0xFFB8, 'M', 'ᄌ'),
+ (0xFFB9, 'M', 'ᄍ'),
+ (0xFFBA, 'M', 'ᄎ'),
+ (0xFFBB, 'M', 'ᄏ'),
+ (0xFFBC, 'M', 'ᄐ'),
+ (0xFFBD, 'M', 'ᄑ'),
+ (0xFFBE, 'M', 'ᄒ'),
+ (0xFFBF, 'X'),
+ (0xFFC2, 'M', 'ᅡ'),
+ (0xFFC3, 'M', 'ᅢ'),
+ (0xFFC4, 'M', 'ᅣ'),
+ (0xFFC5, 'M', 'ᅤ'),
+ (0xFFC6, 'M', 'ᅥ'),
+ (0xFFC7, 'M', 'ᅦ'),
+ (0xFFC8, 'X'),
+ (0xFFCA, 'M', 'ᅧ'),
+ (0xFFCB, 'M', 'ᅨ'),
+ (0xFFCC, 'M', 'ᅩ'),
+ (0xFFCD, 'M', 'ᅪ'),
+ (0xFFCE, 'M', 'ᅫ'),
+ (0xFFCF, 'M', 'ᅬ'),
+ (0xFFD0, 'X'),
+ (0xFFD2, 'M', 'ᅭ'),
+ (0xFFD3, 'M', 'ᅮ'),
+ (0xFFD4, 'M', 'ᅯ'),
+ (0xFFD5, 'M', 'ᅰ'),
+ (0xFFD6, 'M', 'ᅱ'),
+ (0xFFD7, 'M', 'ᅲ'),
+ (0xFFD8, 'X'),
+ (0xFFDA, 'M', 'ᅳ'),
+ (0xFFDB, 'M', 'ᅴ'),
+ (0xFFDC, 'M', 'ᅵ'),
+ (0xFFDD, 'X'),
+ (0xFFE0, 'M', '¢'),
+ (0xFFE1, 'M', '£'),
+ (0xFFE2, 'M', '¬'),
+ (0xFFE3, '3', ' ̄'),
+ (0xFFE4, 'M', '¦'),
+ (0xFFE5, 'M', '¥'),
+ (0xFFE6, 'M', '₩'),
+ (0xFFE7, 'X'),
+ (0xFFE8, 'M', '│'),
+ (0xFFE9, 'M', '←'),
+ ]
+
+def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFFEA, 'M', '↑'),
+ (0xFFEB, 'M', '→'),
+ (0xFFEC, 'M', '↓'),
+ (0xFFED, 'M', '■'),
+ (0xFFEE, 'M', '○'),
+ (0xFFEF, 'X'),
+ (0x10000, 'V'),
+ (0x1000C, 'X'),
+ (0x1000D, 'V'),
+ (0x10027, 'X'),
+ (0x10028, 'V'),
+ (0x1003B, 'X'),
+ (0x1003C, 'V'),
+ (0x1003E, 'X'),
+ (0x1003F, 'V'),
+ (0x1004E, 'X'),
+ (0x10050, 'V'),
+ (0x1005E, 'X'),
+ (0x10080, 'V'),
+ (0x100FB, 'X'),
+ (0x10100, 'V'),
+ (0x10103, 'X'),
+ (0x10107, 'V'),
+ (0x10134, 'X'),
+ (0x10137, 'V'),
+ (0x1018F, 'X'),
+ (0x10190, 'V'),
+ (0x1019D, 'X'),
+ (0x101A0, 'V'),
+ (0x101A1, 'X'),
+ (0x101D0, 'V'),
+ (0x101FE, 'X'),
+ (0x10280, 'V'),
+ (0x1029D, 'X'),
+ (0x102A0, 'V'),
+ (0x102D1, 'X'),
+ (0x102E0, 'V'),
+ (0x102FC, 'X'),
+ (0x10300, 'V'),
+ (0x10324, 'X'),
+ (0x1032D, 'V'),
+ (0x1034B, 'X'),
+ (0x10350, 'V'),
+ (0x1037B, 'X'),
+ (0x10380, 'V'),
+ (0x1039E, 'X'),
+ (0x1039F, 'V'),
+ (0x103C4, 'X'),
+ (0x103C8, 'V'),
+ (0x103D6, 'X'),
+ (0x10400, 'M', '𐐨'),
+ (0x10401, 'M', '𐐩'),
+ (0x10402, 'M', '𐐪'),
+ (0x10403, 'M', '𐐫'),
+ (0x10404, 'M', '𐐬'),
+ (0x10405, 'M', '𐐭'),
+ (0x10406, 'M', '𐐮'),
+ (0x10407, 'M', '𐐯'),
+ (0x10408, 'M', '𐐰'),
+ (0x10409, 'M', '𐐱'),
+ (0x1040A, 'M', '𐐲'),
+ (0x1040B, 'M', '𐐳'),
+ (0x1040C, 'M', '𐐴'),
+ (0x1040D, 'M', '𐐵'),
+ (0x1040E, 'M', '𐐶'),
+ (0x1040F, 'M', '𐐷'),
+ (0x10410, 'M', '𐐸'),
+ (0x10411, 'M', '𐐹'),
+ (0x10412, 'M', '𐐺'),
+ (0x10413, 'M', '𐐻'),
+ (0x10414, 'M', '𐐼'),
+ (0x10415, 'M', '𐐽'),
+ (0x10416, 'M', '𐐾'),
+ (0x10417, 'M', '𐐿'),
+ (0x10418, 'M', '𐑀'),
+ (0x10419, 'M', '𐑁'),
+ (0x1041A, 'M', '𐑂'),
+ (0x1041B, 'M', '𐑃'),
+ (0x1041C, 'M', '𐑄'),
+ (0x1041D, 'M', '𐑅'),
+ (0x1041E, 'M', '𐑆'),
+ (0x1041F, 'M', '𐑇'),
+ (0x10420, 'M', '𐑈'),
+ (0x10421, 'M', '𐑉'),
+ (0x10422, 'M', '𐑊'),
+ (0x10423, 'M', '𐑋'),
+ (0x10424, 'M', '𐑌'),
+ (0x10425, 'M', '𐑍'),
+ (0x10426, 'M', '𐑎'),
+ (0x10427, 'M', '𐑏'),
+ (0x10428, 'V'),
+ (0x1049E, 'X'),
+ (0x104A0, 'V'),
+ (0x104AA, 'X'),
+ (0x104B0, 'M', '𐓘'),
+ (0x104B1, 'M', '𐓙'),
+ (0x104B2, 'M', '𐓚'),
+ (0x104B3, 'M', '𐓛'),
+ (0x104B4, 'M', '𐓜'),
+ (0x104B5, 'M', '𐓝'),
+ ]
+
+def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x104B6, 'M', '𐓞'),
+ (0x104B7, 'M', '𐓟'),
+ (0x104B8, 'M', '𐓠'),
+ (0x104B9, 'M', '𐓡'),
+ (0x104BA, 'M', '𐓢'),
+ (0x104BB, 'M', '𐓣'),
+ (0x104BC, 'M', '𐓤'),
+ (0x104BD, 'M', '𐓥'),
+ (0x104BE, 'M', '𐓦'),
+ (0x104BF, 'M', '𐓧'),
+ (0x104C0, 'M', '𐓨'),
+ (0x104C1, 'M', '𐓩'),
+ (0x104C2, 'M', '𐓪'),
+ (0x104C3, 'M', '𐓫'),
+ (0x104C4, 'M', '𐓬'),
+ (0x104C5, 'M', '𐓭'),
+ (0x104C6, 'M', '𐓮'),
+ (0x104C7, 'M', '𐓯'),
+ (0x104C8, 'M', '𐓰'),
+ (0x104C9, 'M', '𐓱'),
+ (0x104CA, 'M', '𐓲'),
+ (0x104CB, 'M', '𐓳'),
+ (0x104CC, 'M', '𐓴'),
+ (0x104CD, 'M', '𐓵'),
+ (0x104CE, 'M', '𐓶'),
+ (0x104CF, 'M', '𐓷'),
+ (0x104D0, 'M', '𐓸'),
+ (0x104D1, 'M', '𐓹'),
+ (0x104D2, 'M', '𐓺'),
+ (0x104D3, 'M', '𐓻'),
+ (0x104D4, 'X'),
+ (0x104D8, 'V'),
+ (0x104FC, 'X'),
+ (0x10500, 'V'),
+ (0x10528, 'X'),
+ (0x10530, 'V'),
+ (0x10564, 'X'),
+ (0x1056F, 'V'),
+ (0x10570, 'M', '𐖗'),
+ (0x10571, 'M', '𐖘'),
+ (0x10572, 'M', '𐖙'),
+ (0x10573, 'M', '𐖚'),
+ (0x10574, 'M', '𐖛'),
+ (0x10575, 'M', '𐖜'),
+ (0x10576, 'M', '𐖝'),
+ (0x10577, 'M', '𐖞'),
+ (0x10578, 'M', '𐖟'),
+ (0x10579, 'M', '𐖠'),
+ (0x1057A, 'M', '𐖡'),
+ (0x1057B, 'X'),
+ (0x1057C, 'M', '𐖣'),
+ (0x1057D, 'M', '𐖤'),
+ (0x1057E, 'M', '𐖥'),
+ (0x1057F, 'M', '𐖦'),
+ (0x10580, 'M', '𐖧'),
+ (0x10581, 'M', '𐖨'),
+ (0x10582, 'M', '𐖩'),
+ (0x10583, 'M', '𐖪'),
+ (0x10584, 'M', '𐖫'),
+ (0x10585, 'M', '𐖬'),
+ (0x10586, 'M', '𐖭'),
+ (0x10587, 'M', '𐖮'),
+ (0x10588, 'M', '𐖯'),
+ (0x10589, 'M', '𐖰'),
+ (0x1058A, 'M', '𐖱'),
+ (0x1058B, 'X'),
+ (0x1058C, 'M', '𐖳'),
+ (0x1058D, 'M', '𐖴'),
+ (0x1058E, 'M', '𐖵'),
+ (0x1058F, 'M', '𐖶'),
+ (0x10590, 'M', '𐖷'),
+ (0x10591, 'M', '𐖸'),
+ (0x10592, 'M', '𐖹'),
+ (0x10593, 'X'),
+ (0x10594, 'M', '𐖻'),
+ (0x10595, 'M', '𐖼'),
+ (0x10596, 'X'),
+ (0x10597, 'V'),
+ (0x105A2, 'X'),
+ (0x105A3, 'V'),
+ (0x105B2, 'X'),
+ (0x105B3, 'V'),
+ (0x105BA, 'X'),
+ (0x105BB, 'V'),
+ (0x105BD, 'X'),
+ (0x10600, 'V'),
+ (0x10737, 'X'),
+ (0x10740, 'V'),
+ (0x10756, 'X'),
+ (0x10760, 'V'),
+ (0x10768, 'X'),
+ (0x10780, 'V'),
+ (0x10781, 'M', 'ː'),
+ (0x10782, 'M', 'ˑ'),
+ (0x10783, 'M', 'æ'),
+ (0x10784, 'M', 'ʙ'),
+ (0x10785, 'M', 'ɓ'),
+ (0x10786, 'X'),
+ (0x10787, 'M', 'ʣ'),
+ (0x10788, 'M', 'ꭦ'),
+ ]
+
+def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x10789, 'M', 'ʥ'),
+ (0x1078A, 'M', 'ʤ'),
+ (0x1078B, 'M', 'ɖ'),
+ (0x1078C, 'M', 'ɗ'),
+ (0x1078D, 'M', 'ᶑ'),
+ (0x1078E, 'M', 'ɘ'),
+ (0x1078F, 'M', 'ɞ'),
+ (0x10790, 'M', 'ʩ'),
+ (0x10791, 'M', 'ɤ'),
+ (0x10792, 'M', 'ɢ'),
+ (0x10793, 'M', 'ɠ'),
+ (0x10794, 'M', 'ʛ'),
+ (0x10795, 'M', 'ħ'),
+ (0x10796, 'M', 'ʜ'),
+ (0x10797, 'M', 'ɧ'),
+ (0x10798, 'M', 'ʄ'),
+ (0x10799, 'M', 'ʪ'),
+ (0x1079A, 'M', 'ʫ'),
+ (0x1079B, 'M', 'ɬ'),
+ (0x1079C, 'M', '𝼄'),
+ (0x1079D, 'M', 'ꞎ'),
+ (0x1079E, 'M', 'ɮ'),
+ (0x1079F, 'M', '𝼅'),
+ (0x107A0, 'M', 'ʎ'),
+ (0x107A1, 'M', '𝼆'),
+ (0x107A2, 'M', 'ø'),
+ (0x107A3, 'M', 'ɶ'),
+ (0x107A4, 'M', 'ɷ'),
+ (0x107A5, 'M', 'q'),
+ (0x107A6, 'M', 'ɺ'),
+ (0x107A7, 'M', '𝼈'),
+ (0x107A8, 'M', 'ɽ'),
+ (0x107A9, 'M', 'ɾ'),
+ (0x107AA, 'M', 'ʀ'),
+ (0x107AB, 'M', 'ʨ'),
+ (0x107AC, 'M', 'ʦ'),
+ (0x107AD, 'M', 'ꭧ'),
+ (0x107AE, 'M', 'ʧ'),
+ (0x107AF, 'M', 'ʈ'),
+ (0x107B0, 'M', 'ⱱ'),
+ (0x107B1, 'X'),
+ (0x107B2, 'M', 'ʏ'),
+ (0x107B3, 'M', 'ʡ'),
+ (0x107B4, 'M', 'ʢ'),
+ (0x107B5, 'M', 'ʘ'),
+ (0x107B6, 'M', 'ǀ'),
+ (0x107B7, 'M', 'ǁ'),
+ (0x107B8, 'M', 'ǂ'),
+ (0x107B9, 'M', '𝼊'),
+ (0x107BA, 'M', '𝼞'),
+ (0x107BB, 'X'),
+ (0x10800, 'V'),
+ (0x10806, 'X'),
+ (0x10808, 'V'),
+ (0x10809, 'X'),
+ (0x1080A, 'V'),
+ (0x10836, 'X'),
+ (0x10837, 'V'),
+ (0x10839, 'X'),
+ (0x1083C, 'V'),
+ (0x1083D, 'X'),
+ (0x1083F, 'V'),
+ (0x10856, 'X'),
+ (0x10857, 'V'),
+ (0x1089F, 'X'),
+ (0x108A7, 'V'),
+ (0x108B0, 'X'),
+ (0x108E0, 'V'),
+ (0x108F3, 'X'),
+ (0x108F4, 'V'),
+ (0x108F6, 'X'),
+ (0x108FB, 'V'),
+ (0x1091C, 'X'),
+ (0x1091F, 'V'),
+ (0x1093A, 'X'),
+ (0x1093F, 'V'),
+ (0x10940, 'X'),
+ (0x10980, 'V'),
+ (0x109B8, 'X'),
+ (0x109BC, 'V'),
+ (0x109D0, 'X'),
+ (0x109D2, 'V'),
+ (0x10A04, 'X'),
+ (0x10A05, 'V'),
+ (0x10A07, 'X'),
+ (0x10A0C, 'V'),
+ (0x10A14, 'X'),
+ (0x10A15, 'V'),
+ (0x10A18, 'X'),
+ (0x10A19, 'V'),
+ (0x10A36, 'X'),
+ (0x10A38, 'V'),
+ (0x10A3B, 'X'),
+ (0x10A3F, 'V'),
+ (0x10A49, 'X'),
+ (0x10A50, 'V'),
+ (0x10A59, 'X'),
+ (0x10A60, 'V'),
+ (0x10AA0, 'X'),
+ (0x10AC0, 'V'),
+ ]
+
+def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x10AE7, 'X'),
+ (0x10AEB, 'V'),
+ (0x10AF7, 'X'),
+ (0x10B00, 'V'),
+ (0x10B36, 'X'),
+ (0x10B39, 'V'),
+ (0x10B56, 'X'),
+ (0x10B58, 'V'),
+ (0x10B73, 'X'),
+ (0x10B78, 'V'),
+ (0x10B92, 'X'),
+ (0x10B99, 'V'),
+ (0x10B9D, 'X'),
+ (0x10BA9, 'V'),
+ (0x10BB0, 'X'),
+ (0x10C00, 'V'),
+ (0x10C49, 'X'),
+ (0x10C80, 'M', '𐳀'),
+ (0x10C81, 'M', '𐳁'),
+ (0x10C82, 'M', '𐳂'),
+ (0x10C83, 'M', '𐳃'),
+ (0x10C84, 'M', '𐳄'),
+ (0x10C85, 'M', '𐳅'),
+ (0x10C86, 'M', '𐳆'),
+ (0x10C87, 'M', '𐳇'),
+ (0x10C88, 'M', '𐳈'),
+ (0x10C89, 'M', '𐳉'),
+ (0x10C8A, 'M', '𐳊'),
+ (0x10C8B, 'M', '𐳋'),
+ (0x10C8C, 'M', '𐳌'),
+ (0x10C8D, 'M', '𐳍'),
+ (0x10C8E, 'M', '𐳎'),
+ (0x10C8F, 'M', '𐳏'),
+ (0x10C90, 'M', '𐳐'),
+ (0x10C91, 'M', '𐳑'),
+ (0x10C92, 'M', '𐳒'),
+ (0x10C93, 'M', '𐳓'),
+ (0x10C94, 'M', '𐳔'),
+ (0x10C95, 'M', '𐳕'),
+ (0x10C96, 'M', '𐳖'),
+ (0x10C97, 'M', '𐳗'),
+ (0x10C98, 'M', '𐳘'),
+ (0x10C99, 'M', '𐳙'),
+ (0x10C9A, 'M', '𐳚'),
+ (0x10C9B, 'M', '𐳛'),
+ (0x10C9C, 'M', '𐳜'),
+ (0x10C9D, 'M', '𐳝'),
+ (0x10C9E, 'M', '𐳞'),
+ (0x10C9F, 'M', '𐳟'),
+ (0x10CA0, 'M', '𐳠'),
+ (0x10CA1, 'M', '𐳡'),
+ (0x10CA2, 'M', '𐳢'),
+ (0x10CA3, 'M', '𐳣'),
+ (0x10CA4, 'M', '𐳤'),
+ (0x10CA5, 'M', '𐳥'),
+ (0x10CA6, 'M', '𐳦'),
+ (0x10CA7, 'M', '𐳧'),
+ (0x10CA8, 'M', '𐳨'),
+ (0x10CA9, 'M', '𐳩'),
+ (0x10CAA, 'M', '𐳪'),
+ (0x10CAB, 'M', '𐳫'),
+ (0x10CAC, 'M', '𐳬'),
+ (0x10CAD, 'M', '𐳭'),
+ (0x10CAE, 'M', '𐳮'),
+ (0x10CAF, 'M', '𐳯'),
+ (0x10CB0, 'M', '𐳰'),
+ (0x10CB1, 'M', '𐳱'),
+ (0x10CB2, 'M', '𐳲'),
+ (0x10CB3, 'X'),
+ (0x10CC0, 'V'),
+ (0x10CF3, 'X'),
+ (0x10CFA, 'V'),
+ (0x10D28, 'X'),
+ (0x10D30, 'V'),
+ (0x10D3A, 'X'),
+ (0x10E60, 'V'),
+ (0x10E7F, 'X'),
+ (0x10E80, 'V'),
+ (0x10EAA, 'X'),
+ (0x10EAB, 'V'),
+ (0x10EAE, 'X'),
+ (0x10EB0, 'V'),
+ (0x10EB2, 'X'),
+ (0x10EFD, 'V'),
+ (0x10F28, 'X'),
+ (0x10F30, 'V'),
+ (0x10F5A, 'X'),
+ (0x10F70, 'V'),
+ (0x10F8A, 'X'),
+ (0x10FB0, 'V'),
+ (0x10FCC, 'X'),
+ (0x10FE0, 'V'),
+ (0x10FF7, 'X'),
+ (0x11000, 'V'),
+ (0x1104E, 'X'),
+ (0x11052, 'V'),
+ (0x11076, 'X'),
+ (0x1107F, 'V'),
+ (0x110BD, 'X'),
+ (0x110BE, 'V'),
+ ]
+
+def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x110C3, 'X'),
+ (0x110D0, 'V'),
+ (0x110E9, 'X'),
+ (0x110F0, 'V'),
+ (0x110FA, 'X'),
+ (0x11100, 'V'),
+ (0x11135, 'X'),
+ (0x11136, 'V'),
+ (0x11148, 'X'),
+ (0x11150, 'V'),
+ (0x11177, 'X'),
+ (0x11180, 'V'),
+ (0x111E0, 'X'),
+ (0x111E1, 'V'),
+ (0x111F5, 'X'),
+ (0x11200, 'V'),
+ (0x11212, 'X'),
+ (0x11213, 'V'),
+ (0x11242, 'X'),
+ (0x11280, 'V'),
+ (0x11287, 'X'),
+ (0x11288, 'V'),
+ (0x11289, 'X'),
+ (0x1128A, 'V'),
+ (0x1128E, 'X'),
+ (0x1128F, 'V'),
+ (0x1129E, 'X'),
+ (0x1129F, 'V'),
+ (0x112AA, 'X'),
+ (0x112B0, 'V'),
+ (0x112EB, 'X'),
+ (0x112F0, 'V'),
+ (0x112FA, 'X'),
+ (0x11300, 'V'),
+ (0x11304, 'X'),
+ (0x11305, 'V'),
+ (0x1130D, 'X'),
+ (0x1130F, 'V'),
+ (0x11311, 'X'),
+ (0x11313, 'V'),
+ (0x11329, 'X'),
+ (0x1132A, 'V'),
+ (0x11331, 'X'),
+ (0x11332, 'V'),
+ (0x11334, 'X'),
+ (0x11335, 'V'),
+ (0x1133A, 'X'),
+ (0x1133B, 'V'),
+ (0x11345, 'X'),
+ (0x11347, 'V'),
+ (0x11349, 'X'),
+ (0x1134B, 'V'),
+ (0x1134E, 'X'),
+ (0x11350, 'V'),
+ (0x11351, 'X'),
+ (0x11357, 'V'),
+ (0x11358, 'X'),
+ (0x1135D, 'V'),
+ (0x11364, 'X'),
+ (0x11366, 'V'),
+ (0x1136D, 'X'),
+ (0x11370, 'V'),
+ (0x11375, 'X'),
+ (0x11400, 'V'),
+ (0x1145C, 'X'),
+ (0x1145D, 'V'),
+ (0x11462, 'X'),
+ (0x11480, 'V'),
+ (0x114C8, 'X'),
+ (0x114D0, 'V'),
+ (0x114DA, 'X'),
+ (0x11580, 'V'),
+ (0x115B6, 'X'),
+ (0x115B8, 'V'),
+ (0x115DE, 'X'),
+ (0x11600, 'V'),
+ (0x11645, 'X'),
+ (0x11650, 'V'),
+ (0x1165A, 'X'),
+ (0x11660, 'V'),
+ (0x1166D, 'X'),
+ (0x11680, 'V'),
+ (0x116BA, 'X'),
+ (0x116C0, 'V'),
+ (0x116CA, 'X'),
+ (0x11700, 'V'),
+ (0x1171B, 'X'),
+ (0x1171D, 'V'),
+ (0x1172C, 'X'),
+ (0x11730, 'V'),
+ (0x11747, 'X'),
+ (0x11800, 'V'),
+ (0x1183C, 'X'),
+ (0x118A0, 'M', '𑣀'),
+ (0x118A1, 'M', '𑣁'),
+ (0x118A2, 'M', '𑣂'),
+ (0x118A3, 'M', '𑣃'),
+ (0x118A4, 'M', '𑣄'),
+ (0x118A5, 'M', '𑣅'),
+ (0x118A6, 'M', '𑣆'),
+ ]
+
+def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x118A7, 'M', '𑣇'),
+ (0x118A8, 'M', '𑣈'),
+ (0x118A9, 'M', '𑣉'),
+ (0x118AA, 'M', '𑣊'),
+ (0x118AB, 'M', '𑣋'),
+ (0x118AC, 'M', '𑣌'),
+ (0x118AD, 'M', '𑣍'),
+ (0x118AE, 'M', '𑣎'),
+ (0x118AF, 'M', '𑣏'),
+ (0x118B0, 'M', '𑣐'),
+ (0x118B1, 'M', '𑣑'),
+ (0x118B2, 'M', '𑣒'),
+ (0x118B3, 'M', '𑣓'),
+ (0x118B4, 'M', '𑣔'),
+ (0x118B5, 'M', '𑣕'),
+ (0x118B6, 'M', '𑣖'),
+ (0x118B7, 'M', '𑣗'),
+ (0x118B8, 'M', '𑣘'),
+ (0x118B9, 'M', '𑣙'),
+ (0x118BA, 'M', '𑣚'),
+ (0x118BB, 'M', '𑣛'),
+ (0x118BC, 'M', '𑣜'),
+ (0x118BD, 'M', '𑣝'),
+ (0x118BE, 'M', '𑣞'),
+ (0x118BF, 'M', '𑣟'),
+ (0x118C0, 'V'),
+ (0x118F3, 'X'),
+ (0x118FF, 'V'),
+ (0x11907, 'X'),
+ (0x11909, 'V'),
+ (0x1190A, 'X'),
+ (0x1190C, 'V'),
+ (0x11914, 'X'),
+ (0x11915, 'V'),
+ (0x11917, 'X'),
+ (0x11918, 'V'),
+ (0x11936, 'X'),
+ (0x11937, 'V'),
+ (0x11939, 'X'),
+ (0x1193B, 'V'),
+ (0x11947, 'X'),
+ (0x11950, 'V'),
+ (0x1195A, 'X'),
+ (0x119A0, 'V'),
+ (0x119A8, 'X'),
+ (0x119AA, 'V'),
+ (0x119D8, 'X'),
+ (0x119DA, 'V'),
+ (0x119E5, 'X'),
+ (0x11A00, 'V'),
+ (0x11A48, 'X'),
+ (0x11A50, 'V'),
+ (0x11AA3, 'X'),
+ (0x11AB0, 'V'),
+ (0x11AF9, 'X'),
+ (0x11B00, 'V'),
+ (0x11B0A, 'X'),
+ (0x11C00, 'V'),
+ (0x11C09, 'X'),
+ (0x11C0A, 'V'),
+ (0x11C37, 'X'),
+ (0x11C38, 'V'),
+ (0x11C46, 'X'),
+ (0x11C50, 'V'),
+ (0x11C6D, 'X'),
+ (0x11C70, 'V'),
+ (0x11C90, 'X'),
+ (0x11C92, 'V'),
+ (0x11CA8, 'X'),
+ (0x11CA9, 'V'),
+ (0x11CB7, 'X'),
+ (0x11D00, 'V'),
+ (0x11D07, 'X'),
+ (0x11D08, 'V'),
+ (0x11D0A, 'X'),
+ (0x11D0B, 'V'),
+ (0x11D37, 'X'),
+ (0x11D3A, 'V'),
+ (0x11D3B, 'X'),
+ (0x11D3C, 'V'),
+ (0x11D3E, 'X'),
+ (0x11D3F, 'V'),
+ (0x11D48, 'X'),
+ (0x11D50, 'V'),
+ (0x11D5A, 'X'),
+ (0x11D60, 'V'),
+ (0x11D66, 'X'),
+ (0x11D67, 'V'),
+ (0x11D69, 'X'),
+ (0x11D6A, 'V'),
+ (0x11D8F, 'X'),
+ (0x11D90, 'V'),
+ (0x11D92, 'X'),
+ (0x11D93, 'V'),
+ (0x11D99, 'X'),
+ (0x11DA0, 'V'),
+ (0x11DAA, 'X'),
+ (0x11EE0, 'V'),
+ (0x11EF9, 'X'),
+ (0x11F00, 'V'),
+ ]
+
+def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x11F11, 'X'),
+ (0x11F12, 'V'),
+ (0x11F3B, 'X'),
+ (0x11F3E, 'V'),
+ (0x11F5A, 'X'),
+ (0x11FB0, 'V'),
+ (0x11FB1, 'X'),
+ (0x11FC0, 'V'),
+ (0x11FF2, 'X'),
+ (0x11FFF, 'V'),
+ (0x1239A, 'X'),
+ (0x12400, 'V'),
+ (0x1246F, 'X'),
+ (0x12470, 'V'),
+ (0x12475, 'X'),
+ (0x12480, 'V'),
+ (0x12544, 'X'),
+ (0x12F90, 'V'),
+ (0x12FF3, 'X'),
+ (0x13000, 'V'),
+ (0x13430, 'X'),
+ (0x13440, 'V'),
+ (0x13456, 'X'),
+ (0x14400, 'V'),
+ (0x14647, 'X'),
+ (0x16800, 'V'),
+ (0x16A39, 'X'),
+ (0x16A40, 'V'),
+ (0x16A5F, 'X'),
+ (0x16A60, 'V'),
+ (0x16A6A, 'X'),
+ (0x16A6E, 'V'),
+ (0x16ABF, 'X'),
+ (0x16AC0, 'V'),
+ (0x16ACA, 'X'),
+ (0x16AD0, 'V'),
+ (0x16AEE, 'X'),
+ (0x16AF0, 'V'),
+ (0x16AF6, 'X'),
+ (0x16B00, 'V'),
+ (0x16B46, 'X'),
+ (0x16B50, 'V'),
+ (0x16B5A, 'X'),
+ (0x16B5B, 'V'),
+ (0x16B62, 'X'),
+ (0x16B63, 'V'),
+ (0x16B78, 'X'),
+ (0x16B7D, 'V'),
+ (0x16B90, 'X'),
+ (0x16E40, 'M', '𖹠'),
+ (0x16E41, 'M', '𖹡'),
+ (0x16E42, 'M', '𖹢'),
+ (0x16E43, 'M', '𖹣'),
+ (0x16E44, 'M', '𖹤'),
+ (0x16E45, 'M', '𖹥'),
+ (0x16E46, 'M', '𖹦'),
+ (0x16E47, 'M', '𖹧'),
+ (0x16E48, 'M', '𖹨'),
+ (0x16E49, 'M', '𖹩'),
+ (0x16E4A, 'M', '𖹪'),
+ (0x16E4B, 'M', '𖹫'),
+ (0x16E4C, 'M', '𖹬'),
+ (0x16E4D, 'M', '𖹭'),
+ (0x16E4E, 'M', '𖹮'),
+ (0x16E4F, 'M', '𖹯'),
+ (0x16E50, 'M', '𖹰'),
+ (0x16E51, 'M', '𖹱'),
+ (0x16E52, 'M', '𖹲'),
+ (0x16E53, 'M', '𖹳'),
+ (0x16E54, 'M', '𖹴'),
+ (0x16E55, 'M', '𖹵'),
+ (0x16E56, 'M', '𖹶'),
+ (0x16E57, 'M', '𖹷'),
+ (0x16E58, 'M', '𖹸'),
+ (0x16E59, 'M', '𖹹'),
+ (0x16E5A, 'M', '𖹺'),
+ (0x16E5B, 'M', '𖹻'),
+ (0x16E5C, 'M', '𖹼'),
+ (0x16E5D, 'M', '𖹽'),
+ (0x16E5E, 'M', '𖹾'),
+ (0x16E5F, 'M', '𖹿'),
+ (0x16E60, 'V'),
+ (0x16E9B, 'X'),
+ (0x16F00, 'V'),
+ (0x16F4B, 'X'),
+ (0x16F4F, 'V'),
+ (0x16F88, 'X'),
+ (0x16F8F, 'V'),
+ (0x16FA0, 'X'),
+ (0x16FE0, 'V'),
+ (0x16FE5, 'X'),
+ (0x16FF0, 'V'),
+ (0x16FF2, 'X'),
+ (0x17000, 'V'),
+ (0x187F8, 'X'),
+ (0x18800, 'V'),
+ (0x18CD6, 'X'),
+ (0x18D00, 'V'),
+ (0x18D09, 'X'),
+ (0x1AFF0, 'V'),
+ ]
+
+def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1AFF4, 'X'),
+ (0x1AFF5, 'V'),
+ (0x1AFFC, 'X'),
+ (0x1AFFD, 'V'),
+ (0x1AFFF, 'X'),
+ (0x1B000, 'V'),
+ (0x1B123, 'X'),
+ (0x1B132, 'V'),
+ (0x1B133, 'X'),
+ (0x1B150, 'V'),
+ (0x1B153, 'X'),
+ (0x1B155, 'V'),
+ (0x1B156, 'X'),
+ (0x1B164, 'V'),
+ (0x1B168, 'X'),
+ (0x1B170, 'V'),
+ (0x1B2FC, 'X'),
+ (0x1BC00, 'V'),
+ (0x1BC6B, 'X'),
+ (0x1BC70, 'V'),
+ (0x1BC7D, 'X'),
+ (0x1BC80, 'V'),
+ (0x1BC89, 'X'),
+ (0x1BC90, 'V'),
+ (0x1BC9A, 'X'),
+ (0x1BC9C, 'V'),
+ (0x1BCA0, 'I'),
+ (0x1BCA4, 'X'),
+ (0x1CF00, 'V'),
+ (0x1CF2E, 'X'),
+ (0x1CF30, 'V'),
+ (0x1CF47, 'X'),
+ (0x1CF50, 'V'),
+ (0x1CFC4, 'X'),
+ (0x1D000, 'V'),
+ (0x1D0F6, 'X'),
+ (0x1D100, 'V'),
+ (0x1D127, 'X'),
+ (0x1D129, 'V'),
+ (0x1D15E, 'M', '𝅗𝅥'),
+ (0x1D15F, 'M', '𝅘𝅥'),
+ (0x1D160, 'M', '𝅘𝅥𝅮'),
+ (0x1D161, 'M', '𝅘𝅥𝅯'),
+ (0x1D162, 'M', '𝅘𝅥𝅰'),
+ (0x1D163, 'M', '𝅘𝅥𝅱'),
+ (0x1D164, 'M', '𝅘𝅥𝅲'),
+ (0x1D165, 'V'),
+ (0x1D173, 'X'),
+ (0x1D17B, 'V'),
+ (0x1D1BB, 'M', '𝆹𝅥'),
+ (0x1D1BC, 'M', '𝆺𝅥'),
+ (0x1D1BD, 'M', '𝆹𝅥𝅮'),
+ (0x1D1BE, 'M', '𝆺𝅥𝅮'),
+ (0x1D1BF, 'M', '𝆹𝅥𝅯'),
+ (0x1D1C0, 'M', '𝆺𝅥𝅯'),
+ (0x1D1C1, 'V'),
+ (0x1D1EB, 'X'),
+ (0x1D200, 'V'),
+ (0x1D246, 'X'),
+ (0x1D2C0, 'V'),
+ (0x1D2D4, 'X'),
+ (0x1D2E0, 'V'),
+ (0x1D2F4, 'X'),
+ (0x1D300, 'V'),
+ (0x1D357, 'X'),
+ (0x1D360, 'V'),
+ (0x1D379, 'X'),
+ (0x1D400, 'M', 'a'),
+ (0x1D401, 'M', 'b'),
+ (0x1D402, 'M', 'c'),
+ (0x1D403, 'M', 'd'),
+ (0x1D404, 'M', 'e'),
+ (0x1D405, 'M', 'f'),
+ (0x1D406, 'M', 'g'),
+ (0x1D407, 'M', 'h'),
+ (0x1D408, 'M', 'i'),
+ (0x1D409, 'M', 'j'),
+ (0x1D40A, 'M', 'k'),
+ (0x1D40B, 'M', 'l'),
+ (0x1D40C, 'M', 'm'),
+ (0x1D40D, 'M', 'n'),
+ (0x1D40E, 'M', 'o'),
+ (0x1D40F, 'M', 'p'),
+ (0x1D410, 'M', 'q'),
+ (0x1D411, 'M', 'r'),
+ (0x1D412, 'M', 's'),
+ (0x1D413, 'M', 't'),
+ (0x1D414, 'M', 'u'),
+ (0x1D415, 'M', 'v'),
+ (0x1D416, 'M', 'w'),
+ (0x1D417, 'M', 'x'),
+ (0x1D418, 'M', 'y'),
+ (0x1D419, 'M', 'z'),
+ (0x1D41A, 'M', 'a'),
+ (0x1D41B, 'M', 'b'),
+ (0x1D41C, 'M', 'c'),
+ (0x1D41D, 'M', 'd'),
+ (0x1D41E, 'M', 'e'),
+ (0x1D41F, 'M', 'f'),
+ (0x1D420, 'M', 'g'),
+ ]
+
+def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D421, 'M', 'h'),
+ (0x1D422, 'M', 'i'),
+ (0x1D423, 'M', 'j'),
+ (0x1D424, 'M', 'k'),
+ (0x1D425, 'M', 'l'),
+ (0x1D426, 'M', 'm'),
+ (0x1D427, 'M', 'n'),
+ (0x1D428, 'M', 'o'),
+ (0x1D429, 'M', 'p'),
+ (0x1D42A, 'M', 'q'),
+ (0x1D42B, 'M', 'r'),
+ (0x1D42C, 'M', 's'),
+ (0x1D42D, 'M', 't'),
+ (0x1D42E, 'M', 'u'),
+ (0x1D42F, 'M', 'v'),
+ (0x1D430, 'M', 'w'),
+ (0x1D431, 'M', 'x'),
+ (0x1D432, 'M', 'y'),
+ (0x1D433, 'M', 'z'),
+ (0x1D434, 'M', 'a'),
+ (0x1D435, 'M', 'b'),
+ (0x1D436, 'M', 'c'),
+ (0x1D437, 'M', 'd'),
+ (0x1D438, 'M', 'e'),
+ (0x1D439, 'M', 'f'),
+ (0x1D43A, 'M', 'g'),
+ (0x1D43B, 'M', 'h'),
+ (0x1D43C, 'M', 'i'),
+ (0x1D43D, 'M', 'j'),
+ (0x1D43E, 'M', 'k'),
+ (0x1D43F, 'M', 'l'),
+ (0x1D440, 'M', 'm'),
+ (0x1D441, 'M', 'n'),
+ (0x1D442, 'M', 'o'),
+ (0x1D443, 'M', 'p'),
+ (0x1D444, 'M', 'q'),
+ (0x1D445, 'M', 'r'),
+ (0x1D446, 'M', 's'),
+ (0x1D447, 'M', 't'),
+ (0x1D448, 'M', 'u'),
+ (0x1D449, 'M', 'v'),
+ (0x1D44A, 'M', 'w'),
+ (0x1D44B, 'M', 'x'),
+ (0x1D44C, 'M', 'y'),
+ (0x1D44D, 'M', 'z'),
+ (0x1D44E, 'M', 'a'),
+ (0x1D44F, 'M', 'b'),
+ (0x1D450, 'M', 'c'),
+ (0x1D451, 'M', 'd'),
+ (0x1D452, 'M', 'e'),
+ (0x1D453, 'M', 'f'),
+ (0x1D454, 'M', 'g'),
+ (0x1D455, 'X'),
+ (0x1D456, 'M', 'i'),
+ (0x1D457, 'M', 'j'),
+ (0x1D458, 'M', 'k'),
+ (0x1D459, 'M', 'l'),
+ (0x1D45A, 'M', 'm'),
+ (0x1D45B, 'M', 'n'),
+ (0x1D45C, 'M', 'o'),
+ (0x1D45D, 'M', 'p'),
+ (0x1D45E, 'M', 'q'),
+ (0x1D45F, 'M', 'r'),
+ (0x1D460, 'M', 's'),
+ (0x1D461, 'M', 't'),
+ (0x1D462, 'M', 'u'),
+ (0x1D463, 'M', 'v'),
+ (0x1D464, 'M', 'w'),
+ (0x1D465, 'M', 'x'),
+ (0x1D466, 'M', 'y'),
+ (0x1D467, 'M', 'z'),
+ (0x1D468, 'M', 'a'),
+ (0x1D469, 'M', 'b'),
+ (0x1D46A, 'M', 'c'),
+ (0x1D46B, 'M', 'd'),
+ (0x1D46C, 'M', 'e'),
+ (0x1D46D, 'M', 'f'),
+ (0x1D46E, 'M', 'g'),
+ (0x1D46F, 'M', 'h'),
+ (0x1D470, 'M', 'i'),
+ (0x1D471, 'M', 'j'),
+ (0x1D472, 'M', 'k'),
+ (0x1D473, 'M', 'l'),
+ (0x1D474, 'M', 'm'),
+ (0x1D475, 'M', 'n'),
+ (0x1D476, 'M', 'o'),
+ (0x1D477, 'M', 'p'),
+ (0x1D478, 'M', 'q'),
+ (0x1D479, 'M', 'r'),
+ (0x1D47A, 'M', 's'),
+ (0x1D47B, 'M', 't'),
+ (0x1D47C, 'M', 'u'),
+ (0x1D47D, 'M', 'v'),
+ (0x1D47E, 'M', 'w'),
+ (0x1D47F, 'M', 'x'),
+ (0x1D480, 'M', 'y'),
+ (0x1D481, 'M', 'z'),
+ (0x1D482, 'M', 'a'),
+ (0x1D483, 'M', 'b'),
+ (0x1D484, 'M', 'c'),
+ ]
+
+def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D485, 'M', 'd'),
+ (0x1D486, 'M', 'e'),
+ (0x1D487, 'M', 'f'),
+ (0x1D488, 'M', 'g'),
+ (0x1D489, 'M', 'h'),
+ (0x1D48A, 'M', 'i'),
+ (0x1D48B, 'M', 'j'),
+ (0x1D48C, 'M', 'k'),
+ (0x1D48D, 'M', 'l'),
+ (0x1D48E, 'M', 'm'),
+ (0x1D48F, 'M', 'n'),
+ (0x1D490, 'M', 'o'),
+ (0x1D491, 'M', 'p'),
+ (0x1D492, 'M', 'q'),
+ (0x1D493, 'M', 'r'),
+ (0x1D494, 'M', 's'),
+ (0x1D495, 'M', 't'),
+ (0x1D496, 'M', 'u'),
+ (0x1D497, 'M', 'v'),
+ (0x1D498, 'M', 'w'),
+ (0x1D499, 'M', 'x'),
+ (0x1D49A, 'M', 'y'),
+ (0x1D49B, 'M', 'z'),
+ (0x1D49C, 'M', 'a'),
+ (0x1D49D, 'X'),
+ (0x1D49E, 'M', 'c'),
+ (0x1D49F, 'M', 'd'),
+ (0x1D4A0, 'X'),
+ (0x1D4A2, 'M', 'g'),
+ (0x1D4A3, 'X'),
+ (0x1D4A5, 'M', 'j'),
+ (0x1D4A6, 'M', 'k'),
+ (0x1D4A7, 'X'),
+ (0x1D4A9, 'M', 'n'),
+ (0x1D4AA, 'M', 'o'),
+ (0x1D4AB, 'M', 'p'),
+ (0x1D4AC, 'M', 'q'),
+ (0x1D4AD, 'X'),
+ (0x1D4AE, 'M', 's'),
+ (0x1D4AF, 'M', 't'),
+ (0x1D4B0, 'M', 'u'),
+ (0x1D4B1, 'M', 'v'),
+ (0x1D4B2, 'M', 'w'),
+ (0x1D4B3, 'M', 'x'),
+ (0x1D4B4, 'M', 'y'),
+ (0x1D4B5, 'M', 'z'),
+ (0x1D4B6, 'M', 'a'),
+ (0x1D4B7, 'M', 'b'),
+ (0x1D4B8, 'M', 'c'),
+ (0x1D4B9, 'M', 'd'),
+ (0x1D4BA, 'X'),
+ (0x1D4BB, 'M', 'f'),
+ (0x1D4BC, 'X'),
+ (0x1D4BD, 'M', 'h'),
+ (0x1D4BE, 'M', 'i'),
+ (0x1D4BF, 'M', 'j'),
+ (0x1D4C0, 'M', 'k'),
+ (0x1D4C1, 'M', 'l'),
+ (0x1D4C2, 'M', 'm'),
+ (0x1D4C3, 'M', 'n'),
+ (0x1D4C4, 'X'),
+ (0x1D4C5, 'M', 'p'),
+ (0x1D4C6, 'M', 'q'),
+ (0x1D4C7, 'M', 'r'),
+ (0x1D4C8, 'M', 's'),
+ (0x1D4C9, 'M', 't'),
+ (0x1D4CA, 'M', 'u'),
+ (0x1D4CB, 'M', 'v'),
+ (0x1D4CC, 'M', 'w'),
+ (0x1D4CD, 'M', 'x'),
+ (0x1D4CE, 'M', 'y'),
+ (0x1D4CF, 'M', 'z'),
+ (0x1D4D0, 'M', 'a'),
+ (0x1D4D1, 'M', 'b'),
+ (0x1D4D2, 'M', 'c'),
+ (0x1D4D3, 'M', 'd'),
+ (0x1D4D4, 'M', 'e'),
+ (0x1D4D5, 'M', 'f'),
+ (0x1D4D6, 'M', 'g'),
+ (0x1D4D7, 'M', 'h'),
+ (0x1D4D8, 'M', 'i'),
+ (0x1D4D9, 'M', 'j'),
+ (0x1D4DA, 'M', 'k'),
+ (0x1D4DB, 'M', 'l'),
+ (0x1D4DC, 'M', 'm'),
+ (0x1D4DD, 'M', 'n'),
+ (0x1D4DE, 'M', 'o'),
+ (0x1D4DF, 'M', 'p'),
+ (0x1D4E0, 'M', 'q'),
+ (0x1D4E1, 'M', 'r'),
+ (0x1D4E2, 'M', 's'),
+ (0x1D4E3, 'M', 't'),
+ (0x1D4E4, 'M', 'u'),
+ (0x1D4E5, 'M', 'v'),
+ (0x1D4E6, 'M', 'w'),
+ (0x1D4E7, 'M', 'x'),
+ (0x1D4E8, 'M', 'y'),
+ (0x1D4E9, 'M', 'z'),
+ (0x1D4EA, 'M', 'a'),
+ (0x1D4EB, 'M', 'b'),
+ ]
+
+def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D4EC, 'M', 'c'),
+ (0x1D4ED, 'M', 'd'),
+ (0x1D4EE, 'M', 'e'),
+ (0x1D4EF, 'M', 'f'),
+ (0x1D4F0, 'M', 'g'),
+ (0x1D4F1, 'M', 'h'),
+ (0x1D4F2, 'M', 'i'),
+ (0x1D4F3, 'M', 'j'),
+ (0x1D4F4, 'M', 'k'),
+ (0x1D4F5, 'M', 'l'),
+ (0x1D4F6, 'M', 'm'),
+ (0x1D4F7, 'M', 'n'),
+ (0x1D4F8, 'M', 'o'),
+ (0x1D4F9, 'M', 'p'),
+ (0x1D4FA, 'M', 'q'),
+ (0x1D4FB, 'M', 'r'),
+ (0x1D4FC, 'M', 's'),
+ (0x1D4FD, 'M', 't'),
+ (0x1D4FE, 'M', 'u'),
+ (0x1D4FF, 'M', 'v'),
+ (0x1D500, 'M', 'w'),
+ (0x1D501, 'M', 'x'),
+ (0x1D502, 'M', 'y'),
+ (0x1D503, 'M', 'z'),
+ (0x1D504, 'M', 'a'),
+ (0x1D505, 'M', 'b'),
+ (0x1D506, 'X'),
+ (0x1D507, 'M', 'd'),
+ (0x1D508, 'M', 'e'),
+ (0x1D509, 'M', 'f'),
+ (0x1D50A, 'M', 'g'),
+ (0x1D50B, 'X'),
+ (0x1D50D, 'M', 'j'),
+ (0x1D50E, 'M', 'k'),
+ (0x1D50F, 'M', 'l'),
+ (0x1D510, 'M', 'm'),
+ (0x1D511, 'M', 'n'),
+ (0x1D512, 'M', 'o'),
+ (0x1D513, 'M', 'p'),
+ (0x1D514, 'M', 'q'),
+ (0x1D515, 'X'),
+ (0x1D516, 'M', 's'),
+ (0x1D517, 'M', 't'),
+ (0x1D518, 'M', 'u'),
+ (0x1D519, 'M', 'v'),
+ (0x1D51A, 'M', 'w'),
+ (0x1D51B, 'M', 'x'),
+ (0x1D51C, 'M', 'y'),
+ (0x1D51D, 'X'),
+ (0x1D51E, 'M', 'a'),
+ (0x1D51F, 'M', 'b'),
+ (0x1D520, 'M', 'c'),
+ (0x1D521, 'M', 'd'),
+ (0x1D522, 'M', 'e'),
+ (0x1D523, 'M', 'f'),
+ (0x1D524, 'M', 'g'),
+ (0x1D525, 'M', 'h'),
+ (0x1D526, 'M', 'i'),
+ (0x1D527, 'M', 'j'),
+ (0x1D528, 'M', 'k'),
+ (0x1D529, 'M', 'l'),
+ (0x1D52A, 'M', 'm'),
+ (0x1D52B, 'M', 'n'),
+ (0x1D52C, 'M', 'o'),
+ (0x1D52D, 'M', 'p'),
+ (0x1D52E, 'M', 'q'),
+ (0x1D52F, 'M', 'r'),
+ (0x1D530, 'M', 's'),
+ (0x1D531, 'M', 't'),
+ (0x1D532, 'M', 'u'),
+ (0x1D533, 'M', 'v'),
+ (0x1D534, 'M', 'w'),
+ (0x1D535, 'M', 'x'),
+ (0x1D536, 'M', 'y'),
+ (0x1D537, 'M', 'z'),
+ (0x1D538, 'M', 'a'),
+ (0x1D539, 'M', 'b'),
+ (0x1D53A, 'X'),
+ (0x1D53B, 'M', 'd'),
+ (0x1D53C, 'M', 'e'),
+ (0x1D53D, 'M', 'f'),
+ (0x1D53E, 'M', 'g'),
+ (0x1D53F, 'X'),
+ (0x1D540, 'M', 'i'),
+ (0x1D541, 'M', 'j'),
+ (0x1D542, 'M', 'k'),
+ (0x1D543, 'M', 'l'),
+ (0x1D544, 'M', 'm'),
+ (0x1D545, 'X'),
+ (0x1D546, 'M', 'o'),
+ (0x1D547, 'X'),
+ (0x1D54A, 'M', 's'),
+ (0x1D54B, 'M', 't'),
+ (0x1D54C, 'M', 'u'),
+ (0x1D54D, 'M', 'v'),
+ (0x1D54E, 'M', 'w'),
+ (0x1D54F, 'M', 'x'),
+ (0x1D550, 'M', 'y'),
+ (0x1D551, 'X'),
+ (0x1D552, 'M', 'a'),
+ ]
+
+def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D553, 'M', 'b'),
+ (0x1D554, 'M', 'c'),
+ (0x1D555, 'M', 'd'),
+ (0x1D556, 'M', 'e'),
+ (0x1D557, 'M', 'f'),
+ (0x1D558, 'M', 'g'),
+ (0x1D559, 'M', 'h'),
+ (0x1D55A, 'M', 'i'),
+ (0x1D55B, 'M', 'j'),
+ (0x1D55C, 'M', 'k'),
+ (0x1D55D, 'M', 'l'),
+ (0x1D55E, 'M', 'm'),
+ (0x1D55F, 'M', 'n'),
+ (0x1D560, 'M', 'o'),
+ (0x1D561, 'M', 'p'),
+ (0x1D562, 'M', 'q'),
+ (0x1D563, 'M', 'r'),
+ (0x1D564, 'M', 's'),
+ (0x1D565, 'M', 't'),
+ (0x1D566, 'M', 'u'),
+ (0x1D567, 'M', 'v'),
+ (0x1D568, 'M', 'w'),
+ (0x1D569, 'M', 'x'),
+ (0x1D56A, 'M', 'y'),
+ (0x1D56B, 'M', 'z'),
+ (0x1D56C, 'M', 'a'),
+ (0x1D56D, 'M', 'b'),
+ (0x1D56E, 'M', 'c'),
+ (0x1D56F, 'M', 'd'),
+ (0x1D570, 'M', 'e'),
+ (0x1D571, 'M', 'f'),
+ (0x1D572, 'M', 'g'),
+ (0x1D573, 'M', 'h'),
+ (0x1D574, 'M', 'i'),
+ (0x1D575, 'M', 'j'),
+ (0x1D576, 'M', 'k'),
+ (0x1D577, 'M', 'l'),
+ (0x1D578, 'M', 'm'),
+ (0x1D579, 'M', 'n'),
+ (0x1D57A, 'M', 'o'),
+ (0x1D57B, 'M', 'p'),
+ (0x1D57C, 'M', 'q'),
+ (0x1D57D, 'M', 'r'),
+ (0x1D57E, 'M', 's'),
+ (0x1D57F, 'M', 't'),
+ (0x1D580, 'M', 'u'),
+ (0x1D581, 'M', 'v'),
+ (0x1D582, 'M', 'w'),
+ (0x1D583, 'M', 'x'),
+ (0x1D584, 'M', 'y'),
+ (0x1D585, 'M', 'z'),
+ (0x1D586, 'M', 'a'),
+ (0x1D587, 'M', 'b'),
+ (0x1D588, 'M', 'c'),
+ (0x1D589, 'M', 'd'),
+ (0x1D58A, 'M', 'e'),
+ (0x1D58B, 'M', 'f'),
+ (0x1D58C, 'M', 'g'),
+ (0x1D58D, 'M', 'h'),
+ (0x1D58E, 'M', 'i'),
+ (0x1D58F, 'M', 'j'),
+ (0x1D590, 'M', 'k'),
+ (0x1D591, 'M', 'l'),
+ (0x1D592, 'M', 'm'),
+ (0x1D593, 'M', 'n'),
+ (0x1D594, 'M', 'o'),
+ (0x1D595, 'M', 'p'),
+ (0x1D596, 'M', 'q'),
+ (0x1D597, 'M', 'r'),
+ (0x1D598, 'M', 's'),
+ (0x1D599, 'M', 't'),
+ (0x1D59A, 'M', 'u'),
+ (0x1D59B, 'M', 'v'),
+ (0x1D59C, 'M', 'w'),
+ (0x1D59D, 'M', 'x'),
+ (0x1D59E, 'M', 'y'),
+ (0x1D59F, 'M', 'z'),
+ (0x1D5A0, 'M', 'a'),
+ (0x1D5A1, 'M', 'b'),
+ (0x1D5A2, 'M', 'c'),
+ (0x1D5A3, 'M', 'd'),
+ (0x1D5A4, 'M', 'e'),
+ (0x1D5A5, 'M', 'f'),
+ (0x1D5A6, 'M', 'g'),
+ (0x1D5A7, 'M', 'h'),
+ (0x1D5A8, 'M', 'i'),
+ (0x1D5A9, 'M', 'j'),
+ (0x1D5AA, 'M', 'k'),
+ (0x1D5AB, 'M', 'l'),
+ (0x1D5AC, 'M', 'm'),
+ (0x1D5AD, 'M', 'n'),
+ (0x1D5AE, 'M', 'o'),
+ (0x1D5AF, 'M', 'p'),
+ (0x1D5B0, 'M', 'q'),
+ (0x1D5B1, 'M', 'r'),
+ (0x1D5B2, 'M', 's'),
+ (0x1D5B3, 'M', 't'),
+ (0x1D5B4, 'M', 'u'),
+ (0x1D5B5, 'M', 'v'),
+ (0x1D5B6, 'M', 'w'),
+ ]
+
+def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D5B7, 'M', 'x'),
+ (0x1D5B8, 'M', 'y'),
+ (0x1D5B9, 'M', 'z'),
+ (0x1D5BA, 'M', 'a'),
+ (0x1D5BB, 'M', 'b'),
+ (0x1D5BC, 'M', 'c'),
+ (0x1D5BD, 'M', 'd'),
+ (0x1D5BE, 'M', 'e'),
+ (0x1D5BF, 'M', 'f'),
+ (0x1D5C0, 'M', 'g'),
+ (0x1D5C1, 'M', 'h'),
+ (0x1D5C2, 'M', 'i'),
+ (0x1D5C3, 'M', 'j'),
+ (0x1D5C4, 'M', 'k'),
+ (0x1D5C5, 'M', 'l'),
+ (0x1D5C6, 'M', 'm'),
+ (0x1D5C7, 'M', 'n'),
+ (0x1D5C8, 'M', 'o'),
+ (0x1D5C9, 'M', 'p'),
+ (0x1D5CA, 'M', 'q'),
+ (0x1D5CB, 'M', 'r'),
+ (0x1D5CC, 'M', 's'),
+ (0x1D5CD, 'M', 't'),
+ (0x1D5CE, 'M', 'u'),
+ (0x1D5CF, 'M', 'v'),
+ (0x1D5D0, 'M', 'w'),
+ (0x1D5D1, 'M', 'x'),
+ (0x1D5D2, 'M', 'y'),
+ (0x1D5D3, 'M', 'z'),
+ (0x1D5D4, 'M', 'a'),
+ (0x1D5D5, 'M', 'b'),
+ (0x1D5D6, 'M', 'c'),
+ (0x1D5D7, 'M', 'd'),
+ (0x1D5D8, 'M', 'e'),
+ (0x1D5D9, 'M', 'f'),
+ (0x1D5DA, 'M', 'g'),
+ (0x1D5DB, 'M', 'h'),
+ (0x1D5DC, 'M', 'i'),
+ (0x1D5DD, 'M', 'j'),
+ (0x1D5DE, 'M', 'k'),
+ (0x1D5DF, 'M', 'l'),
+ (0x1D5E0, 'M', 'm'),
+ (0x1D5E1, 'M', 'n'),
+ (0x1D5E2, 'M', 'o'),
+ (0x1D5E3, 'M', 'p'),
+ (0x1D5E4, 'M', 'q'),
+ (0x1D5E5, 'M', 'r'),
+ (0x1D5E6, 'M', 's'),
+ (0x1D5E7, 'M', 't'),
+ (0x1D5E8, 'M', 'u'),
+ (0x1D5E9, 'M', 'v'),
+ (0x1D5EA, 'M', 'w'),
+ (0x1D5EB, 'M', 'x'),
+ (0x1D5EC, 'M', 'y'),
+ (0x1D5ED, 'M', 'z'),
+ (0x1D5EE, 'M', 'a'),
+ (0x1D5EF, 'M', 'b'),
+ (0x1D5F0, 'M', 'c'),
+ (0x1D5F1, 'M', 'd'),
+ (0x1D5F2, 'M', 'e'),
+ (0x1D5F3, 'M', 'f'),
+ (0x1D5F4, 'M', 'g'),
+ (0x1D5F5, 'M', 'h'),
+ (0x1D5F6, 'M', 'i'),
+ (0x1D5F7, 'M', 'j'),
+ (0x1D5F8, 'M', 'k'),
+ (0x1D5F9, 'M', 'l'),
+ (0x1D5FA, 'M', 'm'),
+ (0x1D5FB, 'M', 'n'),
+ (0x1D5FC, 'M', 'o'),
+ (0x1D5FD, 'M', 'p'),
+ (0x1D5FE, 'M', 'q'),
+ (0x1D5FF, 'M', 'r'),
+ (0x1D600, 'M', 's'),
+ (0x1D601, 'M', 't'),
+ (0x1D602, 'M', 'u'),
+ (0x1D603, 'M', 'v'),
+ (0x1D604, 'M', 'w'),
+ (0x1D605, 'M', 'x'),
+ (0x1D606, 'M', 'y'),
+ (0x1D607, 'M', 'z'),
+ (0x1D608, 'M', 'a'),
+ (0x1D609, 'M', 'b'),
+ (0x1D60A, 'M', 'c'),
+ (0x1D60B, 'M', 'd'),
+ (0x1D60C, 'M', 'e'),
+ (0x1D60D, 'M', 'f'),
+ (0x1D60E, 'M', 'g'),
+ (0x1D60F, 'M', 'h'),
+ (0x1D610, 'M', 'i'),
+ (0x1D611, 'M', 'j'),
+ (0x1D612, 'M', 'k'),
+ (0x1D613, 'M', 'l'),
+ (0x1D614, 'M', 'm'),
+ (0x1D615, 'M', 'n'),
+ (0x1D616, 'M', 'o'),
+ (0x1D617, 'M', 'p'),
+ (0x1D618, 'M', 'q'),
+ (0x1D619, 'M', 'r'),
+ (0x1D61A, 'M', 's'),
+ ]
+
+def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D61B, 'M', 't'),
+ (0x1D61C, 'M', 'u'),
+ (0x1D61D, 'M', 'v'),
+ (0x1D61E, 'M', 'w'),
+ (0x1D61F, 'M', 'x'),
+ (0x1D620, 'M', 'y'),
+ (0x1D621, 'M', 'z'),
+ (0x1D622, 'M', 'a'),
+ (0x1D623, 'M', 'b'),
+ (0x1D624, 'M', 'c'),
+ (0x1D625, 'M', 'd'),
+ (0x1D626, 'M', 'e'),
+ (0x1D627, 'M', 'f'),
+ (0x1D628, 'M', 'g'),
+ (0x1D629, 'M', 'h'),
+ (0x1D62A, 'M', 'i'),
+ (0x1D62B, 'M', 'j'),
+ (0x1D62C, 'M', 'k'),
+ (0x1D62D, 'M', 'l'),
+ (0x1D62E, 'M', 'm'),
+ (0x1D62F, 'M', 'n'),
+ (0x1D630, 'M', 'o'),
+ (0x1D631, 'M', 'p'),
+ (0x1D632, 'M', 'q'),
+ (0x1D633, 'M', 'r'),
+ (0x1D634, 'M', 's'),
+ (0x1D635, 'M', 't'),
+ (0x1D636, 'M', 'u'),
+ (0x1D637, 'M', 'v'),
+ (0x1D638, 'M', 'w'),
+ (0x1D639, 'M', 'x'),
+ (0x1D63A, 'M', 'y'),
+ (0x1D63B, 'M', 'z'),
+ (0x1D63C, 'M', 'a'),
+ (0x1D63D, 'M', 'b'),
+ (0x1D63E, 'M', 'c'),
+ (0x1D63F, 'M', 'd'),
+ (0x1D640, 'M', 'e'),
+ (0x1D641, 'M', 'f'),
+ (0x1D642, 'M', 'g'),
+ (0x1D643, 'M', 'h'),
+ (0x1D644, 'M', 'i'),
+ (0x1D645, 'M', 'j'),
+ (0x1D646, 'M', 'k'),
+ (0x1D647, 'M', 'l'),
+ (0x1D648, 'M', 'm'),
+ (0x1D649, 'M', 'n'),
+ (0x1D64A, 'M', 'o'),
+ (0x1D64B, 'M', 'p'),
+ (0x1D64C, 'M', 'q'),
+ (0x1D64D, 'M', 'r'),
+ (0x1D64E, 'M', 's'),
+ (0x1D64F, 'M', 't'),
+ (0x1D650, 'M', 'u'),
+ (0x1D651, 'M', 'v'),
+ (0x1D652, 'M', 'w'),
+ (0x1D653, 'M', 'x'),
+ (0x1D654, 'M', 'y'),
+ (0x1D655, 'M', 'z'),
+ (0x1D656, 'M', 'a'),
+ (0x1D657, 'M', 'b'),
+ (0x1D658, 'M', 'c'),
+ (0x1D659, 'M', 'd'),
+ (0x1D65A, 'M', 'e'),
+ (0x1D65B, 'M', 'f'),
+ (0x1D65C, 'M', 'g'),
+ (0x1D65D, 'M', 'h'),
+ (0x1D65E, 'M', 'i'),
+ (0x1D65F, 'M', 'j'),
+ (0x1D660, 'M', 'k'),
+ (0x1D661, 'M', 'l'),
+ (0x1D662, 'M', 'm'),
+ (0x1D663, 'M', 'n'),
+ (0x1D664, 'M', 'o'),
+ (0x1D665, 'M', 'p'),
+ (0x1D666, 'M', 'q'),
+ (0x1D667, 'M', 'r'),
+ (0x1D668, 'M', 's'),
+ (0x1D669, 'M', 't'),
+ (0x1D66A, 'M', 'u'),
+ (0x1D66B, 'M', 'v'),
+ (0x1D66C, 'M', 'w'),
+ (0x1D66D, 'M', 'x'),
+ (0x1D66E, 'M', 'y'),
+ (0x1D66F, 'M', 'z'),
+ (0x1D670, 'M', 'a'),
+ (0x1D671, 'M', 'b'),
+ (0x1D672, 'M', 'c'),
+ (0x1D673, 'M', 'd'),
+ (0x1D674, 'M', 'e'),
+ (0x1D675, 'M', 'f'),
+ (0x1D676, 'M', 'g'),
+ (0x1D677, 'M', 'h'),
+ (0x1D678, 'M', 'i'),
+ (0x1D679, 'M', 'j'),
+ (0x1D67A, 'M', 'k'),
+ (0x1D67B, 'M', 'l'),
+ (0x1D67C, 'M', 'm'),
+ (0x1D67D, 'M', 'n'),
+ (0x1D67E, 'M', 'o'),
+ ]
+
+def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D67F, 'M', 'p'),
+ (0x1D680, 'M', 'q'),
+ (0x1D681, 'M', 'r'),
+ (0x1D682, 'M', 's'),
+ (0x1D683, 'M', 't'),
+ (0x1D684, 'M', 'u'),
+ (0x1D685, 'M', 'v'),
+ (0x1D686, 'M', 'w'),
+ (0x1D687, 'M', 'x'),
+ (0x1D688, 'M', 'y'),
+ (0x1D689, 'M', 'z'),
+ (0x1D68A, 'M', 'a'),
+ (0x1D68B, 'M', 'b'),
+ (0x1D68C, 'M', 'c'),
+ (0x1D68D, 'M', 'd'),
+ (0x1D68E, 'M', 'e'),
+ (0x1D68F, 'M', 'f'),
+ (0x1D690, 'M', 'g'),
+ (0x1D691, 'M', 'h'),
+ (0x1D692, 'M', 'i'),
+ (0x1D693, 'M', 'j'),
+ (0x1D694, 'M', 'k'),
+ (0x1D695, 'M', 'l'),
+ (0x1D696, 'M', 'm'),
+ (0x1D697, 'M', 'n'),
+ (0x1D698, 'M', 'o'),
+ (0x1D699, 'M', 'p'),
+ (0x1D69A, 'M', 'q'),
+ (0x1D69B, 'M', 'r'),
+ (0x1D69C, 'M', 's'),
+ (0x1D69D, 'M', 't'),
+ (0x1D69E, 'M', 'u'),
+ (0x1D69F, 'M', 'v'),
+ (0x1D6A0, 'M', 'w'),
+ (0x1D6A1, 'M', 'x'),
+ (0x1D6A2, 'M', 'y'),
+ (0x1D6A3, 'M', 'z'),
+ (0x1D6A4, 'M', 'ı'),
+ (0x1D6A5, 'M', 'ȷ'),
+ (0x1D6A6, 'X'),
+ (0x1D6A8, 'M', 'α'),
+ (0x1D6A9, 'M', 'β'),
+ (0x1D6AA, 'M', 'γ'),
+ (0x1D6AB, 'M', 'δ'),
+ (0x1D6AC, 'M', 'ε'),
+ (0x1D6AD, 'M', 'ζ'),
+ (0x1D6AE, 'M', 'η'),
+ (0x1D6AF, 'M', 'θ'),
+ (0x1D6B0, 'M', 'ι'),
+ (0x1D6B1, 'M', 'κ'),
+ (0x1D6B2, 'M', 'λ'),
+ (0x1D6B3, 'M', 'μ'),
+ (0x1D6B4, 'M', 'ν'),
+ (0x1D6B5, 'M', 'ξ'),
+ (0x1D6B6, 'M', 'ο'),
+ (0x1D6B7, 'M', 'π'),
+ (0x1D6B8, 'M', 'ρ'),
+ (0x1D6B9, 'M', 'θ'),
+ (0x1D6BA, 'M', 'σ'),
+ (0x1D6BB, 'M', 'τ'),
+ (0x1D6BC, 'M', 'υ'),
+ (0x1D6BD, 'M', 'φ'),
+ (0x1D6BE, 'M', 'χ'),
+ (0x1D6BF, 'M', 'ψ'),
+ (0x1D6C0, 'M', 'ω'),
+ (0x1D6C1, 'M', '∇'),
+ (0x1D6C2, 'M', 'α'),
+ (0x1D6C3, 'M', 'β'),
+ (0x1D6C4, 'M', 'γ'),
+ (0x1D6C5, 'M', 'δ'),
+ (0x1D6C6, 'M', 'ε'),
+ (0x1D6C7, 'M', 'ζ'),
+ (0x1D6C8, 'M', 'η'),
+ (0x1D6C9, 'M', 'θ'),
+ (0x1D6CA, 'M', 'ι'),
+ (0x1D6CB, 'M', 'κ'),
+ (0x1D6CC, 'M', 'λ'),
+ (0x1D6CD, 'M', 'μ'),
+ (0x1D6CE, 'M', 'ν'),
+ (0x1D6CF, 'M', 'ξ'),
+ (0x1D6D0, 'M', 'ο'),
+ (0x1D6D1, 'M', 'π'),
+ (0x1D6D2, 'M', 'ρ'),
+ (0x1D6D3, 'M', 'σ'),
+ (0x1D6D5, 'M', 'τ'),
+ (0x1D6D6, 'M', 'υ'),
+ (0x1D6D7, 'M', 'φ'),
+ (0x1D6D8, 'M', 'χ'),
+ (0x1D6D9, 'M', 'ψ'),
+ (0x1D6DA, 'M', 'ω'),
+ (0x1D6DB, 'M', '∂'),
+ (0x1D6DC, 'M', 'ε'),
+ (0x1D6DD, 'M', 'θ'),
+ (0x1D6DE, 'M', 'κ'),
+ (0x1D6DF, 'M', 'φ'),
+ (0x1D6E0, 'M', 'ρ'),
+ (0x1D6E1, 'M', 'π'),
+ (0x1D6E2, 'M', 'α'),
+ (0x1D6E3, 'M', 'β'),
+ (0x1D6E4, 'M', 'γ'),
+ ]
+
+def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D6E5, 'M', 'δ'),
+ (0x1D6E6, 'M', 'ε'),
+ (0x1D6E7, 'M', 'ζ'),
+ (0x1D6E8, 'M', 'η'),
+ (0x1D6E9, 'M', 'θ'),
+ (0x1D6EA, 'M', 'ι'),
+ (0x1D6EB, 'M', 'κ'),
+ (0x1D6EC, 'M', 'λ'),
+ (0x1D6ED, 'M', 'μ'),
+ (0x1D6EE, 'M', 'ν'),
+ (0x1D6EF, 'M', 'ξ'),
+ (0x1D6F0, 'M', 'ο'),
+ (0x1D6F1, 'M', 'π'),
+ (0x1D6F2, 'M', 'ρ'),
+ (0x1D6F3, 'M', 'θ'),
+ (0x1D6F4, 'M', 'σ'),
+ (0x1D6F5, 'M', 'τ'),
+ (0x1D6F6, 'M', 'υ'),
+ (0x1D6F7, 'M', 'φ'),
+ (0x1D6F8, 'M', 'χ'),
+ (0x1D6F9, 'M', 'ψ'),
+ (0x1D6FA, 'M', 'ω'),
+ (0x1D6FB, 'M', '∇'),
+ (0x1D6FC, 'M', 'α'),
+ (0x1D6FD, 'M', 'β'),
+ (0x1D6FE, 'M', 'γ'),
+ (0x1D6FF, 'M', 'δ'),
+ (0x1D700, 'M', 'ε'),
+ (0x1D701, 'M', 'ζ'),
+ (0x1D702, 'M', 'η'),
+ (0x1D703, 'M', 'θ'),
+ (0x1D704, 'M', 'ι'),
+ (0x1D705, 'M', 'κ'),
+ (0x1D706, 'M', 'λ'),
+ (0x1D707, 'M', 'μ'),
+ (0x1D708, 'M', 'ν'),
+ (0x1D709, 'M', 'ξ'),
+ (0x1D70A, 'M', 'ο'),
+ (0x1D70B, 'M', 'π'),
+ (0x1D70C, 'M', 'ρ'),
+ (0x1D70D, 'M', 'σ'),
+ (0x1D70F, 'M', 'τ'),
+ (0x1D710, 'M', 'υ'),
+ (0x1D711, 'M', 'φ'),
+ (0x1D712, 'M', 'χ'),
+ (0x1D713, 'M', 'ψ'),
+ (0x1D714, 'M', 'ω'),
+ (0x1D715, 'M', '∂'),
+ (0x1D716, 'M', 'ε'),
+ (0x1D717, 'M', 'θ'),
+ (0x1D718, 'M', 'κ'),
+ (0x1D719, 'M', 'φ'),
+ (0x1D71A, 'M', 'ρ'),
+ (0x1D71B, 'M', 'π'),
+ (0x1D71C, 'M', 'α'),
+ (0x1D71D, 'M', 'β'),
+ (0x1D71E, 'M', 'γ'),
+ (0x1D71F, 'M', 'δ'),
+ (0x1D720, 'M', 'ε'),
+ (0x1D721, 'M', 'ζ'),
+ (0x1D722, 'M', 'η'),
+ (0x1D723, 'M', 'θ'),
+ (0x1D724, 'M', 'ι'),
+ (0x1D725, 'M', 'κ'),
+ (0x1D726, 'M', 'λ'),
+ (0x1D727, 'M', 'μ'),
+ (0x1D728, 'M', 'ν'),
+ (0x1D729, 'M', 'ξ'),
+ (0x1D72A, 'M', 'ο'),
+ (0x1D72B, 'M', 'π'),
+ (0x1D72C, 'M', 'ρ'),
+ (0x1D72D, 'M', 'θ'),
+ (0x1D72E, 'M', 'σ'),
+ (0x1D72F, 'M', 'τ'),
+ (0x1D730, 'M', 'υ'),
+ (0x1D731, 'M', 'φ'),
+ (0x1D732, 'M', 'χ'),
+ (0x1D733, 'M', 'ψ'),
+ (0x1D734, 'M', 'ω'),
+ (0x1D735, 'M', '∇'),
+ (0x1D736, 'M', 'α'),
+ (0x1D737, 'M', 'β'),
+ (0x1D738, 'M', 'γ'),
+ (0x1D739, 'M', 'δ'),
+ (0x1D73A, 'M', 'ε'),
+ (0x1D73B, 'M', 'ζ'),
+ (0x1D73C, 'M', 'η'),
+ (0x1D73D, 'M', 'θ'),
+ (0x1D73E, 'M', 'ι'),
+ (0x1D73F, 'M', 'κ'),
+ (0x1D740, 'M', 'λ'),
+ (0x1D741, 'M', 'μ'),
+ (0x1D742, 'M', 'ν'),
+ (0x1D743, 'M', 'ξ'),
+ (0x1D744, 'M', 'ο'),
+ (0x1D745, 'M', 'π'),
+ (0x1D746, 'M', 'ρ'),
+ (0x1D747, 'M', 'σ'),
+ (0x1D749, 'M', 'τ'),
+ (0x1D74A, 'M', 'υ'),
+ ]
+
+def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D74B, 'M', 'φ'),
+ (0x1D74C, 'M', 'χ'),
+ (0x1D74D, 'M', 'ψ'),
+ (0x1D74E, 'M', 'ω'),
+ (0x1D74F, 'M', '∂'),
+ (0x1D750, 'M', 'ε'),
+ (0x1D751, 'M', 'θ'),
+ (0x1D752, 'M', 'κ'),
+ (0x1D753, 'M', 'φ'),
+ (0x1D754, 'M', 'ρ'),
+ (0x1D755, 'M', 'π'),
+ (0x1D756, 'M', 'α'),
+ (0x1D757, 'M', 'β'),
+ (0x1D758, 'M', 'γ'),
+ (0x1D759, 'M', 'δ'),
+ (0x1D75A, 'M', 'ε'),
+ (0x1D75B, 'M', 'ζ'),
+ (0x1D75C, 'M', 'η'),
+ (0x1D75D, 'M', 'θ'),
+ (0x1D75E, 'M', 'ι'),
+ (0x1D75F, 'M', 'κ'),
+ (0x1D760, 'M', 'λ'),
+ (0x1D761, 'M', 'μ'),
+ (0x1D762, 'M', 'ν'),
+ (0x1D763, 'M', 'ξ'),
+ (0x1D764, 'M', 'ο'),
+ (0x1D765, 'M', 'π'),
+ (0x1D766, 'M', 'ρ'),
+ (0x1D767, 'M', 'θ'),
+ (0x1D768, 'M', 'σ'),
+ (0x1D769, 'M', 'τ'),
+ (0x1D76A, 'M', 'υ'),
+ (0x1D76B, 'M', 'φ'),
+ (0x1D76C, 'M', 'χ'),
+ (0x1D76D, 'M', 'ψ'),
+ (0x1D76E, 'M', 'ω'),
+ (0x1D76F, 'M', '∇'),
+ (0x1D770, 'M', 'α'),
+ (0x1D771, 'M', 'β'),
+ (0x1D772, 'M', 'γ'),
+ (0x1D773, 'M', 'δ'),
+ (0x1D774, 'M', 'ε'),
+ (0x1D775, 'M', 'ζ'),
+ (0x1D776, 'M', 'η'),
+ (0x1D777, 'M', 'θ'),
+ (0x1D778, 'M', 'ι'),
+ (0x1D779, 'M', 'κ'),
+ (0x1D77A, 'M', 'λ'),
+ (0x1D77B, 'M', 'μ'),
+ (0x1D77C, 'M', 'ν'),
+ (0x1D77D, 'M', 'ξ'),
+ (0x1D77E, 'M', 'ο'),
+ (0x1D77F, 'M', 'π'),
+ (0x1D780, 'M', 'ρ'),
+ (0x1D781, 'M', 'σ'),
+ (0x1D783, 'M', 'τ'),
+ (0x1D784, 'M', 'υ'),
+ (0x1D785, 'M', 'φ'),
+ (0x1D786, 'M', 'χ'),
+ (0x1D787, 'M', 'ψ'),
+ (0x1D788, 'M', 'ω'),
+ (0x1D789, 'M', '∂'),
+ (0x1D78A, 'M', 'ε'),
+ (0x1D78B, 'M', 'θ'),
+ (0x1D78C, 'M', 'κ'),
+ (0x1D78D, 'M', 'φ'),
+ (0x1D78E, 'M', 'ρ'),
+ (0x1D78F, 'M', 'π'),
+ (0x1D790, 'M', 'α'),
+ (0x1D791, 'M', 'β'),
+ (0x1D792, 'M', 'γ'),
+ (0x1D793, 'M', 'δ'),
+ (0x1D794, 'M', 'ε'),
+ (0x1D795, 'M', 'ζ'),
+ (0x1D796, 'M', 'η'),
+ (0x1D797, 'M', 'θ'),
+ (0x1D798, 'M', 'ι'),
+ (0x1D799, 'M', 'κ'),
+ (0x1D79A, 'M', 'λ'),
+ (0x1D79B, 'M', 'μ'),
+ (0x1D79C, 'M', 'ν'),
+ (0x1D79D, 'M', 'ξ'),
+ (0x1D79E, 'M', 'ο'),
+ (0x1D79F, 'M', 'π'),
+ (0x1D7A0, 'M', 'ρ'),
+ (0x1D7A1, 'M', 'θ'),
+ (0x1D7A2, 'M', 'σ'),
+ (0x1D7A3, 'M', 'τ'),
+ (0x1D7A4, 'M', 'υ'),
+ (0x1D7A5, 'M', 'φ'),
+ (0x1D7A6, 'M', 'χ'),
+ (0x1D7A7, 'M', 'ψ'),
+ (0x1D7A8, 'M', 'ω'),
+ (0x1D7A9, 'M', '∇'),
+ (0x1D7AA, 'M', 'α'),
+ (0x1D7AB, 'M', 'β'),
+ (0x1D7AC, 'M', 'γ'),
+ (0x1D7AD, 'M', 'δ'),
+ (0x1D7AE, 'M', 'ε'),
+ (0x1D7AF, 'M', 'ζ'),
+ ]
+
+def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D7B0, 'M', 'η'),
+ (0x1D7B1, 'M', 'θ'),
+ (0x1D7B2, 'M', 'ι'),
+ (0x1D7B3, 'M', 'κ'),
+ (0x1D7B4, 'M', 'λ'),
+ (0x1D7B5, 'M', 'μ'),
+ (0x1D7B6, 'M', 'ν'),
+ (0x1D7B7, 'M', 'ξ'),
+ (0x1D7B8, 'M', 'ο'),
+ (0x1D7B9, 'M', 'π'),
+ (0x1D7BA, 'M', 'ρ'),
+ (0x1D7BB, 'M', 'σ'),
+ (0x1D7BD, 'M', 'τ'),
+ (0x1D7BE, 'M', 'υ'),
+ (0x1D7BF, 'M', 'φ'),
+ (0x1D7C0, 'M', 'χ'),
+ (0x1D7C1, 'M', 'ψ'),
+ (0x1D7C2, 'M', 'ω'),
+ (0x1D7C3, 'M', '∂'),
+ (0x1D7C4, 'M', 'ε'),
+ (0x1D7C5, 'M', 'θ'),
+ (0x1D7C6, 'M', 'κ'),
+ (0x1D7C7, 'M', 'φ'),
+ (0x1D7C8, 'M', 'ρ'),
+ (0x1D7C9, 'M', 'π'),
+ (0x1D7CA, 'M', 'ϝ'),
+ (0x1D7CC, 'X'),
+ (0x1D7CE, 'M', '0'),
+ (0x1D7CF, 'M', '1'),
+ (0x1D7D0, 'M', '2'),
+ (0x1D7D1, 'M', '3'),
+ (0x1D7D2, 'M', '4'),
+ (0x1D7D3, 'M', '5'),
+ (0x1D7D4, 'M', '6'),
+ (0x1D7D5, 'M', '7'),
+ (0x1D7D6, 'M', '8'),
+ (0x1D7D7, 'M', '9'),
+ (0x1D7D8, 'M', '0'),
+ (0x1D7D9, 'M', '1'),
+ (0x1D7DA, 'M', '2'),
+ (0x1D7DB, 'M', '3'),
+ (0x1D7DC, 'M', '4'),
+ (0x1D7DD, 'M', '5'),
+ (0x1D7DE, 'M', '6'),
+ (0x1D7DF, 'M', '7'),
+ (0x1D7E0, 'M', '8'),
+ (0x1D7E1, 'M', '9'),
+ (0x1D7E2, 'M', '0'),
+ (0x1D7E3, 'M', '1'),
+ (0x1D7E4, 'M', '2'),
+ (0x1D7E5, 'M', '3'),
+ (0x1D7E6, 'M', '4'),
+ (0x1D7E7, 'M', '5'),
+ (0x1D7E8, 'M', '6'),
+ (0x1D7E9, 'M', '7'),
+ (0x1D7EA, 'M', '8'),
+ (0x1D7EB, 'M', '9'),
+ (0x1D7EC, 'M', '0'),
+ (0x1D7ED, 'M', '1'),
+ (0x1D7EE, 'M', '2'),
+ (0x1D7EF, 'M', '3'),
+ (0x1D7F0, 'M', '4'),
+ (0x1D7F1, 'M', '5'),
+ (0x1D7F2, 'M', '6'),
+ (0x1D7F3, 'M', '7'),
+ (0x1D7F4, 'M', '8'),
+ (0x1D7F5, 'M', '9'),
+ (0x1D7F6, 'M', '0'),
+ (0x1D7F7, 'M', '1'),
+ (0x1D7F8, 'M', '2'),
+ (0x1D7F9, 'M', '3'),
+ (0x1D7FA, 'M', '4'),
+ (0x1D7FB, 'M', '5'),
+ (0x1D7FC, 'M', '6'),
+ (0x1D7FD, 'M', '7'),
+ (0x1D7FE, 'M', '8'),
+ (0x1D7FF, 'M', '9'),
+ (0x1D800, 'V'),
+ (0x1DA8C, 'X'),
+ (0x1DA9B, 'V'),
+ (0x1DAA0, 'X'),
+ (0x1DAA1, 'V'),
+ (0x1DAB0, 'X'),
+ (0x1DF00, 'V'),
+ (0x1DF1F, 'X'),
+ (0x1DF25, 'V'),
+ (0x1DF2B, 'X'),
+ (0x1E000, 'V'),
+ (0x1E007, 'X'),
+ (0x1E008, 'V'),
+ (0x1E019, 'X'),
+ (0x1E01B, 'V'),
+ (0x1E022, 'X'),
+ (0x1E023, 'V'),
+ (0x1E025, 'X'),
+ (0x1E026, 'V'),
+ (0x1E02B, 'X'),
+ (0x1E030, 'M', 'а'),
+ (0x1E031, 'M', 'б'),
+ (0x1E032, 'M', 'в'),
+ ]
+
+def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1E033, 'M', 'г'),
+ (0x1E034, 'M', 'д'),
+ (0x1E035, 'M', 'е'),
+ (0x1E036, 'M', 'ж'),
+ (0x1E037, 'M', 'з'),
+ (0x1E038, 'M', 'и'),
+ (0x1E039, 'M', 'к'),
+ (0x1E03A, 'M', 'л'),
+ (0x1E03B, 'M', 'м'),
+ (0x1E03C, 'M', 'о'),
+ (0x1E03D, 'M', 'п'),
+ (0x1E03E, 'M', 'р'),
+ (0x1E03F, 'M', 'с'),
+ (0x1E040, 'M', 'т'),
+ (0x1E041, 'M', 'у'),
+ (0x1E042, 'M', 'ф'),
+ (0x1E043, 'M', 'х'),
+ (0x1E044, 'M', 'ц'),
+ (0x1E045, 'M', 'ч'),
+ (0x1E046, 'M', 'ш'),
+ (0x1E047, 'M', 'ы'),
+ (0x1E048, 'M', 'э'),
+ (0x1E049, 'M', 'ю'),
+ (0x1E04A, 'M', 'ꚉ'),
+ (0x1E04B, 'M', 'ә'),
+ (0x1E04C, 'M', 'і'),
+ (0x1E04D, 'M', 'ј'),
+ (0x1E04E, 'M', 'ө'),
+ (0x1E04F, 'M', 'ү'),
+ (0x1E050, 'M', 'ӏ'),
+ (0x1E051, 'M', 'а'),
+ (0x1E052, 'M', 'б'),
+ (0x1E053, 'M', 'в'),
+ (0x1E054, 'M', 'г'),
+ (0x1E055, 'M', 'д'),
+ (0x1E056, 'M', 'е'),
+ (0x1E057, 'M', 'ж'),
+ (0x1E058, 'M', 'з'),
+ (0x1E059, 'M', 'и'),
+ (0x1E05A, 'M', 'к'),
+ (0x1E05B, 'M', 'л'),
+ (0x1E05C, 'M', 'о'),
+ (0x1E05D, 'M', 'п'),
+ (0x1E05E, 'M', 'с'),
+ (0x1E05F, 'M', 'у'),
+ (0x1E060, 'M', 'ф'),
+ (0x1E061, 'M', 'х'),
+ (0x1E062, 'M', 'ц'),
+ (0x1E063, 'M', 'ч'),
+ (0x1E064, 'M', 'ш'),
+ (0x1E065, 'M', 'ъ'),
+ (0x1E066, 'M', 'ы'),
+ (0x1E067, 'M', 'ґ'),
+ (0x1E068, 'M', 'і'),
+ (0x1E069, 'M', 'ѕ'),
+ (0x1E06A, 'M', 'џ'),
+ (0x1E06B, 'M', 'ҫ'),
+ (0x1E06C, 'M', 'ꙑ'),
+ (0x1E06D, 'M', 'ұ'),
+ (0x1E06E, 'X'),
+ (0x1E08F, 'V'),
+ (0x1E090, 'X'),
+ (0x1E100, 'V'),
+ (0x1E12D, 'X'),
+ (0x1E130, 'V'),
+ (0x1E13E, 'X'),
+ (0x1E140, 'V'),
+ (0x1E14A, 'X'),
+ (0x1E14E, 'V'),
+ (0x1E150, 'X'),
+ (0x1E290, 'V'),
+ (0x1E2AF, 'X'),
+ (0x1E2C0, 'V'),
+ (0x1E2FA, 'X'),
+ (0x1E2FF, 'V'),
+ (0x1E300, 'X'),
+ (0x1E4D0, 'V'),
+ (0x1E4FA, 'X'),
+ (0x1E7E0, 'V'),
+ (0x1E7E7, 'X'),
+ (0x1E7E8, 'V'),
+ (0x1E7EC, 'X'),
+ (0x1E7ED, 'V'),
+ (0x1E7EF, 'X'),
+ (0x1E7F0, 'V'),
+ (0x1E7FF, 'X'),
+ (0x1E800, 'V'),
+ (0x1E8C5, 'X'),
+ (0x1E8C7, 'V'),
+ (0x1E8D7, 'X'),
+ (0x1E900, 'M', '𞤢'),
+ (0x1E901, 'M', '𞤣'),
+ (0x1E902, 'M', '𞤤'),
+ (0x1E903, 'M', '𞤥'),
+ (0x1E904, 'M', '𞤦'),
+ (0x1E905, 'M', '𞤧'),
+ (0x1E906, 'M', '𞤨'),
+ (0x1E907, 'M', '𞤩'),
+ (0x1E908, 'M', '𞤪'),
+ (0x1E909, 'M', '𞤫'),
+ ]
+
+def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1E90A, 'M', '𞤬'),
+ (0x1E90B, 'M', '𞤭'),
+ (0x1E90C, 'M', '𞤮'),
+ (0x1E90D, 'M', '𞤯'),
+ (0x1E90E, 'M', '𞤰'),
+ (0x1E90F, 'M', '𞤱'),
+ (0x1E910, 'M', '𞤲'),
+ (0x1E911, 'M', '𞤳'),
+ (0x1E912, 'M', '𞤴'),
+ (0x1E913, 'M', '𞤵'),
+ (0x1E914, 'M', '𞤶'),
+ (0x1E915, 'M', '𞤷'),
+ (0x1E916, 'M', '𞤸'),
+ (0x1E917, 'M', '𞤹'),
+ (0x1E918, 'M', '𞤺'),
+ (0x1E919, 'M', '𞤻'),
+ (0x1E91A, 'M', '𞤼'),
+ (0x1E91B, 'M', '𞤽'),
+ (0x1E91C, 'M', '𞤾'),
+ (0x1E91D, 'M', '𞤿'),
+ (0x1E91E, 'M', '𞥀'),
+ (0x1E91F, 'M', '𞥁'),
+ (0x1E920, 'M', '𞥂'),
+ (0x1E921, 'M', '𞥃'),
+ (0x1E922, 'V'),
+ (0x1E94C, 'X'),
+ (0x1E950, 'V'),
+ (0x1E95A, 'X'),
+ (0x1E95E, 'V'),
+ (0x1E960, 'X'),
+ (0x1EC71, 'V'),
+ (0x1ECB5, 'X'),
+ (0x1ED01, 'V'),
+ (0x1ED3E, 'X'),
+ (0x1EE00, 'M', 'ا'),
+ (0x1EE01, 'M', 'ب'),
+ (0x1EE02, 'M', 'ج'),
+ (0x1EE03, 'M', 'د'),
+ (0x1EE04, 'X'),
+ (0x1EE05, 'M', 'و'),
+ (0x1EE06, 'M', 'ز'),
+ (0x1EE07, 'M', 'ح'),
+ (0x1EE08, 'M', 'ط'),
+ (0x1EE09, 'M', 'ي'),
+ (0x1EE0A, 'M', 'ك'),
+ (0x1EE0B, 'M', 'ل'),
+ (0x1EE0C, 'M', 'م'),
+ (0x1EE0D, 'M', 'ن'),
+ (0x1EE0E, 'M', 'س'),
+ (0x1EE0F, 'M', 'ع'),
+ (0x1EE10, 'M', 'ف'),
+ (0x1EE11, 'M', 'ص'),
+ (0x1EE12, 'M', 'ق'),
+ (0x1EE13, 'M', 'ر'),
+ (0x1EE14, 'M', 'ش'),
+ (0x1EE15, 'M', 'ت'),
+ (0x1EE16, 'M', 'ث'),
+ (0x1EE17, 'M', 'خ'),
+ (0x1EE18, 'M', 'ذ'),
+ (0x1EE19, 'M', 'ض'),
+ (0x1EE1A, 'M', 'ظ'),
+ (0x1EE1B, 'M', 'غ'),
+ (0x1EE1C, 'M', 'ٮ'),
+ (0x1EE1D, 'M', 'ں'),
+ (0x1EE1E, 'M', 'ڡ'),
+ (0x1EE1F, 'M', 'ٯ'),
+ (0x1EE20, 'X'),
+ (0x1EE21, 'M', 'ب'),
+ (0x1EE22, 'M', 'ج'),
+ (0x1EE23, 'X'),
+ (0x1EE24, 'M', 'ه'),
+ (0x1EE25, 'X'),
+ (0x1EE27, 'M', 'ح'),
+ (0x1EE28, 'X'),
+ (0x1EE29, 'M', 'ي'),
+ (0x1EE2A, 'M', 'ك'),
+ (0x1EE2B, 'M', 'ل'),
+ (0x1EE2C, 'M', 'م'),
+ (0x1EE2D, 'M', 'ن'),
+ (0x1EE2E, 'M', 'س'),
+ (0x1EE2F, 'M', 'ع'),
+ (0x1EE30, 'M', 'ف'),
+ (0x1EE31, 'M', 'ص'),
+ (0x1EE32, 'M', 'ق'),
+ (0x1EE33, 'X'),
+ (0x1EE34, 'M', 'ش'),
+ (0x1EE35, 'M', 'ت'),
+ (0x1EE36, 'M', 'ث'),
+ (0x1EE37, 'M', 'خ'),
+ (0x1EE38, 'X'),
+ (0x1EE39, 'M', 'ض'),
+ (0x1EE3A, 'X'),
+ (0x1EE3B, 'M', 'غ'),
+ (0x1EE3C, 'X'),
+ (0x1EE42, 'M', 'ج'),
+ (0x1EE43, 'X'),
+ (0x1EE47, 'M', 'ح'),
+ (0x1EE48, 'X'),
+ (0x1EE49, 'M', 'ي'),
+ (0x1EE4A, 'X'),
+ ]
+
+def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1EE4B, 'M', 'ل'),
+ (0x1EE4C, 'X'),
+ (0x1EE4D, 'M', 'ن'),
+ (0x1EE4E, 'M', 'س'),
+ (0x1EE4F, 'M', 'ع'),
+ (0x1EE50, 'X'),
+ (0x1EE51, 'M', 'ص'),
+ (0x1EE52, 'M', 'ق'),
+ (0x1EE53, 'X'),
+ (0x1EE54, 'M', 'ش'),
+ (0x1EE55, 'X'),
+ (0x1EE57, 'M', 'خ'),
+ (0x1EE58, 'X'),
+ (0x1EE59, 'M', 'ض'),
+ (0x1EE5A, 'X'),
+ (0x1EE5B, 'M', 'غ'),
+ (0x1EE5C, 'X'),
+ (0x1EE5D, 'M', 'ں'),
+ (0x1EE5E, 'X'),
+ (0x1EE5F, 'M', 'ٯ'),
+ (0x1EE60, 'X'),
+ (0x1EE61, 'M', 'ب'),
+ (0x1EE62, 'M', 'ج'),
+ (0x1EE63, 'X'),
+ (0x1EE64, 'M', 'ه'),
+ (0x1EE65, 'X'),
+ (0x1EE67, 'M', 'ح'),
+ (0x1EE68, 'M', 'ط'),
+ (0x1EE69, 'M', 'ي'),
+ (0x1EE6A, 'M', 'ك'),
+ (0x1EE6B, 'X'),
+ (0x1EE6C, 'M', 'م'),
+ (0x1EE6D, 'M', 'ن'),
+ (0x1EE6E, 'M', 'س'),
+ (0x1EE6F, 'M', 'ع'),
+ (0x1EE70, 'M', 'ف'),
+ (0x1EE71, 'M', 'ص'),
+ (0x1EE72, 'M', 'ق'),
+ (0x1EE73, 'X'),
+ (0x1EE74, 'M', 'ش'),
+ (0x1EE75, 'M', 'ت'),
+ (0x1EE76, 'M', 'ث'),
+ (0x1EE77, 'M', 'خ'),
+ (0x1EE78, 'X'),
+ (0x1EE79, 'M', 'ض'),
+ (0x1EE7A, 'M', 'ظ'),
+ (0x1EE7B, 'M', 'غ'),
+ (0x1EE7C, 'M', 'ٮ'),
+ (0x1EE7D, 'X'),
+ (0x1EE7E, 'M', 'ڡ'),
+ (0x1EE7F, 'X'),
+ (0x1EE80, 'M', 'ا'),
+ (0x1EE81, 'M', 'ب'),
+ (0x1EE82, 'M', 'ج'),
+ (0x1EE83, 'M', 'د'),
+ (0x1EE84, 'M', 'ه'),
+ (0x1EE85, 'M', 'و'),
+ (0x1EE86, 'M', 'ز'),
+ (0x1EE87, 'M', 'ح'),
+ (0x1EE88, 'M', 'ط'),
+ (0x1EE89, 'M', 'ي'),
+ (0x1EE8A, 'X'),
+ (0x1EE8B, 'M', 'ل'),
+ (0x1EE8C, 'M', 'م'),
+ (0x1EE8D, 'M', 'ن'),
+ (0x1EE8E, 'M', 'س'),
+ (0x1EE8F, 'M', 'ع'),
+ (0x1EE90, 'M', 'ف'),
+ (0x1EE91, 'M', 'ص'),
+ (0x1EE92, 'M', 'ق'),
+ (0x1EE93, 'M', 'ر'),
+ (0x1EE94, 'M', 'ش'),
+ (0x1EE95, 'M', 'ت'),
+ (0x1EE96, 'M', 'ث'),
+ (0x1EE97, 'M', 'خ'),
+ (0x1EE98, 'M', 'ذ'),
+ (0x1EE99, 'M', 'ض'),
+ (0x1EE9A, 'M', 'ظ'),
+ (0x1EE9B, 'M', 'غ'),
+ (0x1EE9C, 'X'),
+ (0x1EEA1, 'M', 'ب'),
+ (0x1EEA2, 'M', 'ج'),
+ (0x1EEA3, 'M', 'د'),
+ (0x1EEA4, 'X'),
+ (0x1EEA5, 'M', 'و'),
+ (0x1EEA6, 'M', 'ز'),
+ (0x1EEA7, 'M', 'ح'),
+ (0x1EEA8, 'M', 'ط'),
+ (0x1EEA9, 'M', 'ي'),
+ (0x1EEAA, 'X'),
+ (0x1EEAB, 'M', 'ل'),
+ (0x1EEAC, 'M', 'م'),
+ (0x1EEAD, 'M', 'ن'),
+ (0x1EEAE, 'M', 'س'),
+ (0x1EEAF, 'M', 'ع'),
+ (0x1EEB0, 'M', 'ف'),
+ (0x1EEB1, 'M', 'ص'),
+ (0x1EEB2, 'M', 'ق'),
+ (0x1EEB3, 'M', 'ر'),
+ (0x1EEB4, 'M', 'ش'),
+ ]
+
+def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1EEB5, 'M', 'ت'),
+ (0x1EEB6, 'M', 'ث'),
+ (0x1EEB7, 'M', 'خ'),
+ (0x1EEB8, 'M', 'ذ'),
+ (0x1EEB9, 'M', 'ض'),
+ (0x1EEBA, 'M', 'ظ'),
+ (0x1EEBB, 'M', 'غ'),
+ (0x1EEBC, 'X'),
+ (0x1EEF0, 'V'),
+ (0x1EEF2, 'X'),
+ (0x1F000, 'V'),
+ (0x1F02C, 'X'),
+ (0x1F030, 'V'),
+ (0x1F094, 'X'),
+ (0x1F0A0, 'V'),
+ (0x1F0AF, 'X'),
+ (0x1F0B1, 'V'),
+ (0x1F0C0, 'X'),
+ (0x1F0C1, 'V'),
+ (0x1F0D0, 'X'),
+ (0x1F0D1, 'V'),
+ (0x1F0F6, 'X'),
+ (0x1F101, '3', '0,'),
+ (0x1F102, '3', '1,'),
+ (0x1F103, '3', '2,'),
+ (0x1F104, '3', '3,'),
+ (0x1F105, '3', '4,'),
+ (0x1F106, '3', '5,'),
+ (0x1F107, '3', '6,'),
+ (0x1F108, '3', '7,'),
+ (0x1F109, '3', '8,'),
+ (0x1F10A, '3', '9,'),
+ (0x1F10B, 'V'),
+ (0x1F110, '3', '(a)'),
+ (0x1F111, '3', '(b)'),
+ (0x1F112, '3', '(c)'),
+ (0x1F113, '3', '(d)'),
+ (0x1F114, '3', '(e)'),
+ (0x1F115, '3', '(f)'),
+ (0x1F116, '3', '(g)'),
+ (0x1F117, '3', '(h)'),
+ (0x1F118, '3', '(i)'),
+ (0x1F119, '3', '(j)'),
+ (0x1F11A, '3', '(k)'),
+ (0x1F11B, '3', '(l)'),
+ (0x1F11C, '3', '(m)'),
+ (0x1F11D, '3', '(n)'),
+ (0x1F11E, '3', '(o)'),
+ (0x1F11F, '3', '(p)'),
+ (0x1F120, '3', '(q)'),
+ (0x1F121, '3', '(r)'),
+ (0x1F122, '3', '(s)'),
+ (0x1F123, '3', '(t)'),
+ (0x1F124, '3', '(u)'),
+ (0x1F125, '3', '(v)'),
+ (0x1F126, '3', '(w)'),
+ (0x1F127, '3', '(x)'),
+ (0x1F128, '3', '(y)'),
+ (0x1F129, '3', '(z)'),
+ (0x1F12A, 'M', '〔s〕'),
+ (0x1F12B, 'M', 'c'),
+ (0x1F12C, 'M', 'r'),
+ (0x1F12D, 'M', 'cd'),
+ (0x1F12E, 'M', 'wz'),
+ (0x1F12F, 'V'),
+ (0x1F130, 'M', 'a'),
+ (0x1F131, 'M', 'b'),
+ (0x1F132, 'M', 'c'),
+ (0x1F133, 'M', 'd'),
+ (0x1F134, 'M', 'e'),
+ (0x1F135, 'M', 'f'),
+ (0x1F136, 'M', 'g'),
+ (0x1F137, 'M', 'h'),
+ (0x1F138, 'M', 'i'),
+ (0x1F139, 'M', 'j'),
+ (0x1F13A, 'M', 'k'),
+ (0x1F13B, 'M', 'l'),
+ (0x1F13C, 'M', 'm'),
+ (0x1F13D, 'M', 'n'),
+ (0x1F13E, 'M', 'o'),
+ (0x1F13F, 'M', 'p'),
+ (0x1F140, 'M', 'q'),
+ (0x1F141, 'M', 'r'),
+ (0x1F142, 'M', 's'),
+ (0x1F143, 'M', 't'),
+ (0x1F144, 'M', 'u'),
+ (0x1F145, 'M', 'v'),
+ (0x1F146, 'M', 'w'),
+ (0x1F147, 'M', 'x'),
+ (0x1F148, 'M', 'y'),
+ (0x1F149, 'M', 'z'),
+ (0x1F14A, 'M', 'hv'),
+ (0x1F14B, 'M', 'mv'),
+ (0x1F14C, 'M', 'sd'),
+ (0x1F14D, 'M', 'ss'),
+ (0x1F14E, 'M', 'ppv'),
+ (0x1F14F, 'M', 'wc'),
+ (0x1F150, 'V'),
+ (0x1F16A, 'M', 'mc'),
+ (0x1F16B, 'M', 'md'),
+ ]
+
+def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1F16C, 'M', 'mr'),
+ (0x1F16D, 'V'),
+ (0x1F190, 'M', 'dj'),
+ (0x1F191, 'V'),
+ (0x1F1AE, 'X'),
+ (0x1F1E6, 'V'),
+ (0x1F200, 'M', 'ほか'),
+ (0x1F201, 'M', 'ココ'),
+ (0x1F202, 'M', 'サ'),
+ (0x1F203, 'X'),
+ (0x1F210, 'M', '手'),
+ (0x1F211, 'M', '字'),
+ (0x1F212, 'M', '双'),
+ (0x1F213, 'M', 'デ'),
+ (0x1F214, 'M', '二'),
+ (0x1F215, 'M', '多'),
+ (0x1F216, 'M', '解'),
+ (0x1F217, 'M', '天'),
+ (0x1F218, 'M', '交'),
+ (0x1F219, 'M', '映'),
+ (0x1F21A, 'M', '無'),
+ (0x1F21B, 'M', '料'),
+ (0x1F21C, 'M', '前'),
+ (0x1F21D, 'M', '後'),
+ (0x1F21E, 'M', '再'),
+ (0x1F21F, 'M', '新'),
+ (0x1F220, 'M', '初'),
+ (0x1F221, 'M', '終'),
+ (0x1F222, 'M', '生'),
+ (0x1F223, 'M', '販'),
+ (0x1F224, 'M', '声'),
+ (0x1F225, 'M', '吹'),
+ (0x1F226, 'M', '演'),
+ (0x1F227, 'M', '投'),
+ (0x1F228, 'M', '捕'),
+ (0x1F229, 'M', '一'),
+ (0x1F22A, 'M', '三'),
+ (0x1F22B, 'M', '遊'),
+ (0x1F22C, 'M', '左'),
+ (0x1F22D, 'M', '中'),
+ (0x1F22E, 'M', '右'),
+ (0x1F22F, 'M', '指'),
+ (0x1F230, 'M', '走'),
+ (0x1F231, 'M', '打'),
+ (0x1F232, 'M', '禁'),
+ (0x1F233, 'M', '空'),
+ (0x1F234, 'M', '合'),
+ (0x1F235, 'M', '満'),
+ (0x1F236, 'M', '有'),
+ (0x1F237, 'M', '月'),
+ (0x1F238, 'M', '申'),
+ (0x1F239, 'M', '割'),
+ (0x1F23A, 'M', '営'),
+ (0x1F23B, 'M', '配'),
+ (0x1F23C, 'X'),
+ (0x1F240, 'M', '〔本〕'),
+ (0x1F241, 'M', '〔三〕'),
+ (0x1F242, 'M', '〔二〕'),
+ (0x1F243, 'M', '〔安〕'),
+ (0x1F244, 'M', '〔点〕'),
+ (0x1F245, 'M', '〔打〕'),
+ (0x1F246, 'M', '〔盗〕'),
+ (0x1F247, 'M', '〔勝〕'),
+ (0x1F248, 'M', '〔敗〕'),
+ (0x1F249, 'X'),
+ (0x1F250, 'M', '得'),
+ (0x1F251, 'M', '可'),
+ (0x1F252, 'X'),
+ (0x1F260, 'V'),
+ (0x1F266, 'X'),
+ (0x1F300, 'V'),
+ (0x1F6D8, 'X'),
+ (0x1F6DC, 'V'),
+ (0x1F6ED, 'X'),
+ (0x1F6F0, 'V'),
+ (0x1F6FD, 'X'),
+ (0x1F700, 'V'),
+ (0x1F777, 'X'),
+ (0x1F77B, 'V'),
+ (0x1F7DA, 'X'),
+ (0x1F7E0, 'V'),
+ (0x1F7EC, 'X'),
+ (0x1F7F0, 'V'),
+ (0x1F7F1, 'X'),
+ (0x1F800, 'V'),
+ (0x1F80C, 'X'),
+ (0x1F810, 'V'),
+ (0x1F848, 'X'),
+ (0x1F850, 'V'),
+ (0x1F85A, 'X'),
+ (0x1F860, 'V'),
+ (0x1F888, 'X'),
+ (0x1F890, 'V'),
+ (0x1F8AE, 'X'),
+ (0x1F8B0, 'V'),
+ (0x1F8B2, 'X'),
+ (0x1F900, 'V'),
+ (0x1FA54, 'X'),
+ (0x1FA60, 'V'),
+ (0x1FA6E, 'X'),
+ ]
+
+def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1FA70, 'V'),
+ (0x1FA7D, 'X'),
+ (0x1FA80, 'V'),
+ (0x1FA89, 'X'),
+ (0x1FA90, 'V'),
+ (0x1FABE, 'X'),
+ (0x1FABF, 'V'),
+ (0x1FAC6, 'X'),
+ (0x1FACE, 'V'),
+ (0x1FADC, 'X'),
+ (0x1FAE0, 'V'),
+ (0x1FAE9, 'X'),
+ (0x1FAF0, 'V'),
+ (0x1FAF9, 'X'),
+ (0x1FB00, 'V'),
+ (0x1FB93, 'X'),
+ (0x1FB94, 'V'),
+ (0x1FBCB, 'X'),
+ (0x1FBF0, 'M', '0'),
+ (0x1FBF1, 'M', '1'),
+ (0x1FBF2, 'M', '2'),
+ (0x1FBF3, 'M', '3'),
+ (0x1FBF4, 'M', '4'),
+ (0x1FBF5, 'M', '5'),
+ (0x1FBF6, 'M', '6'),
+ (0x1FBF7, 'M', '7'),
+ (0x1FBF8, 'M', '8'),
+ (0x1FBF9, 'M', '9'),
+ (0x1FBFA, 'X'),
+ (0x20000, 'V'),
+ (0x2A6E0, 'X'),
+ (0x2A700, 'V'),
+ (0x2B73A, 'X'),
+ (0x2B740, 'V'),
+ (0x2B81E, 'X'),
+ (0x2B820, 'V'),
+ (0x2CEA2, 'X'),
+ (0x2CEB0, 'V'),
+ (0x2EBE1, 'X'),
+ (0x2F800, 'M', '丽'),
+ (0x2F801, 'M', '丸'),
+ (0x2F802, 'M', '乁'),
+ (0x2F803, 'M', '𠄢'),
+ (0x2F804, 'M', '你'),
+ (0x2F805, 'M', '侮'),
+ (0x2F806, 'M', '侻'),
+ (0x2F807, 'M', '倂'),
+ (0x2F808, 'M', '偺'),
+ (0x2F809, 'M', '備'),
+ (0x2F80A, 'M', '僧'),
+ (0x2F80B, 'M', '像'),
+ (0x2F80C, 'M', '㒞'),
+ (0x2F80D, 'M', '𠘺'),
+ (0x2F80E, 'M', '免'),
+ (0x2F80F, 'M', '兔'),
+ (0x2F810, 'M', '兤'),
+ (0x2F811, 'M', '具'),
+ (0x2F812, 'M', '𠔜'),
+ (0x2F813, 'M', '㒹'),
+ (0x2F814, 'M', '內'),
+ (0x2F815, 'M', '再'),
+ (0x2F816, 'M', '𠕋'),
+ (0x2F817, 'M', '冗'),
+ (0x2F818, 'M', '冤'),
+ (0x2F819, 'M', '仌'),
+ (0x2F81A, 'M', '冬'),
+ (0x2F81B, 'M', '况'),
+ (0x2F81C, 'M', '𩇟'),
+ (0x2F81D, 'M', '凵'),
+ (0x2F81E, 'M', '刃'),
+ (0x2F81F, 'M', '㓟'),
+ (0x2F820, 'M', '刻'),
+ (0x2F821, 'M', '剆'),
+ (0x2F822, 'M', '割'),
+ (0x2F823, 'M', '剷'),
+ (0x2F824, 'M', '㔕'),
+ (0x2F825, 'M', '勇'),
+ (0x2F826, 'M', '勉'),
+ (0x2F827, 'M', '勤'),
+ (0x2F828, 'M', '勺'),
+ (0x2F829, 'M', '包'),
+ (0x2F82A, 'M', '匆'),
+ (0x2F82B, 'M', '北'),
+ (0x2F82C, 'M', '卉'),
+ (0x2F82D, 'M', '卑'),
+ (0x2F82E, 'M', '博'),
+ (0x2F82F, 'M', '即'),
+ (0x2F830, 'M', '卽'),
+ (0x2F831, 'M', '卿'),
+ (0x2F834, 'M', '𠨬'),
+ (0x2F835, 'M', '灰'),
+ (0x2F836, 'M', '及'),
+ (0x2F837, 'M', '叟'),
+ (0x2F838, 'M', '𠭣'),
+ (0x2F839, 'M', '叫'),
+ (0x2F83A, 'M', '叱'),
+ (0x2F83B, 'M', '吆'),
+ (0x2F83C, 'M', '咞'),
+ (0x2F83D, 'M', '吸'),
+ (0x2F83E, 'M', '呈'),
+ ]
+
+def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2F83F, 'M', '周'),
+ (0x2F840, 'M', '咢'),
+ (0x2F841, 'M', '哶'),
+ (0x2F842, 'M', '唐'),
+ (0x2F843, 'M', '啓'),
+ (0x2F844, 'M', '啣'),
+ (0x2F845, 'M', '善'),
+ (0x2F847, 'M', '喙'),
+ (0x2F848, 'M', '喫'),
+ (0x2F849, 'M', '喳'),
+ (0x2F84A, 'M', '嗂'),
+ (0x2F84B, 'M', '圖'),
+ (0x2F84C, 'M', '嘆'),
+ (0x2F84D, 'M', '圗'),
+ (0x2F84E, 'M', '噑'),
+ (0x2F84F, 'M', '噴'),
+ (0x2F850, 'M', '切'),
+ (0x2F851, 'M', '壮'),
+ (0x2F852, 'M', '城'),
+ (0x2F853, 'M', '埴'),
+ (0x2F854, 'M', '堍'),
+ (0x2F855, 'M', '型'),
+ (0x2F856, 'M', '堲'),
+ (0x2F857, 'M', '報'),
+ (0x2F858, 'M', '墬'),
+ (0x2F859, 'M', '𡓤'),
+ (0x2F85A, 'M', '売'),
+ (0x2F85B, 'M', '壷'),
+ (0x2F85C, 'M', '夆'),
+ (0x2F85D, 'M', '多'),
+ (0x2F85E, 'M', '夢'),
+ (0x2F85F, 'M', '奢'),
+ (0x2F860, 'M', '𡚨'),
+ (0x2F861, 'M', '𡛪'),
+ (0x2F862, 'M', '姬'),
+ (0x2F863, 'M', '娛'),
+ (0x2F864, 'M', '娧'),
+ (0x2F865, 'M', '姘'),
+ (0x2F866, 'M', '婦'),
+ (0x2F867, 'M', '㛮'),
+ (0x2F868, 'X'),
+ (0x2F869, 'M', '嬈'),
+ (0x2F86A, 'M', '嬾'),
+ (0x2F86C, 'M', '𡧈'),
+ (0x2F86D, 'M', '寃'),
+ (0x2F86E, 'M', '寘'),
+ (0x2F86F, 'M', '寧'),
+ (0x2F870, 'M', '寳'),
+ (0x2F871, 'M', '𡬘'),
+ (0x2F872, 'M', '寿'),
+ (0x2F873, 'M', '将'),
+ (0x2F874, 'X'),
+ (0x2F875, 'M', '尢'),
+ (0x2F876, 'M', '㞁'),
+ (0x2F877, 'M', '屠'),
+ (0x2F878, 'M', '屮'),
+ (0x2F879, 'M', '峀'),
+ (0x2F87A, 'M', '岍'),
+ (0x2F87B, 'M', '𡷤'),
+ (0x2F87C, 'M', '嵃'),
+ (0x2F87D, 'M', '𡷦'),
+ (0x2F87E, 'M', '嵮'),
+ (0x2F87F, 'M', '嵫'),
+ (0x2F880, 'M', '嵼'),
+ (0x2F881, 'M', '巡'),
+ (0x2F882, 'M', '巢'),
+ (0x2F883, 'M', '㠯'),
+ (0x2F884, 'M', '巽'),
+ (0x2F885, 'M', '帨'),
+ (0x2F886, 'M', '帽'),
+ (0x2F887, 'M', '幩'),
+ (0x2F888, 'M', '㡢'),
+ (0x2F889, 'M', '𢆃'),
+ (0x2F88A, 'M', '㡼'),
+ (0x2F88B, 'M', '庰'),
+ (0x2F88C, 'M', '庳'),
+ (0x2F88D, 'M', '庶'),
+ (0x2F88E, 'M', '廊'),
+ (0x2F88F, 'M', '𪎒'),
+ (0x2F890, 'M', '廾'),
+ (0x2F891, 'M', '𢌱'),
+ (0x2F893, 'M', '舁'),
+ (0x2F894, 'M', '弢'),
+ (0x2F896, 'M', '㣇'),
+ (0x2F897, 'M', '𣊸'),
+ (0x2F898, 'M', '𦇚'),
+ (0x2F899, 'M', '形'),
+ (0x2F89A, 'M', '彫'),
+ (0x2F89B, 'M', '㣣'),
+ (0x2F89C, 'M', '徚'),
+ (0x2F89D, 'M', '忍'),
+ (0x2F89E, 'M', '志'),
+ (0x2F89F, 'M', '忹'),
+ (0x2F8A0, 'M', '悁'),
+ (0x2F8A1, 'M', '㤺'),
+ (0x2F8A2, 'M', '㤜'),
+ (0x2F8A3, 'M', '悔'),
+ (0x2F8A4, 'M', '𢛔'),
+ (0x2F8A5, 'M', '惇'),
+ (0x2F8A6, 'M', '慈'),
+ ]
+
+def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2F8A7, 'M', '慌'),
+ (0x2F8A8, 'M', '慎'),
+ (0x2F8A9, 'M', '慌'),
+ (0x2F8AA, 'M', '慺'),
+ (0x2F8AB, 'M', '憎'),
+ (0x2F8AC, 'M', '憲'),
+ (0x2F8AD, 'M', '憤'),
+ (0x2F8AE, 'M', '憯'),
+ (0x2F8AF, 'M', '懞'),
+ (0x2F8B0, 'M', '懲'),
+ (0x2F8B1, 'M', '懶'),
+ (0x2F8B2, 'M', '成'),
+ (0x2F8B3, 'M', '戛'),
+ (0x2F8B4, 'M', '扝'),
+ (0x2F8B5, 'M', '抱'),
+ (0x2F8B6, 'M', '拔'),
+ (0x2F8B7, 'M', '捐'),
+ (0x2F8B8, 'M', '𢬌'),
+ (0x2F8B9, 'M', '挽'),
+ (0x2F8BA, 'M', '拼'),
+ (0x2F8BB, 'M', '捨'),
+ (0x2F8BC, 'M', '掃'),
+ (0x2F8BD, 'M', '揤'),
+ (0x2F8BE, 'M', '𢯱'),
+ (0x2F8BF, 'M', '搢'),
+ (0x2F8C0, 'M', '揅'),
+ (0x2F8C1, 'M', '掩'),
+ (0x2F8C2, 'M', '㨮'),
+ (0x2F8C3, 'M', '摩'),
+ (0x2F8C4, 'M', '摾'),
+ (0x2F8C5, 'M', '撝'),
+ (0x2F8C6, 'M', '摷'),
+ (0x2F8C7, 'M', '㩬'),
+ (0x2F8C8, 'M', '敏'),
+ (0x2F8C9, 'M', '敬'),
+ (0x2F8CA, 'M', '𣀊'),
+ (0x2F8CB, 'M', '旣'),
+ (0x2F8CC, 'M', '書'),
+ (0x2F8CD, 'M', '晉'),
+ (0x2F8CE, 'M', '㬙'),
+ (0x2F8CF, 'M', '暑'),
+ (0x2F8D0, 'M', '㬈'),
+ (0x2F8D1, 'M', '㫤'),
+ (0x2F8D2, 'M', '冒'),
+ (0x2F8D3, 'M', '冕'),
+ (0x2F8D4, 'M', '最'),
+ (0x2F8D5, 'M', '暜'),
+ (0x2F8D6, 'M', '肭'),
+ (0x2F8D7, 'M', '䏙'),
+ (0x2F8D8, 'M', '朗'),
+ (0x2F8D9, 'M', '望'),
+ (0x2F8DA, 'M', '朡'),
+ (0x2F8DB, 'M', '杞'),
+ (0x2F8DC, 'M', '杓'),
+ (0x2F8DD, 'M', '𣏃'),
+ (0x2F8DE, 'M', '㭉'),
+ (0x2F8DF, 'M', '柺'),
+ (0x2F8E0, 'M', '枅'),
+ (0x2F8E1, 'M', '桒'),
+ (0x2F8E2, 'M', '梅'),
+ (0x2F8E3, 'M', '𣑭'),
+ (0x2F8E4, 'M', '梎'),
+ (0x2F8E5, 'M', '栟'),
+ (0x2F8E6, 'M', '椔'),
+ (0x2F8E7, 'M', '㮝'),
+ (0x2F8E8, 'M', '楂'),
+ (0x2F8E9, 'M', '榣'),
+ (0x2F8EA, 'M', '槪'),
+ (0x2F8EB, 'M', '檨'),
+ (0x2F8EC, 'M', '𣚣'),
+ (0x2F8ED, 'M', '櫛'),
+ (0x2F8EE, 'M', '㰘'),
+ (0x2F8EF, 'M', '次'),
+ (0x2F8F0, 'M', '𣢧'),
+ (0x2F8F1, 'M', '歔'),
+ (0x2F8F2, 'M', '㱎'),
+ (0x2F8F3, 'M', '歲'),
+ (0x2F8F4, 'M', '殟'),
+ (0x2F8F5, 'M', '殺'),
+ (0x2F8F6, 'M', '殻'),
+ (0x2F8F7, 'M', '𣪍'),
+ (0x2F8F8, 'M', '𡴋'),
+ (0x2F8F9, 'M', '𣫺'),
+ (0x2F8FA, 'M', '汎'),
+ (0x2F8FB, 'M', '𣲼'),
+ (0x2F8FC, 'M', '沿'),
+ (0x2F8FD, 'M', '泍'),
+ (0x2F8FE, 'M', '汧'),
+ (0x2F8FF, 'M', '洖'),
+ (0x2F900, 'M', '派'),
+ (0x2F901, 'M', '海'),
+ (0x2F902, 'M', '流'),
+ (0x2F903, 'M', '浩'),
+ (0x2F904, 'M', '浸'),
+ (0x2F905, 'M', '涅'),
+ (0x2F906, 'M', '𣴞'),
+ (0x2F907, 'M', '洴'),
+ (0x2F908, 'M', '港'),
+ (0x2F909, 'M', '湮'),
+ (0x2F90A, 'M', '㴳'),
+ ]
+
+def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2F90B, 'M', '滋'),
+ (0x2F90C, 'M', '滇'),
+ (0x2F90D, 'M', '𣻑'),
+ (0x2F90E, 'M', '淹'),
+ (0x2F90F, 'M', '潮'),
+ (0x2F910, 'M', '𣽞'),
+ (0x2F911, 'M', '𣾎'),
+ (0x2F912, 'M', '濆'),
+ (0x2F913, 'M', '瀹'),
+ (0x2F914, 'M', '瀞'),
+ (0x2F915, 'M', '瀛'),
+ (0x2F916, 'M', '㶖'),
+ (0x2F917, 'M', '灊'),
+ (0x2F918, 'M', '災'),
+ (0x2F919, 'M', '灷'),
+ (0x2F91A, 'M', '炭'),
+ (0x2F91B, 'M', '𠔥'),
+ (0x2F91C, 'M', '煅'),
+ (0x2F91D, 'M', '𤉣'),
+ (0x2F91E, 'M', '熜'),
+ (0x2F91F, 'X'),
+ (0x2F920, 'M', '爨'),
+ (0x2F921, 'M', '爵'),
+ (0x2F922, 'M', '牐'),
+ (0x2F923, 'M', '𤘈'),
+ (0x2F924, 'M', '犀'),
+ (0x2F925, 'M', '犕'),
+ (0x2F926, 'M', '𤜵'),
+ (0x2F927, 'M', '𤠔'),
+ (0x2F928, 'M', '獺'),
+ (0x2F929, 'M', '王'),
+ (0x2F92A, 'M', '㺬'),
+ (0x2F92B, 'M', '玥'),
+ (0x2F92C, 'M', '㺸'),
+ (0x2F92E, 'M', '瑇'),
+ (0x2F92F, 'M', '瑜'),
+ (0x2F930, 'M', '瑱'),
+ (0x2F931, 'M', '璅'),
+ (0x2F932, 'M', '瓊'),
+ (0x2F933, 'M', '㼛'),
+ (0x2F934, 'M', '甤'),
+ (0x2F935, 'M', '𤰶'),
+ (0x2F936, 'M', '甾'),
+ (0x2F937, 'M', '𤲒'),
+ (0x2F938, 'M', '異'),
+ (0x2F939, 'M', '𢆟'),
+ (0x2F93A, 'M', '瘐'),
+ (0x2F93B, 'M', '𤾡'),
+ (0x2F93C, 'M', '𤾸'),
+ (0x2F93D, 'M', '𥁄'),
+ (0x2F93E, 'M', '㿼'),
+ (0x2F93F, 'M', '䀈'),
+ (0x2F940, 'M', '直'),
+ (0x2F941, 'M', '𥃳'),
+ (0x2F942, 'M', '𥃲'),
+ (0x2F943, 'M', '𥄙'),
+ (0x2F944, 'M', '𥄳'),
+ (0x2F945, 'M', '眞'),
+ (0x2F946, 'M', '真'),
+ (0x2F948, 'M', '睊'),
+ (0x2F949, 'M', '䀹'),
+ (0x2F94A, 'M', '瞋'),
+ (0x2F94B, 'M', '䁆'),
+ (0x2F94C, 'M', '䂖'),
+ (0x2F94D, 'M', '𥐝'),
+ (0x2F94E, 'M', '硎'),
+ (0x2F94F, 'M', '碌'),
+ (0x2F950, 'M', '磌'),
+ (0x2F951, 'M', '䃣'),
+ (0x2F952, 'M', '𥘦'),
+ (0x2F953, 'M', '祖'),
+ (0x2F954, 'M', '𥚚'),
+ (0x2F955, 'M', '𥛅'),
+ (0x2F956, 'M', '福'),
+ (0x2F957, 'M', '秫'),
+ (0x2F958, 'M', '䄯'),
+ (0x2F959, 'M', '穀'),
+ (0x2F95A, 'M', '穊'),
+ (0x2F95B, 'M', '穏'),
+ (0x2F95C, 'M', '𥥼'),
+ (0x2F95D, 'M', '𥪧'),
+ (0x2F95F, 'X'),
+ (0x2F960, 'M', '䈂'),
+ (0x2F961, 'M', '𥮫'),
+ (0x2F962, 'M', '篆'),
+ (0x2F963, 'M', '築'),
+ (0x2F964, 'M', '䈧'),
+ (0x2F965, 'M', '𥲀'),
+ (0x2F966, 'M', '糒'),
+ (0x2F967, 'M', '䊠'),
+ (0x2F968, 'M', '糨'),
+ (0x2F969, 'M', '糣'),
+ (0x2F96A, 'M', '紀'),
+ (0x2F96B, 'M', '𥾆'),
+ (0x2F96C, 'M', '絣'),
+ (0x2F96D, 'M', '䌁'),
+ (0x2F96E, 'M', '緇'),
+ (0x2F96F, 'M', '縂'),
+ (0x2F970, 'M', '繅'),
+ (0x2F971, 'M', '䌴'),
+ ]
+
+def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2F972, 'M', '𦈨'),
+ (0x2F973, 'M', '𦉇'),
+ (0x2F974, 'M', '䍙'),
+ (0x2F975, 'M', '𦋙'),
+ (0x2F976, 'M', '罺'),
+ (0x2F977, 'M', '𦌾'),
+ (0x2F978, 'M', '羕'),
+ (0x2F979, 'M', '翺'),
+ (0x2F97A, 'M', '者'),
+ (0x2F97B, 'M', '𦓚'),
+ (0x2F97C, 'M', '𦔣'),
+ (0x2F97D, 'M', '聠'),
+ (0x2F97E, 'M', '𦖨'),
+ (0x2F97F, 'M', '聰'),
+ (0x2F980, 'M', '𣍟'),
+ (0x2F981, 'M', '䏕'),
+ (0x2F982, 'M', '育'),
+ (0x2F983, 'M', '脃'),
+ (0x2F984, 'M', '䐋'),
+ (0x2F985, 'M', '脾'),
+ (0x2F986, 'M', '媵'),
+ (0x2F987, 'M', '𦞧'),
+ (0x2F988, 'M', '𦞵'),
+ (0x2F989, 'M', '𣎓'),
+ (0x2F98A, 'M', '𣎜'),
+ (0x2F98B, 'M', '舁'),
+ (0x2F98C, 'M', '舄'),
+ (0x2F98D, 'M', '辞'),
+ (0x2F98E, 'M', '䑫'),
+ (0x2F98F, 'M', '芑'),
+ (0x2F990, 'M', '芋'),
+ (0x2F991, 'M', '芝'),
+ (0x2F992, 'M', '劳'),
+ (0x2F993, 'M', '花'),
+ (0x2F994, 'M', '芳'),
+ (0x2F995, 'M', '芽'),
+ (0x2F996, 'M', '苦'),
+ (0x2F997, 'M', '𦬼'),
+ (0x2F998, 'M', '若'),
+ (0x2F999, 'M', '茝'),
+ (0x2F99A, 'M', '荣'),
+ (0x2F99B, 'M', '莭'),
+ (0x2F99C, 'M', '茣'),
+ (0x2F99D, 'M', '莽'),
+ (0x2F99E, 'M', '菧'),
+ (0x2F99F, 'M', '著'),
+ (0x2F9A0, 'M', '荓'),
+ (0x2F9A1, 'M', '菊'),
+ (0x2F9A2, 'M', '菌'),
+ (0x2F9A3, 'M', '菜'),
+ (0x2F9A4, 'M', '𦰶'),
+ (0x2F9A5, 'M', '𦵫'),
+ (0x2F9A6, 'M', '𦳕'),
+ (0x2F9A7, 'M', '䔫'),
+ (0x2F9A8, 'M', '蓱'),
+ (0x2F9A9, 'M', '蓳'),
+ (0x2F9AA, 'M', '蔖'),
+ (0x2F9AB, 'M', '𧏊'),
+ (0x2F9AC, 'M', '蕤'),
+ (0x2F9AD, 'M', '𦼬'),
+ (0x2F9AE, 'M', '䕝'),
+ (0x2F9AF, 'M', '䕡'),
+ (0x2F9B0, 'M', '𦾱'),
+ (0x2F9B1, 'M', '𧃒'),
+ (0x2F9B2, 'M', '䕫'),
+ (0x2F9B3, 'M', '虐'),
+ (0x2F9B4, 'M', '虜'),
+ (0x2F9B5, 'M', '虧'),
+ (0x2F9B6, 'M', '虩'),
+ (0x2F9B7, 'M', '蚩'),
+ (0x2F9B8, 'M', '蚈'),
+ (0x2F9B9, 'M', '蜎'),
+ (0x2F9BA, 'M', '蛢'),
+ (0x2F9BB, 'M', '蝹'),
+ (0x2F9BC, 'M', '蜨'),
+ (0x2F9BD, 'M', '蝫'),
+ (0x2F9BE, 'M', '螆'),
+ (0x2F9BF, 'X'),
+ (0x2F9C0, 'M', '蟡'),
+ (0x2F9C1, 'M', '蠁'),
+ (0x2F9C2, 'M', '䗹'),
+ (0x2F9C3, 'M', '衠'),
+ (0x2F9C4, 'M', '衣'),
+ (0x2F9C5, 'M', '𧙧'),
+ (0x2F9C6, 'M', '裗'),
+ (0x2F9C7, 'M', '裞'),
+ (0x2F9C8, 'M', '䘵'),
+ (0x2F9C9, 'M', '裺'),
+ (0x2F9CA, 'M', '㒻'),
+ (0x2F9CB, 'M', '𧢮'),
+ (0x2F9CC, 'M', '𧥦'),
+ (0x2F9CD, 'M', '䚾'),
+ (0x2F9CE, 'M', '䛇'),
+ (0x2F9CF, 'M', '誠'),
+ (0x2F9D0, 'M', '諭'),
+ (0x2F9D1, 'M', '變'),
+ (0x2F9D2, 'M', '豕'),
+ (0x2F9D3, 'M', '𧲨'),
+ (0x2F9D4, 'M', '貫'),
+ (0x2F9D5, 'M', '賁'),
+ ]
+
+def _seg_81() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2F9D6, 'M', '贛'),
+ (0x2F9D7, 'M', '起'),
+ (0x2F9D8, 'M', '𧼯'),
+ (0x2F9D9, 'M', '𠠄'),
+ (0x2F9DA, 'M', '跋'),
+ (0x2F9DB, 'M', '趼'),
+ (0x2F9DC, 'M', '跰'),
+ (0x2F9DD, 'M', '𠣞'),
+ (0x2F9DE, 'M', '軔'),
+ (0x2F9DF, 'M', '輸'),
+ (0x2F9E0, 'M', '𨗒'),
+ (0x2F9E1, 'M', '𨗭'),
+ (0x2F9E2, 'M', '邔'),
+ (0x2F9E3, 'M', '郱'),
+ (0x2F9E4, 'M', '鄑'),
+ (0x2F9E5, 'M', '𨜮'),
+ (0x2F9E6, 'M', '鄛'),
+ (0x2F9E7, 'M', '鈸'),
+ (0x2F9E8, 'M', '鋗'),
+ (0x2F9E9, 'M', '鋘'),
+ (0x2F9EA, 'M', '鉼'),
+ (0x2F9EB, 'M', '鏹'),
+ (0x2F9EC, 'M', '鐕'),
+ (0x2F9ED, 'M', '𨯺'),
+ (0x2F9EE, 'M', '開'),
+ (0x2F9EF, 'M', '䦕'),
+ (0x2F9F0, 'M', '閷'),
+ (0x2F9F1, 'M', '𨵷'),
+ (0x2F9F2, 'M', '䧦'),
+ (0x2F9F3, 'M', '雃'),
+ (0x2F9F4, 'M', '嶲'),
+ (0x2F9F5, 'M', '霣'),
+ (0x2F9F6, 'M', '𩅅'),
+ (0x2F9F7, 'M', '𩈚'),
+ (0x2F9F8, 'M', '䩮'),
+ (0x2F9F9, 'M', '䩶'),
+ (0x2F9FA, 'M', '韠'),
+ (0x2F9FB, 'M', '𩐊'),
+ (0x2F9FC, 'M', '䪲'),
+ (0x2F9FD, 'M', '𩒖'),
+ (0x2F9FE, 'M', '頋'),
+ (0x2FA00, 'M', '頩'),
+ (0x2FA01, 'M', '𩖶'),
+ (0x2FA02, 'M', '飢'),
+ (0x2FA03, 'M', '䬳'),
+ (0x2FA04, 'M', '餩'),
+ (0x2FA05, 'M', '馧'),
+ (0x2FA06, 'M', '駂'),
+ (0x2FA07, 'M', '駾'),
+ (0x2FA08, 'M', '䯎'),
+ (0x2FA09, 'M', '𩬰'),
+ (0x2FA0A, 'M', '鬒'),
+ (0x2FA0B, 'M', '鱀'),
+ (0x2FA0C, 'M', '鳽'),
+ (0x2FA0D, 'M', '䳎'),
+ (0x2FA0E, 'M', '䳭'),
+ (0x2FA0F, 'M', '鵧'),
+ (0x2FA10, 'M', '𪃎'),
+ (0x2FA11, 'M', '䳸'),
+ (0x2FA12, 'M', '𪄅'),
+ (0x2FA13, 'M', '𪈎'),
+ (0x2FA14, 'M', '𪊑'),
+ (0x2FA15, 'M', '麻'),
+ (0x2FA16, 'M', '䵖'),
+ (0x2FA17, 'M', '黹'),
+ (0x2FA18, 'M', '黾'),
+ (0x2FA19, 'M', '鼅'),
+ (0x2FA1A, 'M', '鼏'),
+ (0x2FA1B, 'M', '鼖'),
+ (0x2FA1C, 'M', '鼻'),
+ (0x2FA1D, 'M', '𪘀'),
+ (0x2FA1E, 'X'),
+ (0x30000, 'V'),
+ (0x3134B, 'X'),
+ (0x31350, 'V'),
+ (0x323B0, 'X'),
+ (0xE0100, 'I'),
+ (0xE01F0, 'X'),
+ ]
+
+uts46data = tuple(
+ _seg_0()
+ + _seg_1()
+ + _seg_2()
+ + _seg_3()
+ + _seg_4()
+ + _seg_5()
+ + _seg_6()
+ + _seg_7()
+ + _seg_8()
+ + _seg_9()
+ + _seg_10()
+ + _seg_11()
+ + _seg_12()
+ + _seg_13()
+ + _seg_14()
+ + _seg_15()
+ + _seg_16()
+ + _seg_17()
+ + _seg_18()
+ + _seg_19()
+ + _seg_20()
+ + _seg_21()
+ + _seg_22()
+ + _seg_23()
+ + _seg_24()
+ + _seg_25()
+ + _seg_26()
+ + _seg_27()
+ + _seg_28()
+ + _seg_29()
+ + _seg_30()
+ + _seg_31()
+ + _seg_32()
+ + _seg_33()
+ + _seg_34()
+ + _seg_35()
+ + _seg_36()
+ + _seg_37()
+ + _seg_38()
+ + _seg_39()
+ + _seg_40()
+ + _seg_41()
+ + _seg_42()
+ + _seg_43()
+ + _seg_44()
+ + _seg_45()
+ + _seg_46()
+ + _seg_47()
+ + _seg_48()
+ + _seg_49()
+ + _seg_50()
+ + _seg_51()
+ + _seg_52()
+ + _seg_53()
+ + _seg_54()
+ + _seg_55()
+ + _seg_56()
+ + _seg_57()
+ + _seg_58()
+ + _seg_59()
+ + _seg_60()
+ + _seg_61()
+ + _seg_62()
+ + _seg_63()
+ + _seg_64()
+ + _seg_65()
+ + _seg_66()
+ + _seg_67()
+ + _seg_68()
+ + _seg_69()
+ + _seg_70()
+ + _seg_71()
+ + _seg_72()
+ + _seg_73()
+ + _seg_74()
+ + _seg_75()
+ + _seg_76()
+ + _seg_77()
+ + _seg_78()
+ + _seg_79()
+ + _seg_80()
+ + _seg_81()
+) # type: Tuple[Union[Tuple[int, str], Tuple[int, str, str]], ...]
diff --git a/billinglayer/python/jwt-1.3.1.dist-info/INSTALLER b/billinglayer/python/jwt-1.3.1.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/billinglayer/python/jwt-1.3.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/billinglayer/python/jwt-1.3.1.dist-info/LICENSE b/billinglayer/python/jwt-1.3.1.dist-info/LICENSE
new file mode 100644
index 0000000..9cf69c1
--- /dev/null
+++ b/billinglayer/python/jwt-1.3.1.dist-info/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2017 Gehirn Inc.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/billinglayer/python/jwt-1.3.1.dist-info/METADATA b/billinglayer/python/jwt-1.3.1.dist-info/METADATA
new file mode 100644
index 0000000..1d222a8
--- /dev/null
+++ b/billinglayer/python/jwt-1.3.1.dist-info/METADATA
@@ -0,0 +1,146 @@
+Metadata-Version: 2.1
+Name: jwt
+Version: 1.3.1
+Summary: JSON Web Token library for Python 3.
+Home-page: https://github.com/GehirnInc/python-jwt
+Author: Kohei YOSHIDA
+Author-email: kohei.yoshida@gehirn.co.jp
+License: UNKNOWN
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Topic :: Internet :: WWW/HTTP
+Classifier: Topic :: Security
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Requires-Python: >= 3.6
+Requires-Dist: cryptography (!=3.4.0,>=3.1)
+
+.. image:: https://travis-ci.org/GehirnInc/python-jwt.svg?branch=master
+ :target: https://travis-ci.org/GehirnInc/python-jwt
+.. image:: https://coveralls.io/repos/GehirnInc/python-jwt/badge.png?branch=master
+ :target: https://coveralls.io/r/GehirnInc/python-jwt?branch=master
+.. image:: https://badge.fury.io/py/jwt.svg?dummy
+ :target: http://badge.fury.io/py/jwt
+
+python-jwt
+==========
+
+*python-jwt* is a JSON Web Token (JWT) implementation in Python developed by `Gehirn Inc`_.
+
+
+Examples
+--------
+
+.. code-block:: python
+
+ import json
+ from datetime import datetime, timedelta, timezone
+
+ from jwt import (
+ JWT,
+ jwk_from_dict,
+ jwk_from_pem,
+ )
+ from jwt.utils import get_int_from_datetime
+
+
+ instance = JWT()
+
+ message = {
+ 'iss': 'https://example.com/',
+ 'sub': 'yosida95',
+ 'iat': get_int_from_datetime(datetime.now(timezone.utc)),
+ 'exp': get_int_from_datetime(
+ datetime.now(timezone.utc) + timedelta(hours=1)),
+ }
+
+ """
+ Encode the message to JWT(JWS).
+ """
+
+ # Load a RSA key from a JWK dict.
+ signing_key = jwk_from_dict({
+ 'kty': 'RSA',
+ 'e': 'AQAB',
+ 'n': '...',
+ 'd': '...'})
+ # Or load a RSA key from a PEM file.
+ with open('rsa_private_key.pem', 'rb') as fh:
+ signing_key = jwk_from_pem(fh.read())
+ # You can also load an octet key in the same manner as the RSA.
+ # signing_key = jwk_from_dict({'kty': 'oct', 'k': '...'})
+
+ compact_jws = instance.encode(message, signing_key, alg='RS256')
+
+ """
+ Decode the JWT with verifying the signature.
+ """
+
+ # Load a public key from PEM file corresponding to the signing private key.
+ with open('rsa_public_key.json', 'r') as fh:
+ verifying_key = jwk_from_dict(json.load(fh))
+
+ message_received = instance.decode(
+ compact_jws, verifying_key, do_time_check=True)
+
+ """
+ Successfuly retrieved the `message` from the `compact_jws`
+ """
+ assert message == message_received
+
+
+Installation
+------------
+
+You can install python-jwt with pip.
+
+.. code-block:: shell
+
+ $ pip install jwt
+
+
+Implementation Details
+-------------------------
+
+Supported Algorithms
+~~~~~~~~~~~~~~~~~~~~
+
+- Unsecured
+
+ - none (disabled by default for security)
+
+- Symmetric
+
+ - HS256
+ - HS384
+ - HS512
+
+- Asymmetric
+
+ - PS256
+ - PS384
+ - PS512
+ - RS256
+ - RS384
+ - RS512
+
+Supported Python Versions
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Python 3.6+
+
+
+License
+-------
+python-jwt is licensed under the Apache License version 2. See ./LICENSE.rst.
+
+
+.. _Gehirn Inc: http://www.gehirn.co.jp/
+
+
diff --git a/billinglayer/python/jwt-1.3.1.dist-info/RECORD b/billinglayer/python/jwt-1.3.1.dist-info/RECORD
new file mode 100644
index 0000000..cece7fd
--- /dev/null
+++ b/billinglayer/python/jwt-1.3.1.dist-info/RECORD
@@ -0,0 +1,24 @@
+jwt-1.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jwt-1.3.1.dist-info/LICENSE,sha256=k8azE68RcLJ3OnQA3SRI1ebXxCDW2EAO_HhROs6xuzE,11341
+jwt-1.3.1.dist-info/METADATA,sha256=Symw7Jt6qIHlxr29twpnSLA-lh9nZHgdcwIxEanHe-M,3439
+jwt-1.3.1.dist-info/RECORD,,
+jwt-1.3.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+jwt-1.3.1.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
+jwt-1.3.1.dist-info/top_level.txt,sha256=RP5DHNyJbMq2ka0FmfTgoSaQzh7e3r5XuCWCO8a00k8,4
+jwt/__init__.py,sha256=_fcS7PLAyhWBShVlSGKYcmUNBTcRFDEymTG6ROXahac,1233
+jwt/__pycache__/__init__.cpython-311.pyc,,
+jwt/__pycache__/exceptions.cpython-311.pyc,,
+jwt/__pycache__/jwa.cpython-311.pyc,,
+jwt/__pycache__/jwk.cpython-311.pyc,,
+jwt/__pycache__/jwkset.cpython-311.pyc,,
+jwt/__pycache__/jws.cpython-311.pyc,,
+jwt/__pycache__/jwt.cpython-311.pyc,,
+jwt/__pycache__/utils.cpython-311.pyc,,
+jwt/exceptions.py,sha256=IbWr8886syf-DdCHKmS6leAN1zHh6Jx9iBhFE4G47VU,1056
+jwt/jwa.py,sha256=0oN1ymjHOqNl6gMpBlPd07-Am2ZCejSf3Btgn4CINdA,6003
+jwt/jwk.py,sha256=P-HBDICpGDyROp8jZGIr5jzFxBkB9NnLaWg1s1SxH30,13549
+jwt/jwkset.py,sha256=fKnzEkf0j6D0qt0z5qTbfl7JV0a9tHqBWc6LmO7CL38,1895
+jwt/jws.py,sha256=_ZcFxpXXPKWtkTI2YTMPoWo8IfKO_WpFmxOYObxyeY8,3445
+jwt/jwt.py,sha256=MuIyOXudmNruv-lXVfYKUOVD2yKVSI3Q9VJ4HxApT6M,4693
+jwt/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+jwt/utils.py,sha256=4bwUBehyYzHJwTokeTXmSs_GRh59zmZf1-cPS9sbHgI,2050
diff --git a/billinglayer/python/jwt-1.3.1.dist-info/REQUESTED b/billinglayer/python/jwt-1.3.1.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/billinglayer/python/jwt-1.3.1.dist-info/WHEEL b/billinglayer/python/jwt-1.3.1.dist-info/WHEEL
new file mode 100644
index 0000000..5bad85f
--- /dev/null
+++ b/billinglayer/python/jwt-1.3.1.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/billinglayer/python/jwt-1.3.1.dist-info/top_level.txt b/billinglayer/python/jwt-1.3.1.dist-info/top_level.txt
new file mode 100644
index 0000000..27ccc9b
--- /dev/null
+++ b/billinglayer/python/jwt-1.3.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+jwt
diff --git a/billinglayer/python/jwt/__init__.py b/billinglayer/python/jwt/__init__.py
new file mode 100644
index 0000000..3ce590c
--- /dev/null
+++ b/billinglayer/python/jwt/__init__.py
@@ -0,0 +1,51 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2017 Gehirn Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .jwa import std_hash_by_alg
+from .jwk import (
+ AbstractJWKBase,
+ jwk_from_dict,
+ jwk_from_bytes,
+ jwk_from_pem,
+ jwk_from_der,
+ supported_key_types,
+)
+from .jwkset import JWKSet
+from .jwa import (
+ AbstractSigningAlgorithm,
+ supported_signing_algorithms,
+)
+from .jwt import JWT
+
+
+__all__ = [
+ # .jwa
+ 'std_hash_by_alg',
+ # .jwk
+ 'AbstractJWKBase',
+ 'jwk_from_bytes',
+ 'jwk_from_dict',
+ 'jwk_from_pem',
+ 'jwk_from_der',
+ 'supported_key_types',
+ # .jwkset
+ 'JWKSet',
+ # .jws
+ 'AbstractSigningAlgorithm',
+ 'supported_signing_algorithms',
+ # .jwt
+ 'JWT',
+]
diff --git a/billinglayer/python/jwt/__pycache__/__init__.cpython-311.pyc b/billinglayer/python/jwt/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..3745ab4
Binary files /dev/null and b/billinglayer/python/jwt/__pycache__/__init__.cpython-311.pyc differ
diff --git a/billinglayer/python/jwt/__pycache__/exceptions.cpython-311.pyc b/billinglayer/python/jwt/__pycache__/exceptions.cpython-311.pyc
new file mode 100644
index 0000000..f10fc6d
Binary files /dev/null and b/billinglayer/python/jwt/__pycache__/exceptions.cpython-311.pyc differ
diff --git a/billinglayer/python/jwt/__pycache__/jwa.cpython-311.pyc b/billinglayer/python/jwt/__pycache__/jwa.cpython-311.pyc
new file mode 100644
index 0000000..647f42f
Binary files /dev/null and b/billinglayer/python/jwt/__pycache__/jwa.cpython-311.pyc differ
diff --git a/billinglayer/python/jwt/__pycache__/jwk.cpython-311.pyc b/billinglayer/python/jwt/__pycache__/jwk.cpython-311.pyc
new file mode 100644
index 0000000..4e4449c
Binary files /dev/null and b/billinglayer/python/jwt/__pycache__/jwk.cpython-311.pyc differ
diff --git a/billinglayer/python/jwt/__pycache__/jwkset.cpython-311.pyc b/billinglayer/python/jwt/__pycache__/jwkset.cpython-311.pyc
new file mode 100644
index 0000000..29b0057
Binary files /dev/null and b/billinglayer/python/jwt/__pycache__/jwkset.cpython-311.pyc differ
diff --git a/billinglayer/python/jwt/__pycache__/jws.cpython-311.pyc b/billinglayer/python/jwt/__pycache__/jws.cpython-311.pyc
new file mode 100644
index 0000000..b0dca9a
Binary files /dev/null and b/billinglayer/python/jwt/__pycache__/jws.cpython-311.pyc differ
diff --git a/billinglayer/python/jwt/__pycache__/jwt.cpython-311.pyc b/billinglayer/python/jwt/__pycache__/jwt.cpython-311.pyc
new file mode 100644
index 0000000..8ac8ed9
Binary files /dev/null and b/billinglayer/python/jwt/__pycache__/jwt.cpython-311.pyc differ
diff --git a/billinglayer/python/jwt/__pycache__/utils.cpython-311.pyc b/billinglayer/python/jwt/__pycache__/utils.cpython-311.pyc
new file mode 100644
index 0000000..9b9105d
Binary files /dev/null and b/billinglayer/python/jwt/__pycache__/utils.cpython-311.pyc differ
diff --git a/billinglayer/python/jwt/exceptions.py b/billinglayer/python/jwt/exceptions.py
new file mode 100644
index 0000000..ec4b9e1
--- /dev/null
+++ b/billinglayer/python/jwt/exceptions.py
@@ -0,0 +1,49 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2017 Gehirn Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+class JWTException(Exception):
+ """
+ common base class for all exceptions used in python-jwt
+ """
+
+
+class MalformedJWKError(JWTException):
+ pass
+
+
+class UnsupportedKeyTypeError(JWTException):
+ pass
+
+
+class InvalidKeyTypeError(JWTException):
+ pass
+
+
+class JWSEncodeError(JWTException):
+ pass
+
+
+class JWSDecodeError(JWTException):
+ pass
+
+
+class JWTEncodeError(JWTException):
+ pass
+
+
+class JWTDecodeError(JWTException):
+ pass
diff --git a/billinglayer/python/jwt/jwa.py b/billinglayer/python/jwt/jwa.py
new file mode 100644
index 0000000..f5986cd
--- /dev/null
+++ b/billinglayer/python/jwt/jwa.py
@@ -0,0 +1,198 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2017 Gehirn Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import hashlib
+import hmac
+from typing import (
+ Any,
+ Dict,
+ Callable,
+ Optional,
+)
+
+from cryptography.hazmat.primitives.asymmetric import padding
+from cryptography.hazmat.primitives.hashes import (
+ SHA256,
+ SHA384,
+ SHA512,
+)
+
+from .exceptions import InvalidKeyTypeError
+from .jwk import AbstractJWKBase
+
+
+def std_hash_by_alg(alg: str) -> Callable[[bytes], object]:
+ if alg.endswith('S256'):
+ return hashlib.sha256
+ if alg.endswith('S384'):
+ return hashlib.sha384
+ if alg.endswith('S512'):
+ return hashlib.sha512
+ raise ValueError('{} is not supported'.format(alg))
+
+
+class AbstractSigningAlgorithm:
+
+ def sign(self, message: bytes, key: Optional[AbstractJWKBase]) -> bytes:
+ raise NotImplementedError() # pragma: no cover
+
+ def verify(self, message: bytes, key: Optional[AbstractJWKBase],
+ signature: bytes) -> bool:
+ raise NotImplementedError() # pragma: no cover
+
+
+class NoneAlgorithm(AbstractSigningAlgorithm):
+
+ def sign(self, message: bytes, key: Optional[AbstractJWKBase]) -> bytes:
+ return b''
+
+ def verify(self, message: bytes, key: Optional[AbstractJWKBase],
+ signature: bytes) -> bool:
+ return hmac.compare_digest(signature, b'')
+
+
+none = NoneAlgorithm()
+
+
+class HMACAlgorithm(AbstractSigningAlgorithm):
+
+ def __init__(self, hash_fun: Callable[[], object]) -> None:
+ self.hash_fun = hash_fun
+
+ def _check_key(self, key: Optional[AbstractJWKBase]) -> AbstractJWKBase:
+ if not key or key.get_kty() != 'oct':
+ raise InvalidKeyTypeError('Octet key is required')
+ return key
+
+ def _sign(self, message: bytes, key: bytes) -> bytes:
+ return hmac.new(key, message, self.hash_fun).digest()
+
+ def sign(self, message: bytes, key: Optional[AbstractJWKBase]) -> bytes:
+ key = self._check_key(key)
+ return key.sign(message, signer=self._sign)
+
+ def verify(self, message: bytes, key: Optional[AbstractJWKBase],
+ signature: bytes) -> bool:
+ key = self._check_key(key)
+ return key.verify(message, signature, signer=self._sign)
+
+
+HS256 = HMACAlgorithm(hashlib.sha256)
+HS384 = HMACAlgorithm(hashlib.sha384)
+HS512 = HMACAlgorithm(hashlib.sha512)
+
+
+class RSAAlgorithm(AbstractSigningAlgorithm):
+
+ def __init__(self, hash_fun: object) -> None:
+ self.hash_fun = hash_fun
+
+ def _check_key(
+ self,
+ key: Optional[AbstractJWKBase],
+ must_sign_key: bool = False,
+ ) -> AbstractJWKBase:
+ if not key or key.get_kty() != 'RSA':
+ raise InvalidKeyTypeError('RSA key is required')
+ if must_sign_key and not key.is_sign_key():
+ raise InvalidKeyTypeError(
+ 'a RSA private key is required, but passed is RSA public key')
+ return key
+
+ def sign(self, message: bytes, key: Optional[AbstractJWKBase]) -> bytes:
+ key = self._check_key(key, must_sign_key=True)
+ return key.sign(message, hash_fun=self.hash_fun,
+ padding=padding.PKCS1v15())
+
+ def verify(
+ self,
+ message: bytes,
+ key: Optional[AbstractJWKBase],
+ signature: bytes,
+ ) -> bool:
+ key = self._check_key(key)
+ return key.verify(message, signature, hash_fun=self.hash_fun,
+ padding=padding.PKCS1v15())
+
+
+RS256 = RSAAlgorithm(SHA256)
+RS384 = RSAAlgorithm(SHA384)
+RS512 = RSAAlgorithm(SHA512)
+
+
+class PSSRSAAlgorithm(AbstractSigningAlgorithm):
+ def __init__(self, hash_fun: Callable[[], Any]) -> None:
+ self.hash_fun = hash_fun
+
+ def _check_key(
+ self,
+ key: Optional[AbstractJWKBase],
+ must_sign_key: bool = False,
+ ) -> AbstractJWKBase:
+ if not key or key.get_kty() != 'RSA':
+ raise InvalidKeyTypeError('RSA key is required')
+ if must_sign_key and not key.is_sign_key():
+ raise InvalidKeyTypeError(
+ 'a RSA private key is required, but passed is RSA public key')
+ return key
+
+ def sign(self, message: bytes, key: Optional[AbstractJWKBase]) -> bytes:
+ key = self._check_key(key, must_sign_key=True)
+ return key.sign(
+ message,
+ hash_fun=self.hash_fun,
+ padding=padding.PSS( # type: ignore[no-untyped-call]
+ mgf=padding.MGF1(self.hash_fun()),
+ salt_length=self.hash_fun().digest_size,
+ ),
+ )
+
+ def verify(
+ self,
+ message: bytes,
+ key: Optional[AbstractJWKBase],
+ signature: bytes
+ ) -> bool:
+ key = self._check_key(key)
+ return key.verify(
+ message,
+ signature,
+ hash_fun=self.hash_fun,
+ padding=padding.PSS( # type: ignore[no-untyped-call]
+ mgf=padding.MGF1(self.hash_fun()),
+ salt_length=self.hash_fun().digest_size,
+ ),
+ )
+
+
+PS256 = PSSRSAAlgorithm(SHA256)
+PS384 = PSSRSAAlgorithm(SHA384)
+PS512 = PSSRSAAlgorithm(SHA512)
+
+
+def supported_signing_algorithms() -> Dict[str, AbstractSigningAlgorithm]:
+ # NOTE(yosida95): exclude vulnerable 'none' algorithm by default.
+ return {
+ 'HS256': HS256,
+ 'HS384': HS384,
+ 'HS512': HS512,
+ 'RS256': RS256,
+ 'RS384': RS384,
+ 'RS512': RS512,
+ 'PS256': PS256,
+ 'PS384': PS384,
+ 'PS512': PS512,
+ }
diff --git a/billinglayer/python/jwt/jwk.py b/billinglayer/python/jwt/jwk.py
new file mode 100644
index 0000000..e1a4936
--- /dev/null
+++ b/billinglayer/python/jwt/jwk.py
@@ -0,0 +1,427 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2017 Gehirn Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import hmac
+from warnings import warn
+from abc import (
+ ABC,
+ abstractmethod,
+)
+from typing import (
+ Any,
+ Callable,
+ Dict,
+ Mapping,
+ Type,
+ TypeVar,
+ Union,
+ Optional
+)
+from functools import wraps
+
+import cryptography.hazmat.primitives.serialization as serialization_module
+from cryptography.exceptions import InvalidSignature
+from cryptography.hazmat.backends import default_backend
+from cryptography.hazmat.primitives.asymmetric import padding
+from cryptography.hazmat.primitives.asymmetric.rsa import (
+ rsa_crt_dmp1,
+ rsa_crt_dmq1,
+ rsa_crt_iqmp,
+ rsa_recover_prime_factors,
+ RSAPrivateKey,
+ RSAPrivateNumbers,
+ RSAPublicKey,
+ RSAPublicNumbers,
+)
+from cryptography.hazmat.primitives.hashes import HashAlgorithm
+
+from .exceptions import (
+ MalformedJWKError,
+ UnsupportedKeyTypeError,
+)
+from .utils import (
+ b64encode,
+ b64decode,
+ uint_b64encode,
+ uint_b64decode,
+)
+
+_AJWK = TypeVar("_AJWK", bound="AbstractJWKBase")
+_T = TypeVar("_T")
+
+
+class AbstractJWKBase(ABC):
+
+ @abstractmethod
+ def get_kty(self) -> str:
+ pass # pragma: no cover
+
+ @abstractmethod
+ def get_kid(self) -> str:
+ pass # pragma: no cover
+
+ @abstractmethod
+ def is_sign_key(self) -> bool:
+ pass # pragma: no cover
+
+ @abstractmethod
+ def sign(self, message: bytes, **options) -> bytes:
+ pass # pragma: no cover
+
+ @abstractmethod
+ def verify(self, message: bytes, signature: bytes, **options) -> bool:
+ pass # pragma: no cover
+
+ @abstractmethod
+ def to_dict(self, public_only: bool = True) -> Dict[str, str]:
+ pass # pragma: no cover
+
+ @classmethod
+ @abstractmethod
+ def from_dict(cls: Type[_AJWK], dct: Dict[str, object]) -> _AJWK:
+ pass # pragma: no cover
+
+
+class OctetJWK(AbstractJWKBase):
+
+ def __init__(self, key: bytes, kid=None, **options) -> None:
+ super(AbstractJWKBase, self).__init__()
+ self.key = key
+ self.kid = kid
+
+ optnames = {'use', 'key_ops', 'alg', 'x5u', 'x5c', 'x5t', 'x5t#s256'}
+ self.options = {k: v for k, v in options.items() if k in optnames}
+
+ def get_kty(self):
+ return 'oct'
+
+ def get_kid(self):
+ return self.kid
+
+ def is_sign_key(self) -> bool:
+ return True
+
+ def _get_signer(self, options) -> Callable[[bytes, bytes], bytes]:
+ return options['signer']
+
+ def sign(self, message: bytes, **options) -> bytes:
+ signer = self._get_signer(options)
+ return signer(message, self.key)
+
+ def verify(self, message: bytes, signature: bytes, **options) -> bool:
+ signer = self._get_signer(options)
+ return hmac.compare_digest(signature, signer(message, self.key))
+
+ def to_dict(self, public_only=True):
+ dct = {
+ 'kty': 'oct',
+ 'k': b64encode(self.key),
+ }
+ dct.update(self.options)
+ if self.kid:
+ dct['kid'] = self.kid
+ return dct
+
+ @classmethod
+ def from_dict(cls, dct):
+ try:
+ return cls(b64decode(dct['k']), **dct)
+ except KeyError as why:
+ raise MalformedJWKError('k is required') from why
+
+
+class RSAJWK(AbstractJWKBase):
+ """
+ https://tools.ietf.org/html/rfc7518.html#section-6.3.1
+ """
+
+ def __init__(self, keyobj: Union[RSAPrivateKey, RSAPublicKey],
+ **options) -> None:
+ super(AbstractJWKBase, self).__init__()
+ self.keyobj = keyobj
+
+ optnames = {'use', 'key_ops', 'alg', 'kid',
+ 'x5u', 'x5c', 'x5t', 'x5t#s256', }
+ self.options = {k: v for k, v in options.items() if k in optnames}
+
+ def is_sign_key(self) -> bool:
+ return isinstance(self.keyobj, RSAPrivateKey)
+
+ def _get_hash_fun(self, options) -> Callable[[], HashAlgorithm]:
+ return options['hash_fun']
+
+ def _get_padding(self, options) -> padding.AsymmetricPadding:
+ try:
+ return options['padding']
+ except KeyError:
+ warn('you should not use RSAJWK.verify/sign without jwa '
+ 'intermiediary, used legacy padding')
+ return padding.PKCS1v15()
+
+ def sign(self, message: bytes, **options) -> bytes:
+ if isinstance(self.keyobj, RSAPublicKey):
+ raise ValueError("Requires a private key.")
+ hash_fun = self._get_hash_fun(options)
+ _padding = self._get_padding(options)
+ return self.keyobj.sign(message, _padding, hash_fun())
+
+ def verify(self, message: bytes, signature: bytes, **options) -> bool:
+ hash_fun = self._get_hash_fun(options)
+ _padding = self._get_padding(options)
+ if isinstance(self.keyobj, RSAPrivateKey):
+ pubkey = self.keyobj.public_key()
+ else:
+ pubkey = self.keyobj
+ try:
+ pubkey.verify(signature, message, _padding, hash_fun())
+ return True
+ except InvalidSignature:
+ return False
+
+ def get_kty(self):
+ return 'RSA'
+
+ def get_kid(self):
+ return self.options.get('kid')
+
+ def to_dict(self, public_only=True):
+ dct = {
+ 'kty': 'RSA',
+ }
+ dct.update(self.options)
+
+ if isinstance(self.keyobj, RSAPrivateKey):
+ priv_numbers = self.keyobj.private_numbers()
+ pub_numbers = priv_numbers.public_numbers
+ dct.update({
+ 'e': uint_b64encode(pub_numbers.e),
+ 'n': uint_b64encode(pub_numbers.n),
+ })
+ if not public_only:
+ dct.update({
+ 'e': uint_b64encode(pub_numbers.e),
+ 'n': uint_b64encode(pub_numbers.n),
+ 'd': uint_b64encode(priv_numbers.d),
+ 'p': uint_b64encode(priv_numbers.p),
+ 'q': uint_b64encode(priv_numbers.q),
+ 'dp': uint_b64encode(priv_numbers.dmp1),
+ 'dq': uint_b64encode(priv_numbers.dmq1),
+ 'qi': uint_b64encode(priv_numbers.iqmp),
+ })
+ return dct
+ pub_numbers = self.keyobj.public_numbers()
+ dct.update({
+ 'e': uint_b64encode(pub_numbers.e),
+ 'n': uint_b64encode(pub_numbers.n),
+ })
+ return dct
+
+ @classmethod
+ def from_dict(cls, dct):
+ if 'oth' in dct:
+ raise UnsupportedKeyTypeError(
+ 'RSA keys with multiples primes are not supported')
+
+ try:
+ e = uint_b64decode(dct['e'])
+ n = uint_b64decode(dct['n'])
+ except KeyError as why:
+ raise MalformedJWKError('e and n are required') from why
+ pub_numbers = RSAPublicNumbers(e, n)
+ if 'd' not in dct:
+ return cls(
+ pub_numbers.public_key(backend=default_backend()), **dct)
+ d = uint_b64decode(dct['d'])
+
+ privparams = {'p', 'q', 'dp', 'dq', 'qi'}
+ product = set(dct.keys()) & privparams
+ if len(product) == 0:
+ p, q = rsa_recover_prime_factors(n, e, d)
+ priv_numbers = RSAPrivateNumbers(
+ d=d,
+ p=p,
+ q=q,
+ dmp1=rsa_crt_dmp1(d, p),
+ dmq1=rsa_crt_dmq1(d, q),
+ iqmp=rsa_crt_iqmp(p, q),
+ public_numbers=pub_numbers)
+ elif product == privparams:
+ priv_numbers = RSAPrivateNumbers(
+ d=d,
+ p=uint_b64decode(dct['p']),
+ q=uint_b64decode(dct['q']),
+ dmp1=uint_b64decode(dct['dp']),
+ dmq1=uint_b64decode(dct['dq']),
+ iqmp=uint_b64decode(dct['qi']),
+ public_numbers=pub_numbers)
+ else:
+ # If the producer includes any of the other private key parameters,
+ # then all of the others MUST be present, with the exception of
+ # "oth", which MUST only be present when more than two prime
+ # factors were used.
+ raise MalformedJWKError(
+ 'p, q, dp, dq, qi MUST be present or'
+ 'all of them MUST be absent')
+ return cls(priv_numbers.private_key(backend=default_backend()), **dct)
+
+
+def supported_key_types() -> Dict[str, Type[AbstractJWKBase]]:
+ return {
+ 'oct': OctetJWK,
+ 'RSA': RSAJWK,
+ }
+
+
+def jwk_from_dict(dct: Mapping[str, Any]) -> AbstractJWKBase:
+ if not isinstance(dct, dict): # pragma: no cover
+ raise TypeError('dct must be a dict')
+ if 'kty' not in dct:
+ raise MalformedJWKError('kty MUST be present')
+
+ supported = supported_key_types()
+ kty = dct['kty']
+ if kty not in supported:
+ raise UnsupportedKeyTypeError('unsupported key type: {}'.format(kty))
+ return supported[kty].from_dict(dct)
+
+
+PublicKeyLoaderT = Union[str, Callable[[bytes, object], object]]
+PrivateKeyLoaderT = Union[
+ str,
+ Callable[[bytes, Optional[str], object], object]]
+_Loader = TypeVar("_Loader", PublicKeyLoaderT, PrivateKeyLoaderT)
+_C = TypeVar("_C", bound=Callable[..., Any])
+
+
+# The above LoaderTs should actually not be Union, and this function should be
+# typed something like this. But, this will lose any kwargs from the typing
+# information. Probably needs: https://github.com/python/mypy/issues/3157
+# (func: Callable[[bytes, _Loader], _T])
+# -> Callable[[bytes, Union[str, _Loader]], _T]
+def jwk_from_bytes_argument_conversion(func: _C) -> _C:
+ if not ('private' in func.__name__ or 'public' in func.__name__):
+ raise Exception("the wrapped function must have either public"
+ " or private in it's name")
+
+ @wraps(func)
+ def wrapper(content, loader, **kwargs):
+ # now convert it to a Callable if it's a string
+ if isinstance(loader, str):
+ loader = getattr(serialization_module, loader)
+
+ if kwargs.get('options') is None:
+ kwargs['options'] = {}
+
+ return func(content, loader, **kwargs)
+ return wrapper # type: ignore[return-value]
+
+
+@jwk_from_bytes_argument_conversion
+def jwk_from_private_bytes(
+ content: bytes,
+ private_loader: PrivateKeyLoaderT,
+ *,
+ password: Optional[str] = None,
+ backend: Optional[object] = None,
+ options: Optional[Mapping[str, object]] = None,
+) -> AbstractJWKBase:
+ """This function is meant to be called from jwk_from_bytes"""
+ if options is None:
+ options = {}
+ try:
+ privkey = private_loader(content, password, backend) # type: ignore[operator] # noqa: E501
+ if isinstance(privkey, RSAPrivateKey):
+ return RSAJWK(privkey, **options)
+ raise UnsupportedKeyTypeError('unsupported key type')
+ except ValueError as ex:
+ raise UnsupportedKeyTypeError('this is probably a public key') from ex
+
+
+@jwk_from_bytes_argument_conversion
+def jwk_from_public_bytes(
+ content: bytes,
+ public_loader: PublicKeyLoaderT,
+ *,
+ backend: Optional[object] = None,
+ options: Optional[Mapping[str, object]] = None
+) -> AbstractJWKBase:
+ """This function is meant to be called from jwk_from_bytes"""
+ if options is None:
+ options = {}
+ try:
+ pubkey = public_loader(content, backend) # type: ignore[operator]
+ if isinstance(pubkey, RSAPublicKey):
+ return RSAJWK(pubkey, **options)
+ raise UnsupportedKeyTypeError(
+ 'unsupported key type') # pragma: no cover
+ except ValueError as why:
+ raise UnsupportedKeyTypeError('could not deserialize') from why
+
+
+def jwk_from_bytes(
+ content: bytes,
+ private_loader: PrivateKeyLoaderT,
+ public_loader: PublicKeyLoaderT,
+ *,
+ private_password: Optional[str] = None,
+ backend: Optional[object] = None,
+ options: Optional[Mapping[str, object]] = None,
+) -> AbstractJWKBase:
+ try:
+ return jwk_from_private_bytes(
+ content,
+ private_loader,
+ password=private_password,
+ backend=backend,
+ options=options,
+ )
+ except UnsupportedKeyTypeError:
+ return jwk_from_public_bytes(
+ content,
+ public_loader,
+ backend=backend,
+ options=options,
+ )
+
+
+def jwk_from_pem(
+ pem_content: bytes,
+ private_password: Optional[str] = None,
+ options: Optional[Mapping[str, object]] = None,
+) -> AbstractJWKBase:
+ return jwk_from_bytes(
+ pem_content,
+ private_loader='load_pem_private_key',
+ public_loader='load_pem_public_key',
+ private_password=private_password,
+ backend=None,
+ options=options,
+ )
+
+
+def jwk_from_der(
+ der_content: bytes,
+ private_password: Optional[str] = None,
+ options: Optional[Mapping[str, object]] = None,
+) -> AbstractJWKBase:
+ return jwk_from_bytes(
+ der_content,
+ private_loader='load_der_private_key',
+ public_loader='load_der_public_key',
+ private_password=private_password,
+ backend=None,
+ options=options,
+ )
diff --git a/billinglayer/python/jwt/jwkset.py b/billinglayer/python/jwt/jwkset.py
new file mode 100644
index 0000000..4c7056f
--- /dev/null
+++ b/billinglayer/python/jwt/jwkset.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2017 Gehirn Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from collections import UserList
+from typing import TYPE_CHECKING
+
+from .jwk import AbstractJWKBase, jwk_from_dict
+
+if TYPE_CHECKING:
+ UserListBase = UserList[AbstractJWKBase]
+else:
+ UserListBase = UserList
+
+
+class JWKSet(UserListBase):
+
+ def filter_keys(self, kid=None, kty=None):
+ # When "kid" values are used within a JWK Set, different
+ # keys within the JWK Set SHOULD use distinct "kid" values. (One
+ # example in which different keys might use the same "kid" value is if
+ # they have different "kty" (key type) values but are considered to be
+ # equivalent alternatives by the application using them.)
+
+ if kid and kty:
+ return [key for key in self.data
+ if key.get_kty() == kty and key.get_kid() == kid]
+ if kid:
+ return [key for key in self.data if key.get_kid() == kid]
+ if kty:
+ return [key for key in self.data if key.get_kty() == kty]
+
+ return self.data.copy()
+
+ def to_dict(self, public_only=True):
+ keys = [key.to_dict(public_only=public_only) for key in self.data]
+ return {'keys': keys}
+
+ @classmethod
+ def from_dict(cls, dct):
+ keys = [jwk_from_dict(key_dct) for key_dct in dct.get('keys', [])]
+ return cls(keys)
diff --git a/billinglayer/python/jwt/jws.py b/billinglayer/python/jwt/jws.py
new file mode 100644
index 0000000..18367b6
--- /dev/null
+++ b/billinglayer/python/jwt/jws.py
@@ -0,0 +1,104 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2017 Gehirn Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+from typing import (
+ AbstractSet,
+ Dict,
+ Optional,
+ Tuple,
+)
+
+from .exceptions import (
+ JWSEncodeError,
+ JWSDecodeError,
+)
+from .jwa import (
+ supported_signing_algorithms,
+ AbstractSigningAlgorithm,
+)
+from .jwk import AbstractJWKBase
+from .utils import (
+ b64encode,
+ b64decode,
+)
+
+__all__ = ['JWS']
+
+
+class JWS:
+
+ def __init__(self) -> None:
+ self._supported_algs = supported_signing_algorithms()
+
+ def _retrieve_alg(self, alg: str) -> AbstractSigningAlgorithm:
+ try:
+ return self._supported_algs[alg]
+ except KeyError:
+ raise JWSDecodeError('Unsupported signing algorithm.')
+
+ def encode(self, message: bytes, key: Optional[AbstractJWKBase] = None,
+ alg='HS256',
+ optional_headers: Optional[Dict[str, str]] = None) -> str:
+ if alg not in self._supported_algs: # pragma: no cover
+ raise JWSEncodeError('unsupported algorithm: {}'.format(alg))
+ alg_impl = self._retrieve_alg(alg)
+
+ header = optional_headers.copy() if optional_headers else {}
+ header['alg'] = alg
+
+ header_b64 = b64encode(
+ json.dumps(header, separators=(',', ':')).encode('ascii'))
+ message_b64 = b64encode(message)
+ signing_message = header_b64 + '.' + message_b64
+
+ signature = alg_impl.sign(signing_message.encode('ascii'), key)
+ signature_b64 = b64encode(signature)
+
+ return signing_message + '.' + signature_b64
+
+ def _decode_segments(
+ self, message: str) -> Tuple[Dict[str, str], bytes, bytes, str]:
+ try:
+ signing_message, signature_b64 = message.rsplit('.', 1)
+ header_b64, message_b64 = signing_message.split('.')
+ except ValueError:
+ raise JWSDecodeError('malformed JWS payload')
+
+ header = json.loads(b64decode(header_b64).decode('ascii'))
+ message_bin = b64decode(message_b64)
+ signature = b64decode(signature_b64)
+ return header, message_bin, signature, signing_message
+
+ def decode(self, message: str, key: Optional[AbstractJWKBase] = None,
+ do_verify=True,
+ algorithms: Optional[AbstractSet[str]] = None) -> bytes:
+ if algorithms is None:
+ algorithms = set(supported_signing_algorithms().keys())
+
+ header, message_bin, signature, signing_message = \
+ self._decode_segments(message)
+
+ alg_value = header['alg']
+ if alg_value not in algorithms:
+ raise JWSDecodeError('Unsupported signing algorithm.')
+
+ alg_impl = self._retrieve_alg(alg_value)
+ if do_verify and not alg_impl.verify(
+ signing_message.encode('ascii'), key, signature):
+ raise JWSDecodeError('JWS passed could not be validated')
+
+ return message_bin
diff --git a/billinglayer/python/jwt/jwt.py b/billinglayer/python/jwt/jwt.py
new file mode 100644
index 0000000..1c15b1a
--- /dev/null
+++ b/billinglayer/python/jwt/jwt.py
@@ -0,0 +1,119 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2017 Gehirn Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+from datetime import datetime, timezone
+from typing import AbstractSet, Any, Dict, Optional
+
+from jwt.utils import (
+ get_time_from_int,
+)
+from .exceptions import (
+ JWSEncodeError,
+ JWSDecodeError,
+ JWTEncodeError,
+ JWTDecodeError,
+)
+from .jwk import AbstractJWKBase
+from .jws import JWS
+
+
+class JWT:
+
+ def __init__(self):
+ self._jws = JWS()
+
+ def encode(self, payload: Dict[str, Any],
+ key: Optional[AbstractJWKBase] = None, alg='HS256',
+ optional_headers: Optional[Dict[str, str]] = None) -> str:
+ if not isinstance(self, JWT): # pragma: no cover
+ # https://github.com/GehirnInc/python-jwt/issues/15
+ raise RuntimeError(
+ 'encode must be called on a jwt.JWT() instance. '
+ 'Do jwt.JWT().encode(...)')
+ if not isinstance(payload, dict): # pragma: no cover
+ raise TypeError('payload must be a dict')
+ if not (key is None
+ or isinstance(key, AbstractJWKBase)): # pragma: no cover
+ raise TypeError(
+ 'key must be an instance of a class implements '
+ 'jwt.AbstractJWKBase')
+ if not (optional_headers is None
+ or isinstance(optional_headers, dict)): # pragma: no cover
+ raise TypeError('optional_headers must be a dict')
+
+ try:
+ message = json.dumps(payload).encode('utf-8')
+ except ValueError as why:
+ raise JWTEncodeError(
+ 'payload must be able to be encoded to JSON') from why
+
+ optional_headers = optional_headers and optional_headers.copy() or {}
+ optional_headers['typ'] = 'JWT'
+ try:
+ return self._jws.encode(message, key, alg, optional_headers)
+ except JWSEncodeError as why:
+ raise JWTEncodeError('failed to encode to JWT') from why
+
+ def decode(self, message: str, key: Optional[AbstractJWKBase] = None,
+ do_verify=True, algorithms: Optional[AbstractSet[str]] = None,
+ do_time_check: bool = True) -> Dict[str, Any]:
+ if not isinstance(self, JWT): # pragma: no cover
+ # https://github.com/GehirnInc/python-jwt/issues/15
+ raise RuntimeError(
+ 'decode must be called on a jwt.JWT() instance. '
+ 'Do jwt.JWT().decode(...)')
+ if not isinstance(message, str): # pragma: no cover
+ raise TypeError('message must be a str')
+ if not (key is None
+ or isinstance(key, AbstractJWKBase)): # pragma: no cover
+ raise TypeError(
+ 'key must be an instance of a class implements '
+ 'jwt.AbstractJWKBase')
+
+ # utc now with timezone
+ now = datetime.now(timezone.utc)
+ try:
+ message_bin = self._jws.decode(message, key, do_verify, algorithms)
+ except JWSDecodeError as why:
+ raise JWTDecodeError('failed to decode JWT') from why
+ try:
+ payload = json.loads(message_bin.decode('utf-8'))
+ except ValueError as why:
+ raise JWTDecodeError(
+ 'a payload of the JWT is not valid JSON') from why
+
+ # The "exp" (expiration time) claim identifies the expiration time on
+ # or after which the JWT MUST NOT be accepted for processing.
+ if 'exp' in payload and do_time_check:
+ try:
+ exp = get_time_from_int(payload['exp'])
+ except TypeError:
+ raise JWTDecodeError("Invalid Expired value")
+ if now >= exp:
+ raise JWTDecodeError("JWT Expired")
+
+ # The "nbf" (not before) claim identifies the time before which the JWT
+ # MUST NOT be accepted for processing.
+ if 'nbf' in payload and do_time_check:
+ try:
+ nbf = get_time_from_int(payload['nbf'])
+ except TypeError:
+ raise JWTDecodeError('Invalid "Not valid yet" value')
+ if now < nbf:
+ raise JWTDecodeError("JWT Not valid yet")
+
+ return payload
diff --git a/billinglayer/python/jwt/py.typed b/billinglayer/python/jwt/py.typed
new file mode 100644
index 0000000..e69de29
diff --git a/billinglayer/python/jwt/utils.py b/billinglayer/python/jwt/utils.py
new file mode 100644
index 0000000..48f139f
--- /dev/null
+++ b/billinglayer/python/jwt/utils.py
@@ -0,0 +1,75 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2017 Gehirn Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from base64 import (
+ urlsafe_b64encode,
+ urlsafe_b64decode,
+)
+from datetime import datetime, timezone
+
+
+def b64encode(s: bytes) -> str:
+ s_bin = urlsafe_b64encode(s)
+ s_bin = s_bin.replace(b'=', b'')
+ return s_bin.decode('ascii')
+
+
+def b64decode(s: str) -> bytes:
+ s_bin = s.encode('ascii')
+ s_bin += b'=' * (4 - len(s_bin) % 4)
+ return urlsafe_b64decode(s_bin)
+
+
+def uint_b64encode(value: int) -> str:
+ length = 1
+ rem = value >> 8
+ while rem:
+ length += 1
+ rem >>= 8
+
+ uint_bin = value.to_bytes(length, 'big', signed=False)
+ return b64encode(uint_bin)
+
+
+def uint_b64decode(uint_b64: str) -> int:
+ uint_bin = b64decode(uint_b64)
+
+ value = 0
+ for b in uint_bin:
+ value <<= 8
+ value += int(b)
+ return value
+
+
+def get_time_from_int(value: int) -> datetime:
+ """
+ :param value: seconds since the Epoch
+ :return: datetime
+ """
+ if not isinstance(value, int): # pragma: no cover
+ raise TypeError('an int is required')
+ return datetime.fromtimestamp(value, timezone.utc)
+
+
+def get_int_from_datetime(value: datetime) -> int:
+ """
+ :param value: datetime with or without timezone, if don't contains timezone
+ it will managed as it is UTC
+ :return: Seconds since the Epoch
+ """
+ if not isinstance(value, datetime): # pragma: no cover
+ raise TypeError('a datetime is required')
+ return int(value.timestamp())
diff --git a/billinglayer/python/lattice.py b/billinglayer/python/lattice.py
new file mode 100644
index 0000000..bc901bd
--- /dev/null
+++ b/billinglayer/python/lattice.py
@@ -0,0 +1,419 @@
+"""Query Meshify for data."""
+import json
+import csv
+from os import getenv
+import getpass
+import pickle
+from pathlib import Path
+import requests
+import click
+
+
+MESHIFY_BASE_URL = "https://194.p2121.net/api/" #getenv("MESHIFY_BASE_URL")
+MESHIFY_USERNAME = "reportuser@henrypump.com" #getenv("MESHIFY_USERNAME")
+MESHIFY_PASSWORD = "Kk8kMU2cc6vqVy" #getenv("MESHIFY_PASSWORD")
+MESHIFY_AUTH = None
+
+
+class NameNotFound(Exception):
+ """Thrown when a name is not found in a list of stuff."""
+
+ def __init__(self, message, name, list_of_stuff, *args):
+ """Initialize the NameNotFound Exception."""
+ self.message = message
+ self.name = name
+ self.list_of_stuff = list_of_stuff
+ super(NameNotFound, self).__init__(message, name, list_of_stuff, *args)
+
+
+def dict_filter(it, *keys):
+ """Filter dictionary results."""
+ for d in it:
+ yield dict((k, d[k]) for k in keys)
+
+
+def check_setup():
+ """Check the global parameters."""
+ global MESHIFY_USERNAME, MESHIFY_PASSWORD, MESHIFY_AUTH, MESHIFY_BASE_URL
+ if not MESHIFY_USERNAME or not MESHIFY_PASSWORD:
+ print("Simplify the usage by setting the meshify username and password as environment variables MESHIFY_USERNAME and MESHIFY_PASSWORD")
+ MESHIFY_USERNAME = input("Meshify Username: ")
+ MESHIFY_PASSWORD = getpass.getpass("Meshify Password: ")
+
+ MESHIFY_AUTH = requests.auth.HTTPBasicAuth(MESHIFY_USERNAME, MESHIFY_PASSWORD)
+
+ if not MESHIFY_BASE_URL:
+ print("Simplify the usage by setting the environment variable MESHIFY_BASE_URL")
+ MESHIFY_BASE_URL = input("Meshify Base URL: ")
+
+
+def find_by_name(name, list_of_stuff):
+ """Find an object in a list of stuff by its name parameter."""
+ for x in list_of_stuff:
+ if x['name'] == name:
+ return x
+ raise NameNotFound("Name not found!", name, list_of_stuff)
+
+
+def GET(endpoint):
+ """Make a query to the meshify API."""
+ check_setup()
+ if endpoint[0] == "/":
+ endpoint = endpoint[1:]
+ q_url = MESHIFY_BASE_URL + endpoint
+ q_req = requests.get(q_url, auth=MESHIFY_AUTH)
+ return json.loads(q_req.text) if q_req.status_code == 200 else []
+
+
+def post_meshify_api(endpoint, data):
+ """Post data to the meshify API."""
+ check_setup()
+ q_url = MESHIFY_BASE_URL + endpoint
+ q_req = requests.post(q_url, data=json.dumps(data), auth=MESHIFY_AUTH)
+ if q_req.status_code != 200:
+ print(q_req.status_code)
+ return json.loads(q_req.text) if q_req.status_code == 200 else []
+
+
+def getNodeTypes():
+ return GET("nodetypes")
+
+def getNodes():
+ return GET("nodes")
+
+def getFolders():
+ return GET("folders")
+
+def getChannelValues(nodeId):
+ return GET("data/current?nodeId={}".format(nodeId))
+
+def getUsers():
+ return GET("users")
+
+def decode_channel_parameters(channel):
+ """Decode a channel object's parameters into human-readable format."""
+ channel_types = {
+ 1: 'device',
+ 5: 'static',
+ 6: 'user input',
+ 7: 'system'
+ }
+
+ io_options = {
+ 0: 'readonly',
+ 1: 'readwrite'
+ }
+
+ datatype_options = {
+ 1: "float",
+ 2: 'string',
+ 3: 'integer',
+ 4: 'boolean',
+ 5: 'datetime',
+ 6: 'timespan',
+ 7: 'file',
+ 8: 'latlng'
+ }
+
+ channel['channelType'] = channel_types[channel['channelType']]
+ channel['io'] = io_options[channel['io']]
+ channel['dataType'] = datatype_options[channel['dataType']]
+ return channel
+
+
+def encode_channel_parameters(channel):
+ """Encode a channel object from human-readable format."""
+ channel_types = {
+ 'device': 1,
+ 'static': 5,
+ 'user input': 6,
+ 'system': 7
+ }
+
+ io_options = {
+ 'readonly': False,
+ 'readwrite': True
+ }
+
+ datatype_options = {
+ "float": 1,
+ 'string': 2,
+ 'integer': 3,
+ 'boolean': 4,
+ 'datetime': 5,
+ 'timespan': 6,
+ 'file': 7,
+ 'latlng': 8
+ }
+ try:
+ channel['deviceTypeId'] = int(channel['deviceTypeId'])
+ channel['fromMe'] = channel['fromMe'].lower() == 'true'
+ channel['channelType'] = channel_types[channel['channelType'].lower()]
+ channel['io'] = io_options[channel['io'].lower()]
+ channel['dataType'] = datatype_options[channel['dataType'].lower()]
+ # channel['id'] = 1
+ return channel
+ except KeyError as e:
+ click.echo("Unable to convert channel {} due to bad key: {}".format(channel['name'], e))
+
+
+def make_modbusmap_channel(i, chan, device_type_name):
+ """Make a channel object for a row in the CSV."""
+ json_obj = {
+ "ah": "",
+ "bytary": None,
+ "al": "",
+ "vn": chan['subTitle'], # Name
+ "ct": "number", # ChangeType
+ "le": "16", # Length(16 or 32)
+ "grp": str(chan['guaranteedReportPeriod']), # GuaranteedReportPeriod
+ "la": None,
+ "chn": chan['name'], # ChannelName
+ "un": "1", # DeviceNumber
+ "dn": device_type_name, # deviceName
+ "vm": None,
+ "lrt": "0",
+ "da": "300", # DeviceAddress
+ "a": chan['helpExplanation'], # TagName
+ "c": str(chan['change']), # Change
+ "misc_u": str(chan['units']), # Units
+ "f": "1", # FunctionCode
+ "mrt": str(chan['minReportTime']), # MinimumReportTime
+ "m": "none", # multiplier
+ "m1ch": "2-{}".format(i),
+ "mv": "0", # MultiplierValue
+ "s": "On",
+ "r": "{}-{}".format(chan['min'], chan['max']), # range
+ "t": "int" # type
+ }
+ return json_obj
+
+
+def combine_modbusmap_and_channel(channel_obj, modbus_map):
+ """Add the parameters from the modbus map to the channel object."""
+ channel_part = modbus_map["1"]["addresses"]["300"]
+ for c in channel_part:
+ if channel_part[c]["chn"] == channel_obj['name']:
+ channel_obj['units'] = channel_part[c]["misc_u"]
+ try:
+ min_max_range = channel_part[c]["r"].split("-")
+ channel_obj['min'] = int(min_max_range[0])
+ channel_obj['max'] = int(min_max_range[1])
+ except Exception:
+ channel_obj['min'] = None
+ channel_obj['max'] = None
+
+ channel_obj['change'] = float(channel_part[c]["c"])
+ channel_obj['guaranteedReportPeriod'] = int(channel_part[c]["grp"])
+ channel_obj['minReportTime'] = int(channel_part[c]["mrt"])
+ return channel_obj
+ return False
+
+
+@click.group()
+def cli():
+ """Command Line Interface."""
+ pass
+
+
+@click.command()
+@click.argument("device_type_name")
+@click.option("-o", '--output-file', default=None, help="Where to put the CSV of channels.")
+@click.option("-m", '--modbusmap-file', default="modbusMap.p", help="The location of the modbusMap.p file")
+def get_channel_csv(device_type_name, output_file, modbusmap_file):
+ """Query the meshify API and create a CSV of the current channels."""
+ channel_fieldnames = [
+ 'id',
+ 'name',
+ 'deviceTypeId',
+ 'fromMe',
+ 'io',
+ 'subTitle',
+ 'helpExplanation',
+ 'channelType',
+ 'dataType',
+ 'defaultValue',
+ 'regex',
+ 'regexErrMsg',
+ 'units',
+ 'min',
+ 'max',
+ 'change',
+ 'guaranteedReportPeriod',
+ 'minReportTime'
+ ]
+ devicetypes = GET('devicetypes')
+ this_devicetype = find_by_name(device_type_name, devicetypes)
+ channels = GET('devicetypes/{}/channels'.format(this_devicetype['id']))
+ modbus_map = None
+
+ if Path(modbusmap_file).exists():
+ with open(modbusmap_file, 'rb') as open_mbs_file:
+ modbus_map = pickle.load(open_mbs_file)
+
+ if not output_file:
+ output_file = 'channels_{}.csv'.format(device_type_name)
+
+ with open(output_file, 'w') as csvfile:
+ writer = csv.DictWriter(csvfile, fieldnames=channel_fieldnames)
+
+ writer.writeheader()
+ for ch in channels:
+ if not modbus_map:
+ ch['units'] = None
+ ch['min'] = None
+ ch['max'] = None
+ ch['change'] = None
+ ch['guaranteedReportPeriod'] = None
+ ch['minReportTime'] = None
+ else:
+ combined = combine_modbusmap_and_channel(ch, modbus_map)
+ if combined:
+ ch = combined
+ writer.writerow(decode_channel_parameters(ch))
+
+ click.echo("Wrote channels to {}".format(output_file))
+
+
+@click.command()
+@click.argument("device_type_name")
+@click.argument("csv_file")
+def post_channel_csv(device_type_name, csv_file):
+ """Post values from a CSV to Meshify Channel API."""
+ devicetypes = GET('devicetypes')
+ this_devicetype = find_by_name(device_type_name, devicetypes)
+
+ with open(csv_file, 'r') as inp_file:
+ reader = csv.DictReader(inp_file)
+ for row in dict_filter(reader, 'name',
+ 'deviceTypeId',
+ 'fromMe',
+ 'io',
+ 'subTitle',
+ 'helpExplanation',
+ 'channelType',
+ 'dataType',
+ 'defaultValue',
+ 'regex',
+ 'regexErrMsg'):
+ # print(row)
+ # print(encode_channel_parameters(row))
+ # click.echo(json.dumps(encode_channel_parameters(row), indent=4))
+ if post_meshify_api('devicetypes/{}/channels'.format(this_devicetype['id']), encode_channel_parameters(row)):
+ click.echo("Successfully added channel {}".format(row['name']))
+ else:
+ click.echo("Unable to add channel {}".format(row['name']))
+
+
+@click.command()
+def print_channel_options():
+ """Print channel options for use with the csv files."""
+ channel_types = ['device', 'static', 'user input', 'system']
+ io_options = ['readonly', 'readwrite']
+ datatype_options = [
+ "float",
+ 'string',
+ 'integer',
+ 'boolean',
+ 'datetime',
+ 'timespan',
+ 'file',
+ 'latlng'
+ ]
+
+ click.echo("\n\nchannelType options")
+ click.echo("===================")
+ for chan in channel_types:
+ click.echo(chan)
+
+ click.echo("\n\nio options")
+ click.echo("==========")
+ for i in io_options:
+ click.echo(i)
+
+ click.echo("\n\ndataType options")
+ click.echo("================")
+ for d in datatype_options:
+ click.echo(d)
+
+
+@click.command()
+@click.argument("device_type_name")
+@click.argument("csv_file")
+def create_modbusMap(device_type_name, csv_file):
+ """Create modbusMap.p from channel csv file."""
+ modbusMap = {
+ "1": {
+ "c": "ETHERNET/IP",
+ "b": "192.168.1.10",
+ "addresses": {
+ "300": {}
+ },
+ "f": "Off",
+ "p": "",
+ "s": "1"
+ },
+ "2": {
+ "c": "M1-485",
+ "b": "9600",
+ "addresses": {},
+ "f": "Off",
+ "p": "None",
+ "s": "1"
+ }
+ }
+ ind = 1
+ with open(csv_file, 'r') as inp_file:
+ reader = csv.DictReader(inp_file)
+ for row in reader:
+ modbusMap["1"]["addresses"]["300"]["2-{}".format(ind)] = make_modbusmap_channel(ind, row, device_type_name)
+ ind += 1
+ with open("modbusMap.p", 'wb') as mod_map_file:
+ pickle.dump(modbusMap, mod_map_file, protocol=0)
+
+ with open("modbusMap.json", 'w') as json_file:
+ json.dump(modbusMap, json_file, indent=4)
+
+
+@click.command()
+@click.option("-i", "--input-file", default="modbusMap.p", help="The modbus map pickle file to convert.")
+@click.option("-o", "--output", default="modbusMap.json", help="The modbus map json file output filename.")
+def pickle_to_json(input_file, output):
+ """Convert a pickle file to a json file."""
+ if not Path(input_file).exists():
+ click.echo("Pickle file {} does not exist".format(input_file))
+ return
+
+ with open(input_file, 'rb') as picklefile:
+ input_contents = pickle.load(picklefile)
+
+ with open(output, 'w') as outfile:
+ json.dump(input_contents, outfile, indent=4)
+ click.echo("Wrote from {} to {}.".format(input_file, output))
+
+@click.command()
+@click.option("-i", "--input-file", default="modbusMap.json", help="The modbus map json file to convert.")
+@click.option("-o", "--output", default="modbusMap.p", help="The modbus map pickle file output filename.")
+def json_to_pickle(input_file, output):
+ """Convert a pickle file to a json file."""
+ if not Path(input_file).exists():
+ click.echo("JSON file {} does not exist".format(input_file))
+ return
+
+ with open(input_file, 'rb') as json_file:
+ input_contents = json.load(json_file)
+
+ with open(output, 'wb') as outfile:
+ pickle.dump(input_contents, outfile, protocol=0)
+ click.echo("Wrote from {} to {}.".format(input_file, output))
+
+
+cli.add_command(get_channel_csv)
+cli.add_command(post_channel_csv)
+cli.add_command(print_channel_options)
+cli.add_command(create_modbusMap)
+cli.add_command(pickle_to_json)
+cli.add_command(json_to_pickle)
+
+if __name__ == '__main__':
+ cli()
diff --git a/billinglayer/python/pycparser-2.21.dist-info/INSTALLER b/billinglayer/python/pycparser-2.21.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/billinglayer/python/pycparser-2.21.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/billinglayer/python/pycparser-2.21.dist-info/LICENSE b/billinglayer/python/pycparser-2.21.dist-info/LICENSE
new file mode 100644
index 0000000..ea215f2
--- /dev/null
+++ b/billinglayer/python/pycparser-2.21.dist-info/LICENSE
@@ -0,0 +1,27 @@
+pycparser -- A C parser in Python
+
+Copyright (c) 2008-2020, Eli Bendersky
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+* Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+* Neither the name of Eli Bendersky nor the names of its contributors may
+ be used to endorse or promote products derived from this software without
+ specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
+GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/billinglayer/python/pycparser-2.21.dist-info/METADATA b/billinglayer/python/pycparser-2.21.dist-info/METADATA
new file mode 100644
index 0000000..1d0fbd6
--- /dev/null
+++ b/billinglayer/python/pycparser-2.21.dist-info/METADATA
@@ -0,0 +1,31 @@
+Metadata-Version: 2.1
+Name: pycparser
+Version: 2.21
+Summary: C parser in Python
+Home-page: https://github.com/eliben/pycparser
+Author: Eli Bendersky
+Author-email: eliben@gmail.com
+Maintainer: Eli Bendersky
+License: BSD
+Platform: Cross Platform
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
+
+
+pycparser is a complete parser of the C language, written in
+pure Python using the PLY parsing library.
+It parses C code into an AST and can serve as a front-end for
+C compilers or analysis tools.
+
+
diff --git a/billinglayer/python/pycparser-2.21.dist-info/RECORD b/billinglayer/python/pycparser-2.21.dist-info/RECORD
new file mode 100644
index 0000000..2049707
--- /dev/null
+++ b/billinglayer/python/pycparser-2.21.dist-info/RECORD
@@ -0,0 +1,41 @@
+pycparser-2.21.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+pycparser-2.21.dist-info/LICENSE,sha256=Pn3yW437ZYyakVAZMNTZQ7BQh6g0fH4rQyVhavU1BHs,1536
+pycparser-2.21.dist-info/METADATA,sha256=GvTEQA9yKj0nvP4mknfoGpMvjaJXCQjQANcQHrRrAxc,1108
+pycparser-2.21.dist-info/RECORD,,
+pycparser-2.21.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
+pycparser-2.21.dist-info/top_level.txt,sha256=c-lPcS74L_8KoH7IE6PQF5ofyirRQNV4VhkbSFIPeWM,10
+pycparser/__init__.py,sha256=WUEp5D0fuHBH9Q8c1fYvR2eKWfj-CNghLf2MMlQLI1I,2815
+pycparser/__pycache__/__init__.cpython-311.pyc,,
+pycparser/__pycache__/_ast_gen.cpython-311.pyc,,
+pycparser/__pycache__/_build_tables.cpython-311.pyc,,
+pycparser/__pycache__/ast_transforms.cpython-311.pyc,,
+pycparser/__pycache__/c_ast.cpython-311.pyc,,
+pycparser/__pycache__/c_generator.cpython-311.pyc,,
+pycparser/__pycache__/c_lexer.cpython-311.pyc,,
+pycparser/__pycache__/c_parser.cpython-311.pyc,,
+pycparser/__pycache__/lextab.cpython-311.pyc,,
+pycparser/__pycache__/plyparser.cpython-311.pyc,,
+pycparser/__pycache__/yacctab.cpython-311.pyc,,
+pycparser/_ast_gen.py,sha256=0JRVnDW-Jw-3IjVlo8je9rbAcp6Ko7toHAnB5zi7h0Q,10555
+pycparser/_build_tables.py,sha256=oZCd3Plhq-vkV-QuEsaahcf-jUI6-HgKsrAL9gvFzuU,1039
+pycparser/_c_ast.cfg,sha256=ld5ezE9yzIJFIVAUfw7ezJSlMi4nXKNCzfmqjOyQTNo,4255
+pycparser/ast_transforms.py,sha256=GTMYlUgWmXd5wJVyovXY1qzzAqjxzCpVVg0664dKGBs,5691
+pycparser/c_ast.py,sha256=HWeOrfYdCY0u5XaYhE1i60uVyE3yMWdcxzECUX-DqJw,31445
+pycparser/c_generator.py,sha256=yi6Mcqxv88J5ue8k5-mVGxh3iJ37iD4QyF-sWcGjC-8,17772
+pycparser/c_lexer.py,sha256=xCpjIb6vOUebBJpdifidb08y7XgAsO3T1gNGXJT93-w,17167
+pycparser/c_parser.py,sha256=_8y3i52bL6SUK21KmEEl0qzHxe-0eZRzjZGkWg8gQ4A,73680
+pycparser/lextab.py,sha256=fIxBAHYRC418oKF52M7xb8_KMj3K-tHx0TzZiKwxjPM,8504
+pycparser/ply/__init__.py,sha256=q4s86QwRsYRa20L9ueSxfh-hPihpftBjDOvYa2_SS2Y,102
+pycparser/ply/__pycache__/__init__.cpython-311.pyc,,
+pycparser/ply/__pycache__/cpp.cpython-311.pyc,,
+pycparser/ply/__pycache__/ctokens.cpython-311.pyc,,
+pycparser/ply/__pycache__/lex.cpython-311.pyc,,
+pycparser/ply/__pycache__/yacc.cpython-311.pyc,,
+pycparser/ply/__pycache__/ygen.cpython-311.pyc,,
+pycparser/ply/cpp.py,sha256=UtC3ylTWp5_1MKA-PLCuwKQR8zSOnlGuGGIdzj8xS98,33282
+pycparser/ply/ctokens.py,sha256=MKksnN40TehPhgVfxCJhjj_BjL943apreABKYz-bl0Y,3177
+pycparser/ply/lex.py,sha256=7Qol57x702HZwjA3ZLp-84CUEWq1EehW-N67Wzghi-M,42918
+pycparser/ply/yacc.py,sha256=eatSDkRLgRr6X3-hoDk_SQQv065R0BdL2K7fQ54CgVM,137323
+pycparser/ply/ygen.py,sha256=2JYNeYtrPz1JzLSLO3d4GsS8zJU8jY_I_CR1VI9gWrA,2251
+pycparser/plyparser.py,sha256=8tLOoEytcapvWrr1JfCf7Dog-wulBtS1YrDs8S7JfMo,4875
+pycparser/yacctab.py,sha256=j_fVNIyDWDRVk7eWMqQtlBw2AwUSV5JTrtT58l7zis0,205652
diff --git a/billinglayer/python/pycparser-2.21.dist-info/WHEEL b/billinglayer/python/pycparser-2.21.dist-info/WHEEL
new file mode 100644
index 0000000..ef99c6c
--- /dev/null
+++ b/billinglayer/python/pycparser-2.21.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.34.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/billinglayer/python/pycparser-2.21.dist-info/top_level.txt b/billinglayer/python/pycparser-2.21.dist-info/top_level.txt
new file mode 100644
index 0000000..dc1c9e1
--- /dev/null
+++ b/billinglayer/python/pycparser-2.21.dist-info/top_level.txt
@@ -0,0 +1 @@
+pycparser
diff --git a/billinglayer/python/pycparser/__init__.py b/billinglayer/python/pycparser/__init__.py
new file mode 100644
index 0000000..d82eb2d
--- /dev/null
+++ b/billinglayer/python/pycparser/__init__.py
@@ -0,0 +1,90 @@
+#-----------------------------------------------------------------
+# pycparser: __init__.py
+#
+# This package file exports some convenience functions for
+# interacting with pycparser
+#
+# Eli Bendersky [https://eli.thegreenplace.net/]
+# License: BSD
+#-----------------------------------------------------------------
+__all__ = ['c_lexer', 'c_parser', 'c_ast']
+__version__ = '2.21'
+
+import io
+from subprocess import check_output
+from .c_parser import CParser
+
+
+def preprocess_file(filename, cpp_path='cpp', cpp_args=''):
+ """ Preprocess a file using cpp.
+
+ filename:
+ Name of the file you want to preprocess.
+
+ cpp_path:
+ cpp_args:
+ Refer to the documentation of parse_file for the meaning of these
+ arguments.
+
+ When successful, returns the preprocessed file's contents.
+ Errors from cpp will be printed out.
+ """
+ path_list = [cpp_path]
+ if isinstance(cpp_args, list):
+ path_list += cpp_args
+ elif cpp_args != '':
+ path_list += [cpp_args]
+ path_list += [filename]
+
+ try:
+ # Note the use of universal_newlines to treat all newlines
+ # as \n for Python's purpose
+ text = check_output(path_list, universal_newlines=True)
+ except OSError as e:
+ raise RuntimeError("Unable to invoke 'cpp'. " +
+ 'Make sure its path was passed correctly\n' +
+ ('Original error: %s' % e))
+
+ return text
+
+
+def parse_file(filename, use_cpp=False, cpp_path='cpp', cpp_args='',
+ parser=None):
+ """ Parse a C file using pycparser.
+
+ filename:
+ Name of the file you want to parse.
+
+ use_cpp:
+ Set to True if you want to execute the C pre-processor
+ on the file prior to parsing it.
+
+ cpp_path:
+ If use_cpp is True, this is the path to 'cpp' on your
+ system. If no path is provided, it attempts to just
+ execute 'cpp', so it must be in your PATH.
+
+ cpp_args:
+ If use_cpp is True, set this to the command line arguments strings
+ to cpp. Be careful with quotes - it's best to pass a raw string
+ (r'') here. For example:
+ r'-I../utils/fake_libc_include'
+ If several arguments are required, pass a list of strings.
+
+ parser:
+ Optional parser object to be used instead of the default CParser
+
+ When successful, an AST is returned. ParseError can be
+ thrown if the file doesn't parse successfully.
+
+ Errors from cpp will be printed out.
+ """
+ if use_cpp:
+ text = preprocess_file(filename, cpp_path, cpp_args)
+ else:
+ with io.open(filename) as f:
+ text = f.read()
+
+ if parser is None:
+ parser = CParser()
+ return parser.parse(text, filename)
diff --git a/billinglayer/python/pycparser/__pycache__/__init__.cpython-311.pyc b/billinglayer/python/pycparser/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..cf8e2c8
Binary files /dev/null and b/billinglayer/python/pycparser/__pycache__/__init__.cpython-311.pyc differ
diff --git a/billinglayer/python/pycparser/__pycache__/_ast_gen.cpython-311.pyc b/billinglayer/python/pycparser/__pycache__/_ast_gen.cpython-311.pyc
new file mode 100644
index 0000000..b8849f7
Binary files /dev/null and b/billinglayer/python/pycparser/__pycache__/_ast_gen.cpython-311.pyc differ
diff --git a/billinglayer/python/pycparser/__pycache__/_build_tables.cpython-311.pyc b/billinglayer/python/pycparser/__pycache__/_build_tables.cpython-311.pyc
new file mode 100644
index 0000000..cebb5e6
Binary files /dev/null and b/billinglayer/python/pycparser/__pycache__/_build_tables.cpython-311.pyc differ
diff --git a/billinglayer/python/pycparser/__pycache__/ast_transforms.cpython-311.pyc b/billinglayer/python/pycparser/__pycache__/ast_transforms.cpython-311.pyc
new file mode 100644
index 0000000..aed4851
Binary files /dev/null and b/billinglayer/python/pycparser/__pycache__/ast_transforms.cpython-311.pyc differ
diff --git a/billinglayer/python/pycparser/__pycache__/c_ast.cpython-311.pyc b/billinglayer/python/pycparser/__pycache__/c_ast.cpython-311.pyc
new file mode 100644
index 0000000..c8878db
Binary files /dev/null and b/billinglayer/python/pycparser/__pycache__/c_ast.cpython-311.pyc differ
diff --git a/billinglayer/python/pycparser/__pycache__/c_generator.cpython-311.pyc b/billinglayer/python/pycparser/__pycache__/c_generator.cpython-311.pyc
new file mode 100644
index 0000000..9df5995
Binary files /dev/null and b/billinglayer/python/pycparser/__pycache__/c_generator.cpython-311.pyc differ
diff --git a/billinglayer/python/pycparser/__pycache__/c_lexer.cpython-311.pyc b/billinglayer/python/pycparser/__pycache__/c_lexer.cpython-311.pyc
new file mode 100644
index 0000000..4b107a9
Binary files /dev/null and b/billinglayer/python/pycparser/__pycache__/c_lexer.cpython-311.pyc differ
diff --git a/billinglayer/python/pycparser/__pycache__/c_parser.cpython-311.pyc b/billinglayer/python/pycparser/__pycache__/c_parser.cpython-311.pyc
new file mode 100644
index 0000000..b3ddea3
Binary files /dev/null and b/billinglayer/python/pycparser/__pycache__/c_parser.cpython-311.pyc differ
diff --git a/billinglayer/python/pycparser/__pycache__/lextab.cpython-311.pyc b/billinglayer/python/pycparser/__pycache__/lextab.cpython-311.pyc
new file mode 100644
index 0000000..c67e313
Binary files /dev/null and b/billinglayer/python/pycparser/__pycache__/lextab.cpython-311.pyc differ
diff --git a/billinglayer/python/pycparser/__pycache__/plyparser.cpython-311.pyc b/billinglayer/python/pycparser/__pycache__/plyparser.cpython-311.pyc
new file mode 100644
index 0000000..d3bba7f
Binary files /dev/null and b/billinglayer/python/pycparser/__pycache__/plyparser.cpython-311.pyc differ
diff --git a/billinglayer/python/pycparser/__pycache__/yacctab.cpython-311.pyc b/billinglayer/python/pycparser/__pycache__/yacctab.cpython-311.pyc
new file mode 100644
index 0000000..42a0f32
Binary files /dev/null and b/billinglayer/python/pycparser/__pycache__/yacctab.cpython-311.pyc differ
diff --git a/billinglayer/python/pycparser/_ast_gen.py b/billinglayer/python/pycparser/_ast_gen.py
new file mode 100644
index 0000000..0f7d330
--- /dev/null
+++ b/billinglayer/python/pycparser/_ast_gen.py
@@ -0,0 +1,336 @@
+#-----------------------------------------------------------------
+# _ast_gen.py
+#
+# Generates the AST Node classes from a specification given in
+# a configuration file
+#
+# The design of this module was inspired by astgen.py from the
+# Python 2.5 code-base.
+#
+# Eli Bendersky [https://eli.thegreenplace.net/]
+# License: BSD
+#-----------------------------------------------------------------
+from string import Template
+
+
+class ASTCodeGenerator(object):
+ def __init__(self, cfg_filename='_c_ast.cfg'):
+ """ Initialize the code generator from a configuration
+ file.
+ """
+ self.cfg_filename = cfg_filename
+ self.node_cfg = [NodeCfg(name, contents)
+ for (name, contents) in self.parse_cfgfile(cfg_filename)]
+
+ def generate(self, file=None):
+ """ Generates the code into file, an open file buffer.
+ """
+ src = Template(_PROLOGUE_COMMENT).substitute(
+ cfg_filename=self.cfg_filename)
+
+ src += _PROLOGUE_CODE
+ for node_cfg in self.node_cfg:
+ src += node_cfg.generate_source() + '\n\n'
+
+ file.write(src)
+
+ def parse_cfgfile(self, filename):
+ """ Parse the configuration file and yield pairs of
+ (name, contents) for each node.
+ """
+ with open(filename, "r") as f:
+ for line in f:
+ line = line.strip()
+ if not line or line.startswith('#'):
+ continue
+ colon_i = line.find(':')
+ lbracket_i = line.find('[')
+ rbracket_i = line.find(']')
+ if colon_i < 1 or lbracket_i <= colon_i or rbracket_i <= lbracket_i:
+ raise RuntimeError("Invalid line in %s:\n%s\n" % (filename, line))
+
+ name = line[:colon_i]
+ val = line[lbracket_i + 1:rbracket_i]
+ vallist = [v.strip() for v in val.split(',')] if val else []
+ yield name, vallist
+
+
+class NodeCfg(object):
+ """ Node configuration.
+
+ name: node name
+ contents: a list of contents - attributes and child nodes
+ See comment at the top of the configuration file for details.
+ """
+
+ def __init__(self, name, contents):
+ self.name = name
+ self.all_entries = []
+ self.attr = []
+ self.child = []
+ self.seq_child = []
+
+ for entry in contents:
+ clean_entry = entry.rstrip('*')
+ self.all_entries.append(clean_entry)
+
+ if entry.endswith('**'):
+ self.seq_child.append(clean_entry)
+ elif entry.endswith('*'):
+ self.child.append(clean_entry)
+ else:
+ self.attr.append(entry)
+
+ def generate_source(self):
+ src = self._gen_init()
+ src += '\n' + self._gen_children()
+ src += '\n' + self._gen_iter()
+ src += '\n' + self._gen_attr_names()
+ return src
+
+ def _gen_init(self):
+ src = "class %s(Node):\n" % self.name
+
+ if self.all_entries:
+ args = ', '.join(self.all_entries)
+ slots = ', '.join("'{0}'".format(e) for e in self.all_entries)
+ slots += ", 'coord', '__weakref__'"
+ arglist = '(self, %s, coord=None)' % args
+ else:
+ slots = "'coord', '__weakref__'"
+ arglist = '(self, coord=None)'
+
+ src += " __slots__ = (%s)\n" % slots
+ src += " def __init__%s:\n" % arglist
+
+ for name in self.all_entries + ['coord']:
+ src += " self.%s = %s\n" % (name, name)
+
+ return src
+
+ def _gen_children(self):
+ src = ' def children(self):\n'
+
+ if self.all_entries:
+ src += ' nodelist = []\n'
+
+ for child in self.child:
+ src += (
+ ' if self.%(child)s is not None:' +
+ ' nodelist.append(("%(child)s", self.%(child)s))\n') % (
+ dict(child=child))
+
+ for seq_child in self.seq_child:
+ src += (
+ ' for i, child in enumerate(self.%(child)s or []):\n'
+ ' nodelist.append(("%(child)s[%%d]" %% i, child))\n') % (
+ dict(child=seq_child))
+
+ src += ' return tuple(nodelist)\n'
+ else:
+ src += ' return ()\n'
+
+ return src
+
+ def _gen_iter(self):
+ src = ' def __iter__(self):\n'
+
+ if self.all_entries:
+ for child in self.child:
+ src += (
+ ' if self.%(child)s is not None:\n' +
+ ' yield self.%(child)s\n') % (dict(child=child))
+
+ for seq_child in self.seq_child:
+ src += (
+ ' for child in (self.%(child)s or []):\n'
+ ' yield child\n') % (dict(child=seq_child))
+
+ if not (self.child or self.seq_child):
+ # Empty generator
+ src += (
+ ' return\n' +
+ ' yield\n')
+ else:
+ # Empty generator
+ src += (
+ ' return\n' +
+ ' yield\n')
+
+ return src
+
+ def _gen_attr_names(self):
+ src = " attr_names = (" + ''.join("%r, " % nm for nm in self.attr) + ')'
+ return src
+
+
+_PROLOGUE_COMMENT = \
+r'''#-----------------------------------------------------------------
+# ** ATTENTION **
+# This code was automatically generated from the file:
+# $cfg_filename
+#
+# Do not modify it directly. Modify the configuration file and
+# run the generator again.
+# ** ** *** ** **
+#
+# pycparser: c_ast.py
+#
+# AST Node classes.
+#
+# Eli Bendersky [https://eli.thegreenplace.net/]
+# License: BSD
+#-----------------------------------------------------------------
+
+'''
+
+_PROLOGUE_CODE = r'''
+import sys
+
+def _repr(obj):
+ """
+ Get the representation of an object, with dedicated pprint-like format for lists.
+ """
+ if isinstance(obj, list):
+ return '[' + (',\n '.join((_repr(e).replace('\n', '\n ') for e in obj))) + '\n]'
+ else:
+ return repr(obj)
+
+class Node(object):
+ __slots__ = ()
+ """ Abstract base class for AST nodes.
+ """
+ def __repr__(self):
+ """ Generates a python representation of the current node
+ """
+ result = self.__class__.__name__ + '('
+
+ indent = ''
+ separator = ''
+ for name in self.__slots__[:-2]:
+ result += separator
+ result += indent
+ result += name + '=' + (_repr(getattr(self, name)).replace('\n', '\n ' + (' ' * (len(name) + len(self.__class__.__name__)))))
+
+ separator = ','
+ indent = '\n ' + (' ' * len(self.__class__.__name__))
+
+ result += indent + ')'
+
+ return result
+
+ def children(self):
+ """ A sequence of all children that are Nodes
+ """
+ pass
+
+ def show(self, buf=sys.stdout, offset=0, attrnames=False, nodenames=False, showcoord=False, _my_node_name=None):
+ """ Pretty print the Node and all its attributes and
+ children (recursively) to a buffer.
+
+ buf:
+ Open IO buffer into which the Node is printed.
+
+ offset:
+ Initial offset (amount of leading spaces)
+
+ attrnames:
+ True if you want to see the attribute names in
+ name=value pairs. False to only see the values.
+
+ nodenames:
+ True if you want to see the actual node names
+ within their parents.
+
+ showcoord:
+ Do you want the coordinates of each Node to be
+ displayed.
+ """
+ lead = ' ' * offset
+ if nodenames and _my_node_name is not None:
+ buf.write(lead + self.__class__.__name__+ ' <' + _my_node_name + '>: ')
+ else:
+ buf.write(lead + self.__class__.__name__+ ': ')
+
+ if self.attr_names:
+ if attrnames:
+ nvlist = [(n, getattr(self,n)) for n in self.attr_names]
+ attrstr = ', '.join('%s=%s' % nv for nv in nvlist)
+ else:
+ vlist = [getattr(self, n) for n in self.attr_names]
+ attrstr = ', '.join('%s' % v for v in vlist)
+ buf.write(attrstr)
+
+ if showcoord:
+ buf.write(' (at %s)' % self.coord)
+ buf.write('\n')
+
+ for (child_name, child) in self.children():
+ child.show(
+ buf,
+ offset=offset + 2,
+ attrnames=attrnames,
+ nodenames=nodenames,
+ showcoord=showcoord,
+ _my_node_name=child_name)
+
+
+class NodeVisitor(object):
+ """ A base NodeVisitor class for visiting c_ast nodes.
+ Subclass it and define your own visit_XXX methods, where
+ XXX is the class name you want to visit with these
+ methods.
+
+ For example:
+
+ class ConstantVisitor(NodeVisitor):
+ def __init__(self):
+ self.values = []
+
+ def visit_Constant(self, node):
+ self.values.append(node.value)
+
+ Creates a list of values of all the constant nodes
+ encountered below the given node. To use it:
+
+ cv = ConstantVisitor()
+ cv.visit(node)
+
+ Notes:
+
+ * generic_visit() will be called for AST nodes for which
+ no visit_XXX method was defined.
+ * The children of nodes for which a visit_XXX was
+ defined will not be visited - if you need this, call
+ generic_visit() on the node.
+ You can use:
+ NodeVisitor.generic_visit(self, node)
+ * Modeled after Python's own AST visiting facilities
+ (the ast module of Python 3.0)
+ """
+
+ _method_cache = None
+
+ def visit(self, node):
+ """ Visit a node.
+ """
+
+ if self._method_cache is None:
+ self._method_cache = {}
+
+ visitor = self._method_cache.get(node.__class__.__name__, None)
+ if visitor is None:
+ method = 'visit_' + node.__class__.__name__
+ visitor = getattr(self, method, self.generic_visit)
+ self._method_cache[node.__class__.__name__] = visitor
+
+ return visitor(node)
+
+ def generic_visit(self, node):
+ """ Called if no explicit visitor function exists for a
+ node. Implements preorder visiting of the node.
+ """
+ for c in node:
+ self.visit(c)
+
+'''
diff --git a/billinglayer/python/pycparser/_build_tables.py b/billinglayer/python/pycparser/_build_tables.py
new file mode 100644
index 0000000..958381a
--- /dev/null
+++ b/billinglayer/python/pycparser/_build_tables.py
@@ -0,0 +1,37 @@
+#-----------------------------------------------------------------
+# pycparser: _build_tables.py
+#
+# A dummy for generating the lexing/parsing tables and and
+# compiling them into .pyc for faster execution in optimized mode.
+# Also generates AST code from the configuration file.
+# Should be called from the pycparser directory.
+#
+# Eli Bendersky [https://eli.thegreenplace.net/]
+# License: BSD
+#-----------------------------------------------------------------
+
+# Insert '.' and '..' as first entries to the search path for modules.
+# Restricted environments like embeddable python do not include the
+# current working directory on startup.
+import sys
+sys.path[0:0] = ['.', '..']
+
+# Generate c_ast.py
+from _ast_gen import ASTCodeGenerator
+ast_gen = ASTCodeGenerator('_c_ast.cfg')
+ast_gen.generate(open('c_ast.py', 'w'))
+
+from pycparser import c_parser
+
+# Generates the tables
+#
+c_parser.CParser(
+ lex_optimize=True,
+ yacc_debug=False,
+ yacc_optimize=True)
+
+# Load to compile into .pyc
+#
+import lextab
+import yacctab
+import c_ast
diff --git a/billinglayer/python/pycparser/_c_ast.cfg b/billinglayer/python/pycparser/_c_ast.cfg
new file mode 100644
index 0000000..0626533
--- /dev/null
+++ b/billinglayer/python/pycparser/_c_ast.cfg
@@ -0,0 +1,195 @@
+#-----------------------------------------------------------------
+# pycparser: _c_ast.cfg
+#
+# Defines the AST Node classes used in pycparser.
+#
+# Each entry is a Node sub-class name, listing the attributes
+# and child nodes of the class:
+# * - a child node
+# ** - a sequence of child nodes
+# - an attribute
+#
+# Eli Bendersky [https://eli.thegreenplace.net/]
+# License: BSD
+#-----------------------------------------------------------------
+
+# ArrayDecl is a nested declaration of an array with the given type.
+# dim: the dimension (for example, constant 42)
+# dim_quals: list of dimension qualifiers, to support C99's allowing 'const'
+# and 'static' within the array dimension in function declarations.
+ArrayDecl: [type*, dim*, dim_quals]
+
+ArrayRef: [name*, subscript*]
+
+# op: =, +=, /= etc.
+#
+Assignment: [op, lvalue*, rvalue*]
+
+Alignas: [alignment*]
+
+BinaryOp: [op, left*, right*]
+
+Break: []
+
+Case: [expr*, stmts**]
+
+Cast: [to_type*, expr*]
+
+# Compound statement in C99 is a list of block items (declarations or
+# statements).
+#
+Compound: [block_items**]
+
+# Compound literal (anonymous aggregate) for C99.
+# (type-name) {initializer_list}
+# type: the typename
+# init: InitList for the initializer list
+#
+CompoundLiteral: [type*, init*]
+
+# type: int, char, float, string, etc.
+#
+Constant: [type, value]
+
+Continue: []
+
+# name: the variable being declared
+# quals: list of qualifiers (const, volatile)
+# funcspec: list function specifiers (i.e. inline in C99)
+# storage: list of storage specifiers (extern, register, etc.)
+# type: declaration type (probably nested with all the modifiers)
+# init: initialization value, or None
+# bitsize: bit field size, or None
+#
+Decl: [name, quals, align, storage, funcspec, type*, init*, bitsize*]
+
+DeclList: [decls**]
+
+Default: [stmts**]
+
+DoWhile: [cond*, stmt*]
+
+# Represents the ellipsis (...) parameter in a function
+# declaration
+#
+EllipsisParam: []
+
+# An empty statement (a semicolon ';' on its own)
+#
+EmptyStatement: []
+
+# Enumeration type specifier
+# name: an optional ID
+# values: an EnumeratorList
+#
+Enum: [name, values*]
+
+# A name/value pair for enumeration values
+#
+Enumerator: [name, value*]
+
+# A list of enumerators
+#
+EnumeratorList: [enumerators**]
+
+# A list of expressions separated by the comma operator.
+#
+ExprList: [exprs**]
+
+# This is the top of the AST, representing a single C file (a
+# translation unit in K&R jargon). It contains a list of
+# "external-declaration"s, which is either declarations (Decl),
+# Typedef or function definitions (FuncDef).
+#
+FileAST: [ext**]
+
+# for (init; cond; next) stmt
+#
+For: [init*, cond*, next*, stmt*]
+
+# name: Id
+# args: ExprList
+#
+FuncCall: [name*, args*]
+
+# type (args)
+#
+FuncDecl: [args*, type*]
+
+# Function definition: a declarator for the function name and
+# a body, which is a compound statement.
+# There's an optional list of parameter declarations for old
+# K&R-style definitions
+#
+FuncDef: [decl*, param_decls**, body*]
+
+Goto: [name]
+
+ID: [name]
+
+# Holder for types that are a simple identifier (e.g. the built
+# ins void, char etc. and typedef-defined types)
+#
+IdentifierType: [names]
+
+If: [cond*, iftrue*, iffalse*]
+
+# An initialization list used for compound literals.
+#
+InitList: [exprs**]
+
+Label: [name, stmt*]
+
+# A named initializer for C99.
+# The name of a NamedInitializer is a sequence of Nodes, because
+# names can be hierarchical and contain constant expressions.
+#
+NamedInitializer: [name**, expr*]
+
+# a list of comma separated function parameter declarations
+#
+ParamList: [params**]
+
+PtrDecl: [quals, type*]
+
+Return: [expr*]
+
+StaticAssert: [cond*, message*]
+
+# name: struct tag name
+# decls: declaration of members
+#
+Struct: [name, decls**]
+
+# type: . or ->
+# name.field or name->field
+#
+StructRef: [name*, type, field*]
+
+Switch: [cond*, stmt*]
+
+# cond ? iftrue : iffalse
+#
+TernaryOp: [cond*, iftrue*, iffalse*]
+
+# A base type declaration
+#
+TypeDecl: [declname, quals, align, type*]
+
+# A typedef declaration.
+# Very similar to Decl, but without some attributes
+#
+Typedef: [name, quals, storage, type*]
+
+Typename: [name, quals, align, type*]
+
+UnaryOp: [op, expr*]
+
+# name: union tag name
+# decls: declaration of members
+#
+Union: [name, decls**]
+
+While: [cond*, stmt*]
+
+Pragma: [string]
diff --git a/billinglayer/python/pycparser/ast_transforms.py b/billinglayer/python/pycparser/ast_transforms.py
new file mode 100644
index 0000000..367dcf5
--- /dev/null
+++ b/billinglayer/python/pycparser/ast_transforms.py
@@ -0,0 +1,164 @@
+#------------------------------------------------------------------------------
+# pycparser: ast_transforms.py
+#
+# Some utilities used by the parser to create a friendlier AST.
+#
+# Eli Bendersky [https://eli.thegreenplace.net/]
+# License: BSD
+#------------------------------------------------------------------------------
+
+from . import c_ast
+
+
+def fix_switch_cases(switch_node):
+ """ The 'case' statements in a 'switch' come out of parsing with one
+ child node, so subsequent statements are just tucked to the parent
+ Compound. Additionally, consecutive (fall-through) case statements
+ come out messy. This is a peculiarity of the C grammar. The following:
+
+ switch (myvar) {
+ case 10:
+ k = 10;
+ p = k + 1;
+ return 10;
+ case 20:
+ case 30:
+ return 20;
+ default:
+ break;
+ }
+
+ Creates this tree (pseudo-dump):
+
+ Switch
+ ID: myvar
+ Compound:
+ Case 10:
+ k = 10
+ p = k + 1
+ return 10
+ Case 20:
+ Case 30:
+ return 20
+ Default:
+ break
+
+ The goal of this transform is to fix this mess, turning it into the
+ following:
+
+ Switch
+ ID: myvar
+ Compound:
+ Case 10:
+ k = 10
+ p = k + 1
+ return 10
+ Case 20:
+ Case 30:
+ return 20
+ Default:
+ break
+
+ A fixed AST node is returned. The argument may be modified.
+ """
+ assert isinstance(switch_node, c_ast.Switch)
+ if not isinstance(switch_node.stmt, c_ast.Compound):
+ return switch_node
+
+ # The new Compound child for the Switch, which will collect children in the
+ # correct order
+ new_compound = c_ast.Compound([], switch_node.stmt.coord)
+
+ # The last Case/Default node
+ last_case = None
+
+ # Goes over the children of the Compound below the Switch, adding them
+ # either directly below new_compound or below the last Case as appropriate
+ # (for `switch(cond) {}`, block_items would have been None)
+ for child in (switch_node.stmt.block_items or []):
+ if isinstance(child, (c_ast.Case, c_ast.Default)):
+ # If it's a Case/Default:
+ # 1. Add it to the Compound and mark as "last case"
+ # 2. If its immediate child is also a Case or Default, promote it
+ # to a sibling.
+ new_compound.block_items.append(child)
+ _extract_nested_case(child, new_compound.block_items)
+ last_case = new_compound.block_items[-1]
+ else:
+ # Other statements are added as children to the last case, if it
+ # exists.
+ if last_case is None:
+ new_compound.block_items.append(child)
+ else:
+ last_case.stmts.append(child)
+
+ switch_node.stmt = new_compound
+ return switch_node
+
+
+def _extract_nested_case(case_node, stmts_list):
+ """ Recursively extract consecutive Case statements that are made nested
+ by the parser and add them to the stmts_list.
+ """
+ if isinstance(case_node.stmts[0], (c_ast.Case, c_ast.Default)):
+ stmts_list.append(case_node.stmts.pop())
+ _extract_nested_case(stmts_list[-1], stmts_list)
+
+
+def fix_atomic_specifiers(decl):
+ """ Atomic specifiers like _Atomic(type) are unusually structured,
+ conferring a qualifier upon the contained type.
+
+ This function fixes a decl with atomic specifiers to have a sane AST
+ structure, by removing spurious Typename->TypeDecl pairs and attaching
+ the _Atomic qualifier in the right place.
+ """
+ # There can be multiple levels of _Atomic in a decl; fix them until a
+ # fixed point is reached.
+ while True:
+ decl, found = _fix_atomic_specifiers_once(decl)
+ if not found:
+ break
+
+ # Make sure to add an _Atomic qual on the topmost decl if needed. Also
+ # restore the declname on the innermost TypeDecl (it gets placed in the
+ # wrong place during construction).
+ typ = decl
+ while not isinstance(typ, c_ast.TypeDecl):
+ try:
+ typ = typ.type
+ except AttributeError:
+ return decl
+ if '_Atomic' in typ.quals and '_Atomic' not in decl.quals:
+ decl.quals.append('_Atomic')
+ if typ.declname is None:
+ typ.declname = decl.name
+
+ return decl
+
+
+def _fix_atomic_specifiers_once(decl):
+ """ Performs one 'fix' round of atomic specifiers.
+ Returns (modified_decl, found) where found is True iff a fix was made.
+ """
+ parent = decl
+ grandparent = None
+ node = decl.type
+ while node is not None:
+ if isinstance(node, c_ast.Typename) and '_Atomic' in node.quals:
+ break
+ try:
+ grandparent = parent
+ parent = node
+ node = node.type
+ except AttributeError:
+ # If we've reached a node without a `type` field, it means we won't
+ # find what we're looking for at this point; give up the search
+ # and return the original decl unmodified.
+ return decl, False
+
+ assert isinstance(parent, c_ast.TypeDecl)
+ grandparent.type = node.type
+ if '_Atomic' not in node.type.quals:
+ node.type.quals.append('_Atomic')
+ return decl, True
diff --git a/billinglayer/python/pycparser/c_ast.py b/billinglayer/python/pycparser/c_ast.py
new file mode 100644
index 0000000..6575a2a
--- /dev/null
+++ b/billinglayer/python/pycparser/c_ast.py
@@ -0,0 +1,1125 @@
+#-----------------------------------------------------------------
+# ** ATTENTION **
+# This code was automatically generated from the file:
+# _c_ast.cfg
+#
+# Do not modify it directly. Modify the configuration file and
+# run the generator again.
+# ** ** *** ** **
+#
+# pycparser: c_ast.py
+#
+# AST Node classes.
+#
+# Eli Bendersky [https://eli.thegreenplace.net/]
+# License: BSD
+#-----------------------------------------------------------------
+
+
+import sys
+
+def _repr(obj):
+ """
+ Get the representation of an object, with dedicated pprint-like format for lists.
+ """
+ if isinstance(obj, list):
+ return '[' + (',\n '.join((_repr(e).replace('\n', '\n ') for e in obj))) + '\n]'
+ else:
+ return repr(obj)
+
+class Node(object):
+ __slots__ = ()
+ """ Abstract base class for AST nodes.
+ """
+ def __repr__(self):
+ """ Generates a python representation of the current node
+ """
+ result = self.__class__.__name__ + '('
+
+ indent = ''
+ separator = ''
+ for name in self.__slots__[:-2]:
+ result += separator
+ result += indent
+ result += name + '=' + (_repr(getattr(self, name)).replace('\n', '\n ' + (' ' * (len(name) + len(self.__class__.__name__)))))
+
+ separator = ','
+ indent = '\n ' + (' ' * len(self.__class__.__name__))
+
+ result += indent + ')'
+
+ return result
+
+ def children(self):
+ """ A sequence of all children that are Nodes
+ """
+ pass
+
+ def show(self, buf=sys.stdout, offset=0, attrnames=False, nodenames=False, showcoord=False, _my_node_name=None):
+ """ Pretty print the Node and all its attributes and
+ children (recursively) to a buffer.
+
+ buf:
+ Open IO buffer into which the Node is printed.
+
+ offset:
+ Initial offset (amount of leading spaces)
+
+ attrnames:
+ True if you want to see the attribute names in
+ name=value pairs. False to only see the values.
+
+ nodenames:
+ True if you want to see the actual node names
+ within their parents.
+
+ showcoord:
+ Do you want the coordinates of each Node to be
+ displayed.
+ """
+ lead = ' ' * offset
+ if nodenames and _my_node_name is not None:
+ buf.write(lead + self.__class__.__name__+ ' <' + _my_node_name + '>: ')
+ else:
+ buf.write(lead + self.__class__.__name__+ ': ')
+
+ if self.attr_names:
+ if attrnames:
+ nvlist = [(n, getattr(self,n)) for n in self.attr_names]
+ attrstr = ', '.join('%s=%s' % nv for nv in nvlist)
+ else:
+ vlist = [getattr(self, n) for n in self.attr_names]
+ attrstr = ', '.join('%s' % v for v in vlist)
+ buf.write(attrstr)
+
+ if showcoord:
+ buf.write(' (at %s)' % self.coord)
+ buf.write('\n')
+
+ for (child_name, child) in self.children():
+ child.show(
+ buf,
+ offset=offset + 2,
+ attrnames=attrnames,
+ nodenames=nodenames,
+ showcoord=showcoord,
+ _my_node_name=child_name)
+
+
+class NodeVisitor(object):
+ """ A base NodeVisitor class for visiting c_ast nodes.
+ Subclass it and define your own visit_XXX methods, where
+ XXX is the class name you want to visit with these
+ methods.
+
+ For example:
+
+ class ConstantVisitor(NodeVisitor):
+ def __init__(self):
+ self.values = []
+
+ def visit_Constant(self, node):
+ self.values.append(node.value)
+
+ Creates a list of values of all the constant nodes
+ encountered below the given node. To use it:
+
+ cv = ConstantVisitor()
+ cv.visit(node)
+
+ Notes:
+
+ * generic_visit() will be called for AST nodes for which
+ no visit_XXX method was defined.
+ * The children of nodes for which a visit_XXX was
+ defined will not be visited - if you need this, call
+ generic_visit() on the node.
+ You can use:
+ NodeVisitor.generic_visit(self, node)
+ * Modeled after Python's own AST visiting facilities
+ (the ast module of Python 3.0)
+ """
+
+ _method_cache = None
+
+ def visit(self, node):
+ """ Visit a node.
+ """
+
+ if self._method_cache is None:
+ self._method_cache = {}
+
+ visitor = self._method_cache.get(node.__class__.__name__, None)
+ if visitor is None:
+ method = 'visit_' + node.__class__.__name__
+ visitor = getattr(self, method, self.generic_visit)
+ self._method_cache[node.__class__.__name__] = visitor
+
+ return visitor(node)
+
+ def generic_visit(self, node):
+ """ Called if no explicit visitor function exists for a
+ node. Implements preorder visiting of the node.
+ """
+ for c in node:
+ self.visit(c)
+
+class ArrayDecl(Node):
+ __slots__ = ('type', 'dim', 'dim_quals', 'coord', '__weakref__')
+ def __init__(self, type, dim, dim_quals, coord=None):
+ self.type = type
+ self.dim = dim
+ self.dim_quals = dim_quals
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.type is not None: nodelist.append(("type", self.type))
+ if self.dim is not None: nodelist.append(("dim", self.dim))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.type is not None:
+ yield self.type
+ if self.dim is not None:
+ yield self.dim
+
+ attr_names = ('dim_quals', )
+
+class ArrayRef(Node):
+ __slots__ = ('name', 'subscript', 'coord', '__weakref__')
+ def __init__(self, name, subscript, coord=None):
+ self.name = name
+ self.subscript = subscript
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.name is not None: nodelist.append(("name", self.name))
+ if self.subscript is not None: nodelist.append(("subscript", self.subscript))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.name is not None:
+ yield self.name
+ if self.subscript is not None:
+ yield self.subscript
+
+ attr_names = ()
+
+class Assignment(Node):
+ __slots__ = ('op', 'lvalue', 'rvalue', 'coord', '__weakref__')
+ def __init__(self, op, lvalue, rvalue, coord=None):
+ self.op = op
+ self.lvalue = lvalue
+ self.rvalue = rvalue
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.lvalue is not None: nodelist.append(("lvalue", self.lvalue))
+ if self.rvalue is not None: nodelist.append(("rvalue", self.rvalue))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.lvalue is not None:
+ yield self.lvalue
+ if self.rvalue is not None:
+ yield self.rvalue
+
+ attr_names = ('op', )
+
+class Alignas(Node):
+ __slots__ = ('alignment', 'coord', '__weakref__')
+ def __init__(self, alignment, coord=None):
+ self.alignment = alignment
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.alignment is not None: nodelist.append(("alignment", self.alignment))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.alignment is not None:
+ yield self.alignment
+
+ attr_names = ()
+
+class BinaryOp(Node):
+ __slots__ = ('op', 'left', 'right', 'coord', '__weakref__')
+ def __init__(self, op, left, right, coord=None):
+ self.op = op
+ self.left = left
+ self.right = right
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.left is not None: nodelist.append(("left", self.left))
+ if self.right is not None: nodelist.append(("right", self.right))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.left is not None:
+ yield self.left
+ if self.right is not None:
+ yield self.right
+
+ attr_names = ('op', )
+
+class Break(Node):
+ __slots__ = ('coord', '__weakref__')
+ def __init__(self, coord=None):
+ self.coord = coord
+
+ def children(self):
+ return ()
+
+ def __iter__(self):
+ return
+ yield
+
+ attr_names = ()
+
+class Case(Node):
+ __slots__ = ('expr', 'stmts', 'coord', '__weakref__')
+ def __init__(self, expr, stmts, coord=None):
+ self.expr = expr
+ self.stmts = stmts
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.expr is not None: nodelist.append(("expr", self.expr))
+ for i, child in enumerate(self.stmts or []):
+ nodelist.append(("stmts[%d]" % i, child))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.expr is not None:
+ yield self.expr
+ for child in (self.stmts or []):
+ yield child
+
+ attr_names = ()
+
+class Cast(Node):
+ __slots__ = ('to_type', 'expr', 'coord', '__weakref__')
+ def __init__(self, to_type, expr, coord=None):
+ self.to_type = to_type
+ self.expr = expr
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.to_type is not None: nodelist.append(("to_type", self.to_type))
+ if self.expr is not None: nodelist.append(("expr", self.expr))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.to_type is not None:
+ yield self.to_type
+ if self.expr is not None:
+ yield self.expr
+
+ attr_names = ()
+
+class Compound(Node):
+ __slots__ = ('block_items', 'coord', '__weakref__')
+ def __init__(self, block_items, coord=None):
+ self.block_items = block_items
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ for i, child in enumerate(self.block_items or []):
+ nodelist.append(("block_items[%d]" % i, child))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ for child in (self.block_items or []):
+ yield child
+
+ attr_names = ()
+
+class CompoundLiteral(Node):
+ __slots__ = ('type', 'init', 'coord', '__weakref__')
+ def __init__(self, type, init, coord=None):
+ self.type = type
+ self.init = init
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.type is not None: nodelist.append(("type", self.type))
+ if self.init is not None: nodelist.append(("init", self.init))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.type is not None:
+ yield self.type
+ if self.init is not None:
+ yield self.init
+
+ attr_names = ()
+
+class Constant(Node):
+ __slots__ = ('type', 'value', 'coord', '__weakref__')
+ def __init__(self, type, value, coord=None):
+ self.type = type
+ self.value = value
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ return tuple(nodelist)
+
+ def __iter__(self):
+ return
+ yield
+
+ attr_names = ('type', 'value', )
+
+class Continue(Node):
+ __slots__ = ('coord', '__weakref__')
+ def __init__(self, coord=None):
+ self.coord = coord
+
+ def children(self):
+ return ()
+
+ def __iter__(self):
+ return
+ yield
+
+ attr_names = ()
+
+class Decl(Node):
+ __slots__ = ('name', 'quals', 'align', 'storage', 'funcspec', 'type', 'init', 'bitsize', 'coord', '__weakref__')
+ def __init__(self, name, quals, align, storage, funcspec, type, init, bitsize, coord=None):
+ self.name = name
+ self.quals = quals
+ self.align = align
+ self.storage = storage
+ self.funcspec = funcspec
+ self.type = type
+ self.init = init
+ self.bitsize = bitsize
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.type is not None: nodelist.append(("type", self.type))
+ if self.init is not None: nodelist.append(("init", self.init))
+ if self.bitsize is not None: nodelist.append(("bitsize", self.bitsize))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.type is not None:
+ yield self.type
+ if self.init is not None:
+ yield self.init
+ if self.bitsize is not None:
+ yield self.bitsize
+
+ attr_names = ('name', 'quals', 'align', 'storage', 'funcspec', )
+
+class DeclList(Node):
+ __slots__ = ('decls', 'coord', '__weakref__')
+ def __init__(self, decls, coord=None):
+ self.decls = decls
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ for i, child in enumerate(self.decls or []):
+ nodelist.append(("decls[%d]" % i, child))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ for child in (self.decls or []):
+ yield child
+
+ attr_names = ()
+
+class Default(Node):
+ __slots__ = ('stmts', 'coord', '__weakref__')
+ def __init__(self, stmts, coord=None):
+ self.stmts = stmts
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ for i, child in enumerate(self.stmts or []):
+ nodelist.append(("stmts[%d]" % i, child))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ for child in (self.stmts or []):
+ yield child
+
+ attr_names = ()
+
+class DoWhile(Node):
+ __slots__ = ('cond', 'stmt', 'coord', '__weakref__')
+ def __init__(self, cond, stmt, coord=None):
+ self.cond = cond
+ self.stmt = stmt
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.cond is not None: nodelist.append(("cond", self.cond))
+ if self.stmt is not None: nodelist.append(("stmt", self.stmt))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.cond is not None:
+ yield self.cond
+ if self.stmt is not None:
+ yield self.stmt
+
+ attr_names = ()
+
+class EllipsisParam(Node):
+ __slots__ = ('coord', '__weakref__')
+ def __init__(self, coord=None):
+ self.coord = coord
+
+ def children(self):
+ return ()
+
+ def __iter__(self):
+ return
+ yield
+
+ attr_names = ()
+
+class EmptyStatement(Node):
+ __slots__ = ('coord', '__weakref__')
+ def __init__(self, coord=None):
+ self.coord = coord
+
+ def children(self):
+ return ()
+
+ def __iter__(self):
+ return
+ yield
+
+ attr_names = ()
+
+class Enum(Node):
+ __slots__ = ('name', 'values', 'coord', '__weakref__')
+ def __init__(self, name, values, coord=None):
+ self.name = name
+ self.values = values
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.values is not None: nodelist.append(("values", self.values))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.values is not None:
+ yield self.values
+
+ attr_names = ('name', )
+
+class Enumerator(Node):
+ __slots__ = ('name', 'value', 'coord', '__weakref__')
+ def __init__(self, name, value, coord=None):
+ self.name = name
+ self.value = value
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.value is not None: nodelist.append(("value", self.value))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.value is not None:
+ yield self.value
+
+ attr_names = ('name', )
+
+class EnumeratorList(Node):
+ __slots__ = ('enumerators', 'coord', '__weakref__')
+ def __init__(self, enumerators, coord=None):
+ self.enumerators = enumerators
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ for i, child in enumerate(self.enumerators or []):
+ nodelist.append(("enumerators[%d]" % i, child))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ for child in (self.enumerators or []):
+ yield child
+
+ attr_names = ()
+
+class ExprList(Node):
+ __slots__ = ('exprs', 'coord', '__weakref__')
+ def __init__(self, exprs, coord=None):
+ self.exprs = exprs
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ for i, child in enumerate(self.exprs or []):
+ nodelist.append(("exprs[%d]" % i, child))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ for child in (self.exprs or []):
+ yield child
+
+ attr_names = ()
+
+class FileAST(Node):
+ __slots__ = ('ext', 'coord', '__weakref__')
+ def __init__(self, ext, coord=None):
+ self.ext = ext
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ for i, child in enumerate(self.ext or []):
+ nodelist.append(("ext[%d]" % i, child))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ for child in (self.ext or []):
+ yield child
+
+ attr_names = ()
+
+class For(Node):
+ __slots__ = ('init', 'cond', 'next', 'stmt', 'coord', '__weakref__')
+ def __init__(self, init, cond, next, stmt, coord=None):
+ self.init = init
+ self.cond = cond
+ self.next = next
+ self.stmt = stmt
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.init is not None: nodelist.append(("init", self.init))
+ if self.cond is not None: nodelist.append(("cond", self.cond))
+ if self.next is not None: nodelist.append(("next", self.next))
+ if self.stmt is not None: nodelist.append(("stmt", self.stmt))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.init is not None:
+ yield self.init
+ if self.cond is not None:
+ yield self.cond
+ if self.next is not None:
+ yield self.next
+ if self.stmt is not None:
+ yield self.stmt
+
+ attr_names = ()
+
+class FuncCall(Node):
+ __slots__ = ('name', 'args', 'coord', '__weakref__')
+ def __init__(self, name, args, coord=None):
+ self.name = name
+ self.args = args
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.name is not None: nodelist.append(("name", self.name))
+ if self.args is not None: nodelist.append(("args", self.args))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.name is not None:
+ yield self.name
+ if self.args is not None:
+ yield self.args
+
+ attr_names = ()
+
+class FuncDecl(Node):
+ __slots__ = ('args', 'type', 'coord', '__weakref__')
+ def __init__(self, args, type, coord=None):
+ self.args = args
+ self.type = type
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.args is not None: nodelist.append(("args", self.args))
+ if self.type is not None: nodelist.append(("type", self.type))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.args is not None:
+ yield self.args
+ if self.type is not None:
+ yield self.type
+
+ attr_names = ()
+
+class FuncDef(Node):
+ __slots__ = ('decl', 'param_decls', 'body', 'coord', '__weakref__')
+ def __init__(self, decl, param_decls, body, coord=None):
+ self.decl = decl
+ self.param_decls = param_decls
+ self.body = body
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.decl is not None: nodelist.append(("decl", self.decl))
+ if self.body is not None: nodelist.append(("body", self.body))
+ for i, child in enumerate(self.param_decls or []):
+ nodelist.append(("param_decls[%d]" % i, child))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.decl is not None:
+ yield self.decl
+ if self.body is not None:
+ yield self.body
+ for child in (self.param_decls or []):
+ yield child
+
+ attr_names = ()
+
+class Goto(Node):
+ __slots__ = ('name', 'coord', '__weakref__')
+ def __init__(self, name, coord=None):
+ self.name = name
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ return tuple(nodelist)
+
+ def __iter__(self):
+ return
+ yield
+
+ attr_names = ('name', )
+
+class ID(Node):
+ __slots__ = ('name', 'coord', '__weakref__')
+ def __init__(self, name, coord=None):
+ self.name = name
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ return tuple(nodelist)
+
+ def __iter__(self):
+ return
+ yield
+
+ attr_names = ('name', )
+
+class IdentifierType(Node):
+ __slots__ = ('names', 'coord', '__weakref__')
+ def __init__(self, names, coord=None):
+ self.names = names
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ return tuple(nodelist)
+
+ def __iter__(self):
+ return
+ yield
+
+ attr_names = ('names', )
+
+class If(Node):
+ __slots__ = ('cond', 'iftrue', 'iffalse', 'coord', '__weakref__')
+ def __init__(self, cond, iftrue, iffalse, coord=None):
+ self.cond = cond
+ self.iftrue = iftrue
+ self.iffalse = iffalse
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.cond is not None: nodelist.append(("cond", self.cond))
+ if self.iftrue is not None: nodelist.append(("iftrue", self.iftrue))
+ if self.iffalse is not None: nodelist.append(("iffalse", self.iffalse))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.cond is not None:
+ yield self.cond
+ if self.iftrue is not None:
+ yield self.iftrue
+ if self.iffalse is not None:
+ yield self.iffalse
+
+ attr_names = ()
+
+class InitList(Node):
+ __slots__ = ('exprs', 'coord', '__weakref__')
+ def __init__(self, exprs, coord=None):
+ self.exprs = exprs
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ for i, child in enumerate(self.exprs or []):
+ nodelist.append(("exprs[%d]" % i, child))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ for child in (self.exprs or []):
+ yield child
+
+ attr_names = ()
+
+class Label(Node):
+ __slots__ = ('name', 'stmt', 'coord', '__weakref__')
+ def __init__(self, name, stmt, coord=None):
+ self.name = name
+ self.stmt = stmt
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.stmt is not None: nodelist.append(("stmt", self.stmt))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.stmt is not None:
+ yield self.stmt
+
+ attr_names = ('name', )
+
+class NamedInitializer(Node):
+ __slots__ = ('name', 'expr', 'coord', '__weakref__')
+ def __init__(self, name, expr, coord=None):
+ self.name = name
+ self.expr = expr
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.expr is not None: nodelist.append(("expr", self.expr))
+ for i, child in enumerate(self.name or []):
+ nodelist.append(("name[%d]" % i, child))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.expr is not None:
+ yield self.expr
+ for child in (self.name or []):
+ yield child
+
+ attr_names = ()
+
+class ParamList(Node):
+ __slots__ = ('params', 'coord', '__weakref__')
+ def __init__(self, params, coord=None):
+ self.params = params
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ for i, child in enumerate(self.params or []):
+ nodelist.append(("params[%d]" % i, child))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ for child in (self.params or []):
+ yield child
+
+ attr_names = ()
+
+class PtrDecl(Node):
+ __slots__ = ('quals', 'type', 'coord', '__weakref__')
+ def __init__(self, quals, type, coord=None):
+ self.quals = quals
+ self.type = type
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.type is not None: nodelist.append(("type", self.type))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.type is not None:
+ yield self.type
+
+ attr_names = ('quals', )
+
+class Return(Node):
+ __slots__ = ('expr', 'coord', '__weakref__')
+ def __init__(self, expr, coord=None):
+ self.expr = expr
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.expr is not None: nodelist.append(("expr", self.expr))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.expr is not None:
+ yield self.expr
+
+ attr_names = ()
+
+class StaticAssert(Node):
+ __slots__ = ('cond', 'message', 'coord', '__weakref__')
+ def __init__(self, cond, message, coord=None):
+ self.cond = cond
+ self.message = message
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.cond is not None: nodelist.append(("cond", self.cond))
+ if self.message is not None: nodelist.append(("message", self.message))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.cond is not None:
+ yield self.cond
+ if self.message is not None:
+ yield self.message
+
+ attr_names = ()
+
+class Struct(Node):
+ __slots__ = ('name', 'decls', 'coord', '__weakref__')
+ def __init__(self, name, decls, coord=None):
+ self.name = name
+ self.decls = decls
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ for i, child in enumerate(self.decls or []):
+ nodelist.append(("decls[%d]" % i, child))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ for child in (self.decls or []):
+ yield child
+
+ attr_names = ('name', )
+
+class StructRef(Node):
+ __slots__ = ('name', 'type', 'field', 'coord', '__weakref__')
+ def __init__(self, name, type, field, coord=None):
+ self.name = name
+ self.type = type
+ self.field = field
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.name is not None: nodelist.append(("name", self.name))
+ if self.field is not None: nodelist.append(("field", self.field))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.name is not None:
+ yield self.name
+ if self.field is not None:
+ yield self.field
+
+ attr_names = ('type', )
+
+class Switch(Node):
+ __slots__ = ('cond', 'stmt', 'coord', '__weakref__')
+ def __init__(self, cond, stmt, coord=None):
+ self.cond = cond
+ self.stmt = stmt
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.cond is not None: nodelist.append(("cond", self.cond))
+ if self.stmt is not None: nodelist.append(("stmt", self.stmt))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.cond is not None:
+ yield self.cond
+ if self.stmt is not None:
+ yield self.stmt
+
+ attr_names = ()
+
+class TernaryOp(Node):
+ __slots__ = ('cond', 'iftrue', 'iffalse', 'coord', '__weakref__')
+ def __init__(self, cond, iftrue, iffalse, coord=None):
+ self.cond = cond
+ self.iftrue = iftrue
+ self.iffalse = iffalse
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.cond is not None: nodelist.append(("cond", self.cond))
+ if self.iftrue is not None: nodelist.append(("iftrue", self.iftrue))
+ if self.iffalse is not None: nodelist.append(("iffalse", self.iffalse))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.cond is not None:
+ yield self.cond
+ if self.iftrue is not None:
+ yield self.iftrue
+ if self.iffalse is not None:
+ yield self.iffalse
+
+ attr_names = ()
+
+class TypeDecl(Node):
+ __slots__ = ('declname', 'quals', 'align', 'type', 'coord', '__weakref__')
+ def __init__(self, declname, quals, align, type, coord=None):
+ self.declname = declname
+ self.quals = quals
+ self.align = align
+ self.type = type
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.type is not None: nodelist.append(("type", self.type))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.type is not None:
+ yield self.type
+
+ attr_names = ('declname', 'quals', 'align', )
+
+class Typedef(Node):
+ __slots__ = ('name', 'quals', 'storage', 'type', 'coord', '__weakref__')
+ def __init__(self, name, quals, storage, type, coord=None):
+ self.name = name
+ self.quals = quals
+ self.storage = storage
+ self.type = type
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.type is not None: nodelist.append(("type", self.type))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.type is not None:
+ yield self.type
+
+ attr_names = ('name', 'quals', 'storage', )
+
+class Typename(Node):
+ __slots__ = ('name', 'quals', 'align', 'type', 'coord', '__weakref__')
+ def __init__(self, name, quals, align, type, coord=None):
+ self.name = name
+ self.quals = quals
+ self.align = align
+ self.type = type
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.type is not None: nodelist.append(("type", self.type))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.type is not None:
+ yield self.type
+
+ attr_names = ('name', 'quals', 'align', )
+
+class UnaryOp(Node):
+ __slots__ = ('op', 'expr', 'coord', '__weakref__')
+ def __init__(self, op, expr, coord=None):
+ self.op = op
+ self.expr = expr
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.expr is not None: nodelist.append(("expr", self.expr))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.expr is not None:
+ yield self.expr
+
+ attr_names = ('op', )
+
+class Union(Node):
+ __slots__ = ('name', 'decls', 'coord', '__weakref__')
+ def __init__(self, name, decls, coord=None):
+ self.name = name
+ self.decls = decls
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ for i, child in enumerate(self.decls or []):
+ nodelist.append(("decls[%d]" % i, child))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ for child in (self.decls or []):
+ yield child
+
+ attr_names = ('name', )
+
+class While(Node):
+ __slots__ = ('cond', 'stmt', 'coord', '__weakref__')
+ def __init__(self, cond, stmt, coord=None):
+ self.cond = cond
+ self.stmt = stmt
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.cond is not None: nodelist.append(("cond", self.cond))
+ if self.stmt is not None: nodelist.append(("stmt", self.stmt))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.cond is not None:
+ yield self.cond
+ if self.stmt is not None:
+ yield self.stmt
+
+ attr_names = ()
+
+class Pragma(Node):
+ __slots__ = ('string', 'coord', '__weakref__')
+ def __init__(self, string, coord=None):
+ self.string = string
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ return tuple(nodelist)
+
+ def __iter__(self):
+ return
+ yield
+
+ attr_names = ('string', )
+
diff --git a/billinglayer/python/pycparser/c_generator.py b/billinglayer/python/pycparser/c_generator.py
new file mode 100644
index 0000000..1057b2c
--- /dev/null
+++ b/billinglayer/python/pycparser/c_generator.py
@@ -0,0 +1,502 @@
+#------------------------------------------------------------------------------
+# pycparser: c_generator.py
+#
+# C code generator from pycparser AST nodes.
+#
+# Eli Bendersky [https://eli.thegreenplace.net/]
+# License: BSD
+#------------------------------------------------------------------------------
+from . import c_ast
+
+
+class CGenerator(object):
+ """ Uses the same visitor pattern as c_ast.NodeVisitor, but modified to
+ return a value from each visit method, using string accumulation in
+ generic_visit.
+ """
+ def __init__(self, reduce_parentheses=False):
+ """ Constructs C-code generator
+
+ reduce_parentheses:
+ if True, eliminates needless parentheses on binary operators
+ """
+ # Statements start with indentation of self.indent_level spaces, using
+ # the _make_indent method.
+ self.indent_level = 0
+ self.reduce_parentheses = reduce_parentheses
+
+ def _make_indent(self):
+ return ' ' * self.indent_level
+
+ def visit(self, node):
+ method = 'visit_' + node.__class__.__name__
+ return getattr(self, method, self.generic_visit)(node)
+
+ def generic_visit(self, node):
+ if node is None:
+ return ''
+ else:
+ return ''.join(self.visit(c) for c_name, c in node.children())
+
+ def visit_Constant(self, n):
+ return n.value
+
+ def visit_ID(self, n):
+ return n.name
+
+ def visit_Pragma(self, n):
+ ret = '#pragma'
+ if n.string:
+ ret += ' ' + n.string
+ return ret
+
+ def visit_ArrayRef(self, n):
+ arrref = self._parenthesize_unless_simple(n.name)
+ return arrref + '[' + self.visit(n.subscript) + ']'
+
+ def visit_StructRef(self, n):
+ sref = self._parenthesize_unless_simple(n.name)
+ return sref + n.type + self.visit(n.field)
+
+ def visit_FuncCall(self, n):
+ fref = self._parenthesize_unless_simple(n.name)
+ return fref + '(' + self.visit(n.args) + ')'
+
+ def visit_UnaryOp(self, n):
+ if n.op == 'sizeof':
+ # Always parenthesize the argument of sizeof since it can be
+ # a name.
+ return 'sizeof(%s)' % self.visit(n.expr)
+ else:
+ operand = self._parenthesize_unless_simple(n.expr)
+ if n.op == 'p++':
+ return '%s++' % operand
+ elif n.op == 'p--':
+ return '%s--' % operand
+ else:
+ return '%s%s' % (n.op, operand)
+
+ # Precedence map of binary operators:
+ precedence_map = {
+ # Should be in sync with c_parser.CParser.precedence
+ # Higher numbers are stronger binding
+ '||': 0, # weakest binding
+ '&&': 1,
+ '|': 2,
+ '^': 3,
+ '&': 4,
+ '==': 5, '!=': 5,
+ '>': 6, '>=': 6, '<': 6, '<=': 6,
+ '>>': 7, '<<': 7,
+ '+': 8, '-': 8,
+ '*': 9, '/': 9, '%': 9 # strongest binding
+ }
+
+ def visit_BinaryOp(self, n):
+ # Note: all binary operators are left-to-right associative
+ #
+ # If `n.left.op` has a stronger or equally binding precedence in
+ # comparison to `n.op`, no parenthesis are needed for the left:
+ # e.g., `(a*b) + c` is equivalent to `a*b + c`, as well as
+ # `(a+b) - c` is equivalent to `a+b - c` (same precedence).
+ # If the left operator is weaker binding than the current, then
+ # parentheses are necessary:
+ # e.g., `(a+b) * c` is NOT equivalent to `a+b * c`.
+ lval_str = self._parenthesize_if(
+ n.left,
+ lambda d: not (self._is_simple_node(d) or
+ self.reduce_parentheses and isinstance(d, c_ast.BinaryOp) and
+ self.precedence_map[d.op] >= self.precedence_map[n.op]))
+ # If `n.right.op` has a stronger -but not equal- binding precedence,
+ # parenthesis can be omitted on the right:
+ # e.g., `a + (b*c)` is equivalent to `a + b*c`.
+ # If the right operator is weaker or equally binding, then parentheses
+ # are necessary:
+ # e.g., `a * (b+c)` is NOT equivalent to `a * b+c` and
+ # `a - (b+c)` is NOT equivalent to `a - b+c` (same precedence).
+ rval_str = self._parenthesize_if(
+ n.right,
+ lambda d: not (self._is_simple_node(d) or
+ self.reduce_parentheses and isinstance(d, c_ast.BinaryOp) and
+ self.precedence_map[d.op] > self.precedence_map[n.op]))
+ return '%s %s %s' % (lval_str, n.op, rval_str)
+
+ def visit_Assignment(self, n):
+ rval_str = self._parenthesize_if(
+ n.rvalue,
+ lambda n: isinstance(n, c_ast.Assignment))
+ return '%s %s %s' % (self.visit(n.lvalue), n.op, rval_str)
+
+ def visit_IdentifierType(self, n):
+ return ' '.join(n.names)
+
+ def _visit_expr(self, n):
+ if isinstance(n, c_ast.InitList):
+ return '{' + self.visit(n) + '}'
+ elif isinstance(n, c_ast.ExprList):
+ return '(' + self.visit(n) + ')'
+ else:
+ return self.visit(n)
+
+ def visit_Decl(self, n, no_type=False):
+ # no_type is used when a Decl is part of a DeclList, where the type is
+ # explicitly only for the first declaration in a list.
+ #
+ s = n.name if no_type else self._generate_decl(n)
+ if n.bitsize: s += ' : ' + self.visit(n.bitsize)
+ if n.init:
+ s += ' = ' + self._visit_expr(n.init)
+ return s
+
+ def visit_DeclList(self, n):
+ s = self.visit(n.decls[0])
+ if len(n.decls) > 1:
+ s += ', ' + ', '.join(self.visit_Decl(decl, no_type=True)
+ for decl in n.decls[1:])
+ return s
+
+ def visit_Typedef(self, n):
+ s = ''
+ if n.storage: s += ' '.join(n.storage) + ' '
+ s += self._generate_type(n.type)
+ return s
+
+ def visit_Cast(self, n):
+ s = '(' + self._generate_type(n.to_type, emit_declname=False) + ')'
+ return s + ' ' + self._parenthesize_unless_simple(n.expr)
+
+ def visit_ExprList(self, n):
+ visited_subexprs = []
+ for expr in n.exprs:
+ visited_subexprs.append(self._visit_expr(expr))
+ return ', '.join(visited_subexprs)
+
+ def visit_InitList(self, n):
+ visited_subexprs = []
+ for expr in n.exprs:
+ visited_subexprs.append(self._visit_expr(expr))
+ return ', '.join(visited_subexprs)
+
+ def visit_Enum(self, n):
+ return self._generate_struct_union_enum(n, name='enum')
+
+ def visit_Alignas(self, n):
+ return '_Alignas({})'.format(self.visit(n.alignment))
+
+ def visit_Enumerator(self, n):
+ if not n.value:
+ return '{indent}{name},\n'.format(
+ indent=self._make_indent(),
+ name=n.name,
+ )
+ else:
+ return '{indent}{name} = {value},\n'.format(
+ indent=self._make_indent(),
+ name=n.name,
+ value=self.visit(n.value),
+ )
+
+ def visit_FuncDef(self, n):
+ decl = self.visit(n.decl)
+ self.indent_level = 0
+ body = self.visit(n.body)
+ if n.param_decls:
+ knrdecls = ';\n'.join(self.visit(p) for p in n.param_decls)
+ return decl + '\n' + knrdecls + ';\n' + body + '\n'
+ else:
+ return decl + '\n' + body + '\n'
+
+ def visit_FileAST(self, n):
+ s = ''
+ for ext in n.ext:
+ if isinstance(ext, c_ast.FuncDef):
+ s += self.visit(ext)
+ elif isinstance(ext, c_ast.Pragma):
+ s += self.visit(ext) + '\n'
+ else:
+ s += self.visit(ext) + ';\n'
+ return s
+
+ def visit_Compound(self, n):
+ s = self._make_indent() + '{\n'
+ self.indent_level += 2
+ if n.block_items:
+ s += ''.join(self._generate_stmt(stmt) for stmt in n.block_items)
+ self.indent_level -= 2
+ s += self._make_indent() + '}\n'
+ return s
+
+ def visit_CompoundLiteral(self, n):
+ return '(' + self.visit(n.type) + '){' + self.visit(n.init) + '}'
+
+
+ def visit_EmptyStatement(self, n):
+ return ';'
+
+ def visit_ParamList(self, n):
+ return ', '.join(self.visit(param) for param in n.params)
+
+ def visit_Return(self, n):
+ s = 'return'
+ if n.expr: s += ' ' + self.visit(n.expr)
+ return s + ';'
+
+ def visit_Break(self, n):
+ return 'break;'
+
+ def visit_Continue(self, n):
+ return 'continue;'
+
+ def visit_TernaryOp(self, n):
+ s = '(' + self._visit_expr(n.cond) + ') ? '
+ s += '(' + self._visit_expr(n.iftrue) + ') : '
+ s += '(' + self._visit_expr(n.iffalse) + ')'
+ return s
+
+ def visit_If(self, n):
+ s = 'if ('
+ if n.cond: s += self.visit(n.cond)
+ s += ')\n'
+ s += self._generate_stmt(n.iftrue, add_indent=True)
+ if n.iffalse:
+ s += self._make_indent() + 'else\n'
+ s += self._generate_stmt(n.iffalse, add_indent=True)
+ return s
+
+ def visit_For(self, n):
+ s = 'for ('
+ if n.init: s += self.visit(n.init)
+ s += ';'
+ if n.cond: s += ' ' + self.visit(n.cond)
+ s += ';'
+ if n.next: s += ' ' + self.visit(n.next)
+ s += ')\n'
+ s += self._generate_stmt(n.stmt, add_indent=True)
+ return s
+
+ def visit_While(self, n):
+ s = 'while ('
+ if n.cond: s += self.visit(n.cond)
+ s += ')\n'
+ s += self._generate_stmt(n.stmt, add_indent=True)
+ return s
+
+ def visit_DoWhile(self, n):
+ s = 'do\n'
+ s += self._generate_stmt(n.stmt, add_indent=True)
+ s += self._make_indent() + 'while ('
+ if n.cond: s += self.visit(n.cond)
+ s += ');'
+ return s
+
+ def visit_StaticAssert(self, n):
+ s = '_Static_assert('
+ s += self.visit(n.cond)
+ if n.message:
+ s += ','
+ s += self.visit(n.message)
+ s += ')'
+ return s
+
+ def visit_Switch(self, n):
+ s = 'switch (' + self.visit(n.cond) + ')\n'
+ s += self._generate_stmt(n.stmt, add_indent=True)
+ return s
+
+ def visit_Case(self, n):
+ s = 'case ' + self.visit(n.expr) + ':\n'
+ for stmt in n.stmts:
+ s += self._generate_stmt(stmt, add_indent=True)
+ return s
+
+ def visit_Default(self, n):
+ s = 'default:\n'
+ for stmt in n.stmts:
+ s += self._generate_stmt(stmt, add_indent=True)
+ return s
+
+ def visit_Label(self, n):
+ return n.name + ':\n' + self._generate_stmt(n.stmt)
+
+ def visit_Goto(self, n):
+ return 'goto ' + n.name + ';'
+
+ def visit_EllipsisParam(self, n):
+ return '...'
+
+ def visit_Struct(self, n):
+ return self._generate_struct_union_enum(n, 'struct')
+
+ def visit_Typename(self, n):
+ return self._generate_type(n.type)
+
+ def visit_Union(self, n):
+ return self._generate_struct_union_enum(n, 'union')
+
+ def visit_NamedInitializer(self, n):
+ s = ''
+ for name in n.name:
+ if isinstance(name, c_ast.ID):
+ s += '.' + name.name
+ else:
+ s += '[' + self.visit(name) + ']'
+ s += ' = ' + self._visit_expr(n.expr)
+ return s
+
+ def visit_FuncDecl(self, n):
+ return self._generate_type(n)
+
+ def visit_ArrayDecl(self, n):
+ return self._generate_type(n, emit_declname=False)
+
+ def visit_TypeDecl(self, n):
+ return self._generate_type(n, emit_declname=False)
+
+ def visit_PtrDecl(self, n):
+ return self._generate_type(n, emit_declname=False)
+
+ def _generate_struct_union_enum(self, n, name):
+ """ Generates code for structs, unions, and enums. name should be
+ 'struct', 'union', or 'enum'.
+ """
+ if name in ('struct', 'union'):
+ members = n.decls
+ body_function = self._generate_struct_union_body
+ else:
+ assert name == 'enum'
+ members = None if n.values is None else n.values.enumerators
+ body_function = self._generate_enum_body
+ s = name + ' ' + (n.name or '')
+ if members is not None:
+ # None means no members
+ # Empty sequence means an empty list of members
+ s += '\n'
+ s += self._make_indent()
+ self.indent_level += 2
+ s += '{\n'
+ s += body_function(members)
+ self.indent_level -= 2
+ s += self._make_indent() + '}'
+ return s
+
+ def _generate_struct_union_body(self, members):
+ return ''.join(self._generate_stmt(decl) for decl in members)
+
+ def _generate_enum_body(self, members):
+ # `[:-2] + '\n'` removes the final `,` from the enumerator list
+ return ''.join(self.visit(value) for value in members)[:-2] + '\n'
+
+ def _generate_stmt(self, n, add_indent=False):
+ """ Generation from a statement node. This method exists as a wrapper
+ for individual visit_* methods to handle different treatment of
+ some statements in this context.
+ """
+ typ = type(n)
+ if add_indent: self.indent_level += 2
+ indent = self._make_indent()
+ if add_indent: self.indent_level -= 2
+
+ if typ in (
+ c_ast.Decl, c_ast.Assignment, c_ast.Cast, c_ast.UnaryOp,
+ c_ast.BinaryOp, c_ast.TernaryOp, c_ast.FuncCall, c_ast.ArrayRef,
+ c_ast.StructRef, c_ast.Constant, c_ast.ID, c_ast.Typedef,
+ c_ast.ExprList):
+ # These can also appear in an expression context so no semicolon
+ # is added to them automatically
+ #
+ return indent + self.visit(n) + ';\n'
+ elif typ in (c_ast.Compound,):
+ # No extra indentation required before the opening brace of a
+ # compound - because it consists of multiple lines it has to
+ # compute its own indentation.
+ #
+ return self.visit(n)
+ elif typ in (c_ast.If,):
+ return indent + self.visit(n)
+ else:
+ return indent + self.visit(n) + '\n'
+
+ def _generate_decl(self, n):
+ """ Generation from a Decl node.
+ """
+ s = ''
+ if n.funcspec: s = ' '.join(n.funcspec) + ' '
+ if n.storage: s += ' '.join(n.storage) + ' '
+ if n.align: s += self.visit(n.align[0]) + ' '
+ s += self._generate_type(n.type)
+ return s
+
+ def _generate_type(self, n, modifiers=[], emit_declname = True):
+ """ Recursive generation from a type node. n is the type node.
+ modifiers collects the PtrDecl, ArrayDecl and FuncDecl modifiers
+ encountered on the way down to a TypeDecl, to allow proper
+ generation from it.
+ """
+ typ = type(n)
+ #~ print(n, modifiers)
+
+ if typ == c_ast.TypeDecl:
+ s = ''
+ if n.quals: s += ' '.join(n.quals) + ' '
+ s += self.visit(n.type)
+
+ nstr = n.declname if n.declname and emit_declname else ''
+ # Resolve modifiers.
+ # Wrap in parens to distinguish pointer to array and pointer to
+ # function syntax.
+ #
+ for i, modifier in enumerate(modifiers):
+ if isinstance(modifier, c_ast.ArrayDecl):
+ if (i != 0 and
+ isinstance(modifiers[i - 1], c_ast.PtrDecl)):
+ nstr = '(' + nstr + ')'
+ nstr += '['
+ if modifier.dim_quals:
+ nstr += ' '.join(modifier.dim_quals) + ' '
+ nstr += self.visit(modifier.dim) + ']'
+ elif isinstance(modifier, c_ast.FuncDecl):
+ if (i != 0 and
+ isinstance(modifiers[i - 1], c_ast.PtrDecl)):
+ nstr = '(' + nstr + ')'
+ nstr += '(' + self.visit(modifier.args) + ')'
+ elif isinstance(modifier, c_ast.PtrDecl):
+ if modifier.quals:
+ nstr = '* %s%s' % (' '.join(modifier.quals),
+ ' ' + nstr if nstr else '')
+ else:
+ nstr = '*' + nstr
+ if nstr: s += ' ' + nstr
+ return s
+ elif typ == c_ast.Decl:
+ return self._generate_decl(n.type)
+ elif typ == c_ast.Typename:
+ return self._generate_type(n.type, emit_declname = emit_declname)
+ elif typ == c_ast.IdentifierType:
+ return ' '.join(n.names) + ' '
+ elif typ in (c_ast.ArrayDecl, c_ast.PtrDecl, c_ast.FuncDecl):
+ return self._generate_type(n.type, modifiers + [n],
+ emit_declname = emit_declname)
+ else:
+ return self.visit(n)
+
+ def _parenthesize_if(self, n, condition):
+ """ Visits 'n' and returns its string representation, parenthesized
+ if the condition function applied to the node returns True.
+ """
+ s = self._visit_expr(n)
+ if condition(n):
+ return '(' + s + ')'
+ else:
+ return s
+
+ def _parenthesize_unless_simple(self, n):
+ """ Common use case for _parenthesize_if
+ """
+ return self._parenthesize_if(n, lambda d: not self._is_simple_node(d))
+
+ def _is_simple_node(self, n):
+ """ Returns True for nodes that are "simple" - i.e. nodes that always
+ have higher precedence than operators.
+ """
+ return isinstance(n, (c_ast.Constant, c_ast.ID, c_ast.ArrayRef,
+ c_ast.StructRef, c_ast.FuncCall))
diff --git a/billinglayer/python/pycparser/c_lexer.py b/billinglayer/python/pycparser/c_lexer.py
new file mode 100644
index 0000000..d68d8eb
--- /dev/null
+++ b/billinglayer/python/pycparser/c_lexer.py
@@ -0,0 +1,554 @@
+#------------------------------------------------------------------------------
+# pycparser: c_lexer.py
+#
+# CLexer class: lexer for the C language
+#
+# Eli Bendersky [https://eli.thegreenplace.net/]
+# License: BSD
+#------------------------------------------------------------------------------
+import re
+
+from .ply import lex
+from .ply.lex import TOKEN
+
+
+class CLexer(object):
+ """ A lexer for the C language. After building it, set the
+ input text with input(), and call token() to get new
+ tokens.
+
+ The public attribute filename can be set to an initial
+ filename, but the lexer will update it upon #line
+ directives.
+ """
+ def __init__(self, error_func, on_lbrace_func, on_rbrace_func,
+ type_lookup_func):
+ """ Create a new Lexer.
+
+ error_func:
+ An error function. Will be called with an error
+ message, line and column as arguments, in case of
+ an error during lexing.
+
+ on_lbrace_func, on_rbrace_func:
+ Called when an LBRACE or RBRACE is encountered
+ (likely to push/pop type_lookup_func's scope)
+
+ type_lookup_func:
+ A type lookup function. Given a string, it must
+ return True IFF this string is a name of a type
+ that was defined with a typedef earlier.
+ """
+ self.error_func = error_func
+ self.on_lbrace_func = on_lbrace_func
+ self.on_rbrace_func = on_rbrace_func
+ self.type_lookup_func = type_lookup_func
+ self.filename = ''
+
+ # Keeps track of the last token returned from self.token()
+ self.last_token = None
+
+ # Allow either "# line" or "# " to support GCC's
+ # cpp output
+ #
+ self.line_pattern = re.compile(r'([ \t]*line\W)|([ \t]*\d+)')
+ self.pragma_pattern = re.compile(r'[ \t]*pragma\W')
+
+ def build(self, **kwargs):
+ """ Builds the lexer from the specification. Must be
+ called after the lexer object is created.
+
+ This method exists separately, because the PLY
+ manual warns against calling lex.lex inside
+ __init__
+ """
+ self.lexer = lex.lex(object=self, **kwargs)
+
+ def reset_lineno(self):
+ """ Resets the internal line number counter of the lexer.
+ """
+ self.lexer.lineno = 1
+
+ def input(self, text):
+ self.lexer.input(text)
+
+ def token(self):
+ self.last_token = self.lexer.token()
+ return self.last_token
+
+ def find_tok_column(self, token):
+ """ Find the column of the token in its line.
+ """
+ last_cr = self.lexer.lexdata.rfind('\n', 0, token.lexpos)
+ return token.lexpos - last_cr
+
+ ######################-- PRIVATE --######################
+
+ ##
+ ## Internal auxiliary methods
+ ##
+ def _error(self, msg, token):
+ location = self._make_tok_location(token)
+ self.error_func(msg, location[0], location[1])
+ self.lexer.skip(1)
+
+ def _make_tok_location(self, token):
+ return (token.lineno, self.find_tok_column(token))
+
+ ##
+ ## Reserved keywords
+ ##
+ keywords = (
+ 'AUTO', 'BREAK', 'CASE', 'CHAR', 'CONST',
+ 'CONTINUE', 'DEFAULT', 'DO', 'DOUBLE', 'ELSE', 'ENUM', 'EXTERN',
+ 'FLOAT', 'FOR', 'GOTO', 'IF', 'INLINE', 'INT', 'LONG',
+ 'REGISTER', 'OFFSETOF',
+ 'RESTRICT', 'RETURN', 'SHORT', 'SIGNED', 'SIZEOF', 'STATIC', 'STRUCT',
+ 'SWITCH', 'TYPEDEF', 'UNION', 'UNSIGNED', 'VOID',
+ 'VOLATILE', 'WHILE', '__INT128',
+ )
+
+ keywords_new = (
+ '_BOOL', '_COMPLEX',
+ '_NORETURN', '_THREAD_LOCAL', '_STATIC_ASSERT',
+ '_ATOMIC', '_ALIGNOF', '_ALIGNAS',
+ )
+
+ keyword_map = {}
+
+ for keyword in keywords:
+ keyword_map[keyword.lower()] = keyword
+
+ for keyword in keywords_new:
+ keyword_map[keyword[:2].upper() + keyword[2:].lower()] = keyword
+
+ ##
+ ## All the tokens recognized by the lexer
+ ##
+ tokens = keywords + keywords_new + (
+ # Identifiers
+ 'ID',
+
+ # Type identifiers (identifiers previously defined as
+ # types with typedef)
+ 'TYPEID',
+
+ # constants
+ 'INT_CONST_DEC', 'INT_CONST_OCT', 'INT_CONST_HEX', 'INT_CONST_BIN', 'INT_CONST_CHAR',
+ 'FLOAT_CONST', 'HEX_FLOAT_CONST',
+ 'CHAR_CONST',
+ 'WCHAR_CONST',
+ 'U8CHAR_CONST',
+ 'U16CHAR_CONST',
+ 'U32CHAR_CONST',
+
+ # String literals
+ 'STRING_LITERAL',
+ 'WSTRING_LITERAL',
+ 'U8STRING_LITERAL',
+ 'U16STRING_LITERAL',
+ 'U32STRING_LITERAL',
+
+ # Operators
+ 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MOD',
+ 'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT',
+ 'LOR', 'LAND', 'LNOT',
+ 'LT', 'LE', 'GT', 'GE', 'EQ', 'NE',
+
+ # Assignment
+ 'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL',
+ 'PLUSEQUAL', 'MINUSEQUAL',
+ 'LSHIFTEQUAL','RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL',
+ 'OREQUAL',
+
+ # Increment/decrement
+ 'PLUSPLUS', 'MINUSMINUS',
+
+ # Structure dereference (->)
+ 'ARROW',
+
+ # Conditional operator (?)
+ 'CONDOP',
+
+ # Delimiters
+ 'LPAREN', 'RPAREN', # ( )
+ 'LBRACKET', 'RBRACKET', # [ ]
+ 'LBRACE', 'RBRACE', # { }
+ 'COMMA', 'PERIOD', # . ,
+ 'SEMI', 'COLON', # ; :
+
+ # Ellipsis (...)
+ 'ELLIPSIS',
+
+ # pre-processor
+ 'PPHASH', # '#'
+ 'PPPRAGMA', # 'pragma'
+ 'PPPRAGMASTR',
+ )
+
+ ##
+ ## Regexes for use in tokens
+ ##
+ ##
+
+ # valid C identifiers (K&R2: A.2.3), plus '$' (supported by some compilers)
+ identifier = r'[a-zA-Z_$][0-9a-zA-Z_$]*'
+
+ hex_prefix = '0[xX]'
+ hex_digits = '[0-9a-fA-F]+'
+ bin_prefix = '0[bB]'
+ bin_digits = '[01]+'
+
+ # integer constants (K&R2: A.2.5.1)
+ integer_suffix_opt = r'(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?'
+ decimal_constant = '(0'+integer_suffix_opt+')|([1-9][0-9]*'+integer_suffix_opt+')'
+ octal_constant = '0[0-7]*'+integer_suffix_opt
+ hex_constant = hex_prefix+hex_digits+integer_suffix_opt
+ bin_constant = bin_prefix+bin_digits+integer_suffix_opt
+
+ bad_octal_constant = '0[0-7]*[89]'
+
+ # character constants (K&R2: A.2.5.2)
+ # Note: a-zA-Z and '.-~^_!=&;,' are allowed as escape chars to support #line
+ # directives with Windows paths as filenames (..\..\dir\file)
+ # For the same reason, decimal_escape allows all digit sequences. We want to
+ # parse all correct code, even if it means to sometimes parse incorrect
+ # code.
+ #
+ # The original regexes were taken verbatim from the C syntax definition,
+ # and were later modified to avoid worst-case exponential running time.
+ #
+ # simple_escape = r"""([a-zA-Z._~!=&\^\-\\?'"])"""
+ # decimal_escape = r"""(\d+)"""
+ # hex_escape = r"""(x[0-9a-fA-F]+)"""
+ # bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-7])"""
+ #
+ # The following modifications were made to avoid the ambiguity that allowed backtracking:
+ # (https://github.com/eliben/pycparser/issues/61)
+ #
+ # - \x was removed from simple_escape, unless it was not followed by a hex digit, to avoid ambiguity with hex_escape.
+ # - hex_escape allows one or more hex characters, but requires that the next character(if any) is not hex
+ # - decimal_escape allows one or more decimal characters, but requires that the next character(if any) is not a decimal
+ # - bad_escape does not allow any decimals (8-9), to avoid conflicting with the permissive decimal_escape.
+ #
+ # Without this change, python's `re` module would recursively try parsing each ambiguous escape sequence in multiple ways.
+ # e.g. `\123` could be parsed as `\1`+`23`, `\12`+`3`, and `\123`.
+
+ simple_escape = r"""([a-wyzA-Z._~!=&\^\-\\?'"]|x(?![0-9a-fA-F]))"""
+ decimal_escape = r"""(\d+)(?!\d)"""
+ hex_escape = r"""(x[0-9a-fA-F]+)(?![0-9a-fA-F])"""
+ bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-9])"""
+
+ escape_sequence = r"""(\\("""+simple_escape+'|'+decimal_escape+'|'+hex_escape+'))'
+
+ # This complicated regex with lookahead might be slow for strings, so because all of the valid escapes (including \x) allowed
+ # 0 or more non-escaped characters after the first character, simple_escape+decimal_escape+hex_escape got simplified to
+
+ escape_sequence_start_in_string = r"""(\\[0-9a-zA-Z._~!=&\^\-\\?'"])"""
+
+ cconst_char = r"""([^'\\\n]|"""+escape_sequence+')'
+ char_const = "'"+cconst_char+"'"
+ wchar_const = 'L'+char_const
+ u8char_const = 'u8'+char_const
+ u16char_const = 'u'+char_const
+ u32char_const = 'U'+char_const
+ multicharacter_constant = "'"+cconst_char+"{2,4}'"
+ unmatched_quote = "('"+cconst_char+"*\\n)|('"+cconst_char+"*$)"
+ bad_char_const = r"""('"""+cconst_char+"""[^'\n]+')|('')|('"""+bad_escape+r"""[^'\n]*')"""
+
+ # string literals (K&R2: A.2.6)
+ string_char = r"""([^"\\\n]|"""+escape_sequence_start_in_string+')'
+ string_literal = '"'+string_char+'*"'
+ wstring_literal = 'L'+string_literal
+ u8string_literal = 'u8'+string_literal
+ u16string_literal = 'u'+string_literal
+ u32string_literal = 'U'+string_literal
+ bad_string_literal = '"'+string_char+'*'+bad_escape+string_char+'*"'
+
+ # floating constants (K&R2: A.2.5.3)
+ exponent_part = r"""([eE][-+]?[0-9]+)"""
+ fractional_constant = r"""([0-9]*\.[0-9]+)|([0-9]+\.)"""
+ floating_constant = '(((('+fractional_constant+')'+exponent_part+'?)|([0-9]+'+exponent_part+'))[FfLl]?)'
+ binary_exponent_part = r'''([pP][+-]?[0-9]+)'''
+ hex_fractional_constant = '((('+hex_digits+r""")?\."""+hex_digits+')|('+hex_digits+r"""\.))"""
+ hex_floating_constant = '('+hex_prefix+'('+hex_digits+'|'+hex_fractional_constant+')'+binary_exponent_part+'[FfLl]?)'
+
+ ##
+ ## Lexer states: used for preprocessor \n-terminated directives
+ ##
+ states = (
+ # ppline: preprocessor line directives
+ #
+ ('ppline', 'exclusive'),
+
+ # pppragma: pragma
+ #
+ ('pppragma', 'exclusive'),
+ )
+
+ def t_PPHASH(self, t):
+ r'[ \t]*\#'
+ if self.line_pattern.match(t.lexer.lexdata, pos=t.lexer.lexpos):
+ t.lexer.begin('ppline')
+ self.pp_line = self.pp_filename = None
+ elif self.pragma_pattern.match(t.lexer.lexdata, pos=t.lexer.lexpos):
+ t.lexer.begin('pppragma')
+ else:
+ t.type = 'PPHASH'
+ return t
+
+ ##
+ ## Rules for the ppline state
+ ##
+ @TOKEN(string_literal)
+ def t_ppline_FILENAME(self, t):
+ if self.pp_line is None:
+ self._error('filename before line number in #line', t)
+ else:
+ self.pp_filename = t.value.lstrip('"').rstrip('"')
+
+ @TOKEN(decimal_constant)
+ def t_ppline_LINE_NUMBER(self, t):
+ if self.pp_line is None:
+ self.pp_line = t.value
+ else:
+ # Ignore: GCC's cpp sometimes inserts a numeric flag
+ # after the file name
+ pass
+
+ def t_ppline_NEWLINE(self, t):
+ r'\n'
+ if self.pp_line is None:
+ self._error('line number missing in #line', t)
+ else:
+ self.lexer.lineno = int(self.pp_line)
+
+ if self.pp_filename is not None:
+ self.filename = self.pp_filename
+
+ t.lexer.begin('INITIAL')
+
+ def t_ppline_PPLINE(self, t):
+ r'line'
+ pass
+
+ t_ppline_ignore = ' \t'
+
+ def t_ppline_error(self, t):
+ self._error('invalid #line directive', t)
+
+ ##
+ ## Rules for the pppragma state
+ ##
+ def t_pppragma_NEWLINE(self, t):
+ r'\n'
+ t.lexer.lineno += 1
+ t.lexer.begin('INITIAL')
+
+ def t_pppragma_PPPRAGMA(self, t):
+ r'pragma'
+ return t
+
+ t_pppragma_ignore = ' \t'
+
+ def t_pppragma_STR(self, t):
+ '.+'
+ t.type = 'PPPRAGMASTR'
+ return t
+
+ def t_pppragma_error(self, t):
+ self._error('invalid #pragma directive', t)
+
+ ##
+ ## Rules for the normal state
+ ##
+ t_ignore = ' \t'
+
+ # Newlines
+ def t_NEWLINE(self, t):
+ r'\n+'
+ t.lexer.lineno += t.value.count("\n")
+
+ # Operators
+ t_PLUS = r'\+'
+ t_MINUS = r'-'
+ t_TIMES = r'\*'
+ t_DIVIDE = r'/'
+ t_MOD = r'%'
+ t_OR = r'\|'
+ t_AND = r'&'
+ t_NOT = r'~'
+ t_XOR = r'\^'
+ t_LSHIFT = r'<<'
+ t_RSHIFT = r'>>'
+ t_LOR = r'\|\|'
+ t_LAND = r'&&'
+ t_LNOT = r'!'
+ t_LT = r'<'
+ t_GT = r'>'
+ t_LE = r'<='
+ t_GE = r'>='
+ t_EQ = r'=='
+ t_NE = r'!='
+
+ # Assignment operators
+ t_EQUALS = r'='
+ t_TIMESEQUAL = r'\*='
+ t_DIVEQUAL = r'/='
+ t_MODEQUAL = r'%='
+ t_PLUSEQUAL = r'\+='
+ t_MINUSEQUAL = r'-='
+ t_LSHIFTEQUAL = r'<<='
+ t_RSHIFTEQUAL = r'>>='
+ t_ANDEQUAL = r'&='
+ t_OREQUAL = r'\|='
+ t_XOREQUAL = r'\^='
+
+ # Increment/decrement
+ t_PLUSPLUS = r'\+\+'
+ t_MINUSMINUS = r'--'
+
+ # ->
+ t_ARROW = r'->'
+
+ # ?
+ t_CONDOP = r'\?'
+
+ # Delimiters
+ t_LPAREN = r'\('
+ t_RPAREN = r'\)'
+ t_LBRACKET = r'\['
+ t_RBRACKET = r'\]'
+ t_COMMA = r','
+ t_PERIOD = r'\.'
+ t_SEMI = r';'
+ t_COLON = r':'
+ t_ELLIPSIS = r'\.\.\.'
+
+ # Scope delimiters
+ # To see why on_lbrace_func is needed, consider:
+ # typedef char TT;
+ # void foo(int TT) { TT = 10; }
+ # TT x = 5;
+ # Outside the function, TT is a typedef, but inside (starting and ending
+ # with the braces) it's a parameter. The trouble begins with yacc's
+ # lookahead token. If we open a new scope in brace_open, then TT has
+ # already been read and incorrectly interpreted as TYPEID. So, we need
+ # to open and close scopes from within the lexer.
+ # Similar for the TT immediately outside the end of the function.
+ #
+ @TOKEN(r'\{')
+ def t_LBRACE(self, t):
+ self.on_lbrace_func()
+ return t
+ @TOKEN(r'\}')
+ def t_RBRACE(self, t):
+ self.on_rbrace_func()
+ return t
+
+ t_STRING_LITERAL = string_literal
+
+ # The following floating and integer constants are defined as
+ # functions to impose a strict order (otherwise, decimal
+ # is placed before the others because its regex is longer,
+ # and this is bad)
+ #
+ @TOKEN(floating_constant)
+ def t_FLOAT_CONST(self, t):
+ return t
+
+ @TOKEN(hex_floating_constant)
+ def t_HEX_FLOAT_CONST(self, t):
+ return t
+
+ @TOKEN(hex_constant)
+ def t_INT_CONST_HEX(self, t):
+ return t
+
+ @TOKEN(bin_constant)
+ def t_INT_CONST_BIN(self, t):
+ return t
+
+ @TOKEN(bad_octal_constant)
+ def t_BAD_CONST_OCT(self, t):
+ msg = "Invalid octal constant"
+ self._error(msg, t)
+
+ @TOKEN(octal_constant)
+ def t_INT_CONST_OCT(self, t):
+ return t
+
+ @TOKEN(decimal_constant)
+ def t_INT_CONST_DEC(self, t):
+ return t
+
+ # Must come before bad_char_const, to prevent it from
+ # catching valid char constants as invalid
+ #
+ @TOKEN(multicharacter_constant)
+ def t_INT_CONST_CHAR(self, t):
+ return t
+
+ @TOKEN(char_const)
+ def t_CHAR_CONST(self, t):
+ return t
+
+ @TOKEN(wchar_const)
+ def t_WCHAR_CONST(self, t):
+ return t
+
+ @TOKEN(u8char_const)
+ def t_U8CHAR_CONST(self, t):
+ return t
+
+ @TOKEN(u16char_const)
+ def t_U16CHAR_CONST(self, t):
+ return t
+
+ @TOKEN(u32char_const)
+ def t_U32CHAR_CONST(self, t):
+ return t
+
+ @TOKEN(unmatched_quote)
+ def t_UNMATCHED_QUOTE(self, t):
+ msg = "Unmatched '"
+ self._error(msg, t)
+
+ @TOKEN(bad_char_const)
+ def t_BAD_CHAR_CONST(self, t):
+ msg = "Invalid char constant %s" % t.value
+ self._error(msg, t)
+
+ @TOKEN(wstring_literal)
+ def t_WSTRING_LITERAL(self, t):
+ return t
+
+ @TOKEN(u8string_literal)
+ def t_U8STRING_LITERAL(self, t):
+ return t
+
+ @TOKEN(u16string_literal)
+ def t_U16STRING_LITERAL(self, t):
+ return t
+
+ @TOKEN(u32string_literal)
+ def t_U32STRING_LITERAL(self, t):
+ return t
+
+ # unmatched string literals are caught by the preprocessor
+
+ @TOKEN(bad_string_literal)
+ def t_BAD_STRING_LITERAL(self, t):
+ msg = "String contains invalid escape code"
+ self._error(msg, t)
+
+ @TOKEN(identifier)
+ def t_ID(self, t):
+ t.type = self.keyword_map.get(t.value, "ID")
+ if t.type == 'ID' and self.type_lookup_func(t.value):
+ t.type = "TYPEID"
+ return t
+
+ def t_error(self, t):
+ msg = 'Illegal character %s' % repr(t.value[0])
+ self._error(msg, t)
diff --git a/billinglayer/python/pycparser/c_parser.py b/billinglayer/python/pycparser/c_parser.py
new file mode 100644
index 0000000..640a759
--- /dev/null
+++ b/billinglayer/python/pycparser/c_parser.py
@@ -0,0 +1,1936 @@
+#------------------------------------------------------------------------------
+# pycparser: c_parser.py
+#
+# CParser class: Parser and AST builder for the C language
+#
+# Eli Bendersky [https://eli.thegreenplace.net/]
+# License: BSD
+#------------------------------------------------------------------------------
+from .ply import yacc
+
+from . import c_ast
+from .c_lexer import CLexer
+from .plyparser import PLYParser, ParseError, parameterized, template
+from .ast_transforms import fix_switch_cases, fix_atomic_specifiers
+
+
+@template
+class CParser(PLYParser):
+ def __init__(
+ self,
+ lex_optimize=True,
+ lexer=CLexer,
+ lextab='pycparser.lextab',
+ yacc_optimize=True,
+ yacctab='pycparser.yacctab',
+ yacc_debug=False,
+ taboutputdir=''):
+ """ Create a new CParser.
+
+ Some arguments for controlling the debug/optimization
+ level of the parser are provided. The defaults are
+ tuned for release/performance mode.
+ The simple rules for using them are:
+ *) When tweaking CParser/CLexer, set these to False
+ *) When releasing a stable parser, set to True
+
+ lex_optimize:
+ Set to False when you're modifying the lexer.
+ Otherwise, changes in the lexer won't be used, if
+ some lextab.py file exists.
+ When releasing with a stable lexer, set to True
+ to save the re-generation of the lexer table on
+ each run.
+
+ lexer:
+ Set this parameter to define the lexer to use if
+ you're not using the default CLexer.
+
+ lextab:
+ Points to the lex table that's used for optimized
+ mode. Only if you're modifying the lexer and want
+ some tests to avoid re-generating the table, make
+ this point to a local lex table file (that's been
+ earlier generated with lex_optimize=True)
+
+ yacc_optimize:
+ Set to False when you're modifying the parser.
+ Otherwise, changes in the parser won't be used, if
+ some parsetab.py file exists.
+ When releasing with a stable parser, set to True
+ to save the re-generation of the parser table on
+ each run.
+
+ yacctab:
+ Points to the yacc table that's used for optimized
+ mode. Only if you're modifying the parser, make
+ this point to a local yacc table file
+
+ yacc_debug:
+ Generate a parser.out file that explains how yacc
+ built the parsing table from the grammar.
+
+ taboutputdir:
+ Set this parameter to control the location of generated
+ lextab and yacctab files.
+ """
+ self.clex = lexer(
+ error_func=self._lex_error_func,
+ on_lbrace_func=self._lex_on_lbrace_func,
+ on_rbrace_func=self._lex_on_rbrace_func,
+ type_lookup_func=self._lex_type_lookup_func)
+
+ self.clex.build(
+ optimize=lex_optimize,
+ lextab=lextab,
+ outputdir=taboutputdir)
+ self.tokens = self.clex.tokens
+
+ rules_with_opt = [
+ 'abstract_declarator',
+ 'assignment_expression',
+ 'declaration_list',
+ 'declaration_specifiers_no_type',
+ 'designation',
+ 'expression',
+ 'identifier_list',
+ 'init_declarator_list',
+ 'id_init_declarator_list',
+ 'initializer_list',
+ 'parameter_type_list',
+ 'block_item_list',
+ 'type_qualifier_list',
+ 'struct_declarator_list'
+ ]
+
+ for rule in rules_with_opt:
+ self._create_opt_rule(rule)
+
+ self.cparser = yacc.yacc(
+ module=self,
+ start='translation_unit_or_empty',
+ debug=yacc_debug,
+ optimize=yacc_optimize,
+ tabmodule=yacctab,
+ outputdir=taboutputdir)
+
+ # Stack of scopes for keeping track of symbols. _scope_stack[-1] is
+ # the current (topmost) scope. Each scope is a dictionary that
+ # specifies whether a name is a type. If _scope_stack[n][name] is
+ # True, 'name' is currently a type in the scope. If it's False,
+ # 'name' is used in the scope but not as a type (for instance, if we
+ # saw: int name;
+ # If 'name' is not a key in _scope_stack[n] then 'name' was not defined
+ # in this scope at all.
+ self._scope_stack = [dict()]
+
+ # Keeps track of the last token given to yacc (the lookahead token)
+ self._last_yielded_token = None
+
+ def parse(self, text, filename='', debug=False):
+ """ Parses C code and returns an AST.
+
+ text:
+ A string containing the C source code
+
+ filename:
+ Name of the file being parsed (for meaningful
+ error messages)
+
+ debug:
+ Debug flag to YACC
+ """
+ self.clex.filename = filename
+ self.clex.reset_lineno()
+ self._scope_stack = [dict()]
+ self._last_yielded_token = None
+ return self.cparser.parse(
+ input=text,
+ lexer=self.clex,
+ debug=debug)
+
+ ######################-- PRIVATE --######################
+
+ def _push_scope(self):
+ self._scope_stack.append(dict())
+
+ def _pop_scope(self):
+ assert len(self._scope_stack) > 1
+ self._scope_stack.pop()
+
+ def _add_typedef_name(self, name, coord):
+ """ Add a new typedef name (ie a TYPEID) to the current scope
+ """
+ if not self._scope_stack[-1].get(name, True):
+ self._parse_error(
+ "Typedef %r previously declared as non-typedef "
+ "in this scope" % name, coord)
+ self._scope_stack[-1][name] = True
+
+ def _add_identifier(self, name, coord):
+ """ Add a new object, function, or enum member name (ie an ID) to the
+ current scope
+ """
+ if self._scope_stack[-1].get(name, False):
+ self._parse_error(
+ "Non-typedef %r previously declared as typedef "
+ "in this scope" % name, coord)
+ self._scope_stack[-1][name] = False
+
+ def _is_type_in_scope(self, name):
+ """ Is *name* a typedef-name in the current scope?
+ """
+ for scope in reversed(self._scope_stack):
+ # If name is an identifier in this scope it shadows typedefs in
+ # higher scopes.
+ in_scope = scope.get(name)
+ if in_scope is not None: return in_scope
+ return False
+
+ def _lex_error_func(self, msg, line, column):
+ self._parse_error(msg, self._coord(line, column))
+
+ def _lex_on_lbrace_func(self):
+ self._push_scope()
+
+ def _lex_on_rbrace_func(self):
+ self._pop_scope()
+
+ def _lex_type_lookup_func(self, name):
+ """ Looks up types that were previously defined with
+ typedef.
+ Passed to the lexer for recognizing identifiers that
+ are types.
+ """
+ is_type = self._is_type_in_scope(name)
+ return is_type
+
+ def _get_yacc_lookahead_token(self):
+ """ We need access to yacc's lookahead token in certain cases.
+ This is the last token yacc requested from the lexer, so we
+ ask the lexer.
+ """
+ return self.clex.last_token
+
+ # To understand what's going on here, read sections A.8.5 and
+ # A.8.6 of K&R2 very carefully.
+ #
+ # A C type consists of a basic type declaration, with a list
+ # of modifiers. For example:
+ #
+ # int *c[5];
+ #
+ # The basic declaration here is 'int c', and the pointer and
+ # the array are the modifiers.
+ #
+ # Basic declarations are represented by TypeDecl (from module c_ast) and the
+ # modifiers are FuncDecl, PtrDecl and ArrayDecl.
+ #
+ # The standard states that whenever a new modifier is parsed, it should be
+ # added to the end of the list of modifiers. For example:
+ #
+ # K&R2 A.8.6.2: Array Declarators
+ #
+ # In a declaration T D where D has the form
+ # D1 [constant-expression-opt]
+ # and the type of the identifier in the declaration T D1 is
+ # "type-modifier T", the type of the
+ # identifier of D is "type-modifier array of T"
+ #
+ # This is what this method does. The declarator it receives
+ # can be a list of declarators ending with TypeDecl. It
+ # tacks the modifier to the end of this list, just before
+ # the TypeDecl.
+ #
+ # Additionally, the modifier may be a list itself. This is
+ # useful for pointers, that can come as a chain from the rule
+ # p_pointer. In this case, the whole modifier list is spliced
+ # into the new location.
+ def _type_modify_decl(self, decl, modifier):
+ """ Tacks a type modifier on a declarator, and returns
+ the modified declarator.
+
+ Note: the declarator and modifier may be modified
+ """
+ #~ print '****'
+ #~ decl.show(offset=3)
+ #~ modifier.show(offset=3)
+ #~ print '****'
+
+ modifier_head = modifier
+ modifier_tail = modifier
+
+ # The modifier may be a nested list. Reach its tail.
+ while modifier_tail.type:
+ modifier_tail = modifier_tail.type
+
+ # If the decl is a basic type, just tack the modifier onto it.
+ if isinstance(decl, c_ast.TypeDecl):
+ modifier_tail.type = decl
+ return modifier
+ else:
+ # Otherwise, the decl is a list of modifiers. Reach
+ # its tail and splice the modifier onto the tail,
+ # pointing to the underlying basic type.
+ decl_tail = decl
+
+ while not isinstance(decl_tail.type, c_ast.TypeDecl):
+ decl_tail = decl_tail.type
+
+ modifier_tail.type = decl_tail.type
+ decl_tail.type = modifier_head
+ return decl
+
+ # Due to the order in which declarators are constructed,
+ # they have to be fixed in order to look like a normal AST.
+ #
+ # When a declaration arrives from syntax construction, it has
+ # these problems:
+ # * The innermost TypeDecl has no type (because the basic
+ # type is only known at the uppermost declaration level)
+ # * The declaration has no variable name, since that is saved
+ # in the innermost TypeDecl
+ # * The typename of the declaration is a list of type
+ # specifiers, and not a node. Here, basic identifier types
+ # should be separated from more complex types like enums
+ # and structs.
+ #
+ # This method fixes these problems.
+ def _fix_decl_name_type(self, decl, typename):
+ """ Fixes a declaration. Modifies decl.
+ """
+ # Reach the underlying basic type
+ #
+ type = decl
+ while not isinstance(type, c_ast.TypeDecl):
+ type = type.type
+
+ decl.name = type.declname
+ type.quals = decl.quals[:]
+
+ # The typename is a list of types. If any type in this
+ # list isn't an IdentifierType, it must be the only
+ # type in the list (it's illegal to declare "int enum ..")
+ # If all the types are basic, they're collected in the
+ # IdentifierType holder.
+ for tn in typename:
+ if not isinstance(tn, c_ast.IdentifierType):
+ if len(typename) > 1:
+ self._parse_error(
+ "Invalid multiple types specified", tn.coord)
+ else:
+ type.type = tn
+ return decl
+
+ if not typename:
+ # Functions default to returning int
+ #
+ if not isinstance(decl.type, c_ast.FuncDecl):
+ self._parse_error(
+ "Missing type in declaration", decl.coord)
+ type.type = c_ast.IdentifierType(
+ ['int'],
+ coord=decl.coord)
+ else:
+ # At this point, we know that typename is a list of IdentifierType
+ # nodes. Concatenate all the names into a single list.
+ #
+ type.type = c_ast.IdentifierType(
+ [name for id in typename for name in id.names],
+ coord=typename[0].coord)
+ return decl
+
+ def _add_declaration_specifier(self, declspec, newspec, kind, append=False):
+ """ Declaration specifiers are represented by a dictionary
+ with the entries:
+ * qual: a list of type qualifiers
+ * storage: a list of storage type qualifiers
+ * type: a list of type specifiers
+ * function: a list of function specifiers
+ * alignment: a list of alignment specifiers
+
+ This method is given a declaration specifier, and a
+ new specifier of a given kind.
+ If `append` is True, the new specifier is added to the end of
+ the specifiers list, otherwise it's added at the beginning.
+ Returns the declaration specifier, with the new
+ specifier incorporated.
+ """
+ spec = declspec or dict(qual=[], storage=[], type=[], function=[], alignment=[])
+
+ if append:
+ spec[kind].append(newspec)
+ else:
+ spec[kind].insert(0, newspec)
+
+ return spec
+
+ def _build_declarations(self, spec, decls, typedef_namespace=False):
+ """ Builds a list of declarations all sharing the given specifiers.
+ If typedef_namespace is true, each declared name is added
+ to the "typedef namespace", which also includes objects,
+ functions, and enum constants.
+ """
+ is_typedef = 'typedef' in spec['storage']
+ declarations = []
+
+ # Bit-fields are allowed to be unnamed.
+ if decls[0].get('bitsize') is not None:
+ pass
+
+ # When redeclaring typedef names as identifiers in inner scopes, a
+ # problem can occur where the identifier gets grouped into
+ # spec['type'], leaving decl as None. This can only occur for the
+ # first declarator.
+ elif decls[0]['decl'] is None:
+ if len(spec['type']) < 2 or len(spec['type'][-1].names) != 1 or \
+ not self._is_type_in_scope(spec['type'][-1].names[0]):
+ coord = '?'
+ for t in spec['type']:
+ if hasattr(t, 'coord'):
+ coord = t.coord
+ break
+ self._parse_error('Invalid declaration', coord)
+
+ # Make this look as if it came from "direct_declarator:ID"
+ decls[0]['decl'] = c_ast.TypeDecl(
+ declname=spec['type'][-1].names[0],
+ type=None,
+ quals=None,
+ align=spec['alignment'],
+ coord=spec['type'][-1].coord)
+ # Remove the "new" type's name from the end of spec['type']
+ del spec['type'][-1]
+
+ # A similar problem can occur where the declaration ends up looking
+ # like an abstract declarator. Give it a name if this is the case.
+ elif not isinstance(decls[0]['decl'], (
+ c_ast.Enum, c_ast.Struct, c_ast.Union, c_ast.IdentifierType)):
+ decls_0_tail = decls[0]['decl']
+ while not isinstance(decls_0_tail, c_ast.TypeDecl):
+ decls_0_tail = decls_0_tail.type
+ if decls_0_tail.declname is None:
+ decls_0_tail.declname = spec['type'][-1].names[0]
+ del spec['type'][-1]
+
+ for decl in decls:
+ assert decl['decl'] is not None
+ if is_typedef:
+ declaration = c_ast.Typedef(
+ name=None,
+ quals=spec['qual'],
+ storage=spec['storage'],
+ type=decl['decl'],
+ coord=decl['decl'].coord)
+ else:
+ declaration = c_ast.Decl(
+ name=None,
+ quals=spec['qual'],
+ align=spec['alignment'],
+ storage=spec['storage'],
+ funcspec=spec['function'],
+ type=decl['decl'],
+ init=decl.get('init'),
+ bitsize=decl.get('bitsize'),
+ coord=decl['decl'].coord)
+
+ if isinstance(declaration.type, (
+ c_ast.Enum, c_ast.Struct, c_ast.Union,
+ c_ast.IdentifierType)):
+ fixed_decl = declaration
+ else:
+ fixed_decl = self._fix_decl_name_type(declaration, spec['type'])
+
+ # Add the type name defined by typedef to a
+ # symbol table (for usage in the lexer)
+ if typedef_namespace:
+ if is_typedef:
+ self._add_typedef_name(fixed_decl.name, fixed_decl.coord)
+ else:
+ self._add_identifier(fixed_decl.name, fixed_decl.coord)
+
+ fixed_decl = fix_atomic_specifiers(fixed_decl)
+ declarations.append(fixed_decl)
+
+ return declarations
+
+ def _build_function_definition(self, spec, decl, param_decls, body):
+ """ Builds a function definition.
+ """
+ if 'typedef' in spec['storage']:
+ self._parse_error("Invalid typedef", decl.coord)
+
+ declaration = self._build_declarations(
+ spec=spec,
+ decls=[dict(decl=decl, init=None)],
+ typedef_namespace=True)[0]
+
+ return c_ast.FuncDef(
+ decl=declaration,
+ param_decls=param_decls,
+ body=body,
+ coord=decl.coord)
+
+ def _select_struct_union_class(self, token):
+ """ Given a token (either STRUCT or UNION), selects the
+ appropriate AST class.
+ """
+ if token == 'struct':
+ return c_ast.Struct
+ else:
+ return c_ast.Union
+
+ ##
+ ## Precedence and associativity of operators
+ ##
+ # If this changes, c_generator.CGenerator.precedence_map needs to change as
+ # well
+ precedence = (
+ ('left', 'LOR'),
+ ('left', 'LAND'),
+ ('left', 'OR'),
+ ('left', 'XOR'),
+ ('left', 'AND'),
+ ('left', 'EQ', 'NE'),
+ ('left', 'GT', 'GE', 'LT', 'LE'),
+ ('left', 'RSHIFT', 'LSHIFT'),
+ ('left', 'PLUS', 'MINUS'),
+ ('left', 'TIMES', 'DIVIDE', 'MOD')
+ )
+
+ ##
+ ## Grammar productions
+ ## Implementation of the BNF defined in K&R2 A.13
+ ##
+
+ # Wrapper around a translation unit, to allow for empty input.
+ # Not strictly part of the C99 Grammar, but useful in practice.
+ def p_translation_unit_or_empty(self, p):
+ """ translation_unit_or_empty : translation_unit
+ | empty
+ """
+ if p[1] is None:
+ p[0] = c_ast.FileAST([])
+ else:
+ p[0] = c_ast.FileAST(p[1])
+
+ def p_translation_unit_1(self, p):
+ """ translation_unit : external_declaration
+ """
+ # Note: external_declaration is already a list
+ p[0] = p[1]
+
+ def p_translation_unit_2(self, p):
+ """ translation_unit : translation_unit external_declaration
+ """
+ p[1].extend(p[2])
+ p[0] = p[1]
+
+ # Declarations always come as lists (because they can be
+ # several in one line), so we wrap the function definition
+ # into a list as well, to make the return value of
+ # external_declaration homogeneous.
+ def p_external_declaration_1(self, p):
+ """ external_declaration : function_definition
+ """
+ p[0] = [p[1]]
+
+ def p_external_declaration_2(self, p):
+ """ external_declaration : declaration
+ """
+ p[0] = p[1]
+
+ def p_external_declaration_3(self, p):
+ """ external_declaration : pp_directive
+ | pppragma_directive
+ """
+ p[0] = [p[1]]
+
+ def p_external_declaration_4(self, p):
+ """ external_declaration : SEMI
+ """
+ p[0] = []
+
+ def p_external_declaration_5(self, p):
+ """ external_declaration : static_assert
+ """
+ p[0] = p[1]
+
+ def p_static_assert_declaration(self, p):
+ """ static_assert : _STATIC_ASSERT LPAREN constant_expression COMMA unified_string_literal RPAREN
+ | _STATIC_ASSERT LPAREN constant_expression RPAREN
+ """
+ if len(p) == 5:
+ p[0] = [c_ast.StaticAssert(p[3], None, self._token_coord(p, 1))]
+ else:
+ p[0] = [c_ast.StaticAssert(p[3], p[5], self._token_coord(p, 1))]
+
+ def p_pp_directive(self, p):
+ """ pp_directive : PPHASH
+ """
+ self._parse_error('Directives not supported yet',
+ self._token_coord(p, 1))
+
+ def p_pppragma_directive(self, p):
+ """ pppragma_directive : PPPRAGMA
+ | PPPRAGMA PPPRAGMASTR
+ """
+ if len(p) == 3:
+ p[0] = c_ast.Pragma(p[2], self._token_coord(p, 2))
+ else:
+ p[0] = c_ast.Pragma("", self._token_coord(p, 1))
+
+ # In function definitions, the declarator can be followed by
+ # a declaration list, for old "K&R style" function definitios.
+ def p_function_definition_1(self, p):
+ """ function_definition : id_declarator declaration_list_opt compound_statement
+ """
+ # no declaration specifiers - 'int' becomes the default type
+ spec = dict(
+ qual=[],
+ alignment=[],
+ storage=[],
+ type=[c_ast.IdentifierType(['int'],
+ coord=self._token_coord(p, 1))],
+ function=[])
+
+ p[0] = self._build_function_definition(
+ spec=spec,
+ decl=p[1],
+ param_decls=p[2],
+ body=p[3])
+
+ def p_function_definition_2(self, p):
+ """ function_definition : declaration_specifiers id_declarator declaration_list_opt compound_statement
+ """
+ spec = p[1]
+
+ p[0] = self._build_function_definition(
+ spec=spec,
+ decl=p[2],
+ param_decls=p[3],
+ body=p[4])
+
+ # Note, according to C18 A.2.2 6.7.10 static_assert-declaration _Static_assert
+ # is a declaration, not a statement. We additionally recognise it as a statement
+ # to fix parsing of _Static_assert inside the functions.
+ #
+ def p_statement(self, p):
+ """ statement : labeled_statement
+ | expression_statement
+ | compound_statement
+ | selection_statement
+ | iteration_statement
+ | jump_statement
+ | pppragma_directive
+ | static_assert
+ """
+ p[0] = p[1]
+
+ # A pragma is generally considered a decorator rather than an actual
+ # statement. Still, for the purposes of analyzing an abstract syntax tree of
+ # C code, pragma's should not be ignored and were previously treated as a
+ # statement. This presents a problem for constructs that take a statement
+ # such as labeled_statements, selection_statements, and
+ # iteration_statements, causing a misleading structure in the AST. For
+ # example, consider the following C code.
+ #
+ # for (int i = 0; i < 3; i++)
+ # #pragma omp critical
+ # sum += 1;
+ #
+ # This code will compile and execute "sum += 1;" as the body of the for
+ # loop. Previous implementations of PyCParser would render the AST for this
+ # block of code as follows:
+ #
+ # For:
+ # DeclList:
+ # Decl: i, [], [], []
+ # TypeDecl: i, []
+ # IdentifierType: ['int']
+ # Constant: int, 0
+ # BinaryOp: <
+ # ID: i
+ # Constant: int, 3
+ # UnaryOp: p++
+ # ID: i
+ # Pragma: omp critical
+ # Assignment: +=
+ # ID: sum
+ # Constant: int, 1
+ #
+ # This AST misleadingly takes the Pragma as the body of the loop and the
+ # assignment then becomes a sibling of the loop.
+ #
+ # To solve edge cases like these, the pragmacomp_or_statement rule groups
+ # a pragma and its following statement (which would otherwise be orphaned)
+ # using a compound block, effectively turning the above code into:
+ #
+ # for (int i = 0; i < 3; i++) {
+ # #pragma omp critical
+ # sum += 1;
+ # }
+ def p_pragmacomp_or_statement(self, p):
+ """ pragmacomp_or_statement : pppragma_directive statement
+ | statement
+ """
+ if isinstance(p[1], c_ast.Pragma) and len(p) == 3:
+ p[0] = c_ast.Compound(
+ block_items=[p[1], p[2]],
+ coord=self._token_coord(p, 1))
+ else:
+ p[0] = p[1]
+
+ # In C, declarations can come several in a line:
+ # int x, *px, romulo = 5;
+ #
+ # However, for the AST, we will split them to separate Decl
+ # nodes.
+ #
+ # This rule splits its declarations and always returns a list
+ # of Decl nodes, even if it's one element long.
+ #
+ def p_decl_body(self, p):
+ """ decl_body : declaration_specifiers init_declarator_list_opt
+ | declaration_specifiers_no_type id_init_declarator_list_opt
+ """
+ spec = p[1]
+
+ # p[2] (init_declarator_list_opt) is either a list or None
+ #
+ if p[2] is None:
+ # By the standard, you must have at least one declarator unless
+ # declaring a structure tag, a union tag, or the members of an
+ # enumeration.
+ #
+ ty = spec['type']
+ s_u_or_e = (c_ast.Struct, c_ast.Union, c_ast.Enum)
+ if len(ty) == 1 and isinstance(ty[0], s_u_or_e):
+ decls = [c_ast.Decl(
+ name=None,
+ quals=spec['qual'],
+ align=spec['alignment'],
+ storage=spec['storage'],
+ funcspec=spec['function'],
+ type=ty[0],
+ init=None,
+ bitsize=None,
+ coord=ty[0].coord)]
+
+ # However, this case can also occur on redeclared identifiers in
+ # an inner scope. The trouble is that the redeclared type's name
+ # gets grouped into declaration_specifiers; _build_declarations
+ # compensates for this.
+ #
+ else:
+ decls = self._build_declarations(
+ spec=spec,
+ decls=[dict(decl=None, init=None)],
+ typedef_namespace=True)
+
+ else:
+ decls = self._build_declarations(
+ spec=spec,
+ decls=p[2],
+ typedef_namespace=True)
+
+ p[0] = decls
+
+ # The declaration has been split to a decl_body sub-rule and
+ # SEMI, because having them in a single rule created a problem
+ # for defining typedefs.
+ #
+ # If a typedef line was directly followed by a line using the
+ # type defined with the typedef, the type would not be
+ # recognized. This is because to reduce the declaration rule,
+ # the parser's lookahead asked for the token after SEMI, which
+ # was the type from the next line, and the lexer had no chance
+ # to see the updated type symbol table.
+ #
+ # Splitting solves this problem, because after seeing SEMI,
+ # the parser reduces decl_body, which actually adds the new
+ # type into the table to be seen by the lexer before the next
+ # line is reached.
+ def p_declaration(self, p):
+ """ declaration : decl_body SEMI
+ """
+ p[0] = p[1]
+
+ # Since each declaration is a list of declarations, this
+ # rule will combine all the declarations and return a single
+ # list
+ #
+ def p_declaration_list(self, p):
+ """ declaration_list : declaration
+ | declaration_list declaration
+ """
+ p[0] = p[1] if len(p) == 2 else p[1] + p[2]
+
+ # To know when declaration-specifiers end and declarators begin,
+ # we require declaration-specifiers to have at least one
+ # type-specifier, and disallow typedef-names after we've seen any
+ # type-specifier. These are both required by the spec.
+ #
+ def p_declaration_specifiers_no_type_1(self, p):
+ """ declaration_specifiers_no_type : type_qualifier declaration_specifiers_no_type_opt
+ """
+ p[0] = self._add_declaration_specifier(p[2], p[1], 'qual')
+
+ def p_declaration_specifiers_no_type_2(self, p):
+ """ declaration_specifiers_no_type : storage_class_specifier declaration_specifiers_no_type_opt
+ """
+ p[0] = self._add_declaration_specifier(p[2], p[1], 'storage')
+
+ def p_declaration_specifiers_no_type_3(self, p):
+ """ declaration_specifiers_no_type : function_specifier declaration_specifiers_no_type_opt
+ """
+ p[0] = self._add_declaration_specifier(p[2], p[1], 'function')
+
+ # Without this, `typedef _Atomic(T) U` will parse incorrectly because the
+ # _Atomic qualifier will match, instead of the specifier.
+ def p_declaration_specifiers_no_type_4(self, p):
+ """ declaration_specifiers_no_type : atomic_specifier declaration_specifiers_no_type_opt
+ """
+ p[0] = self._add_declaration_specifier(p[2], p[1], 'type')
+
+ def p_declaration_specifiers_no_type_5(self, p):
+ """ declaration_specifiers_no_type : alignment_specifier declaration_specifiers_no_type_opt
+ """
+ p[0] = self._add_declaration_specifier(p[2], p[1], 'alignment')
+
+ def p_declaration_specifiers_1(self, p):
+ """ declaration_specifiers : declaration_specifiers type_qualifier
+ """
+ p[0] = self._add_declaration_specifier(p[1], p[2], 'qual', append=True)
+
+ def p_declaration_specifiers_2(self, p):
+ """ declaration_specifiers : declaration_specifiers storage_class_specifier
+ """
+ p[0] = self._add_declaration_specifier(p[1], p[2], 'storage', append=True)
+
+ def p_declaration_specifiers_3(self, p):
+ """ declaration_specifiers : declaration_specifiers function_specifier
+ """
+ p[0] = self._add_declaration_specifier(p[1], p[2], 'function', append=True)
+
+ def p_declaration_specifiers_4(self, p):
+ """ declaration_specifiers : declaration_specifiers type_specifier_no_typeid
+ """
+ p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True)
+
+ def p_declaration_specifiers_5(self, p):
+ """ declaration_specifiers : type_specifier
+ """
+ p[0] = self._add_declaration_specifier(None, p[1], 'type')
+
+ def p_declaration_specifiers_6(self, p):
+ """ declaration_specifiers : declaration_specifiers_no_type type_specifier
+ """
+ p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True)
+
+ def p_declaration_specifiers_7(self, p):
+ """ declaration_specifiers : declaration_specifiers alignment_specifier
+ """
+ p[0] = self._add_declaration_specifier(p[1], p[2], 'alignment', append=True)
+
+ def p_storage_class_specifier(self, p):
+ """ storage_class_specifier : AUTO
+ | REGISTER
+ | STATIC
+ | EXTERN
+ | TYPEDEF
+ | _THREAD_LOCAL
+ """
+ p[0] = p[1]
+
+ def p_function_specifier(self, p):
+ """ function_specifier : INLINE
+ | _NORETURN
+ """
+ p[0] = p[1]
+
+ def p_type_specifier_no_typeid(self, p):
+ """ type_specifier_no_typeid : VOID
+ | _BOOL
+ | CHAR
+ | SHORT
+ | INT
+ | LONG
+ | FLOAT
+ | DOUBLE
+ | _COMPLEX
+ | SIGNED
+ | UNSIGNED
+ | __INT128
+ """
+ p[0] = c_ast.IdentifierType([p[1]], coord=self._token_coord(p, 1))
+
+ def p_type_specifier(self, p):
+ """ type_specifier : typedef_name
+ | enum_specifier
+ | struct_or_union_specifier
+ | type_specifier_no_typeid
+ | atomic_specifier
+ """
+ p[0] = p[1]
+
+ # See section 6.7.2.4 of the C11 standard.
+ def p_atomic_specifier(self, p):
+ """ atomic_specifier : _ATOMIC LPAREN type_name RPAREN
+ """
+ typ = p[3]
+ typ.quals.append('_Atomic')
+ p[0] = typ
+
+ def p_type_qualifier(self, p):
+ """ type_qualifier : CONST
+ | RESTRICT
+ | VOLATILE
+ | _ATOMIC
+ """
+ p[0] = p[1]
+
+ def p_init_declarator_list(self, p):
+ """ init_declarator_list : init_declarator
+ | init_declarator_list COMMA init_declarator
+ """
+ p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]]
+
+ # Returns a {decl= : init=} dictionary
+ # If there's no initializer, uses None
+ #
+ def p_init_declarator(self, p):
+ """ init_declarator : declarator
+ | declarator EQUALS initializer
+ """
+ p[0] = dict(decl=p[1], init=(p[3] if len(p) > 2 else None))
+
+ def p_id_init_declarator_list(self, p):
+ """ id_init_declarator_list : id_init_declarator
+ | id_init_declarator_list COMMA init_declarator
+ """
+ p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]]
+
+ def p_id_init_declarator(self, p):
+ """ id_init_declarator : id_declarator
+ | id_declarator EQUALS initializer
+ """
+ p[0] = dict(decl=p[1], init=(p[3] if len(p) > 2 else None))
+
+ # Require at least one type specifier in a specifier-qualifier-list
+ #
+ def p_specifier_qualifier_list_1(self, p):
+ """ specifier_qualifier_list : specifier_qualifier_list type_specifier_no_typeid
+ """
+ p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True)
+
+ def p_specifier_qualifier_list_2(self, p):
+ """ specifier_qualifier_list : specifier_qualifier_list type_qualifier
+ """
+ p[0] = self._add_declaration_specifier(p[1], p[2], 'qual', append=True)
+
+ def p_specifier_qualifier_list_3(self, p):
+ """ specifier_qualifier_list : type_specifier
+ """
+ p[0] = self._add_declaration_specifier(None, p[1], 'type')
+
+ def p_specifier_qualifier_list_4(self, p):
+ """ specifier_qualifier_list : type_qualifier_list type_specifier
+ """
+ p[0] = dict(qual=p[1], alignment=[], storage=[], type=[p[2]], function=[])
+
+ def p_specifier_qualifier_list_5(self, p):
+ """ specifier_qualifier_list : alignment_specifier
+ """
+ p[0] = dict(qual=[], alignment=[p[1]], storage=[], type=[], function=[])
+
+ def p_specifier_qualifier_list_6(self, p):
+ """ specifier_qualifier_list : specifier_qualifier_list alignment_specifier
+ """
+ p[0] = self._add_declaration_specifier(p[1], p[2], 'alignment')
+
+ # TYPEID is allowed here (and in other struct/enum related tag names), because
+ # struct/enum tags reside in their own namespace and can be named the same as types
+ #
+ def p_struct_or_union_specifier_1(self, p):
+ """ struct_or_union_specifier : struct_or_union ID
+ | struct_or_union TYPEID
+ """
+ klass = self._select_struct_union_class(p[1])
+ # None means no list of members
+ p[0] = klass(
+ name=p[2],
+ decls=None,
+ coord=self._token_coord(p, 2))
+
+ def p_struct_or_union_specifier_2(self, p):
+ """ struct_or_union_specifier : struct_or_union brace_open struct_declaration_list brace_close
+ | struct_or_union brace_open brace_close
+ """
+ klass = self._select_struct_union_class(p[1])
+ if len(p) == 4:
+ # Empty sequence means an empty list of members
+ p[0] = klass(
+ name=None,
+ decls=[],
+ coord=self._token_coord(p, 2))
+ else:
+ p[0] = klass(
+ name=None,
+ decls=p[3],
+ coord=self._token_coord(p, 2))
+
+
+ def p_struct_or_union_specifier_3(self, p):
+ """ struct_or_union_specifier : struct_or_union ID brace_open struct_declaration_list brace_close
+ | struct_or_union ID brace_open brace_close
+ | struct_or_union TYPEID brace_open struct_declaration_list brace_close
+ | struct_or_union TYPEID brace_open brace_close
+ """
+ klass = self._select_struct_union_class(p[1])
+ if len(p) == 5:
+ # Empty sequence means an empty list of members
+ p[0] = klass(
+ name=p[2],
+ decls=[],
+ coord=self._token_coord(p, 2))
+ else:
+ p[0] = klass(
+ name=p[2],
+ decls=p[4],
+ coord=self._token_coord(p, 2))
+
+ def p_struct_or_union(self, p):
+ """ struct_or_union : STRUCT
+ | UNION
+ """
+ p[0] = p[1]
+
+ # Combine all declarations into a single list
+ #
+ def p_struct_declaration_list(self, p):
+ """ struct_declaration_list : struct_declaration
+ | struct_declaration_list struct_declaration
+ """
+ if len(p) == 2:
+ p[0] = p[1] or []
+ else:
+ p[0] = p[1] + (p[2] or [])
+
+ def p_struct_declaration_1(self, p):
+ """ struct_declaration : specifier_qualifier_list struct_declarator_list_opt SEMI
+ """
+ spec = p[1]
+ assert 'typedef' not in spec['storage']
+
+ if p[2] is not None:
+ decls = self._build_declarations(
+ spec=spec,
+ decls=p[2])
+
+ elif len(spec['type']) == 1:
+ # Anonymous struct/union, gcc extension, C1x feature.
+ # Although the standard only allows structs/unions here, I see no
+ # reason to disallow other types since some compilers have typedefs
+ # here, and pycparser isn't about rejecting all invalid code.
+ #
+ node = spec['type'][0]
+ if isinstance(node, c_ast.Node):
+ decl_type = node
+ else:
+ decl_type = c_ast.IdentifierType(node)
+
+ decls = self._build_declarations(
+ spec=spec,
+ decls=[dict(decl=decl_type)])
+
+ else:
+ # Structure/union members can have the same names as typedefs.
+ # The trouble is that the member's name gets grouped into
+ # specifier_qualifier_list; _build_declarations compensates.
+ #
+ decls = self._build_declarations(
+ spec=spec,
+ decls=[dict(decl=None, init=None)])
+
+ p[0] = decls
+
+ def p_struct_declaration_2(self, p):
+ """ struct_declaration : SEMI
+ """
+ p[0] = None
+
+ def p_struct_declaration_3(self, p):
+ """ struct_declaration : pppragma_directive
+ """
+ p[0] = [p[1]]
+
+ def p_struct_declarator_list(self, p):
+ """ struct_declarator_list : struct_declarator
+ | struct_declarator_list COMMA struct_declarator
+ """
+ p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]]
+
+ # struct_declarator passes up a dict with the keys: decl (for
+ # the underlying declarator) and bitsize (for the bitsize)
+ #
+ def p_struct_declarator_1(self, p):
+ """ struct_declarator : declarator
+ """
+ p[0] = {'decl': p[1], 'bitsize': None}
+
+ def p_struct_declarator_2(self, p):
+ """ struct_declarator : declarator COLON constant_expression
+ | COLON constant_expression
+ """
+ if len(p) > 3:
+ p[0] = {'decl': p[1], 'bitsize': p[3]}
+ else:
+ p[0] = {'decl': c_ast.TypeDecl(None, None, None, None), 'bitsize': p[2]}
+
+ def p_enum_specifier_1(self, p):
+ """ enum_specifier : ENUM ID
+ | ENUM TYPEID
+ """
+ p[0] = c_ast.Enum(p[2], None, self._token_coord(p, 1))
+
+ def p_enum_specifier_2(self, p):
+ """ enum_specifier : ENUM brace_open enumerator_list brace_close
+ """
+ p[0] = c_ast.Enum(None, p[3], self._token_coord(p, 1))
+
+ def p_enum_specifier_3(self, p):
+ """ enum_specifier : ENUM ID brace_open enumerator_list brace_close
+ | ENUM TYPEID brace_open enumerator_list brace_close
+ """
+ p[0] = c_ast.Enum(p[2], p[4], self._token_coord(p, 1))
+
+ def p_enumerator_list(self, p):
+ """ enumerator_list : enumerator
+ | enumerator_list COMMA
+ | enumerator_list COMMA enumerator
+ """
+ if len(p) == 2:
+ p[0] = c_ast.EnumeratorList([p[1]], p[1].coord)
+ elif len(p) == 3:
+ p[0] = p[1]
+ else:
+ p[1].enumerators.append(p[3])
+ p[0] = p[1]
+
+ def p_alignment_specifier(self, p):
+ """ alignment_specifier : _ALIGNAS LPAREN type_name RPAREN
+ | _ALIGNAS LPAREN constant_expression RPAREN
+ """
+ p[0] = c_ast.Alignas(p[3], self._token_coord(p, 1))
+
+ def p_enumerator(self, p):
+ """ enumerator : ID
+ | ID EQUALS constant_expression
+ """
+ if len(p) == 2:
+ enumerator = c_ast.Enumerator(
+ p[1], None,
+ self._token_coord(p, 1))
+ else:
+ enumerator = c_ast.Enumerator(
+ p[1], p[3],
+ self._token_coord(p, 1))
+ self._add_identifier(enumerator.name, enumerator.coord)
+
+ p[0] = enumerator
+
+ def p_declarator(self, p):
+ """ declarator : id_declarator
+ | typeid_declarator
+ """
+ p[0] = p[1]
+
+ @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
+ def p_xxx_declarator_1(self, p):
+ """ xxx_declarator : direct_xxx_declarator
+ """
+ p[0] = p[1]
+
+ @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
+ def p_xxx_declarator_2(self, p):
+ """ xxx_declarator : pointer direct_xxx_declarator
+ """
+ p[0] = self._type_modify_decl(p[2], p[1])
+
+ @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
+ def p_direct_xxx_declarator_1(self, p):
+ """ direct_xxx_declarator : yyy
+ """
+ p[0] = c_ast.TypeDecl(
+ declname=p[1],
+ type=None,
+ quals=None,
+ align=None,
+ coord=self._token_coord(p, 1))
+
+ @parameterized(('id', 'ID'), ('typeid', 'TYPEID'))
+ def p_direct_xxx_declarator_2(self, p):
+ """ direct_xxx_declarator : LPAREN xxx_declarator RPAREN
+ """
+ p[0] = p[2]
+
+ @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
+ def p_direct_xxx_declarator_3(self, p):
+ """ direct_xxx_declarator : direct_xxx_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET
+ """
+ quals = (p[3] if len(p) > 5 else []) or []
+ # Accept dimension qualifiers
+ # Per C99 6.7.5.3 p7
+ arr = c_ast.ArrayDecl(
+ type=None,
+ dim=p[4] if len(p) > 5 else p[3],
+ dim_quals=quals,
+ coord=p[1].coord)
+
+ p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
+
+ @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
+ def p_direct_xxx_declarator_4(self, p):
+ """ direct_xxx_declarator : direct_xxx_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET
+ | direct_xxx_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET
+ """
+ # Using slice notation for PLY objects doesn't work in Python 3 for the
+ # version of PLY embedded with pycparser; see PLY Google Code issue 30.
+ # Work around that here by listing the two elements separately.
+ listed_quals = [item if isinstance(item, list) else [item]
+ for item in [p[3],p[4]]]
+ dim_quals = [qual for sublist in listed_quals for qual in sublist
+ if qual is not None]
+ arr = c_ast.ArrayDecl(
+ type=None,
+ dim=p[5],
+ dim_quals=dim_quals,
+ coord=p[1].coord)
+
+ p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
+
+ # Special for VLAs
+ #
+ @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
+ def p_direct_xxx_declarator_5(self, p):
+ """ direct_xxx_declarator : direct_xxx_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET
+ """
+ arr = c_ast.ArrayDecl(
+ type=None,
+ dim=c_ast.ID(p[4], self._token_coord(p, 4)),
+ dim_quals=p[3] if p[3] is not None else [],
+ coord=p[1].coord)
+
+ p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
+
+ @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
+ def p_direct_xxx_declarator_6(self, p):
+ """ direct_xxx_declarator : direct_xxx_declarator LPAREN parameter_type_list RPAREN
+ | direct_xxx_declarator LPAREN identifier_list_opt RPAREN
+ """
+ func = c_ast.FuncDecl(
+ args=p[3],
+ type=None,
+ coord=p[1].coord)
+
+ # To see why _get_yacc_lookahead_token is needed, consider:
+ # typedef char TT;
+ # void foo(int TT) { TT = 10; }
+ # Outside the function, TT is a typedef, but inside (starting and
+ # ending with the braces) it's a parameter. The trouble begins with
+ # yacc's lookahead token. We don't know if we're declaring or
+ # defining a function until we see LBRACE, but if we wait for yacc to
+ # trigger a rule on that token, then TT will have already been read
+ # and incorrectly interpreted as TYPEID. We need to add the
+ # parameters to the scope the moment the lexer sees LBRACE.
+ #
+ if self._get_yacc_lookahead_token().type == "LBRACE":
+ if func.args is not None:
+ for param in func.args.params:
+ if isinstance(param, c_ast.EllipsisParam): break
+ self._add_identifier(param.name, param.coord)
+
+ p[0] = self._type_modify_decl(decl=p[1], modifier=func)
+
+ def p_pointer(self, p):
+ """ pointer : TIMES type_qualifier_list_opt
+ | TIMES type_qualifier_list_opt pointer
+ """
+ coord = self._token_coord(p, 1)
+ # Pointer decls nest from inside out. This is important when different
+ # levels have different qualifiers. For example:
+ #
+ # char * const * p;
+ #
+ # Means "pointer to const pointer to char"
+ #
+ # While:
+ #
+ # char ** const p;
+ #
+ # Means "const pointer to pointer to char"
+ #
+ # So when we construct PtrDecl nestings, the leftmost pointer goes in
+ # as the most nested type.
+ nested_type = c_ast.PtrDecl(quals=p[2] or [], type=None, coord=coord)
+ if len(p) > 3:
+ tail_type = p[3]
+ while tail_type.type is not None:
+ tail_type = tail_type.type
+ tail_type.type = nested_type
+ p[0] = p[3]
+ else:
+ p[0] = nested_type
+
+ def p_type_qualifier_list(self, p):
+ """ type_qualifier_list : type_qualifier
+ | type_qualifier_list type_qualifier
+ """
+ p[0] = [p[1]] if len(p) == 2 else p[1] + [p[2]]
+
+ def p_parameter_type_list(self, p):
+ """ parameter_type_list : parameter_list
+ | parameter_list COMMA ELLIPSIS
+ """
+ if len(p) > 2:
+ p[1].params.append(c_ast.EllipsisParam(self._token_coord(p, 3)))
+
+ p[0] = p[1]
+
+ def p_parameter_list(self, p):
+ """ parameter_list : parameter_declaration
+ | parameter_list COMMA parameter_declaration
+ """
+ if len(p) == 2: # single parameter
+ p[0] = c_ast.ParamList([p[1]], p[1].coord)
+ else:
+ p[1].params.append(p[3])
+ p[0] = p[1]
+
+ # From ISO/IEC 9899:TC2, 6.7.5.3.11:
+ # "If, in a parameter declaration, an identifier can be treated either
+ # as a typedef name or as a parameter name, it shall be taken as a
+ # typedef name."
+ #
+ # Inside a parameter declaration, once we've reduced declaration specifiers,
+ # if we shift in an LPAREN and see a TYPEID, it could be either an abstract
+ # declarator or a declarator nested inside parens. This rule tells us to
+ # always treat it as an abstract declarator. Therefore, we only accept
+ # `id_declarator`s and `typeid_noparen_declarator`s.
+ def p_parameter_declaration_1(self, p):
+ """ parameter_declaration : declaration_specifiers id_declarator
+ | declaration_specifiers typeid_noparen_declarator
+ """
+ spec = p[1]
+ if not spec['type']:
+ spec['type'] = [c_ast.IdentifierType(['int'],
+ coord=self._token_coord(p, 1))]
+ p[0] = self._build_declarations(
+ spec=spec,
+ decls=[dict(decl=p[2])])[0]
+
+ def p_parameter_declaration_2(self, p):
+ """ parameter_declaration : declaration_specifiers abstract_declarator_opt
+ """
+ spec = p[1]
+ if not spec['type']:
+ spec['type'] = [c_ast.IdentifierType(['int'],
+ coord=self._token_coord(p, 1))]
+
+ # Parameters can have the same names as typedefs. The trouble is that
+ # the parameter's name gets grouped into declaration_specifiers, making
+ # it look like an old-style declaration; compensate.
+ #
+ if len(spec['type']) > 1 and len(spec['type'][-1].names) == 1 and \
+ self._is_type_in_scope(spec['type'][-1].names[0]):
+ decl = self._build_declarations(
+ spec=spec,
+ decls=[dict(decl=p[2], init=None)])[0]
+
+ # This truly is an old-style parameter declaration
+ #
+ else:
+ decl = c_ast.Typename(
+ name='',
+ quals=spec['qual'],
+ align=None,
+ type=p[2] or c_ast.TypeDecl(None, None, None, None),
+ coord=self._token_coord(p, 2))
+ typename = spec['type']
+ decl = self._fix_decl_name_type(decl, typename)
+
+ p[0] = decl
+
+ def p_identifier_list(self, p):
+ """ identifier_list : identifier
+ | identifier_list COMMA identifier
+ """
+ if len(p) == 2: # single parameter
+ p[0] = c_ast.ParamList([p[1]], p[1].coord)
+ else:
+ p[1].params.append(p[3])
+ p[0] = p[1]
+
+ def p_initializer_1(self, p):
+ """ initializer : assignment_expression
+ """
+ p[0] = p[1]
+
+ def p_initializer_2(self, p):
+ """ initializer : brace_open initializer_list_opt brace_close
+ | brace_open initializer_list COMMA brace_close
+ """
+ if p[2] is None:
+ p[0] = c_ast.InitList([], self._token_coord(p, 1))
+ else:
+ p[0] = p[2]
+
+ def p_initializer_list(self, p):
+ """ initializer_list : designation_opt initializer
+ | initializer_list COMMA designation_opt initializer
+ """
+ if len(p) == 3: # single initializer
+ init = p[2] if p[1] is None else c_ast.NamedInitializer(p[1], p[2])
+ p[0] = c_ast.InitList([init], p[2].coord)
+ else:
+ init = p[4] if p[3] is None else c_ast.NamedInitializer(p[3], p[4])
+ p[1].exprs.append(init)
+ p[0] = p[1]
+
+ def p_designation(self, p):
+ """ designation : designator_list EQUALS
+ """
+ p[0] = p[1]
+
+ # Designators are represented as a list of nodes, in the order in which
+ # they're written in the code.
+ #
+ def p_designator_list(self, p):
+ """ designator_list : designator
+ | designator_list designator
+ """
+ p[0] = [p[1]] if len(p) == 2 else p[1] + [p[2]]
+
+ def p_designator(self, p):
+ """ designator : LBRACKET constant_expression RBRACKET
+ | PERIOD identifier
+ """
+ p[0] = p[2]
+
+ def p_type_name(self, p):
+ """ type_name : specifier_qualifier_list abstract_declarator_opt
+ """
+ typename = c_ast.Typename(
+ name='',
+ quals=p[1]['qual'][:],
+ align=None,
+ type=p[2] or c_ast.TypeDecl(None, None, None, None),
+ coord=self._token_coord(p, 2))
+
+ p[0] = self._fix_decl_name_type(typename, p[1]['type'])
+
+ def p_abstract_declarator_1(self, p):
+ """ abstract_declarator : pointer
+ """
+ dummytype = c_ast.TypeDecl(None, None, None, None)
+ p[0] = self._type_modify_decl(
+ decl=dummytype,
+ modifier=p[1])
+
+ def p_abstract_declarator_2(self, p):
+ """ abstract_declarator : pointer direct_abstract_declarator
+ """
+ p[0] = self._type_modify_decl(p[2], p[1])
+
+ def p_abstract_declarator_3(self, p):
+ """ abstract_declarator : direct_abstract_declarator
+ """
+ p[0] = p[1]
+
+ # Creating and using direct_abstract_declarator_opt here
+ # instead of listing both direct_abstract_declarator and the
+ # lack of it in the beginning of _1 and _2 caused two
+ # shift/reduce errors.
+ #
+ def p_direct_abstract_declarator_1(self, p):
+ """ direct_abstract_declarator : LPAREN abstract_declarator RPAREN """
+ p[0] = p[2]
+
+ def p_direct_abstract_declarator_2(self, p):
+ """ direct_abstract_declarator : direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET
+ """
+ arr = c_ast.ArrayDecl(
+ type=None,
+ dim=p[3],
+ dim_quals=[],
+ coord=p[1].coord)
+
+ p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
+
+ def p_direct_abstract_declarator_3(self, p):
+ """ direct_abstract_declarator : LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET
+ """
+ quals = (p[2] if len(p) > 4 else []) or []
+ p[0] = c_ast.ArrayDecl(
+ type=c_ast.TypeDecl(None, None, None, None),
+ dim=p[3] if len(p) > 4 else p[2],
+ dim_quals=quals,
+ coord=self._token_coord(p, 1))
+
+ def p_direct_abstract_declarator_4(self, p):
+ """ direct_abstract_declarator : direct_abstract_declarator LBRACKET TIMES RBRACKET
+ """
+ arr = c_ast.ArrayDecl(
+ type=None,
+ dim=c_ast.ID(p[3], self._token_coord(p, 3)),
+ dim_quals=[],
+ coord=p[1].coord)
+
+ p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
+
+ def p_direct_abstract_declarator_5(self, p):
+ """ direct_abstract_declarator : LBRACKET TIMES RBRACKET
+ """
+ p[0] = c_ast.ArrayDecl(
+ type=c_ast.TypeDecl(None, None, None, None),
+ dim=c_ast.ID(p[3], self._token_coord(p, 3)),
+ dim_quals=[],
+ coord=self._token_coord(p, 1))
+
+ def p_direct_abstract_declarator_6(self, p):
+ """ direct_abstract_declarator : direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN
+ """
+ func = c_ast.FuncDecl(
+ args=p[3],
+ type=None,
+ coord=p[1].coord)
+
+ p[0] = self._type_modify_decl(decl=p[1], modifier=func)
+
+ def p_direct_abstract_declarator_7(self, p):
+ """ direct_abstract_declarator : LPAREN parameter_type_list_opt RPAREN
+ """
+ p[0] = c_ast.FuncDecl(
+ args=p[2],
+ type=c_ast.TypeDecl(None, None, None, None),
+ coord=self._token_coord(p, 1))
+
+ # declaration is a list, statement isn't. To make it consistent, block_item
+ # will always be a list
+ #
+ def p_block_item(self, p):
+ """ block_item : declaration
+ | statement
+ """
+ p[0] = p[1] if isinstance(p[1], list) else [p[1]]
+
+ # Since we made block_item a list, this just combines lists
+ #
+ def p_block_item_list(self, p):
+ """ block_item_list : block_item
+ | block_item_list block_item
+ """
+ # Empty block items (plain ';') produce [None], so ignore them
+ p[0] = p[1] if (len(p) == 2 or p[2] == [None]) else p[1] + p[2]
+
+ def p_compound_statement_1(self, p):
+ """ compound_statement : brace_open block_item_list_opt brace_close """
+ p[0] = c_ast.Compound(
+ block_items=p[2],
+ coord=self._token_coord(p, 1))
+
+ def p_labeled_statement_1(self, p):
+ """ labeled_statement : ID COLON pragmacomp_or_statement """
+ p[0] = c_ast.Label(p[1], p[3], self._token_coord(p, 1))
+
+ def p_labeled_statement_2(self, p):
+ """ labeled_statement : CASE constant_expression COLON pragmacomp_or_statement """
+ p[0] = c_ast.Case(p[2], [p[4]], self._token_coord(p, 1))
+
+ def p_labeled_statement_3(self, p):
+ """ labeled_statement : DEFAULT COLON pragmacomp_or_statement """
+ p[0] = c_ast.Default([p[3]], self._token_coord(p, 1))
+
+ def p_selection_statement_1(self, p):
+ """ selection_statement : IF LPAREN expression RPAREN pragmacomp_or_statement """
+ p[0] = c_ast.If(p[3], p[5], None, self._token_coord(p, 1))
+
+ def p_selection_statement_2(self, p):
+ """ selection_statement : IF LPAREN expression RPAREN statement ELSE pragmacomp_or_statement """
+ p[0] = c_ast.If(p[3], p[5], p[7], self._token_coord(p, 1))
+
+ def p_selection_statement_3(self, p):
+ """ selection_statement : SWITCH LPAREN expression RPAREN pragmacomp_or_statement """
+ p[0] = fix_switch_cases(
+ c_ast.Switch(p[3], p[5], self._token_coord(p, 1)))
+
+ def p_iteration_statement_1(self, p):
+ """ iteration_statement : WHILE LPAREN expression RPAREN pragmacomp_or_statement """
+ p[0] = c_ast.While(p[3], p[5], self._token_coord(p, 1))
+
+ def p_iteration_statement_2(self, p):
+ """ iteration_statement : DO pragmacomp_or_statement WHILE LPAREN expression RPAREN SEMI """
+ p[0] = c_ast.DoWhile(p[5], p[2], self._token_coord(p, 1))
+
+ def p_iteration_statement_3(self, p):
+ """ iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement """
+ p[0] = c_ast.For(p[3], p[5], p[7], p[9], self._token_coord(p, 1))
+
+ def p_iteration_statement_4(self, p):
+ """ iteration_statement : FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement """
+ p[0] = c_ast.For(c_ast.DeclList(p[3], self._token_coord(p, 1)),
+ p[4], p[6], p[8], self._token_coord(p, 1))
+
+ def p_jump_statement_1(self, p):
+ """ jump_statement : GOTO ID SEMI """
+ p[0] = c_ast.Goto(p[2], self._token_coord(p, 1))
+
+ def p_jump_statement_2(self, p):
+ """ jump_statement : BREAK SEMI """
+ p[0] = c_ast.Break(self._token_coord(p, 1))
+
+ def p_jump_statement_3(self, p):
+ """ jump_statement : CONTINUE SEMI """
+ p[0] = c_ast.Continue(self._token_coord(p, 1))
+
+ def p_jump_statement_4(self, p):
+ """ jump_statement : RETURN expression SEMI
+ | RETURN SEMI
+ """
+ p[0] = c_ast.Return(p[2] if len(p) == 4 else None, self._token_coord(p, 1))
+
+ def p_expression_statement(self, p):
+ """ expression_statement : expression_opt SEMI """
+ if p[1] is None:
+ p[0] = c_ast.EmptyStatement(self._token_coord(p, 2))
+ else:
+ p[0] = p[1]
+
+ def p_expression(self, p):
+ """ expression : assignment_expression
+ | expression COMMA assignment_expression
+ """
+ if len(p) == 2:
+ p[0] = p[1]
+ else:
+ if not isinstance(p[1], c_ast.ExprList):
+ p[1] = c_ast.ExprList([p[1]], p[1].coord)
+
+ p[1].exprs.append(p[3])
+ p[0] = p[1]
+
+ def p_parenthesized_compound_expression(self, p):
+ """ assignment_expression : LPAREN compound_statement RPAREN """
+ p[0] = p[2]
+
+ def p_typedef_name(self, p):
+ """ typedef_name : TYPEID """
+ p[0] = c_ast.IdentifierType([p[1]], coord=self._token_coord(p, 1))
+
+ def p_assignment_expression(self, p):
+ """ assignment_expression : conditional_expression
+ | unary_expression assignment_operator assignment_expression
+ """
+ if len(p) == 2:
+ p[0] = p[1]
+ else:
+ p[0] = c_ast.Assignment(p[2], p[1], p[3], p[1].coord)
+
+ # K&R2 defines these as many separate rules, to encode
+ # precedence and associativity. Why work hard ? I'll just use
+ # the built in precedence/associativity specification feature
+ # of PLY. (see precedence declaration above)
+ #
+ def p_assignment_operator(self, p):
+ """ assignment_operator : EQUALS
+ | XOREQUAL
+ | TIMESEQUAL
+ | DIVEQUAL
+ | MODEQUAL
+ | PLUSEQUAL
+ | MINUSEQUAL
+ | LSHIFTEQUAL
+ | RSHIFTEQUAL
+ | ANDEQUAL
+ | OREQUAL
+ """
+ p[0] = p[1]
+
+ def p_constant_expression(self, p):
+ """ constant_expression : conditional_expression """
+ p[0] = p[1]
+
+ def p_conditional_expression(self, p):
+ """ conditional_expression : binary_expression
+ | binary_expression CONDOP expression COLON conditional_expression
+ """
+ if len(p) == 2:
+ p[0] = p[1]
+ else:
+ p[0] = c_ast.TernaryOp(p[1], p[3], p[5], p[1].coord)
+
+ def p_binary_expression(self, p):
+ """ binary_expression : cast_expression
+ | binary_expression TIMES binary_expression
+ | binary_expression DIVIDE binary_expression
+ | binary_expression MOD binary_expression
+ | binary_expression PLUS binary_expression
+ | binary_expression MINUS binary_expression
+ | binary_expression RSHIFT binary_expression
+ | binary_expression LSHIFT binary_expression
+ | binary_expression LT binary_expression
+ | binary_expression LE binary_expression
+ | binary_expression GE binary_expression
+ | binary_expression GT binary_expression
+ | binary_expression EQ binary_expression
+ | binary_expression NE binary_expression
+ | binary_expression AND binary_expression
+ | binary_expression OR binary_expression
+ | binary_expression XOR binary_expression
+ | binary_expression LAND binary_expression
+ | binary_expression LOR binary_expression
+ """
+ if len(p) == 2:
+ p[0] = p[1]
+ else:
+ p[0] = c_ast.BinaryOp(p[2], p[1], p[3], p[1].coord)
+
+ def p_cast_expression_1(self, p):
+ """ cast_expression : unary_expression """
+ p[0] = p[1]
+
+ def p_cast_expression_2(self, p):
+ """ cast_expression : LPAREN type_name RPAREN cast_expression """
+ p[0] = c_ast.Cast(p[2], p[4], self._token_coord(p, 1))
+
+ def p_unary_expression_1(self, p):
+ """ unary_expression : postfix_expression """
+ p[0] = p[1]
+
+ def p_unary_expression_2(self, p):
+ """ unary_expression : PLUSPLUS unary_expression
+ | MINUSMINUS unary_expression
+ | unary_operator cast_expression
+ """
+ p[0] = c_ast.UnaryOp(p[1], p[2], p[2].coord)
+
+ def p_unary_expression_3(self, p):
+ """ unary_expression : SIZEOF unary_expression
+ | SIZEOF LPAREN type_name RPAREN
+ | _ALIGNOF LPAREN type_name RPAREN
+ """
+ p[0] = c_ast.UnaryOp(
+ p[1],
+ p[2] if len(p) == 3 else p[3],
+ self._token_coord(p, 1))
+
+ def p_unary_operator(self, p):
+ """ unary_operator : AND
+ | TIMES
+ | PLUS
+ | MINUS
+ | NOT
+ | LNOT
+ """
+ p[0] = p[1]
+
+ def p_postfix_expression_1(self, p):
+ """ postfix_expression : primary_expression """
+ p[0] = p[1]
+
+ def p_postfix_expression_2(self, p):
+ """ postfix_expression : postfix_expression LBRACKET expression RBRACKET """
+ p[0] = c_ast.ArrayRef(p[1], p[3], p[1].coord)
+
+ def p_postfix_expression_3(self, p):
+ """ postfix_expression : postfix_expression LPAREN argument_expression_list RPAREN
+ | postfix_expression LPAREN RPAREN
+ """
+ p[0] = c_ast.FuncCall(p[1], p[3] if len(p) == 5 else None, p[1].coord)
+
+ def p_postfix_expression_4(self, p):
+ """ postfix_expression : postfix_expression PERIOD ID
+ | postfix_expression PERIOD TYPEID
+ | postfix_expression ARROW ID
+ | postfix_expression ARROW TYPEID
+ """
+ field = c_ast.ID(p[3], self._token_coord(p, 3))
+ p[0] = c_ast.StructRef(p[1], p[2], field, p[1].coord)
+
+ def p_postfix_expression_5(self, p):
+ """ postfix_expression : postfix_expression PLUSPLUS
+ | postfix_expression MINUSMINUS
+ """
+ p[0] = c_ast.UnaryOp('p' + p[2], p[1], p[1].coord)
+
+ def p_postfix_expression_6(self, p):
+ """ postfix_expression : LPAREN type_name RPAREN brace_open initializer_list brace_close
+ | LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close
+ """
+ p[0] = c_ast.CompoundLiteral(p[2], p[5])
+
+ def p_primary_expression_1(self, p):
+ """ primary_expression : identifier """
+ p[0] = p[1]
+
+ def p_primary_expression_2(self, p):
+ """ primary_expression : constant """
+ p[0] = p[1]
+
+ def p_primary_expression_3(self, p):
+ """ primary_expression : unified_string_literal
+ | unified_wstring_literal
+ """
+ p[0] = p[1]
+
+ def p_primary_expression_4(self, p):
+ """ primary_expression : LPAREN expression RPAREN """
+ p[0] = p[2]
+
+ def p_primary_expression_5(self, p):
+ """ primary_expression : OFFSETOF LPAREN type_name COMMA offsetof_member_designator RPAREN
+ """
+ coord = self._token_coord(p, 1)
+ p[0] = c_ast.FuncCall(c_ast.ID(p[1], coord),
+ c_ast.ExprList([p[3], p[5]], coord),
+ coord)
+
+ def p_offsetof_member_designator(self, p):
+ """ offsetof_member_designator : identifier
+ | offsetof_member_designator PERIOD identifier
+ | offsetof_member_designator LBRACKET expression RBRACKET
+ """
+ if len(p) == 2:
+ p[0] = p[1]
+ elif len(p) == 4:
+ p[0] = c_ast.StructRef(p[1], p[2], p[3], p[1].coord)
+ elif len(p) == 5:
+ p[0] = c_ast.ArrayRef(p[1], p[3], p[1].coord)
+ else:
+ raise NotImplementedError("Unexpected parsing state. len(p): %u" % len(p))
+
+ def p_argument_expression_list(self, p):
+ """ argument_expression_list : assignment_expression
+ | argument_expression_list COMMA assignment_expression
+ """
+ if len(p) == 2: # single expr
+ p[0] = c_ast.ExprList([p[1]], p[1].coord)
+ else:
+ p[1].exprs.append(p[3])
+ p[0] = p[1]
+
+ def p_identifier(self, p):
+ """ identifier : ID """
+ p[0] = c_ast.ID(p[1], self._token_coord(p, 1))
+
+ def p_constant_1(self, p):
+ """ constant : INT_CONST_DEC
+ | INT_CONST_OCT
+ | INT_CONST_HEX
+ | INT_CONST_BIN
+ | INT_CONST_CHAR
+ """
+ uCount = 0
+ lCount = 0
+ for x in p[1][-3:]:
+ if x in ('l', 'L'):
+ lCount += 1
+ elif x in ('u', 'U'):
+ uCount += 1
+ t = ''
+ if uCount > 1:
+ raise ValueError('Constant cannot have more than one u/U suffix.')
+ elif lCount > 2:
+ raise ValueError('Constant cannot have more than two l/L suffix.')
+ prefix = 'unsigned ' * uCount + 'long ' * lCount
+ p[0] = c_ast.Constant(
+ prefix + 'int', p[1], self._token_coord(p, 1))
+
+ def p_constant_2(self, p):
+ """ constant : FLOAT_CONST
+ | HEX_FLOAT_CONST
+ """
+ if 'x' in p[1].lower():
+ t = 'float'
+ else:
+ if p[1][-1] in ('f', 'F'):
+ t = 'float'
+ elif p[1][-1] in ('l', 'L'):
+ t = 'long double'
+ else:
+ t = 'double'
+
+ p[0] = c_ast.Constant(
+ t, p[1], self._token_coord(p, 1))
+
+ def p_constant_3(self, p):
+ """ constant : CHAR_CONST
+ | WCHAR_CONST
+ | U8CHAR_CONST
+ | U16CHAR_CONST
+ | U32CHAR_CONST
+ """
+ p[0] = c_ast.Constant(
+ 'char', p[1], self._token_coord(p, 1))
+
+ # The "unified" string and wstring literal rules are for supporting
+ # concatenation of adjacent string literals.
+ # I.e. "hello " "world" is seen by the C compiler as a single string literal
+ # with the value "hello world"
+ #
+ def p_unified_string_literal(self, p):
+ """ unified_string_literal : STRING_LITERAL
+ | unified_string_literal STRING_LITERAL
+ """
+ if len(p) == 2: # single literal
+ p[0] = c_ast.Constant(
+ 'string', p[1], self._token_coord(p, 1))
+ else:
+ p[1].value = p[1].value[:-1] + p[2][1:]
+ p[0] = p[1]
+
+ def p_unified_wstring_literal(self, p):
+ """ unified_wstring_literal : WSTRING_LITERAL
+ | U8STRING_LITERAL
+ | U16STRING_LITERAL
+ | U32STRING_LITERAL
+ | unified_wstring_literal WSTRING_LITERAL
+ | unified_wstring_literal U8STRING_LITERAL
+ | unified_wstring_literal U16STRING_LITERAL
+ | unified_wstring_literal U32STRING_LITERAL
+ """
+ if len(p) == 2: # single literal
+ p[0] = c_ast.Constant(
+ 'string', p[1], self._token_coord(p, 1))
+ else:
+ p[1].value = p[1].value.rstrip()[:-1] + p[2][2:]
+ p[0] = p[1]
+
+ def p_brace_open(self, p):
+ """ brace_open : LBRACE
+ """
+ p[0] = p[1]
+ p.set_lineno(0, p.lineno(1))
+
+ def p_brace_close(self, p):
+ """ brace_close : RBRACE
+ """
+ p[0] = p[1]
+ p.set_lineno(0, p.lineno(1))
+
+ def p_empty(self, p):
+ 'empty : '
+ p[0] = None
+
+ def p_error(self, p):
+ # If error recovery is added here in the future, make sure
+ # _get_yacc_lookahead_token still works!
+ #
+ if p:
+ self._parse_error(
+ 'before: %s' % p.value,
+ self._coord(lineno=p.lineno,
+ column=self.clex.find_tok_column(p)))
+ else:
+ self._parse_error('At end of input', self.clex.filename)
diff --git a/billinglayer/python/pycparser/lextab.py b/billinglayer/python/pycparser/lextab.py
new file mode 100644
index 0000000..444b465
--- /dev/null
+++ b/billinglayer/python/pycparser/lextab.py
@@ -0,0 +1,10 @@
+# lextab.py. This file automatically created by PLY (version 3.10). Don't edit!
+_tabversion = '3.10'
+_lextokens = set(('INT_CONST_CHAR', 'VOID', 'LBRACKET', 'WCHAR_CONST', 'FLOAT_CONST', 'MINUS', 'RPAREN', 'STRUCT', 'LONG', 'PLUS', 'ELLIPSIS', 'U32STRING_LITERAL', 'GT', 'GOTO', 'ENUM', 'PERIOD', 'GE', 'INT_CONST_DEC', 'ARROW', '_STATIC_ASSERT', '__INT128', 'HEX_FLOAT_CONST', 'DOUBLE', 'MINUSEQUAL', 'INT_CONST_OCT', 'TIMESEQUAL', 'OR', 'SHORT', 'RETURN', 'RSHIFTEQUAL', '_ALIGNAS', 'RESTRICT', 'STATIC', 'SIZEOF', 'UNSIGNED', 'PLUSPLUS', 'COLON', 'WSTRING_LITERAL', 'DIVIDE', 'FOR', 'UNION', 'EQUALS', 'ELSE', 'ANDEQUAL', 'EQ', 'AND', 'TYPEID', 'LBRACE', 'PPHASH', 'INT', 'SIGNED', 'CONTINUE', 'NOT', 'OREQUAL', 'MOD', 'RSHIFT', 'DEFAULT', '_NORETURN', 'CHAR', 'WHILE', 'DIVEQUAL', '_ALIGNOF', 'EXTERN', 'LNOT', 'CASE', 'LAND', 'REGISTER', 'MODEQUAL', 'NE', 'SWITCH', 'INT_CONST_HEX', '_COMPLEX', 'PPPRAGMASTR', 'PLUSEQUAL', 'U32CHAR_CONST', 'CONDOP', 'U8STRING_LITERAL', 'BREAK', 'VOLATILE', 'PPPRAGMA', 'INLINE', 'INT_CONST_BIN', 'DO', 'U8CHAR_CONST', 'CONST', 'U16STRING_LITERAL', 'LOR', 'CHAR_CONST', 'LSHIFT', 'RBRACE', '_BOOL', 'LE', 'SEMI', '_THREAD_LOCAL', 'LT', 'COMMA', 'U16CHAR_CONST', 'OFFSETOF', '_ATOMIC', 'TYPEDEF', 'XOR', 'AUTO', 'TIMES', 'LPAREN', 'MINUSMINUS', 'ID', 'IF', 'STRING_LITERAL', 'FLOAT', 'XOREQUAL', 'LSHIFTEQUAL', 'RBRACKET'))
+_lexreflags = 64
+_lexliterals = ''
+_lexstateinfo = {'ppline': 'exclusive', 'pppragma': 'exclusive', 'INITIAL': 'inclusive'}
+_lexstatere = {'ppline': [('(?P"([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*")|(?P(0(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|([1-9][0-9]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?))|(?P\\n)|(?Pline)', [None, ('t_ppline_FILENAME', 'FILENAME'), None, None, ('t_ppline_LINE_NUMBER', 'LINE_NUMBER'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_ppline_NEWLINE', 'NEWLINE'), ('t_ppline_PPLINE', 'PPLINE')])], 'pppragma': [('(?P\\n)|(?Ppragma)|(?P.+)', [None, ('t_pppragma_NEWLINE', 'NEWLINE'), ('t_pppragma_PPPRAGMA', 'PPPRAGMA'), ('t_pppragma_STR', 'STR')])], 'INITIAL': [('(?P[ \\t]*\\#)|(?P\\n+)|(?P\\{)|(?P\\})|(?P((((([0-9]*\\.[0-9]+)|([0-9]+\\.))([eE][-+]?[0-9]+)?)|([0-9]+([eE][-+]?[0-9]+)))[FfLl]?))|(?P(0[xX]([0-9a-fA-F]+|((([0-9a-fA-F]+)?\\.[0-9a-fA-F]+)|([0-9a-fA-F]+\\.)))([pP][+-]?[0-9]+)[FfLl]?))|(?P0[xX][0-9a-fA-F]+(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|(?P0[bB][01]+(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)', [None, ('t_PPHASH', 'PPHASH'), ('t_NEWLINE', 'NEWLINE'), ('t_LBRACE', 'LBRACE'), ('t_RBRACE', 'RBRACE'), ('t_FLOAT_CONST', 'FLOAT_CONST'), None, None, None, None, None, None, None, None, None, ('t_HEX_FLOAT_CONST', 'HEX_FLOAT_CONST'), None, None, None, None, None, None, None, ('t_INT_CONST_HEX', 'INT_CONST_HEX'), None, None, None, None, None, None, None, ('t_INT_CONST_BIN', 'INT_CONST_BIN')]), ('(?P0[0-7]*[89])|(?P0[0-7]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|(?P(0(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|([1-9][0-9]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?))|(?P\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F])))){2,4}\')|(?P\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))\')|(?PL\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))\')|(?Pu8\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))\')|(?Pu\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))\')|(?PU\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))\')', [None, ('t_BAD_CONST_OCT', 'BAD_CONST_OCT'), ('t_INT_CONST_OCT', 'INT_CONST_OCT'), None, None, None, None, None, None, None, ('t_INT_CONST_DEC', 'INT_CONST_DEC'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_INT_CONST_CHAR', 'INT_CONST_CHAR'), None, None, None, None, None, None, ('t_CHAR_CONST', 'CHAR_CONST'), None, None, None, None, None, None, ('t_WCHAR_CONST', 'WCHAR_CONST'), None, None, None, None, None, None, ('t_U8CHAR_CONST', 'U8CHAR_CONST'), None, None, None, None, None, None, ('t_U16CHAR_CONST', 'U16CHAR_CONST'), None, None, None, None, None, None, ('t_U32CHAR_CONST', 'U32CHAR_CONST')]), ('(?P(\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))*\\n)|(\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))*$))|(?P(\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))[^\'\n]+\')|(\'\')|(\'([\\\\][^a-zA-Z._~^!=&\\^\\-\\\\?\'"x0-9])[^\'\\n]*\'))|(?PL"([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*")|(?Pu8"([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*")|(?Pu"([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*")|(?PU"([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*")|(?P"([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*([\\\\][^a-zA-Z._~^!=&\\^\\-\\\\?\'"x0-9])([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*")|(?P[a-zA-Z_$][0-9a-zA-Z_$]*)|(?P"([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*")|(?P\\.\\.\\.)|(?P\\+\\+)|(?P\\|\\|)|(?P\\^=)|(?P\\|=)|(?P<<=)|(?P>>=)|(?P\\+=)|(?P\\*=)', [None, ('t_UNMATCHED_QUOTE', 'UNMATCHED_QUOTE'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_BAD_CHAR_CONST', 'BAD_CHAR_CONST'), None, None, None, None, None, None, None, None, None, None, ('t_WSTRING_LITERAL', 'WSTRING_LITERAL'), None, None, ('t_U8STRING_LITERAL', 'U8STRING_LITERAL'), None, None, ('t_U16STRING_LITERAL', 'U16STRING_LITERAL'), None, None, ('t_U32STRING_LITERAL', 'U32STRING_LITERAL'), None, None, ('t_BAD_STRING_LITERAL', 'BAD_STRING_LITERAL'), None, None, None, None, None, ('t_ID', 'ID'), (None, 'STRING_LITERAL'), None, None, (None, 'ELLIPSIS'), (None, 'PLUSPLUS'), (None, 'LOR'), (None, 'XOREQUAL'), (None, 'OREQUAL'), (None, 'LSHIFTEQUAL'), (None, 'RSHIFTEQUAL'), (None, 'PLUSEQUAL'), (None, 'TIMESEQUAL')]), ('(?P\\+)|(?P%=)|(?P/=)|(?P\\])|(?P\\?)|(?P\\^)|(?P<<)|(?P<=)|(?P\\()|(?P->)|(?P==)|(?P!=)|(?P--)|(?P\\|)|(?P\\*)|(?P\\[)|(?P>=)|(?P\\))|(?P&&)|(?P>>)|(?P-=)|(?P\\.)|(?P&=)|(?P=)|(?P<)|(?P,)|(?P/)|(?P&)|(?P%)|(?P;)|(?P-)|(?P>)|(?P:)|(?P~)|(?P!)', [None, (None, 'PLUS'), (None, 'MODEQUAL'), (None, 'DIVEQUAL'), (None, 'RBRACKET'), (None, 'CONDOP'), (None, 'XOR'), (None, 'LSHIFT'), (None, 'LE'), (None, 'LPAREN'), (None, 'ARROW'), (None, 'EQ'), (None, 'NE'), (None, 'MINUSMINUS'), (None, 'OR'), (None, 'TIMES'), (None, 'LBRACKET'), (None, 'GE'), (None, 'RPAREN'), (None, 'LAND'), (None, 'RSHIFT'), (None, 'MINUSEQUAL'), (None, 'PERIOD'), (None, 'ANDEQUAL'), (None, 'EQUALS'), (None, 'LT'), (None, 'COMMA'), (None, 'DIVIDE'), (None, 'AND'), (None, 'MOD'), (None, 'SEMI'), (None, 'MINUS'), (None, 'GT'), (None, 'COLON'), (None, 'NOT'), (None, 'LNOT')])]}
+_lexstateignore = {'ppline': ' \t', 'pppragma': ' \t', 'INITIAL': ' \t'}
+_lexstateerrorf = {'ppline': 't_ppline_error', 'pppragma': 't_pppragma_error', 'INITIAL': 't_error'}
+_lexstateeoff = {}
diff --git a/billinglayer/python/pycparser/ply/__init__.py b/billinglayer/python/pycparser/ply/__init__.py
new file mode 100644
index 0000000..6e53cdd
--- /dev/null
+++ b/billinglayer/python/pycparser/ply/__init__.py
@@ -0,0 +1,5 @@
+# PLY package
+# Author: David Beazley (dave@dabeaz.com)
+
+__version__ = '3.9'
+__all__ = ['lex','yacc']
diff --git a/billinglayer/python/pycparser/ply/__pycache__/__init__.cpython-311.pyc b/billinglayer/python/pycparser/ply/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..0ebdac1
Binary files /dev/null and b/billinglayer/python/pycparser/ply/__pycache__/__init__.cpython-311.pyc differ
diff --git a/billinglayer/python/pycparser/ply/__pycache__/cpp.cpython-311.pyc b/billinglayer/python/pycparser/ply/__pycache__/cpp.cpython-311.pyc
new file mode 100644
index 0000000..28f4f3e
Binary files /dev/null and b/billinglayer/python/pycparser/ply/__pycache__/cpp.cpython-311.pyc differ
diff --git a/billinglayer/python/pycparser/ply/__pycache__/ctokens.cpython-311.pyc b/billinglayer/python/pycparser/ply/__pycache__/ctokens.cpython-311.pyc
new file mode 100644
index 0000000..08f64a8
Binary files /dev/null and b/billinglayer/python/pycparser/ply/__pycache__/ctokens.cpython-311.pyc differ
diff --git a/billinglayer/python/pycparser/ply/__pycache__/lex.cpython-311.pyc b/billinglayer/python/pycparser/ply/__pycache__/lex.cpython-311.pyc
new file mode 100644
index 0000000..b4e8537
Binary files /dev/null and b/billinglayer/python/pycparser/ply/__pycache__/lex.cpython-311.pyc differ
diff --git a/billinglayer/python/pycparser/ply/__pycache__/yacc.cpython-311.pyc b/billinglayer/python/pycparser/ply/__pycache__/yacc.cpython-311.pyc
new file mode 100644
index 0000000..eadaca8
Binary files /dev/null and b/billinglayer/python/pycparser/ply/__pycache__/yacc.cpython-311.pyc differ
diff --git a/billinglayer/python/pycparser/ply/__pycache__/ygen.cpython-311.pyc b/billinglayer/python/pycparser/ply/__pycache__/ygen.cpython-311.pyc
new file mode 100644
index 0000000..4427edf
Binary files /dev/null and b/billinglayer/python/pycparser/ply/__pycache__/ygen.cpython-311.pyc differ
diff --git a/billinglayer/python/pycparser/ply/cpp.py b/billinglayer/python/pycparser/ply/cpp.py
new file mode 100644
index 0000000..86273ea
--- /dev/null
+++ b/billinglayer/python/pycparser/ply/cpp.py
@@ -0,0 +1,905 @@
+# -----------------------------------------------------------------------------
+# cpp.py
+#
+# Author: David Beazley (http://www.dabeaz.com)
+# Copyright (C) 2017
+# All rights reserved
+#
+# This module implements an ANSI-C style lexical preprocessor for PLY.
+# -----------------------------------------------------------------------------
+import sys
+
+# Some Python 3 compatibility shims
+if sys.version_info.major < 3:
+ STRING_TYPES = (str, unicode)
+else:
+ STRING_TYPES = str
+ xrange = range
+
+# -----------------------------------------------------------------------------
+# Default preprocessor lexer definitions. These tokens are enough to get
+# a basic preprocessor working. Other modules may import these if they want
+# -----------------------------------------------------------------------------
+
+tokens = (
+ 'CPP_ID','CPP_INTEGER', 'CPP_FLOAT', 'CPP_STRING', 'CPP_CHAR', 'CPP_WS', 'CPP_COMMENT1', 'CPP_COMMENT2', 'CPP_POUND','CPP_DPOUND'
+)
+
+literals = "+-*/%|&~^<>=!?()[]{}.,;:\\\'\""
+
+# Whitespace
+def t_CPP_WS(t):
+ r'\s+'
+ t.lexer.lineno += t.value.count("\n")
+ return t
+
+t_CPP_POUND = r'\#'
+t_CPP_DPOUND = r'\#\#'
+
+# Identifier
+t_CPP_ID = r'[A-Za-z_][\w_]*'
+
+# Integer literal
+def CPP_INTEGER(t):
+ r'(((((0x)|(0X))[0-9a-fA-F]+)|(\d+))([uU][lL]|[lL][uU]|[uU]|[lL])?)'
+ return t
+
+t_CPP_INTEGER = CPP_INTEGER
+
+# Floating literal
+t_CPP_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'
+
+# String literal
+def t_CPP_STRING(t):
+ r'\"([^\\\n]|(\\(.|\n)))*?\"'
+ t.lexer.lineno += t.value.count("\n")
+ return t
+
+# Character constant 'c' or L'c'
+def t_CPP_CHAR(t):
+ r'(L)?\'([^\\\n]|(\\(.|\n)))*?\''
+ t.lexer.lineno += t.value.count("\n")
+ return t
+
+# Comment
+def t_CPP_COMMENT1(t):
+ r'(/\*(.|\n)*?\*/)'
+ ncr = t.value.count("\n")
+ t.lexer.lineno += ncr
+ # replace with one space or a number of '\n'
+ t.type = 'CPP_WS'; t.value = '\n' * ncr if ncr else ' '
+ return t
+
+# Line comment
+def t_CPP_COMMENT2(t):
+ r'(//.*?(\n|$))'
+ # replace with '/n'
+ t.type = 'CPP_WS'; t.value = '\n'
+ return t
+
+def t_error(t):
+ t.type = t.value[0]
+ t.value = t.value[0]
+ t.lexer.skip(1)
+ return t
+
+import re
+import copy
+import time
+import os.path
+
+# -----------------------------------------------------------------------------
+# trigraph()
+#
+# Given an input string, this function replaces all trigraph sequences.
+# The following mapping is used:
+#
+# ??= #
+# ??/ \
+# ??' ^
+# ??( [
+# ??) ]
+# ??! |
+# ??< {
+# ??> }
+# ??- ~
+# -----------------------------------------------------------------------------
+
+_trigraph_pat = re.compile(r'''\?\?[=/\'\(\)\!<>\-]''')
+_trigraph_rep = {
+ '=':'#',
+ '/':'\\',
+ "'":'^',
+ '(':'[',
+ ')':']',
+ '!':'|',
+ '<':'{',
+ '>':'}',
+ '-':'~'
+}
+
+def trigraph(input):
+ return _trigraph_pat.sub(lambda g: _trigraph_rep[g.group()[-1]],input)
+
+# ------------------------------------------------------------------
+# Macro object
+#
+# This object holds information about preprocessor macros
+#
+# .name - Macro name (string)
+# .value - Macro value (a list of tokens)
+# .arglist - List of argument names
+# .variadic - Boolean indicating whether or not variadic macro
+# .vararg - Name of the variadic parameter
+#
+# When a macro is created, the macro replacement token sequence is
+# pre-scanned and used to create patch lists that are later used
+# during macro expansion
+# ------------------------------------------------------------------
+
+class Macro(object):
+ def __init__(self,name,value,arglist=None,variadic=False):
+ self.name = name
+ self.value = value
+ self.arglist = arglist
+ self.variadic = variadic
+ if variadic:
+ self.vararg = arglist[-1]
+ self.source = None
+
+# ------------------------------------------------------------------
+# Preprocessor object
+#
+# Object representing a preprocessor. Contains macro definitions,
+# include directories, and other information
+# ------------------------------------------------------------------
+
+class Preprocessor(object):
+ def __init__(self,lexer=None):
+ if lexer is None:
+ lexer = lex.lexer
+ self.lexer = lexer
+ self.macros = { }
+ self.path = []
+ self.temp_path = []
+
+ # Probe the lexer for selected tokens
+ self.lexprobe()
+
+ tm = time.localtime()
+ self.define("__DATE__ \"%s\"" % time.strftime("%b %d %Y",tm))
+ self.define("__TIME__ \"%s\"" % time.strftime("%H:%M:%S",tm))
+ self.parser = None
+
+ # -----------------------------------------------------------------------------
+ # tokenize()
+ #
+ # Utility function. Given a string of text, tokenize into a list of tokens
+ # -----------------------------------------------------------------------------
+
+ def tokenize(self,text):
+ tokens = []
+ self.lexer.input(text)
+ while True:
+ tok = self.lexer.token()
+ if not tok: break
+ tokens.append(tok)
+ return tokens
+
+ # ---------------------------------------------------------------------
+ # error()
+ #
+ # Report a preprocessor error/warning of some kind
+ # ----------------------------------------------------------------------
+
+ def error(self,file,line,msg):
+ print("%s:%d %s" % (file,line,msg))
+
+ # ----------------------------------------------------------------------
+ # lexprobe()
+ #
+ # This method probes the preprocessor lexer object to discover
+ # the token types of symbols that are important to the preprocessor.
+ # If this works right, the preprocessor will simply "work"
+ # with any suitable lexer regardless of how tokens have been named.
+ # ----------------------------------------------------------------------
+
+ def lexprobe(self):
+
+ # Determine the token type for identifiers
+ self.lexer.input("identifier")
+ tok = self.lexer.token()
+ if not tok or tok.value != "identifier":
+ print("Couldn't determine identifier type")
+ else:
+ self.t_ID = tok.type
+
+ # Determine the token type for integers
+ self.lexer.input("12345")
+ tok = self.lexer.token()
+ if not tok or int(tok.value) != 12345:
+ print("Couldn't determine integer type")
+ else:
+ self.t_INTEGER = tok.type
+ self.t_INTEGER_TYPE = type(tok.value)
+
+ # Determine the token type for strings enclosed in double quotes
+ self.lexer.input("\"filename\"")
+ tok = self.lexer.token()
+ if not tok or tok.value != "\"filename\"":
+ print("Couldn't determine string type")
+ else:
+ self.t_STRING = tok.type
+
+ # Determine the token type for whitespace--if any
+ self.lexer.input(" ")
+ tok = self.lexer.token()
+ if not tok or tok.value != " ":
+ self.t_SPACE = None
+ else:
+ self.t_SPACE = tok.type
+
+ # Determine the token type for newlines
+ self.lexer.input("\n")
+ tok = self.lexer.token()
+ if not tok or tok.value != "\n":
+ self.t_NEWLINE = None
+ print("Couldn't determine token for newlines")
+ else:
+ self.t_NEWLINE = tok.type
+
+ self.t_WS = (self.t_SPACE, self.t_NEWLINE)
+
+ # Check for other characters used by the preprocessor
+ chars = [ '<','>','#','##','\\','(',')',',','.']
+ for c in chars:
+ self.lexer.input(c)
+ tok = self.lexer.token()
+ if not tok or tok.value != c:
+ print("Unable to lex '%s' required for preprocessor" % c)
+
+ # ----------------------------------------------------------------------
+ # add_path()
+ #
+ # Adds a search path to the preprocessor.
+ # ----------------------------------------------------------------------
+
+ def add_path(self,path):
+ self.path.append(path)
+
+ # ----------------------------------------------------------------------
+ # group_lines()
+ #
+ # Given an input string, this function splits it into lines. Trailing whitespace
+ # is removed. Any line ending with \ is grouped with the next line. This
+ # function forms the lowest level of the preprocessor---grouping into text into
+ # a line-by-line format.
+ # ----------------------------------------------------------------------
+
+ def group_lines(self,input):
+ lex = self.lexer.clone()
+ lines = [x.rstrip() for x in input.splitlines()]
+ for i in xrange(len(lines)):
+ j = i+1
+ while lines[i].endswith('\\') and (j < len(lines)):
+ lines[i] = lines[i][:-1]+lines[j]
+ lines[j] = ""
+ j += 1
+
+ input = "\n".join(lines)
+ lex.input(input)
+ lex.lineno = 1
+
+ current_line = []
+ while True:
+ tok = lex.token()
+ if not tok:
+ break
+ current_line.append(tok)
+ if tok.type in self.t_WS and '\n' in tok.value:
+ yield current_line
+ current_line = []
+
+ if current_line:
+ yield current_line
+
+ # ----------------------------------------------------------------------
+ # tokenstrip()
+ #
+ # Remove leading/trailing whitespace tokens from a token list
+ # ----------------------------------------------------------------------
+
+ def tokenstrip(self,tokens):
+ i = 0
+ while i < len(tokens) and tokens[i].type in self.t_WS:
+ i += 1
+ del tokens[:i]
+ i = len(tokens)-1
+ while i >= 0 and tokens[i].type in self.t_WS:
+ i -= 1
+ del tokens[i+1:]
+ return tokens
+
+
+ # ----------------------------------------------------------------------
+ # collect_args()
+ #
+ # Collects comma separated arguments from a list of tokens. The arguments
+ # must be enclosed in parenthesis. Returns a tuple (tokencount,args,positions)
+ # where tokencount is the number of tokens consumed, args is a list of arguments,
+ # and positions is a list of integers containing the starting index of each
+ # argument. Each argument is represented by a list of tokens.
+ #
+ # When collecting arguments, leading and trailing whitespace is removed
+ # from each argument.
+ #
+ # This function properly handles nested parenthesis and commas---these do not
+ # define new arguments.
+ # ----------------------------------------------------------------------
+
+ def collect_args(self,tokenlist):
+ args = []
+ positions = []
+ current_arg = []
+ nesting = 1
+ tokenlen = len(tokenlist)
+
+ # Search for the opening '('.
+ i = 0
+ while (i < tokenlen) and (tokenlist[i].type in self.t_WS):
+ i += 1
+
+ if (i < tokenlen) and (tokenlist[i].value == '('):
+ positions.append(i+1)
+ else:
+ self.error(self.source,tokenlist[0].lineno,"Missing '(' in macro arguments")
+ return 0, [], []
+
+ i += 1
+
+ while i < tokenlen:
+ t = tokenlist[i]
+ if t.value == '(':
+ current_arg.append(t)
+ nesting += 1
+ elif t.value == ')':
+ nesting -= 1
+ if nesting == 0:
+ if current_arg:
+ args.append(self.tokenstrip(current_arg))
+ positions.append(i)
+ return i+1,args,positions
+ current_arg.append(t)
+ elif t.value == ',' and nesting == 1:
+ args.append(self.tokenstrip(current_arg))
+ positions.append(i+1)
+ current_arg = []
+ else:
+ current_arg.append(t)
+ i += 1
+
+ # Missing end argument
+ self.error(self.source,tokenlist[-1].lineno,"Missing ')' in macro arguments")
+ return 0, [],[]
+
+ # ----------------------------------------------------------------------
+ # macro_prescan()
+ #
+ # Examine the macro value (token sequence) and identify patch points
+ # This is used to speed up macro expansion later on---we'll know
+ # right away where to apply patches to the value to form the expansion
+ # ----------------------------------------------------------------------
+
+ def macro_prescan(self,macro):
+ macro.patch = [] # Standard macro arguments
+ macro.str_patch = [] # String conversion expansion
+ macro.var_comma_patch = [] # Variadic macro comma patch
+ i = 0
+ while i < len(macro.value):
+ if macro.value[i].type == self.t_ID and macro.value[i].value in macro.arglist:
+ argnum = macro.arglist.index(macro.value[i].value)
+ # Conversion of argument to a string
+ if i > 0 and macro.value[i-1].value == '#':
+ macro.value[i] = copy.copy(macro.value[i])
+ macro.value[i].type = self.t_STRING
+ del macro.value[i-1]
+ macro.str_patch.append((argnum,i-1))
+ continue
+ # Concatenation
+ elif (i > 0 and macro.value[i-1].value == '##'):
+ macro.patch.append(('c',argnum,i-1))
+ del macro.value[i-1]
+ continue
+ elif ((i+1) < len(macro.value) and macro.value[i+1].value == '##'):
+ macro.patch.append(('c',argnum,i))
+ i += 1
+ continue
+ # Standard expansion
+ else:
+ macro.patch.append(('e',argnum,i))
+ elif macro.value[i].value == '##':
+ if macro.variadic and (i > 0) and (macro.value[i-1].value == ',') and \
+ ((i+1) < len(macro.value)) and (macro.value[i+1].type == self.t_ID) and \
+ (macro.value[i+1].value == macro.vararg):
+ macro.var_comma_patch.append(i-1)
+ i += 1
+ macro.patch.sort(key=lambda x: x[2],reverse=True)
+
+ # ----------------------------------------------------------------------
+ # macro_expand_args()
+ #
+ # Given a Macro and list of arguments (each a token list), this method
+ # returns an expanded version of a macro. The return value is a token sequence
+ # representing the replacement macro tokens
+ # ----------------------------------------------------------------------
+
+ def macro_expand_args(self,macro,args):
+ # Make a copy of the macro token sequence
+ rep = [copy.copy(_x) for _x in macro.value]
+
+ # Make string expansion patches. These do not alter the length of the replacement sequence
+
+ str_expansion = {}
+ for argnum, i in macro.str_patch:
+ if argnum not in str_expansion:
+ str_expansion[argnum] = ('"%s"' % "".join([x.value for x in args[argnum]])).replace("\\","\\\\")
+ rep[i] = copy.copy(rep[i])
+ rep[i].value = str_expansion[argnum]
+
+ # Make the variadic macro comma patch. If the variadic macro argument is empty, we get rid
+ comma_patch = False
+ if macro.variadic and not args[-1]:
+ for i in macro.var_comma_patch:
+ rep[i] = None
+ comma_patch = True
+
+ # Make all other patches. The order of these matters. It is assumed that the patch list
+ # has been sorted in reverse order of patch location since replacements will cause the
+ # size of the replacement sequence to expand from the patch point.
+
+ expanded = { }
+ for ptype, argnum, i in macro.patch:
+ # Concatenation. Argument is left unexpanded
+ if ptype == 'c':
+ rep[i:i+1] = args[argnum]
+ # Normal expansion. Argument is macro expanded first
+ elif ptype == 'e':
+ if argnum not in expanded:
+ expanded[argnum] = self.expand_macros(args[argnum])
+ rep[i:i+1] = expanded[argnum]
+
+ # Get rid of removed comma if necessary
+ if comma_patch:
+ rep = [_i for _i in rep if _i]
+
+ return rep
+
+
+ # ----------------------------------------------------------------------
+ # expand_macros()
+ #
+ # Given a list of tokens, this function performs macro expansion.
+ # The expanded argument is a dictionary that contains macros already
+ # expanded. This is used to prevent infinite recursion.
+ # ----------------------------------------------------------------------
+
+ def expand_macros(self,tokens,expanded=None):
+ if expanded is None:
+ expanded = {}
+ i = 0
+ while i < len(tokens):
+ t = tokens[i]
+ if t.type == self.t_ID:
+ if t.value in self.macros and t.value not in expanded:
+ # Yes, we found a macro match
+ expanded[t.value] = True
+
+ m = self.macros[t.value]
+ if not m.arglist:
+ # A simple macro
+ ex = self.expand_macros([copy.copy(_x) for _x in m.value],expanded)
+ for e in ex:
+ e.lineno = t.lineno
+ tokens[i:i+1] = ex
+ i += len(ex)
+ else:
+ # A macro with arguments
+ j = i + 1
+ while j < len(tokens) and tokens[j].type in self.t_WS:
+ j += 1
+ if tokens[j].value == '(':
+ tokcount,args,positions = self.collect_args(tokens[j:])
+ if not m.variadic and len(args) != len(m.arglist):
+ self.error(self.source,t.lineno,"Macro %s requires %d arguments" % (t.value,len(m.arglist)))
+ i = j + tokcount
+ elif m.variadic and len(args) < len(m.arglist)-1:
+ if len(m.arglist) > 2:
+ self.error(self.source,t.lineno,"Macro %s must have at least %d arguments" % (t.value, len(m.arglist)-1))
+ else:
+ self.error(self.source,t.lineno,"Macro %s must have at least %d argument" % (t.value, len(m.arglist)-1))
+ i = j + tokcount
+ else:
+ if m.variadic:
+ if len(args) == len(m.arglist)-1:
+ args.append([])
+ else:
+ args[len(m.arglist)-1] = tokens[j+positions[len(m.arglist)-1]:j+tokcount-1]
+ del args[len(m.arglist):]
+
+ # Get macro replacement text
+ rep = self.macro_expand_args(m,args)
+ rep = self.expand_macros(rep,expanded)
+ for r in rep:
+ r.lineno = t.lineno
+ tokens[i:j+tokcount] = rep
+ i += len(rep)
+ del expanded[t.value]
+ continue
+ elif t.value == '__LINE__':
+ t.type = self.t_INTEGER
+ t.value = self.t_INTEGER_TYPE(t.lineno)
+
+ i += 1
+ return tokens
+
+ # ----------------------------------------------------------------------
+ # evalexpr()
+ #
+ # Evaluate an expression token sequence for the purposes of evaluating
+ # integral expressions.
+ # ----------------------------------------------------------------------
+
+ def evalexpr(self,tokens):
+ # tokens = tokenize(line)
+ # Search for defined macros
+ i = 0
+ while i < len(tokens):
+ if tokens[i].type == self.t_ID and tokens[i].value == 'defined':
+ j = i + 1
+ needparen = False
+ result = "0L"
+ while j < len(tokens):
+ if tokens[j].type in self.t_WS:
+ j += 1
+ continue
+ elif tokens[j].type == self.t_ID:
+ if tokens[j].value in self.macros:
+ result = "1L"
+ else:
+ result = "0L"
+ if not needparen: break
+ elif tokens[j].value == '(':
+ needparen = True
+ elif tokens[j].value == ')':
+ break
+ else:
+ self.error(self.source,tokens[i].lineno,"Malformed defined()")
+ j += 1
+ tokens[i].type = self.t_INTEGER
+ tokens[i].value = self.t_INTEGER_TYPE(result)
+ del tokens[i+1:j+1]
+ i += 1
+ tokens = self.expand_macros(tokens)
+ for i,t in enumerate(tokens):
+ if t.type == self.t_ID:
+ tokens[i] = copy.copy(t)
+ tokens[i].type = self.t_INTEGER
+ tokens[i].value = self.t_INTEGER_TYPE("0L")
+ elif t.type == self.t_INTEGER:
+ tokens[i] = copy.copy(t)
+ # Strip off any trailing suffixes
+ tokens[i].value = str(tokens[i].value)
+ while tokens[i].value[-1] not in "0123456789abcdefABCDEF":
+ tokens[i].value = tokens[i].value[:-1]
+
+ expr = "".join([str(x.value) for x in tokens])
+ expr = expr.replace("&&"," and ")
+ expr = expr.replace("||"," or ")
+ expr = expr.replace("!"," not ")
+ try:
+ result = eval(expr)
+ except Exception:
+ self.error(self.source,tokens[0].lineno,"Couldn't evaluate expression")
+ result = 0
+ return result
+
+ # ----------------------------------------------------------------------
+ # parsegen()
+ #
+ # Parse an input string/
+ # ----------------------------------------------------------------------
+ def parsegen(self,input,source=None):
+
+ # Replace trigraph sequences
+ t = trigraph(input)
+ lines = self.group_lines(t)
+
+ if not source:
+ source = ""
+
+ self.define("__FILE__ \"%s\"" % source)
+
+ self.source = source
+ chunk = []
+ enable = True
+ iftrigger = False
+ ifstack = []
+
+ for x in lines:
+ for i,tok in enumerate(x):
+ if tok.type not in self.t_WS: break
+ if tok.value == '#':
+ # Preprocessor directive
+
+ # insert necessary whitespace instead of eaten tokens
+ for tok in x:
+ if tok.type in self.t_WS and '\n' in tok.value:
+ chunk.append(tok)
+
+ dirtokens = self.tokenstrip(x[i+1:])
+ if dirtokens:
+ name = dirtokens[0].value
+ args = self.tokenstrip(dirtokens[1:])
+ else:
+ name = ""
+ args = []
+
+ if name == 'define':
+ if enable:
+ for tok in self.expand_macros(chunk):
+ yield tok
+ chunk = []
+ self.define(args)
+ elif name == 'include':
+ if enable:
+ for tok in self.expand_macros(chunk):
+ yield tok
+ chunk = []
+ oldfile = self.macros['__FILE__']
+ for tok in self.include(args):
+ yield tok
+ self.macros['__FILE__'] = oldfile
+ self.source = source
+ elif name == 'undef':
+ if enable:
+ for tok in self.expand_macros(chunk):
+ yield tok
+ chunk = []
+ self.undef(args)
+ elif name == 'ifdef':
+ ifstack.append((enable,iftrigger))
+ if enable:
+ if not args[0].value in self.macros:
+ enable = False
+ iftrigger = False
+ else:
+ iftrigger = True
+ elif name == 'ifndef':
+ ifstack.append((enable,iftrigger))
+ if enable:
+ if args[0].value in self.macros:
+ enable = False
+ iftrigger = False
+ else:
+ iftrigger = True
+ elif name == 'if':
+ ifstack.append((enable,iftrigger))
+ if enable:
+ result = self.evalexpr(args)
+ if not result:
+ enable = False
+ iftrigger = False
+ else:
+ iftrigger = True
+ elif name == 'elif':
+ if ifstack:
+ if ifstack[-1][0]: # We only pay attention if outer "if" allows this
+ if enable: # If already true, we flip enable False
+ enable = False
+ elif not iftrigger: # If False, but not triggered yet, we'll check expression
+ result = self.evalexpr(args)
+ if result:
+ enable = True
+ iftrigger = True
+ else:
+ self.error(self.source,dirtokens[0].lineno,"Misplaced #elif")
+
+ elif name == 'else':
+ if ifstack:
+ if ifstack[-1][0]:
+ if enable:
+ enable = False
+ elif not iftrigger:
+ enable = True
+ iftrigger = True
+ else:
+ self.error(self.source,dirtokens[0].lineno,"Misplaced #else")
+
+ elif name == 'endif':
+ if ifstack:
+ enable,iftrigger = ifstack.pop()
+ else:
+ self.error(self.source,dirtokens[0].lineno,"Misplaced #endif")
+ else:
+ # Unknown preprocessor directive
+ pass
+
+ else:
+ # Normal text
+ if enable:
+ chunk.extend(x)
+
+ for tok in self.expand_macros(chunk):
+ yield tok
+ chunk = []
+
+ # ----------------------------------------------------------------------
+ # include()
+ #
+ # Implementation of file-inclusion
+ # ----------------------------------------------------------------------
+
+ def include(self,tokens):
+ # Try to extract the filename and then process an include file
+ if not tokens:
+ return
+ if tokens:
+ if tokens[0].value != '<' and tokens[0].type != self.t_STRING:
+ tokens = self.expand_macros(tokens)
+
+ if tokens[0].value == '<':
+ # Include <...>
+ i = 1
+ while i < len(tokens):
+ if tokens[i].value == '>':
+ break
+ i += 1
+ else:
+ print("Malformed #include <...>")
+ return
+ filename = "".join([x.value for x in tokens[1:i]])
+ path = self.path + [""] + self.temp_path
+ elif tokens[0].type == self.t_STRING:
+ filename = tokens[0].value[1:-1]
+ path = self.temp_path + [""] + self.path
+ else:
+ print("Malformed #include statement")
+ return
+ for p in path:
+ iname = os.path.join(p,filename)
+ try:
+ data = open(iname,"r").read()
+ dname = os.path.dirname(iname)
+ if dname:
+ self.temp_path.insert(0,dname)
+ for tok in self.parsegen(data,filename):
+ yield tok
+ if dname:
+ del self.temp_path[0]
+ break
+ except IOError:
+ pass
+ else:
+ print("Couldn't find '%s'" % filename)
+
+ # ----------------------------------------------------------------------
+ # define()
+ #
+ # Define a new macro
+ # ----------------------------------------------------------------------
+
+ def define(self,tokens):
+ if isinstance(tokens,STRING_TYPES):
+ tokens = self.tokenize(tokens)
+
+ linetok = tokens
+ try:
+ name = linetok[0]
+ if len(linetok) > 1:
+ mtype = linetok[1]
+ else:
+ mtype = None
+ if not mtype:
+ m = Macro(name.value,[])
+ self.macros[name.value] = m
+ elif mtype.type in self.t_WS:
+ # A normal macro
+ m = Macro(name.value,self.tokenstrip(linetok[2:]))
+ self.macros[name.value] = m
+ elif mtype.value == '(':
+ # A macro with arguments
+ tokcount, args, positions = self.collect_args(linetok[1:])
+ variadic = False
+ for a in args:
+ if variadic:
+ print("No more arguments may follow a variadic argument")
+ break
+ astr = "".join([str(_i.value) for _i in a])
+ if astr == "...":
+ variadic = True
+ a[0].type = self.t_ID
+ a[0].value = '__VA_ARGS__'
+ variadic = True
+ del a[1:]
+ continue
+ elif astr[-3:] == "..." and a[0].type == self.t_ID:
+ variadic = True
+ del a[1:]
+ # If, for some reason, "." is part of the identifier, strip off the name for the purposes
+ # of macro expansion
+ if a[0].value[-3:] == '...':
+ a[0].value = a[0].value[:-3]
+ continue
+ if len(a) > 1 or a[0].type != self.t_ID:
+ print("Invalid macro argument")
+ break
+ else:
+ mvalue = self.tokenstrip(linetok[1+tokcount:])
+ i = 0
+ while i < len(mvalue):
+ if i+1 < len(mvalue):
+ if mvalue[i].type in self.t_WS and mvalue[i+1].value == '##':
+ del mvalue[i]
+ continue
+ elif mvalue[i].value == '##' and mvalue[i+1].type in self.t_WS:
+ del mvalue[i+1]
+ i += 1
+ m = Macro(name.value,mvalue,[x[0].value for x in args],variadic)
+ self.macro_prescan(m)
+ self.macros[name.value] = m
+ else:
+ print("Bad macro definition")
+ except LookupError:
+ print("Bad macro definition")
+
+ # ----------------------------------------------------------------------
+ # undef()
+ #
+ # Undefine a macro
+ # ----------------------------------------------------------------------
+
+ def undef(self,tokens):
+ id = tokens[0].value
+ try:
+ del self.macros[id]
+ except LookupError:
+ pass
+
+ # ----------------------------------------------------------------------
+ # parse()
+ #
+ # Parse input text.
+ # ----------------------------------------------------------------------
+ def parse(self,input,source=None,ignore={}):
+ self.ignore = ignore
+ self.parser = self.parsegen(input,source)
+
+ # ----------------------------------------------------------------------
+ # token()
+ #
+ # Method to return individual tokens
+ # ----------------------------------------------------------------------
+ def token(self):
+ try:
+ while True:
+ tok = next(self.parser)
+ if tok.type not in self.ignore: return tok
+ except StopIteration:
+ self.parser = None
+ return None
+
+if __name__ == '__main__':
+ import ply.lex as lex
+ lexer = lex.lex()
+
+ # Run a preprocessor
+ import sys
+ f = open(sys.argv[1])
+ input = f.read()
+
+ p = Preprocessor(lexer)
+ p.parse(input,sys.argv[1])
+ while True:
+ tok = p.token()
+ if not tok: break
+ print(p.source, tok)
diff --git a/billinglayer/python/pycparser/ply/ctokens.py b/billinglayer/python/pycparser/ply/ctokens.py
new file mode 100644
index 0000000..f6f6952
--- /dev/null
+++ b/billinglayer/python/pycparser/ply/ctokens.py
@@ -0,0 +1,133 @@
+# ----------------------------------------------------------------------
+# ctokens.py
+#
+# Token specifications for symbols in ANSI C and C++. This file is
+# meant to be used as a library in other tokenizers.
+# ----------------------------------------------------------------------
+
+# Reserved words
+
+tokens = [
+ # Literals (identifier, integer constant, float constant, string constant, char const)
+ 'ID', 'TYPEID', 'INTEGER', 'FLOAT', 'STRING', 'CHARACTER',
+
+ # Operators (+,-,*,/,%,|,&,~,^,<<,>>, ||, &&, !, <, <=, >, >=, ==, !=)
+ 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MODULO',
+ 'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT',
+ 'LOR', 'LAND', 'LNOT',
+ 'LT', 'LE', 'GT', 'GE', 'EQ', 'NE',
+
+ # Assignment (=, *=, /=, %=, +=, -=, <<=, >>=, &=, ^=, |=)
+ 'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL', 'PLUSEQUAL', 'MINUSEQUAL',
+ 'LSHIFTEQUAL','RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL', 'OREQUAL',
+
+ # Increment/decrement (++,--)
+ 'INCREMENT', 'DECREMENT',
+
+ # Structure dereference (->)
+ 'ARROW',
+
+ # Ternary operator (?)
+ 'TERNARY',
+
+ # Delimeters ( ) [ ] { } , . ; :
+ 'LPAREN', 'RPAREN',
+ 'LBRACKET', 'RBRACKET',
+ 'LBRACE', 'RBRACE',
+ 'COMMA', 'PERIOD', 'SEMI', 'COLON',
+
+ # Ellipsis (...)
+ 'ELLIPSIS',
+]
+
+# Operators
+t_PLUS = r'\+'
+t_MINUS = r'-'
+t_TIMES = r'\*'
+t_DIVIDE = r'/'
+t_MODULO = r'%'
+t_OR = r'\|'
+t_AND = r'&'
+t_NOT = r'~'
+t_XOR = r'\^'
+t_LSHIFT = r'<<'
+t_RSHIFT = r'>>'
+t_LOR = r'\|\|'
+t_LAND = r'&&'
+t_LNOT = r'!'
+t_LT = r'<'
+t_GT = r'>'
+t_LE = r'<='
+t_GE = r'>='
+t_EQ = r'=='
+t_NE = r'!='
+
+# Assignment operators
+
+t_EQUALS = r'='
+t_TIMESEQUAL = r'\*='
+t_DIVEQUAL = r'/='
+t_MODEQUAL = r'%='
+t_PLUSEQUAL = r'\+='
+t_MINUSEQUAL = r'-='
+t_LSHIFTEQUAL = r'<<='
+t_RSHIFTEQUAL = r'>>='
+t_ANDEQUAL = r'&='
+t_OREQUAL = r'\|='
+t_XOREQUAL = r'\^='
+
+# Increment/decrement
+t_INCREMENT = r'\+\+'
+t_DECREMENT = r'--'
+
+# ->
+t_ARROW = r'->'
+
+# ?
+t_TERNARY = r'\?'
+
+# Delimeters
+t_LPAREN = r'\('
+t_RPAREN = r'\)'
+t_LBRACKET = r'\['
+t_RBRACKET = r'\]'
+t_LBRACE = r'\{'
+t_RBRACE = r'\}'
+t_COMMA = r','
+t_PERIOD = r'\.'
+t_SEMI = r';'
+t_COLON = r':'
+t_ELLIPSIS = r'\.\.\.'
+
+# Identifiers
+t_ID = r'[A-Za-z_][A-Za-z0-9_]*'
+
+# Integer literal
+t_INTEGER = r'\d+([uU]|[lL]|[uU][lL]|[lL][uU])?'
+
+# Floating literal
+t_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'
+
+# String literal
+t_STRING = r'\"([^\\\n]|(\\.))*?\"'
+
+# Character constant 'c' or L'c'
+t_CHARACTER = r'(L)?\'([^\\\n]|(\\.))*?\''
+
+# Comment (C-Style)
+def t_COMMENT(t):
+ r'/\*(.|\n)*?\*/'
+ t.lexer.lineno += t.value.count('\n')
+ return t
+
+# Comment (C++-Style)
+def t_CPPCOMMENT(t):
+ r'//.*\n'
+ t.lexer.lineno += 1
+ return t
+
+
+
+
+
+
diff --git a/billinglayer/python/pycparser/ply/lex.py b/billinglayer/python/pycparser/ply/lex.py
new file mode 100644
index 0000000..4bdd76c
--- /dev/null
+++ b/billinglayer/python/pycparser/ply/lex.py
@@ -0,0 +1,1099 @@
+# -----------------------------------------------------------------------------
+# ply: lex.py
+#
+# Copyright (C) 2001-2017
+# David M. Beazley (Dabeaz LLC)
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+# * Neither the name of the David Beazley or Dabeaz LLC may be used to
+# endorse or promote products derived from this software without
+# specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+# -----------------------------------------------------------------------------
+
+__version__ = '3.10'
+__tabversion__ = '3.10'
+
+import re
+import sys
+import types
+import copy
+import os
+import inspect
+
+# This tuple contains known string types
+try:
+ # Python 2.6
+ StringTypes = (types.StringType, types.UnicodeType)
+except AttributeError:
+ # Python 3.0
+ StringTypes = (str, bytes)
+
+# This regular expression is used to match valid token names
+_is_identifier = re.compile(r'^[a-zA-Z0-9_]+$')
+
+# Exception thrown when invalid token encountered and no default error
+# handler is defined.
+class LexError(Exception):
+ def __init__(self, message, s):
+ self.args = (message,)
+ self.text = s
+
+
+# Token class. This class is used to represent the tokens produced.
+class LexToken(object):
+ def __str__(self):
+ return 'LexToken(%s,%r,%d,%d)' % (self.type, self.value, self.lineno, self.lexpos)
+
+ def __repr__(self):
+ return str(self)
+
+
+# This object is a stand-in for a logging object created by the
+# logging module.
+
+class PlyLogger(object):
+ def __init__(self, f):
+ self.f = f
+
+ def critical(self, msg, *args, **kwargs):
+ self.f.write((msg % args) + '\n')
+
+ def warning(self, msg, *args, **kwargs):
+ self.f.write('WARNING: ' + (msg % args) + '\n')
+
+ def error(self, msg, *args, **kwargs):
+ self.f.write('ERROR: ' + (msg % args) + '\n')
+
+ info = critical
+ debug = critical
+
+
+# Null logger is used when no output is generated. Does nothing.
+class NullLogger(object):
+ def __getattribute__(self, name):
+ return self
+
+ def __call__(self, *args, **kwargs):
+ return self
+
+
+# -----------------------------------------------------------------------------
+# === Lexing Engine ===
+#
+# The following Lexer class implements the lexer runtime. There are only
+# a few public methods and attributes:
+#
+# input() - Store a new string in the lexer
+# token() - Get the next token
+# clone() - Clone the lexer
+#
+# lineno - Current line number
+# lexpos - Current position in the input string
+# -----------------------------------------------------------------------------
+
+class Lexer:
+ def __init__(self):
+ self.lexre = None # Master regular expression. This is a list of
+ # tuples (re, findex) where re is a compiled
+ # regular expression and findex is a list
+ # mapping regex group numbers to rules
+ self.lexretext = None # Current regular expression strings
+ self.lexstatere = {} # Dictionary mapping lexer states to master regexs
+ self.lexstateretext = {} # Dictionary mapping lexer states to regex strings
+ self.lexstaterenames = {} # Dictionary mapping lexer states to symbol names
+ self.lexstate = 'INITIAL' # Current lexer state
+ self.lexstatestack = [] # Stack of lexer states
+ self.lexstateinfo = None # State information
+ self.lexstateignore = {} # Dictionary of ignored characters for each state
+ self.lexstateerrorf = {} # Dictionary of error functions for each state
+ self.lexstateeoff = {} # Dictionary of eof functions for each state
+ self.lexreflags = 0 # Optional re compile flags
+ self.lexdata = None # Actual input data (as a string)
+ self.lexpos = 0 # Current position in input text
+ self.lexlen = 0 # Length of the input text
+ self.lexerrorf = None # Error rule (if any)
+ self.lexeoff = None # EOF rule (if any)
+ self.lextokens = None # List of valid tokens
+ self.lexignore = '' # Ignored characters
+ self.lexliterals = '' # Literal characters that can be passed through
+ self.lexmodule = None # Module
+ self.lineno = 1 # Current line number
+ self.lexoptimize = False # Optimized mode
+
+ def clone(self, object=None):
+ c = copy.copy(self)
+
+ # If the object parameter has been supplied, it means we are attaching the
+ # lexer to a new object. In this case, we have to rebind all methods in
+ # the lexstatere and lexstateerrorf tables.
+
+ if object:
+ newtab = {}
+ for key, ritem in self.lexstatere.items():
+ newre = []
+ for cre, findex in ritem:
+ newfindex = []
+ for f in findex:
+ if not f or not f[0]:
+ newfindex.append(f)
+ continue
+ newfindex.append((getattr(object, f[0].__name__), f[1]))
+ newre.append((cre, newfindex))
+ newtab[key] = newre
+ c.lexstatere = newtab
+ c.lexstateerrorf = {}
+ for key, ef in self.lexstateerrorf.items():
+ c.lexstateerrorf[key] = getattr(object, ef.__name__)
+ c.lexmodule = object
+ return c
+
+ # ------------------------------------------------------------
+ # writetab() - Write lexer information to a table file
+ # ------------------------------------------------------------
+ def writetab(self, lextab, outputdir=''):
+ if isinstance(lextab, types.ModuleType):
+ raise IOError("Won't overwrite existing lextab module")
+ basetabmodule = lextab.split('.')[-1]
+ filename = os.path.join(outputdir, basetabmodule) + '.py'
+ with open(filename, 'w') as tf:
+ tf.write('# %s.py. This file automatically created by PLY (version %s). Don\'t edit!\n' % (basetabmodule, __version__))
+ tf.write('_tabversion = %s\n' % repr(__tabversion__))
+ tf.write('_lextokens = set(%s)\n' % repr(tuple(self.lextokens)))
+ tf.write('_lexreflags = %s\n' % repr(self.lexreflags))
+ tf.write('_lexliterals = %s\n' % repr(self.lexliterals))
+ tf.write('_lexstateinfo = %s\n' % repr(self.lexstateinfo))
+
+ # Rewrite the lexstatere table, replacing function objects with function names
+ tabre = {}
+ for statename, lre in self.lexstatere.items():
+ titem = []
+ for (pat, func), retext, renames in zip(lre, self.lexstateretext[statename], self.lexstaterenames[statename]):
+ titem.append((retext, _funcs_to_names(func, renames)))
+ tabre[statename] = titem
+
+ tf.write('_lexstatere = %s\n' % repr(tabre))
+ tf.write('_lexstateignore = %s\n' % repr(self.lexstateignore))
+
+ taberr = {}
+ for statename, ef in self.lexstateerrorf.items():
+ taberr[statename] = ef.__name__ if ef else None
+ tf.write('_lexstateerrorf = %s\n' % repr(taberr))
+
+ tabeof = {}
+ for statename, ef in self.lexstateeoff.items():
+ tabeof[statename] = ef.__name__ if ef else None
+ tf.write('_lexstateeoff = %s\n' % repr(tabeof))
+
+ # ------------------------------------------------------------
+ # readtab() - Read lexer information from a tab file
+ # ------------------------------------------------------------
+ def readtab(self, tabfile, fdict):
+ if isinstance(tabfile, types.ModuleType):
+ lextab = tabfile
+ else:
+ exec('import %s' % tabfile)
+ lextab = sys.modules[tabfile]
+
+ if getattr(lextab, '_tabversion', '0.0') != __tabversion__:
+ raise ImportError('Inconsistent PLY version')
+
+ self.lextokens = lextab._lextokens
+ self.lexreflags = lextab._lexreflags
+ self.lexliterals = lextab._lexliterals
+ self.lextokens_all = self.lextokens | set(self.lexliterals)
+ self.lexstateinfo = lextab._lexstateinfo
+ self.lexstateignore = lextab._lexstateignore
+ self.lexstatere = {}
+ self.lexstateretext = {}
+ for statename, lre in lextab._lexstatere.items():
+ titem = []
+ txtitem = []
+ for pat, func_name in lre:
+ titem.append((re.compile(pat, lextab._lexreflags), _names_to_funcs(func_name, fdict)))
+
+ self.lexstatere[statename] = titem
+ self.lexstateretext[statename] = txtitem
+
+ self.lexstateerrorf = {}
+ for statename, ef in lextab._lexstateerrorf.items():
+ self.lexstateerrorf[statename] = fdict[ef]
+
+ self.lexstateeoff = {}
+ for statename, ef in lextab._lexstateeoff.items():
+ self.lexstateeoff[statename] = fdict[ef]
+
+ self.begin('INITIAL')
+
+ # ------------------------------------------------------------
+ # input() - Push a new string into the lexer
+ # ------------------------------------------------------------
+ def input(self, s):
+ # Pull off the first character to see if s looks like a string
+ c = s[:1]
+ if not isinstance(c, StringTypes):
+ raise ValueError('Expected a string')
+ self.lexdata = s
+ self.lexpos = 0
+ self.lexlen = len(s)
+
+ # ------------------------------------------------------------
+ # begin() - Changes the lexing state
+ # ------------------------------------------------------------
+ def begin(self, state):
+ if state not in self.lexstatere:
+ raise ValueError('Undefined state')
+ self.lexre = self.lexstatere[state]
+ self.lexretext = self.lexstateretext[state]
+ self.lexignore = self.lexstateignore.get(state, '')
+ self.lexerrorf = self.lexstateerrorf.get(state, None)
+ self.lexeoff = self.lexstateeoff.get(state, None)
+ self.lexstate = state
+
+ # ------------------------------------------------------------
+ # push_state() - Changes the lexing state and saves old on stack
+ # ------------------------------------------------------------
+ def push_state(self, state):
+ self.lexstatestack.append(self.lexstate)
+ self.begin(state)
+
+ # ------------------------------------------------------------
+ # pop_state() - Restores the previous state
+ # ------------------------------------------------------------
+ def pop_state(self):
+ self.begin(self.lexstatestack.pop())
+
+ # ------------------------------------------------------------
+ # current_state() - Returns the current lexing state
+ # ------------------------------------------------------------
+ def current_state(self):
+ return self.lexstate
+
+ # ------------------------------------------------------------
+ # skip() - Skip ahead n characters
+ # ------------------------------------------------------------
+ def skip(self, n):
+ self.lexpos += n
+
+ # ------------------------------------------------------------
+ # opttoken() - Return the next token from the Lexer
+ #
+ # Note: This function has been carefully implemented to be as fast
+ # as possible. Don't make changes unless you really know what
+ # you are doing
+ # ------------------------------------------------------------
+ def token(self):
+ # Make local copies of frequently referenced attributes
+ lexpos = self.lexpos
+ lexlen = self.lexlen
+ lexignore = self.lexignore
+ lexdata = self.lexdata
+
+ while lexpos < lexlen:
+ # This code provides some short-circuit code for whitespace, tabs, and other ignored characters
+ if lexdata[lexpos] in lexignore:
+ lexpos += 1
+ continue
+
+ # Look for a regular expression match
+ for lexre, lexindexfunc in self.lexre:
+ m = lexre.match(lexdata, lexpos)
+ if not m:
+ continue
+
+ # Create a token for return
+ tok = LexToken()
+ tok.value = m.group()
+ tok.lineno = self.lineno
+ tok.lexpos = lexpos
+
+ i = m.lastindex
+ func, tok.type = lexindexfunc[i]
+
+ if not func:
+ # If no token type was set, it's an ignored token
+ if tok.type:
+ self.lexpos = m.end()
+ return tok
+ else:
+ lexpos = m.end()
+ break
+
+ lexpos = m.end()
+
+ # If token is processed by a function, call it
+
+ tok.lexer = self # Set additional attributes useful in token rules
+ self.lexmatch = m
+ self.lexpos = lexpos
+
+ newtok = func(tok)
+
+ # Every function must return a token, if nothing, we just move to next token
+ if not newtok:
+ lexpos = self.lexpos # This is here in case user has updated lexpos.
+ lexignore = self.lexignore # This is here in case there was a state change
+ break
+
+ # Verify type of the token. If not in the token map, raise an error
+ if not self.lexoptimize:
+ if newtok.type not in self.lextokens_all:
+ raise LexError("%s:%d: Rule '%s' returned an unknown token type '%s'" % (
+ func.__code__.co_filename, func.__code__.co_firstlineno,
+ func.__name__, newtok.type), lexdata[lexpos:])
+
+ return newtok
+ else:
+ # No match, see if in literals
+ if lexdata[lexpos] in self.lexliterals:
+ tok = LexToken()
+ tok.value = lexdata[lexpos]
+ tok.lineno = self.lineno
+ tok.type = tok.value
+ tok.lexpos = lexpos
+ self.lexpos = lexpos + 1
+ return tok
+
+ # No match. Call t_error() if defined.
+ if self.lexerrorf:
+ tok = LexToken()
+ tok.value = self.lexdata[lexpos:]
+ tok.lineno = self.lineno
+ tok.type = 'error'
+ tok.lexer = self
+ tok.lexpos = lexpos
+ self.lexpos = lexpos
+ newtok = self.lexerrorf(tok)
+ if lexpos == self.lexpos:
+ # Error method didn't change text position at all. This is an error.
+ raise LexError("Scanning error. Illegal character '%s'" % (lexdata[lexpos]), lexdata[lexpos:])
+ lexpos = self.lexpos
+ if not newtok:
+ continue
+ return newtok
+
+ self.lexpos = lexpos
+ raise LexError("Illegal character '%s' at index %d" % (lexdata[lexpos], lexpos), lexdata[lexpos:])
+
+ if self.lexeoff:
+ tok = LexToken()
+ tok.type = 'eof'
+ tok.value = ''
+ tok.lineno = self.lineno
+ tok.lexpos = lexpos
+ tok.lexer = self
+ self.lexpos = lexpos
+ newtok = self.lexeoff(tok)
+ return newtok
+
+ self.lexpos = lexpos + 1
+ if self.lexdata is None:
+ raise RuntimeError('No input string given with input()')
+ return None
+
+ # Iterator interface
+ def __iter__(self):
+ return self
+
+ def next(self):
+ t = self.token()
+ if t is None:
+ raise StopIteration
+ return t
+
+ __next__ = next
+
+# -----------------------------------------------------------------------------
+# ==== Lex Builder ===
+#
+# The functions and classes below are used to collect lexing information
+# and build a Lexer object from it.
+# -----------------------------------------------------------------------------
+
+# -----------------------------------------------------------------------------
+# _get_regex(func)
+#
+# Returns the regular expression assigned to a function either as a doc string
+# or as a .regex attribute attached by the @TOKEN decorator.
+# -----------------------------------------------------------------------------
+def _get_regex(func):
+ return getattr(func, 'regex', func.__doc__)
+
+# -----------------------------------------------------------------------------
+# get_caller_module_dict()
+#
+# This function returns a dictionary containing all of the symbols defined within
+# a caller further down the call stack. This is used to get the environment
+# associated with the yacc() call if none was provided.
+# -----------------------------------------------------------------------------
+def get_caller_module_dict(levels):
+ f = sys._getframe(levels)
+ ldict = f.f_globals.copy()
+ if f.f_globals != f.f_locals:
+ ldict.update(f.f_locals)
+ return ldict
+
+# -----------------------------------------------------------------------------
+# _funcs_to_names()
+#
+# Given a list of regular expression functions, this converts it to a list
+# suitable for output to a table file
+# -----------------------------------------------------------------------------
+def _funcs_to_names(funclist, namelist):
+ result = []
+ for f, name in zip(funclist, namelist):
+ if f and f[0]:
+ result.append((name, f[1]))
+ else:
+ result.append(f)
+ return result
+
+# -----------------------------------------------------------------------------
+# _names_to_funcs()
+#
+# Given a list of regular expression function names, this converts it back to
+# functions.
+# -----------------------------------------------------------------------------
+def _names_to_funcs(namelist, fdict):
+ result = []
+ for n in namelist:
+ if n and n[0]:
+ result.append((fdict[n[0]], n[1]))
+ else:
+ result.append(n)
+ return result
+
+# -----------------------------------------------------------------------------
+# _form_master_re()
+#
+# This function takes a list of all of the regex components and attempts to
+# form the master regular expression. Given limitations in the Python re
+# module, it may be necessary to break the master regex into separate expressions.
+# -----------------------------------------------------------------------------
+def _form_master_re(relist, reflags, ldict, toknames):
+ if not relist:
+ return []
+ regex = '|'.join(relist)
+ try:
+ lexre = re.compile(regex, reflags)
+
+ # Build the index to function map for the matching engine
+ lexindexfunc = [None] * (max(lexre.groupindex.values()) + 1)
+ lexindexnames = lexindexfunc[:]
+
+ for f, i in lexre.groupindex.items():
+ handle = ldict.get(f, None)
+ if type(handle) in (types.FunctionType, types.MethodType):
+ lexindexfunc[i] = (handle, toknames[f])
+ lexindexnames[i] = f
+ elif handle is not None:
+ lexindexnames[i] = f
+ if f.find('ignore_') > 0:
+ lexindexfunc[i] = (None, None)
+ else:
+ lexindexfunc[i] = (None, toknames[f])
+
+ return [(lexre, lexindexfunc)], [regex], [lexindexnames]
+ except Exception:
+ m = int(len(relist)/2)
+ if m == 0:
+ m = 1
+ llist, lre, lnames = _form_master_re(relist[:m], reflags, ldict, toknames)
+ rlist, rre, rnames = _form_master_re(relist[m:], reflags, ldict, toknames)
+ return (llist+rlist), (lre+rre), (lnames+rnames)
+
+# -----------------------------------------------------------------------------
+# def _statetoken(s,names)
+#
+# Given a declaration name s of the form "t_" and a dictionary whose keys are
+# state names, this function returns a tuple (states,tokenname) where states
+# is a tuple of state names and tokenname is the name of the token. For example,
+# calling this with s = "t_foo_bar_SPAM" might return (('foo','bar'),'SPAM')
+# -----------------------------------------------------------------------------
+def _statetoken(s, names):
+ nonstate = 1
+ parts = s.split('_')
+ for i, part in enumerate(parts[1:], 1):
+ if part not in names and part != 'ANY':
+ break
+
+ if i > 1:
+ states = tuple(parts[1:i])
+ else:
+ states = ('INITIAL',)
+
+ if 'ANY' in states:
+ states = tuple(names)
+
+ tokenname = '_'.join(parts[i:])
+ return (states, tokenname)
+
+
+# -----------------------------------------------------------------------------
+# LexerReflect()
+#
+# This class represents information needed to build a lexer as extracted from a
+# user's input file.
+# -----------------------------------------------------------------------------
+class LexerReflect(object):
+ def __init__(self, ldict, log=None, reflags=0):
+ self.ldict = ldict
+ self.error_func = None
+ self.tokens = []
+ self.reflags = reflags
+ self.stateinfo = {'INITIAL': 'inclusive'}
+ self.modules = set()
+ self.error = False
+ self.log = PlyLogger(sys.stderr) if log is None else log
+
+ # Get all of the basic information
+ def get_all(self):
+ self.get_tokens()
+ self.get_literals()
+ self.get_states()
+ self.get_rules()
+
+ # Validate all of the information
+ def validate_all(self):
+ self.validate_tokens()
+ self.validate_literals()
+ self.validate_rules()
+ return self.error
+
+ # Get the tokens map
+ def get_tokens(self):
+ tokens = self.ldict.get('tokens', None)
+ if not tokens:
+ self.log.error('No token list is defined')
+ self.error = True
+ return
+
+ if not isinstance(tokens, (list, tuple)):
+ self.log.error('tokens must be a list or tuple')
+ self.error = True
+ return
+
+ if not tokens:
+ self.log.error('tokens is empty')
+ self.error = True
+ return
+
+ self.tokens = tokens
+
+ # Validate the tokens
+ def validate_tokens(self):
+ terminals = {}
+ for n in self.tokens:
+ if not _is_identifier.match(n):
+ self.log.error("Bad token name '%s'", n)
+ self.error = True
+ if n in terminals:
+ self.log.warning("Token '%s' multiply defined", n)
+ terminals[n] = 1
+
+ # Get the literals specifier
+ def get_literals(self):
+ self.literals = self.ldict.get('literals', '')
+ if not self.literals:
+ self.literals = ''
+
+ # Validate literals
+ def validate_literals(self):
+ try:
+ for c in self.literals:
+ if not isinstance(c, StringTypes) or len(c) > 1:
+ self.log.error('Invalid literal %s. Must be a single character', repr(c))
+ self.error = True
+
+ except TypeError:
+ self.log.error('Invalid literals specification. literals must be a sequence of characters')
+ self.error = True
+
+ def get_states(self):
+ self.states = self.ldict.get('states', None)
+ # Build statemap
+ if self.states:
+ if not isinstance(self.states, (tuple, list)):
+ self.log.error('states must be defined as a tuple or list')
+ self.error = True
+ else:
+ for s in self.states:
+ if not isinstance(s, tuple) or len(s) != 2:
+ self.log.error("Invalid state specifier %s. Must be a tuple (statename,'exclusive|inclusive')", repr(s))
+ self.error = True
+ continue
+ name, statetype = s
+ if not isinstance(name, StringTypes):
+ self.log.error('State name %s must be a string', repr(name))
+ self.error = True
+ continue
+ if not (statetype == 'inclusive' or statetype == 'exclusive'):
+ self.log.error("State type for state %s must be 'inclusive' or 'exclusive'", name)
+ self.error = True
+ continue
+ if name in self.stateinfo:
+ self.log.error("State '%s' already defined", name)
+ self.error = True
+ continue
+ self.stateinfo[name] = statetype
+
+ # Get all of the symbols with a t_ prefix and sort them into various
+ # categories (functions, strings, error functions, and ignore characters)
+
+ def get_rules(self):
+ tsymbols = [f for f in self.ldict if f[:2] == 't_']
+
+ # Now build up a list of functions and a list of strings
+ self.toknames = {} # Mapping of symbols to token names
+ self.funcsym = {} # Symbols defined as functions
+ self.strsym = {} # Symbols defined as strings
+ self.ignore = {} # Ignore strings by state
+ self.errorf = {} # Error functions by state
+ self.eoff = {} # EOF functions by state
+
+ for s in self.stateinfo:
+ self.funcsym[s] = []
+ self.strsym[s] = []
+
+ if len(tsymbols) == 0:
+ self.log.error('No rules of the form t_rulename are defined')
+ self.error = True
+ return
+
+ for f in tsymbols:
+ t = self.ldict[f]
+ states, tokname = _statetoken(f, self.stateinfo)
+ self.toknames[f] = tokname
+
+ if hasattr(t, '__call__'):
+ if tokname == 'error':
+ for s in states:
+ self.errorf[s] = t
+ elif tokname == 'eof':
+ for s in states:
+ self.eoff[s] = t
+ elif tokname == 'ignore':
+ line = t.__code__.co_firstlineno
+ file = t.__code__.co_filename
+ self.log.error("%s:%d: Rule '%s' must be defined as a string", file, line, t.__name__)
+ self.error = True
+ else:
+ for s in states:
+ self.funcsym[s].append((f, t))
+ elif isinstance(t, StringTypes):
+ if tokname == 'ignore':
+ for s in states:
+ self.ignore[s] = t
+ if '\\' in t:
+ self.log.warning("%s contains a literal backslash '\\'", f)
+
+ elif tokname == 'error':
+ self.log.error("Rule '%s' must be defined as a function", f)
+ self.error = True
+ else:
+ for s in states:
+ self.strsym[s].append((f, t))
+ else:
+ self.log.error('%s not defined as a function or string', f)
+ self.error = True
+
+ # Sort the functions by line number
+ for f in self.funcsym.values():
+ f.sort(key=lambda x: x[1].__code__.co_firstlineno)
+
+ # Sort the strings by regular expression length
+ for s in self.strsym.values():
+ s.sort(key=lambda x: len(x[1]), reverse=True)
+
+ # Validate all of the t_rules collected
+ def validate_rules(self):
+ for state in self.stateinfo:
+ # Validate all rules defined by functions
+
+ for fname, f in self.funcsym[state]:
+ line = f.__code__.co_firstlineno
+ file = f.__code__.co_filename
+ module = inspect.getmodule(f)
+ self.modules.add(module)
+
+ tokname = self.toknames[fname]
+ if isinstance(f, types.MethodType):
+ reqargs = 2
+ else:
+ reqargs = 1
+ nargs = f.__code__.co_argcount
+ if nargs > reqargs:
+ self.log.error("%s:%d: Rule '%s' has too many arguments", file, line, f.__name__)
+ self.error = True
+ continue
+
+ if nargs < reqargs:
+ self.log.error("%s:%d: Rule '%s' requires an argument", file, line, f.__name__)
+ self.error = True
+ continue
+
+ if not _get_regex(f):
+ self.log.error("%s:%d: No regular expression defined for rule '%s'", file, line, f.__name__)
+ self.error = True
+ continue
+
+ try:
+ c = re.compile('(?P<%s>%s)' % (fname, _get_regex(f)), self.reflags)
+ if c.match(''):
+ self.log.error("%s:%d: Regular expression for rule '%s' matches empty string", file, line, f.__name__)
+ self.error = True
+ except re.error as e:
+ self.log.error("%s:%d: Invalid regular expression for rule '%s'. %s", file, line, f.__name__, e)
+ if '#' in _get_regex(f):
+ self.log.error("%s:%d. Make sure '#' in rule '%s' is escaped with '\\#'", file, line, f.__name__)
+ self.error = True
+
+ # Validate all rules defined by strings
+ for name, r in self.strsym[state]:
+ tokname = self.toknames[name]
+ if tokname == 'error':
+ self.log.error("Rule '%s' must be defined as a function", name)
+ self.error = True
+ continue
+
+ if tokname not in self.tokens and tokname.find('ignore_') < 0:
+ self.log.error("Rule '%s' defined for an unspecified token %s", name, tokname)
+ self.error = True
+ continue
+
+ try:
+ c = re.compile('(?P<%s>%s)' % (name, r), self.reflags)
+ if (c.match('')):
+ self.log.error("Regular expression for rule '%s' matches empty string", name)
+ self.error = True
+ except re.error as e:
+ self.log.error("Invalid regular expression for rule '%s'. %s", name, e)
+ if '#' in r:
+ self.log.error("Make sure '#' in rule '%s' is escaped with '\\#'", name)
+ self.error = True
+
+ if not self.funcsym[state] and not self.strsym[state]:
+ self.log.error("No rules defined for state '%s'", state)
+ self.error = True
+
+ # Validate the error function
+ efunc = self.errorf.get(state, None)
+ if efunc:
+ f = efunc
+ line = f.__code__.co_firstlineno
+ file = f.__code__.co_filename
+ module = inspect.getmodule(f)
+ self.modules.add(module)
+
+ if isinstance(f, types.MethodType):
+ reqargs = 2
+ else:
+ reqargs = 1
+ nargs = f.__code__.co_argcount
+ if nargs > reqargs:
+ self.log.error("%s:%d: Rule '%s' has too many arguments", file, line, f.__name__)
+ self.error = True
+
+ if nargs < reqargs:
+ self.log.error("%s:%d: Rule '%s' requires an argument", file, line, f.__name__)
+ self.error = True
+
+ for module in self.modules:
+ self.validate_module(module)
+
+ # -----------------------------------------------------------------------------
+ # validate_module()
+ #
+ # This checks to see if there are duplicated t_rulename() functions or strings
+ # in the parser input file. This is done using a simple regular expression
+ # match on each line in the source code of the given module.
+ # -----------------------------------------------------------------------------
+
+ def validate_module(self, module):
+ try:
+ lines, linen = inspect.getsourcelines(module)
+ except IOError:
+ return
+
+ fre = re.compile(r'\s*def\s+(t_[a-zA-Z_0-9]*)\(')
+ sre = re.compile(r'\s*(t_[a-zA-Z_0-9]*)\s*=')
+
+ counthash = {}
+ linen += 1
+ for line in lines:
+ m = fre.match(line)
+ if not m:
+ m = sre.match(line)
+ if m:
+ name = m.group(1)
+ prev = counthash.get(name)
+ if not prev:
+ counthash[name] = linen
+ else:
+ filename = inspect.getsourcefile(module)
+ self.log.error('%s:%d: Rule %s redefined. Previously defined on line %d', filename, linen, name, prev)
+ self.error = True
+ linen += 1
+
+# -----------------------------------------------------------------------------
+# lex(module)
+#
+# Build all of the regular expression rules from definitions in the supplied module
+# -----------------------------------------------------------------------------
+def lex(module=None, object=None, debug=False, optimize=False, lextab='lextab',
+ reflags=int(re.VERBOSE), nowarn=False, outputdir=None, debuglog=None, errorlog=None):
+
+ if lextab is None:
+ lextab = 'lextab'
+
+ global lexer
+
+ ldict = None
+ stateinfo = {'INITIAL': 'inclusive'}
+ lexobj = Lexer()
+ lexobj.lexoptimize = optimize
+ global token, input
+
+ if errorlog is None:
+ errorlog = PlyLogger(sys.stderr)
+
+ if debug:
+ if debuglog is None:
+ debuglog = PlyLogger(sys.stderr)
+
+ # Get the module dictionary used for the lexer
+ if object:
+ module = object
+
+ # Get the module dictionary used for the parser
+ if module:
+ _items = [(k, getattr(module, k)) for k in dir(module)]
+ ldict = dict(_items)
+ # If no __file__ attribute is available, try to obtain it from the __module__ instead
+ if '__file__' not in ldict:
+ ldict['__file__'] = sys.modules[ldict['__module__']].__file__
+ else:
+ ldict = get_caller_module_dict(2)
+
+ # Determine if the module is package of a package or not.
+ # If so, fix the tabmodule setting so that tables load correctly
+ pkg = ldict.get('__package__')
+ if pkg and isinstance(lextab, str):
+ if '.' not in lextab:
+ lextab = pkg + '.' + lextab
+
+ # Collect parser information from the dictionary
+ linfo = LexerReflect(ldict, log=errorlog, reflags=reflags)
+ linfo.get_all()
+ if not optimize:
+ if linfo.validate_all():
+ raise SyntaxError("Can't build lexer")
+
+ if optimize and lextab:
+ try:
+ lexobj.readtab(lextab, ldict)
+ token = lexobj.token
+ input = lexobj.input
+ lexer = lexobj
+ return lexobj
+
+ except ImportError:
+ pass
+
+ # Dump some basic debugging information
+ if debug:
+ debuglog.info('lex: tokens = %r', linfo.tokens)
+ debuglog.info('lex: literals = %r', linfo.literals)
+ debuglog.info('lex: states = %r', linfo.stateinfo)
+
+ # Build a dictionary of valid token names
+ lexobj.lextokens = set()
+ for n in linfo.tokens:
+ lexobj.lextokens.add(n)
+
+ # Get literals specification
+ if isinstance(linfo.literals, (list, tuple)):
+ lexobj.lexliterals = type(linfo.literals[0])().join(linfo.literals)
+ else:
+ lexobj.lexliterals = linfo.literals
+
+ lexobj.lextokens_all = lexobj.lextokens | set(lexobj.lexliterals)
+
+ # Get the stateinfo dictionary
+ stateinfo = linfo.stateinfo
+
+ regexs = {}
+ # Build the master regular expressions
+ for state in stateinfo:
+ regex_list = []
+
+ # Add rules defined by functions first
+ for fname, f in linfo.funcsym[state]:
+ line = f.__code__.co_firstlineno
+ file = f.__code__.co_filename
+ regex_list.append('(?P<%s>%s)' % (fname, _get_regex(f)))
+ if debug:
+ debuglog.info("lex: Adding rule %s -> '%s' (state '%s')", fname, _get_regex(f), state)
+
+ # Now add all of the simple rules
+ for name, r in linfo.strsym[state]:
+ regex_list.append('(?P<%s>%s)' % (name, r))
+ if debug:
+ debuglog.info("lex: Adding rule %s -> '%s' (state '%s')", name, r, state)
+
+ regexs[state] = regex_list
+
+ # Build the master regular expressions
+
+ if debug:
+ debuglog.info('lex: ==== MASTER REGEXS FOLLOW ====')
+
+ for state in regexs:
+ lexre, re_text, re_names = _form_master_re(regexs[state], reflags, ldict, linfo.toknames)
+ lexobj.lexstatere[state] = lexre
+ lexobj.lexstateretext[state] = re_text
+ lexobj.lexstaterenames[state] = re_names
+ if debug:
+ for i, text in enumerate(re_text):
+ debuglog.info("lex: state '%s' : regex[%d] = '%s'", state, i, text)
+
+ # For inclusive states, we need to add the regular expressions from the INITIAL state
+ for state, stype in stateinfo.items():
+ if state != 'INITIAL' and stype == 'inclusive':
+ lexobj.lexstatere[state].extend(lexobj.lexstatere['INITIAL'])
+ lexobj.lexstateretext[state].extend(lexobj.lexstateretext['INITIAL'])
+ lexobj.lexstaterenames[state].extend(lexobj.lexstaterenames['INITIAL'])
+
+ lexobj.lexstateinfo = stateinfo
+ lexobj.lexre = lexobj.lexstatere['INITIAL']
+ lexobj.lexretext = lexobj.lexstateretext['INITIAL']
+ lexobj.lexreflags = reflags
+
+ # Set up ignore variables
+ lexobj.lexstateignore = linfo.ignore
+ lexobj.lexignore = lexobj.lexstateignore.get('INITIAL', '')
+
+ # Set up error functions
+ lexobj.lexstateerrorf = linfo.errorf
+ lexobj.lexerrorf = linfo.errorf.get('INITIAL', None)
+ if not lexobj.lexerrorf:
+ errorlog.warning('No t_error rule is defined')
+
+ # Set up eof functions
+ lexobj.lexstateeoff = linfo.eoff
+ lexobj.lexeoff = linfo.eoff.get('INITIAL', None)
+
+ # Check state information for ignore and error rules
+ for s, stype in stateinfo.items():
+ if stype == 'exclusive':
+ if s not in linfo.errorf:
+ errorlog.warning("No error rule is defined for exclusive state '%s'", s)
+ if s not in linfo.ignore and lexobj.lexignore:
+ errorlog.warning("No ignore rule is defined for exclusive state '%s'", s)
+ elif stype == 'inclusive':
+ if s not in linfo.errorf:
+ linfo.errorf[s] = linfo.errorf.get('INITIAL', None)
+ if s not in linfo.ignore:
+ linfo.ignore[s] = linfo.ignore.get('INITIAL', '')
+
+ # Create global versions of the token() and input() functions
+ token = lexobj.token
+ input = lexobj.input
+ lexer = lexobj
+
+ # If in optimize mode, we write the lextab
+ if lextab and optimize:
+ if outputdir is None:
+ # If no output directory is set, the location of the output files
+ # is determined according to the following rules:
+ # - If lextab specifies a package, files go into that package directory
+ # - Otherwise, files go in the same directory as the specifying module
+ if isinstance(lextab, types.ModuleType):
+ srcfile = lextab.__file__
+ else:
+ if '.' not in lextab:
+ srcfile = ldict['__file__']
+ else:
+ parts = lextab.split('.')
+ pkgname = '.'.join(parts[:-1])
+ exec('import %s' % pkgname)
+ srcfile = getattr(sys.modules[pkgname], '__file__', '')
+ outputdir = os.path.dirname(srcfile)
+ try:
+ lexobj.writetab(lextab, outputdir)
+ except IOError as e:
+ errorlog.warning("Couldn't write lextab module %r. %s" % (lextab, e))
+
+ return lexobj
+
+# -----------------------------------------------------------------------------
+# runmain()
+#
+# This runs the lexer as a main program
+# -----------------------------------------------------------------------------
+
+def runmain(lexer=None, data=None):
+ if not data:
+ try:
+ filename = sys.argv[1]
+ f = open(filename)
+ data = f.read()
+ f.close()
+ except IndexError:
+ sys.stdout.write('Reading from standard input (type EOF to end):\n')
+ data = sys.stdin.read()
+
+ if lexer:
+ _input = lexer.input
+ else:
+ _input = input
+ _input(data)
+ if lexer:
+ _token = lexer.token
+ else:
+ _token = token
+
+ while True:
+ tok = _token()
+ if not tok:
+ break
+ sys.stdout.write('(%s,%r,%d,%d)\n' % (tok.type, tok.value, tok.lineno, tok.lexpos))
+
+# -----------------------------------------------------------------------------
+# @TOKEN(regex)
+#
+# This decorator function can be used to set the regex expression on a function
+# when its docstring might need to be set in an alternative way
+# -----------------------------------------------------------------------------
+
+def TOKEN(r):
+ def set_regex(f):
+ if hasattr(r, '__call__'):
+ f.regex = _get_regex(r)
+ else:
+ f.regex = r
+ return f
+ return set_regex
+
+# Alternative spelling of the TOKEN decorator
+Token = TOKEN
diff --git a/billinglayer/python/pycparser/ply/yacc.py b/billinglayer/python/pycparser/ply/yacc.py
new file mode 100644
index 0000000..20b4f28
--- /dev/null
+++ b/billinglayer/python/pycparser/ply/yacc.py
@@ -0,0 +1,3494 @@
+# -----------------------------------------------------------------------------
+# ply: yacc.py
+#
+# Copyright (C) 2001-2017
+# David M. Beazley (Dabeaz LLC)
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+# * Neither the name of the David Beazley or Dabeaz LLC may be used to
+# endorse or promote products derived from this software without
+# specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+# -----------------------------------------------------------------------------
+#
+# This implements an LR parser that is constructed from grammar rules defined
+# as Python functions. The grammer is specified by supplying the BNF inside
+# Python documentation strings. The inspiration for this technique was borrowed
+# from John Aycock's Spark parsing system. PLY might be viewed as cross between
+# Spark and the GNU bison utility.
+#
+# The current implementation is only somewhat object-oriented. The
+# LR parser itself is defined in terms of an object (which allows multiple
+# parsers to co-exist). However, most of the variables used during table
+# construction are defined in terms of global variables. Users shouldn't
+# notice unless they are trying to define multiple parsers at the same
+# time using threads (in which case they should have their head examined).
+#
+# This implementation supports both SLR and LALR(1) parsing. LALR(1)
+# support was originally implemented by Elias Ioup (ezioup@alumni.uchicago.edu),
+# using the algorithm found in Aho, Sethi, and Ullman "Compilers: Principles,
+# Techniques, and Tools" (The Dragon Book). LALR(1) has since been replaced
+# by the more efficient DeRemer and Pennello algorithm.
+#
+# :::::::: WARNING :::::::
+#
+# Construction of LR parsing tables is fairly complicated and expensive.
+# To make this module run fast, a *LOT* of work has been put into
+# optimization---often at the expensive of readability and what might
+# consider to be good Python "coding style." Modify the code at your
+# own risk!
+# ----------------------------------------------------------------------------
+
+import re
+import types
+import sys
+import os.path
+import inspect
+import base64
+import warnings
+
+__version__ = '3.10'
+__tabversion__ = '3.10'
+
+#-----------------------------------------------------------------------------
+# === User configurable parameters ===
+#
+# Change these to modify the default behavior of yacc (if you wish)
+#-----------------------------------------------------------------------------
+
+yaccdebug = True # Debugging mode. If set, yacc generates a
+ # a 'parser.out' file in the current directory
+
+debug_file = 'parser.out' # Default name of the debugging file
+tab_module = 'parsetab' # Default name of the table module
+default_lr = 'LALR' # Default LR table generation method
+
+error_count = 3 # Number of symbols that must be shifted to leave recovery mode
+
+yaccdevel = False # Set to True if developing yacc. This turns off optimized
+ # implementations of certain functions.
+
+resultlimit = 40 # Size limit of results when running in debug mode.
+
+pickle_protocol = 0 # Protocol to use when writing pickle files
+
+# String type-checking compatibility
+if sys.version_info[0] < 3:
+ string_types = basestring
+else:
+ string_types = str
+
+MAXINT = sys.maxsize
+
+# This object is a stand-in for a logging object created by the
+# logging module. PLY will use this by default to create things
+# such as the parser.out file. If a user wants more detailed
+# information, they can create their own logging object and pass
+# it into PLY.
+
+class PlyLogger(object):
+ def __init__(self, f):
+ self.f = f
+
+ def debug(self, msg, *args, **kwargs):
+ self.f.write((msg % args) + '\n')
+
+ info = debug
+
+ def warning(self, msg, *args, **kwargs):
+ self.f.write('WARNING: ' + (msg % args) + '\n')
+
+ def error(self, msg, *args, **kwargs):
+ self.f.write('ERROR: ' + (msg % args) + '\n')
+
+ critical = debug
+
+# Null logger is used when no output is generated. Does nothing.
+class NullLogger(object):
+ def __getattribute__(self, name):
+ return self
+
+ def __call__(self, *args, **kwargs):
+ return self
+
+# Exception raised for yacc-related errors
+class YaccError(Exception):
+ pass
+
+# Format the result message that the parser produces when running in debug mode.
+def format_result(r):
+ repr_str = repr(r)
+ if '\n' in repr_str:
+ repr_str = repr(repr_str)
+ if len(repr_str) > resultlimit:
+ repr_str = repr_str[:resultlimit] + ' ...'
+ result = '<%s @ 0x%x> (%s)' % (type(r).__name__, id(r), repr_str)
+ return result
+
+# Format stack entries when the parser is running in debug mode
+def format_stack_entry(r):
+ repr_str = repr(r)
+ if '\n' in repr_str:
+ repr_str = repr(repr_str)
+ if len(repr_str) < 16:
+ return repr_str
+ else:
+ return '<%s @ 0x%x>' % (type(r).__name__, id(r))
+
+# Panic mode error recovery support. This feature is being reworked--much of the
+# code here is to offer a deprecation/backwards compatible transition
+
+_errok = None
+_token = None
+_restart = None
+_warnmsg = '''PLY: Don't use global functions errok(), token(), and restart() in p_error().
+Instead, invoke the methods on the associated parser instance:
+
+ def p_error(p):
+ ...
+ # Use parser.errok(), parser.token(), parser.restart()
+ ...
+
+ parser = yacc.yacc()
+'''
+
+def errok():
+ warnings.warn(_warnmsg)
+ return _errok()
+
+def restart():
+ warnings.warn(_warnmsg)
+ return _restart()
+
+def token():
+ warnings.warn(_warnmsg)
+ return _token()
+
+# Utility function to call the p_error() function with some deprecation hacks
+def call_errorfunc(errorfunc, token, parser):
+ global _errok, _token, _restart
+ _errok = parser.errok
+ _token = parser.token
+ _restart = parser.restart
+ r = errorfunc(token)
+ try:
+ del _errok, _token, _restart
+ except NameError:
+ pass
+ return r
+
+#-----------------------------------------------------------------------------
+# === LR Parsing Engine ===
+#
+# The following classes are used for the LR parser itself. These are not
+# used during table construction and are independent of the actual LR
+# table generation algorithm
+#-----------------------------------------------------------------------------
+
+# This class is used to hold non-terminal grammar symbols during parsing.
+# It normally has the following attributes set:
+# .type = Grammar symbol type
+# .value = Symbol value
+# .lineno = Starting line number
+# .endlineno = Ending line number (optional, set automatically)
+# .lexpos = Starting lex position
+# .endlexpos = Ending lex position (optional, set automatically)
+
+class YaccSymbol:
+ def __str__(self):
+ return self.type
+
+ def __repr__(self):
+ return str(self)
+
+# This class is a wrapper around the objects actually passed to each
+# grammar rule. Index lookup and assignment actually assign the
+# .value attribute of the underlying YaccSymbol object.
+# The lineno() method returns the line number of a given
+# item (or 0 if not defined). The linespan() method returns
+# a tuple of (startline,endline) representing the range of lines
+# for a symbol. The lexspan() method returns a tuple (lexpos,endlexpos)
+# representing the range of positional information for a symbol.
+
+class YaccProduction:
+ def __init__(self, s, stack=None):
+ self.slice = s
+ self.stack = stack
+ self.lexer = None
+ self.parser = None
+
+ def __getitem__(self, n):
+ if isinstance(n, slice):
+ return [s.value for s in self.slice[n]]
+ elif n >= 0:
+ return self.slice[n].value
+ else:
+ return self.stack[n].value
+
+ def __setitem__(self, n, v):
+ self.slice[n].value = v
+
+ def __getslice__(self, i, j):
+ return [s.value for s in self.slice[i:j]]
+
+ def __len__(self):
+ return len(self.slice)
+
+ def lineno(self, n):
+ return getattr(self.slice[n], 'lineno', 0)
+
+ def set_lineno(self, n, lineno):
+ self.slice[n].lineno = lineno
+
+ def linespan(self, n):
+ startline = getattr(self.slice[n], 'lineno', 0)
+ endline = getattr(self.slice[n], 'endlineno', startline)
+ return startline, endline
+
+ def lexpos(self, n):
+ return getattr(self.slice[n], 'lexpos', 0)
+
+ def lexspan(self, n):
+ startpos = getattr(self.slice[n], 'lexpos', 0)
+ endpos = getattr(self.slice[n], 'endlexpos', startpos)
+ return startpos, endpos
+
+ def error(self):
+ raise SyntaxError
+
+# -----------------------------------------------------------------------------
+# == LRParser ==
+#
+# The LR Parsing engine.
+# -----------------------------------------------------------------------------
+
+class LRParser:
+ def __init__(self, lrtab, errorf):
+ self.productions = lrtab.lr_productions
+ self.action = lrtab.lr_action
+ self.goto = lrtab.lr_goto
+ self.errorfunc = errorf
+ self.set_defaulted_states()
+ self.errorok = True
+
+ def errok(self):
+ self.errorok = True
+
+ def restart(self):
+ del self.statestack[:]
+ del self.symstack[:]
+ sym = YaccSymbol()
+ sym.type = '$end'
+ self.symstack.append(sym)
+ self.statestack.append(0)
+
+ # Defaulted state support.
+ # This method identifies parser states where there is only one possible reduction action.
+ # For such states, the parser can make a choose to make a rule reduction without consuming
+ # the next look-ahead token. This delayed invocation of the tokenizer can be useful in
+ # certain kinds of advanced parsing situations where the lexer and parser interact with
+ # each other or change states (i.e., manipulation of scope, lexer states, etc.).
+ #
+ # See: https://www.gnu.org/software/bison/manual/html_node/Default-Reductions.html#Default-Reductions
+ def set_defaulted_states(self):
+ self.defaulted_states = {}
+ for state, actions in self.action.items():
+ rules = list(actions.values())
+ if len(rules) == 1 and rules[0] < 0:
+ self.defaulted_states[state] = rules[0]
+
+ def disable_defaulted_states(self):
+ self.defaulted_states = {}
+
+ def parse(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None):
+ if debug or yaccdevel:
+ if isinstance(debug, int):
+ debug = PlyLogger(sys.stderr)
+ return self.parsedebug(input, lexer, debug, tracking, tokenfunc)
+ elif tracking:
+ return self.parseopt(input, lexer, debug, tracking, tokenfunc)
+ else:
+ return self.parseopt_notrack(input, lexer, debug, tracking, tokenfunc)
+
+
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+ # parsedebug().
+ #
+ # This is the debugging enabled version of parse(). All changes made to the
+ # parsing engine should be made here. Optimized versions of this function
+ # are automatically created by the ply/ygen.py script. This script cuts out
+ # sections enclosed in markers such as this:
+ #
+ # #--! DEBUG
+ # statements
+ # #--! DEBUG
+ #
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+ def parsedebug(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None):
+ #--! parsedebug-start
+ lookahead = None # Current lookahead symbol
+ lookaheadstack = [] # Stack of lookahead symbols
+ actions = self.action # Local reference to action table (to avoid lookup on self.)
+ goto = self.goto # Local reference to goto table (to avoid lookup on self.)
+ prod = self.productions # Local reference to production list (to avoid lookup on self.)
+ defaulted_states = self.defaulted_states # Local reference to defaulted states
+ pslice = YaccProduction(None) # Production object passed to grammar rules
+ errorcount = 0 # Used during error recovery
+
+ #--! DEBUG
+ debug.info('PLY: PARSE DEBUG START')
+ #--! DEBUG
+
+ # If no lexer was given, we will try to use the lex module
+ if not lexer:
+ from . import lex
+ lexer = lex.lexer
+
+ # Set up the lexer and parser objects on pslice
+ pslice.lexer = lexer
+ pslice.parser = self
+
+ # If input was supplied, pass to lexer
+ if input is not None:
+ lexer.input(input)
+
+ if tokenfunc is None:
+ # Tokenize function
+ get_token = lexer.token
+ else:
+ get_token = tokenfunc
+
+ # Set the parser() token method (sometimes used in error recovery)
+ self.token = get_token
+
+ # Set up the state and symbol stacks
+
+ statestack = [] # Stack of parsing states
+ self.statestack = statestack
+ symstack = [] # Stack of grammar symbols
+ self.symstack = symstack
+
+ pslice.stack = symstack # Put in the production
+ errtoken = None # Err token
+
+ # The start state is assumed to be (0,$end)
+
+ statestack.append(0)
+ sym = YaccSymbol()
+ sym.type = '$end'
+ symstack.append(sym)
+ state = 0
+ while True:
+ # Get the next symbol on the input. If a lookahead symbol
+ # is already set, we just use that. Otherwise, we'll pull
+ # the next token off of the lookaheadstack or from the lexer
+
+ #--! DEBUG
+ debug.debug('')
+ debug.debug('State : %s', state)
+ #--! DEBUG
+
+ if state not in defaulted_states:
+ if not lookahead:
+ if not lookaheadstack:
+ lookahead = get_token() # Get the next token
+ else:
+ lookahead = lookaheadstack.pop()
+ if not lookahead:
+ lookahead = YaccSymbol()
+ lookahead.type = '$end'
+
+ # Check the action table
+ ltype = lookahead.type
+ t = actions[state].get(ltype)
+ else:
+ t = defaulted_states[state]
+ #--! DEBUG
+ debug.debug('Defaulted state %s: Reduce using %d', state, -t)
+ #--! DEBUG
+
+ #--! DEBUG
+ debug.debug('Stack : %s',
+ ('%s . %s' % (' '.join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip())
+ #--! DEBUG
+
+ if t is not None:
+ if t > 0:
+ # shift a symbol on the stack
+ statestack.append(t)
+ state = t
+
+ #--! DEBUG
+ debug.debug('Action : Shift and goto state %s', t)
+ #--! DEBUG
+
+ symstack.append(lookahead)
+ lookahead = None
+
+ # Decrease error count on successful shift
+ if errorcount:
+ errorcount -= 1
+ continue
+
+ if t < 0:
+ # reduce a symbol on the stack, emit a production
+ p = prod[-t]
+ pname = p.name
+ plen = p.len
+
+ # Get production function
+ sym = YaccSymbol()
+ sym.type = pname # Production name
+ sym.value = None
+
+ #--! DEBUG
+ if plen:
+ debug.info('Action : Reduce rule [%s] with %s and goto state %d', p.str,
+ '['+','.join([format_stack_entry(_v.value) for _v in symstack[-plen:]])+']',
+ goto[statestack[-1-plen]][pname])
+ else:
+ debug.info('Action : Reduce rule [%s] with %s and goto state %d', p.str, [],
+ goto[statestack[-1]][pname])
+
+ #--! DEBUG
+
+ if plen:
+ targ = symstack[-plen-1:]
+ targ[0] = sym
+
+ #--! TRACKING
+ if tracking:
+ t1 = targ[1]
+ sym.lineno = t1.lineno
+ sym.lexpos = t1.lexpos
+ t1 = targ[-1]
+ sym.endlineno = getattr(t1, 'endlineno', t1.lineno)
+ sym.endlexpos = getattr(t1, 'endlexpos', t1.lexpos)
+ #--! TRACKING
+
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+ # The code enclosed in this section is duplicated
+ # below as a performance optimization. Make sure
+ # changes get made in both locations.
+
+ pslice.slice = targ
+
+ try:
+ # Call the grammar rule with our special slice object
+ del symstack[-plen:]
+ self.state = state
+ p.callable(pslice)
+ del statestack[-plen:]
+ #--! DEBUG
+ debug.info('Result : %s', format_result(pslice[0]))
+ #--! DEBUG
+ symstack.append(sym)
+ state = goto[statestack[-1]][pname]
+ statestack.append(state)
+ except SyntaxError:
+ # If an error was set. Enter error recovery state
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ symstack.extend(targ[1:-1]) # Put the production slice back on the stack
+ statestack.pop() # Pop back one state (before the reduce)
+ state = statestack[-1]
+ sym.type = 'error'
+ sym.value = 'error'
+ lookahead = sym
+ errorcount = error_count
+ self.errorok = False
+
+ continue
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+ else:
+
+ #--! TRACKING
+ if tracking:
+ sym.lineno = lexer.lineno
+ sym.lexpos = lexer.lexpos
+ #--! TRACKING
+
+ targ = [sym]
+
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+ # The code enclosed in this section is duplicated
+ # above as a performance optimization. Make sure
+ # changes get made in both locations.
+
+ pslice.slice = targ
+
+ try:
+ # Call the grammar rule with our special slice object
+ self.state = state
+ p.callable(pslice)
+ #--! DEBUG
+ debug.info('Result : %s', format_result(pslice[0]))
+ #--! DEBUG
+ symstack.append(sym)
+ state = goto[statestack[-1]][pname]
+ statestack.append(state)
+ except SyntaxError:
+ # If an error was set. Enter error recovery state
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ statestack.pop() # Pop back one state (before the reduce)
+ state = statestack[-1]
+ sym.type = 'error'
+ sym.value = 'error'
+ lookahead = sym
+ errorcount = error_count
+ self.errorok = False
+
+ continue
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+ if t == 0:
+ n = symstack[-1]
+ result = getattr(n, 'value', None)
+ #--! DEBUG
+ debug.info('Done : Returning %s', format_result(result))
+ debug.info('PLY: PARSE DEBUG END')
+ #--! DEBUG
+ return result
+
+ if t is None:
+
+ #--! DEBUG
+ debug.error('Error : %s',
+ ('%s . %s' % (' '.join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip())
+ #--! DEBUG
+
+ # We have some kind of parsing error here. To handle
+ # this, we are going to push the current token onto
+ # the tokenstack and replace it with an 'error' token.
+ # If there are any synchronization rules, they may
+ # catch it.
+ #
+ # In addition to pushing the error token, we call call
+ # the user defined p_error() function if this is the
+ # first syntax error. This function is only called if
+ # errorcount == 0.
+ if errorcount == 0 or self.errorok:
+ errorcount = error_count
+ self.errorok = False
+ errtoken = lookahead
+ if errtoken.type == '$end':
+ errtoken = None # End of file!
+ if self.errorfunc:
+ if errtoken and not hasattr(errtoken, 'lexer'):
+ errtoken.lexer = lexer
+ self.state = state
+ tok = call_errorfunc(self.errorfunc, errtoken, self)
+ if self.errorok:
+ # User must have done some kind of panic
+ # mode recovery on their own. The
+ # returned token is the next lookahead
+ lookahead = tok
+ errtoken = None
+ continue
+ else:
+ if errtoken:
+ if hasattr(errtoken, 'lineno'):
+ lineno = lookahead.lineno
+ else:
+ lineno = 0
+ if lineno:
+ sys.stderr.write('yacc: Syntax error at line %d, token=%s\n' % (lineno, errtoken.type))
+ else:
+ sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type)
+ else:
+ sys.stderr.write('yacc: Parse error in input. EOF\n')
+ return
+
+ else:
+ errorcount = error_count
+
+ # case 1: the statestack only has 1 entry on it. If we're in this state, the
+ # entire parse has been rolled back and we're completely hosed. The token is
+ # discarded and we just keep going.
+
+ if len(statestack) <= 1 and lookahead.type != '$end':
+ lookahead = None
+ errtoken = None
+ state = 0
+ # Nuke the pushback stack
+ del lookaheadstack[:]
+ continue
+
+ # case 2: the statestack has a couple of entries on it, but we're
+ # at the end of the file. nuke the top entry and generate an error token
+
+ # Start nuking entries on the stack
+ if lookahead.type == '$end':
+ # Whoa. We're really hosed here. Bail out
+ return
+
+ if lookahead.type != 'error':
+ sym = symstack[-1]
+ if sym.type == 'error':
+ # Hmmm. Error is on top of stack, we'll just nuke input
+ # symbol and continue
+ #--! TRACKING
+ if tracking:
+ sym.endlineno = getattr(lookahead, 'lineno', sym.lineno)
+ sym.endlexpos = getattr(lookahead, 'lexpos', sym.lexpos)
+ #--! TRACKING
+ lookahead = None
+ continue
+
+ # Create the error symbol for the first time and make it the new lookahead symbol
+ t = YaccSymbol()
+ t.type = 'error'
+
+ if hasattr(lookahead, 'lineno'):
+ t.lineno = t.endlineno = lookahead.lineno
+ if hasattr(lookahead, 'lexpos'):
+ t.lexpos = t.endlexpos = lookahead.lexpos
+ t.value = lookahead
+ lookaheadstack.append(lookahead)
+ lookahead = t
+ else:
+ sym = symstack.pop()
+ #--! TRACKING
+ if tracking:
+ lookahead.lineno = sym.lineno
+ lookahead.lexpos = sym.lexpos
+ #--! TRACKING
+ statestack.pop()
+ state = statestack[-1]
+
+ continue
+
+ # Call an error function here
+ raise RuntimeError('yacc: internal parser error!!!\n')
+
+ #--! parsedebug-end
+
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+ # parseopt().
+ #
+ # Optimized version of parse() method. DO NOT EDIT THIS CODE DIRECTLY!
+ # This code is automatically generated by the ply/ygen.py script. Make
+ # changes to the parsedebug() method instead.
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+ def parseopt(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None):
+ #--! parseopt-start
+ lookahead = None # Current lookahead symbol
+ lookaheadstack = [] # Stack of lookahead symbols
+ actions = self.action # Local reference to action table (to avoid lookup on self.)
+ goto = self.goto # Local reference to goto table (to avoid lookup on self.)
+ prod = self.productions # Local reference to production list (to avoid lookup on self.)
+ defaulted_states = self.defaulted_states # Local reference to defaulted states
+ pslice = YaccProduction(None) # Production object passed to grammar rules
+ errorcount = 0 # Used during error recovery
+
+
+ # If no lexer was given, we will try to use the lex module
+ if not lexer:
+ from . import lex
+ lexer = lex.lexer
+
+ # Set up the lexer and parser objects on pslice
+ pslice.lexer = lexer
+ pslice.parser = self
+
+ # If input was supplied, pass to lexer
+ if input is not None:
+ lexer.input(input)
+
+ if tokenfunc is None:
+ # Tokenize function
+ get_token = lexer.token
+ else:
+ get_token = tokenfunc
+
+ # Set the parser() token method (sometimes used in error recovery)
+ self.token = get_token
+
+ # Set up the state and symbol stacks
+
+ statestack = [] # Stack of parsing states
+ self.statestack = statestack
+ symstack = [] # Stack of grammar symbols
+ self.symstack = symstack
+
+ pslice.stack = symstack # Put in the production
+ errtoken = None # Err token
+
+ # The start state is assumed to be (0,$end)
+
+ statestack.append(0)
+ sym = YaccSymbol()
+ sym.type = '$end'
+ symstack.append(sym)
+ state = 0
+ while True:
+ # Get the next symbol on the input. If a lookahead symbol
+ # is already set, we just use that. Otherwise, we'll pull
+ # the next token off of the lookaheadstack or from the lexer
+
+
+ if state not in defaulted_states:
+ if not lookahead:
+ if not lookaheadstack:
+ lookahead = get_token() # Get the next token
+ else:
+ lookahead = lookaheadstack.pop()
+ if not lookahead:
+ lookahead = YaccSymbol()
+ lookahead.type = '$end'
+
+ # Check the action table
+ ltype = lookahead.type
+ t = actions[state].get(ltype)
+ else:
+ t = defaulted_states[state]
+
+
+ if t is not None:
+ if t > 0:
+ # shift a symbol on the stack
+ statestack.append(t)
+ state = t
+
+
+ symstack.append(lookahead)
+ lookahead = None
+
+ # Decrease error count on successful shift
+ if errorcount:
+ errorcount -= 1
+ continue
+
+ if t < 0:
+ # reduce a symbol on the stack, emit a production
+ p = prod[-t]
+ pname = p.name
+ plen = p.len
+
+ # Get production function
+ sym = YaccSymbol()
+ sym.type = pname # Production name
+ sym.value = None
+
+
+ if plen:
+ targ = symstack[-plen-1:]
+ targ[0] = sym
+
+ #--! TRACKING
+ if tracking:
+ t1 = targ[1]
+ sym.lineno = t1.lineno
+ sym.lexpos = t1.lexpos
+ t1 = targ[-1]
+ sym.endlineno = getattr(t1, 'endlineno', t1.lineno)
+ sym.endlexpos = getattr(t1, 'endlexpos', t1.lexpos)
+ #--! TRACKING
+
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+ # The code enclosed in this section is duplicated
+ # below as a performance optimization. Make sure
+ # changes get made in both locations.
+
+ pslice.slice = targ
+
+ try:
+ # Call the grammar rule with our special slice object
+ del symstack[-plen:]
+ self.state = state
+ p.callable(pslice)
+ del statestack[-plen:]
+ symstack.append(sym)
+ state = goto[statestack[-1]][pname]
+ statestack.append(state)
+ except SyntaxError:
+ # If an error was set. Enter error recovery state
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ symstack.extend(targ[1:-1]) # Put the production slice back on the stack
+ statestack.pop() # Pop back one state (before the reduce)
+ state = statestack[-1]
+ sym.type = 'error'
+ sym.value = 'error'
+ lookahead = sym
+ errorcount = error_count
+ self.errorok = False
+
+ continue
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+ else:
+
+ #--! TRACKING
+ if tracking:
+ sym.lineno = lexer.lineno
+ sym.lexpos = lexer.lexpos
+ #--! TRACKING
+
+ targ = [sym]
+
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+ # The code enclosed in this section is duplicated
+ # above as a performance optimization. Make sure
+ # changes get made in both locations.
+
+ pslice.slice = targ
+
+ try:
+ # Call the grammar rule with our special slice object
+ self.state = state
+ p.callable(pslice)
+ symstack.append(sym)
+ state = goto[statestack[-1]][pname]
+ statestack.append(state)
+ except SyntaxError:
+ # If an error was set. Enter error recovery state
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ statestack.pop() # Pop back one state (before the reduce)
+ state = statestack[-1]
+ sym.type = 'error'
+ sym.value = 'error'
+ lookahead = sym
+ errorcount = error_count
+ self.errorok = False
+
+ continue
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+ if t == 0:
+ n = symstack[-1]
+ result = getattr(n, 'value', None)
+ return result
+
+ if t is None:
+
+
+ # We have some kind of parsing error here. To handle
+ # this, we are going to push the current token onto
+ # the tokenstack and replace it with an 'error' token.
+ # If there are any synchronization rules, they may
+ # catch it.
+ #
+ # In addition to pushing the error token, we call call
+ # the user defined p_error() function if this is the
+ # first syntax error. This function is only called if
+ # errorcount == 0.
+ if errorcount == 0 or self.errorok:
+ errorcount = error_count
+ self.errorok = False
+ errtoken = lookahead
+ if errtoken.type == '$end':
+ errtoken = None # End of file!
+ if self.errorfunc:
+ if errtoken and not hasattr(errtoken, 'lexer'):
+ errtoken.lexer = lexer
+ self.state = state
+ tok = call_errorfunc(self.errorfunc, errtoken, self)
+ if self.errorok:
+ # User must have done some kind of panic
+ # mode recovery on their own. The
+ # returned token is the next lookahead
+ lookahead = tok
+ errtoken = None
+ continue
+ else:
+ if errtoken:
+ if hasattr(errtoken, 'lineno'):
+ lineno = lookahead.lineno
+ else:
+ lineno = 0
+ if lineno:
+ sys.stderr.write('yacc: Syntax error at line %d, token=%s\n' % (lineno, errtoken.type))
+ else:
+ sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type)
+ else:
+ sys.stderr.write('yacc: Parse error in input. EOF\n')
+ return
+
+ else:
+ errorcount = error_count
+
+ # case 1: the statestack only has 1 entry on it. If we're in this state, the
+ # entire parse has been rolled back and we're completely hosed. The token is
+ # discarded and we just keep going.
+
+ if len(statestack) <= 1 and lookahead.type != '$end':
+ lookahead = None
+ errtoken = None
+ state = 0
+ # Nuke the pushback stack
+ del lookaheadstack[:]
+ continue
+
+ # case 2: the statestack has a couple of entries on it, but we're
+ # at the end of the file. nuke the top entry and generate an error token
+
+ # Start nuking entries on the stack
+ if lookahead.type == '$end':
+ # Whoa. We're really hosed here. Bail out
+ return
+
+ if lookahead.type != 'error':
+ sym = symstack[-1]
+ if sym.type == 'error':
+ # Hmmm. Error is on top of stack, we'll just nuke input
+ # symbol and continue
+ #--! TRACKING
+ if tracking:
+ sym.endlineno = getattr(lookahead, 'lineno', sym.lineno)
+ sym.endlexpos = getattr(lookahead, 'lexpos', sym.lexpos)
+ #--! TRACKING
+ lookahead = None
+ continue
+
+ # Create the error symbol for the first time and make it the new lookahead symbol
+ t = YaccSymbol()
+ t.type = 'error'
+
+ if hasattr(lookahead, 'lineno'):
+ t.lineno = t.endlineno = lookahead.lineno
+ if hasattr(lookahead, 'lexpos'):
+ t.lexpos = t.endlexpos = lookahead.lexpos
+ t.value = lookahead
+ lookaheadstack.append(lookahead)
+ lookahead = t
+ else:
+ sym = symstack.pop()
+ #--! TRACKING
+ if tracking:
+ lookahead.lineno = sym.lineno
+ lookahead.lexpos = sym.lexpos
+ #--! TRACKING
+ statestack.pop()
+ state = statestack[-1]
+
+ continue
+
+ # Call an error function here
+ raise RuntimeError('yacc: internal parser error!!!\n')
+
+ #--! parseopt-end
+
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+ # parseopt_notrack().
+ #
+ # Optimized version of parseopt() with line number tracking removed.
+ # DO NOT EDIT THIS CODE DIRECTLY. This code is automatically generated
+ # by the ply/ygen.py script. Make changes to the parsedebug() method instead.
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+ def parseopt_notrack(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None):
+ #--! parseopt-notrack-start
+ lookahead = None # Current lookahead symbol
+ lookaheadstack = [] # Stack of lookahead symbols
+ actions = self.action # Local reference to action table (to avoid lookup on self.)
+ goto = self.goto # Local reference to goto table (to avoid lookup on self.)
+ prod = self.productions # Local reference to production list (to avoid lookup on self.)
+ defaulted_states = self.defaulted_states # Local reference to defaulted states
+ pslice = YaccProduction(None) # Production object passed to grammar rules
+ errorcount = 0 # Used during error recovery
+
+
+ # If no lexer was given, we will try to use the lex module
+ if not lexer:
+ from . import lex
+ lexer = lex.lexer
+
+ # Set up the lexer and parser objects on pslice
+ pslice.lexer = lexer
+ pslice.parser = self
+
+ # If input was supplied, pass to lexer
+ if input is not None:
+ lexer.input(input)
+
+ if tokenfunc is None:
+ # Tokenize function
+ get_token = lexer.token
+ else:
+ get_token = tokenfunc
+
+ # Set the parser() token method (sometimes used in error recovery)
+ self.token = get_token
+
+ # Set up the state and symbol stacks
+
+ statestack = [] # Stack of parsing states
+ self.statestack = statestack
+ symstack = [] # Stack of grammar symbols
+ self.symstack = symstack
+
+ pslice.stack = symstack # Put in the production
+ errtoken = None # Err token
+
+ # The start state is assumed to be (0,$end)
+
+ statestack.append(0)
+ sym = YaccSymbol()
+ sym.type = '$end'
+ symstack.append(sym)
+ state = 0
+ while True:
+ # Get the next symbol on the input. If a lookahead symbol
+ # is already set, we just use that. Otherwise, we'll pull
+ # the next token off of the lookaheadstack or from the lexer
+
+
+ if state not in defaulted_states:
+ if not lookahead:
+ if not lookaheadstack:
+ lookahead = get_token() # Get the next token
+ else:
+ lookahead = lookaheadstack.pop()
+ if not lookahead:
+ lookahead = YaccSymbol()
+ lookahead.type = '$end'
+
+ # Check the action table
+ ltype = lookahead.type
+ t = actions[state].get(ltype)
+ else:
+ t = defaulted_states[state]
+
+
+ if t is not None:
+ if t > 0:
+ # shift a symbol on the stack
+ statestack.append(t)
+ state = t
+
+
+ symstack.append(lookahead)
+ lookahead = None
+
+ # Decrease error count on successful shift
+ if errorcount:
+ errorcount -= 1
+ continue
+
+ if t < 0:
+ # reduce a symbol on the stack, emit a production
+ p = prod[-t]
+ pname = p.name
+ plen = p.len
+
+ # Get production function
+ sym = YaccSymbol()
+ sym.type = pname # Production name
+ sym.value = None
+
+
+ if plen:
+ targ = symstack[-plen-1:]
+ targ[0] = sym
+
+
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+ # The code enclosed in this section is duplicated
+ # below as a performance optimization. Make sure
+ # changes get made in both locations.
+
+ pslice.slice = targ
+
+ try:
+ # Call the grammar rule with our special slice object
+ del symstack[-plen:]
+ self.state = state
+ p.callable(pslice)
+ del statestack[-plen:]
+ symstack.append(sym)
+ state = goto[statestack[-1]][pname]
+ statestack.append(state)
+ except SyntaxError:
+ # If an error was set. Enter error recovery state
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ symstack.extend(targ[1:-1]) # Put the production slice back on the stack
+ statestack.pop() # Pop back one state (before the reduce)
+ state = statestack[-1]
+ sym.type = 'error'
+ sym.value = 'error'
+ lookahead = sym
+ errorcount = error_count
+ self.errorok = False
+
+ continue
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+ else:
+
+
+ targ = [sym]
+
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+ # The code enclosed in this section is duplicated
+ # above as a performance optimization. Make sure
+ # changes get made in both locations.
+
+ pslice.slice = targ
+
+ try:
+ # Call the grammar rule with our special slice object
+ self.state = state
+ p.callable(pslice)
+ symstack.append(sym)
+ state = goto[statestack[-1]][pname]
+ statestack.append(state)
+ except SyntaxError:
+ # If an error was set. Enter error recovery state
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ statestack.pop() # Pop back one state (before the reduce)
+ state = statestack[-1]
+ sym.type = 'error'
+ sym.value = 'error'
+ lookahead = sym
+ errorcount = error_count
+ self.errorok = False
+
+ continue
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+ if t == 0:
+ n = symstack[-1]
+ result = getattr(n, 'value', None)
+ return result
+
+ if t is None:
+
+
+ # We have some kind of parsing error here. To handle
+ # this, we are going to push the current token onto
+ # the tokenstack and replace it with an 'error' token.
+ # If there are any synchronization rules, they may
+ # catch it.
+ #
+ # In addition to pushing the error token, we call call
+ # the user defined p_error() function if this is the
+ # first syntax error. This function is only called if
+ # errorcount == 0.
+ if errorcount == 0 or self.errorok:
+ errorcount = error_count
+ self.errorok = False
+ errtoken = lookahead
+ if errtoken.type == '$end':
+ errtoken = None # End of file!
+ if self.errorfunc:
+ if errtoken and not hasattr(errtoken, 'lexer'):
+ errtoken.lexer = lexer
+ self.state = state
+ tok = call_errorfunc(self.errorfunc, errtoken, self)
+ if self.errorok:
+ # User must have done some kind of panic
+ # mode recovery on their own. The
+ # returned token is the next lookahead
+ lookahead = tok
+ errtoken = None
+ continue
+ else:
+ if errtoken:
+ if hasattr(errtoken, 'lineno'):
+ lineno = lookahead.lineno
+ else:
+ lineno = 0
+ if lineno:
+ sys.stderr.write('yacc: Syntax error at line %d, token=%s\n' % (lineno, errtoken.type))
+ else:
+ sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type)
+ else:
+ sys.stderr.write('yacc: Parse error in input. EOF\n')
+ return
+
+ else:
+ errorcount = error_count
+
+ # case 1: the statestack only has 1 entry on it. If we're in this state, the
+ # entire parse has been rolled back and we're completely hosed. The token is
+ # discarded and we just keep going.
+
+ if len(statestack) <= 1 and lookahead.type != '$end':
+ lookahead = None
+ errtoken = None
+ state = 0
+ # Nuke the pushback stack
+ del lookaheadstack[:]
+ continue
+
+ # case 2: the statestack has a couple of entries on it, but we're
+ # at the end of the file. nuke the top entry and generate an error token
+
+ # Start nuking entries on the stack
+ if lookahead.type == '$end':
+ # Whoa. We're really hosed here. Bail out
+ return
+
+ if lookahead.type != 'error':
+ sym = symstack[-1]
+ if sym.type == 'error':
+ # Hmmm. Error is on top of stack, we'll just nuke input
+ # symbol and continue
+ lookahead = None
+ continue
+
+ # Create the error symbol for the first time and make it the new lookahead symbol
+ t = YaccSymbol()
+ t.type = 'error'
+
+ if hasattr(lookahead, 'lineno'):
+ t.lineno = t.endlineno = lookahead.lineno
+ if hasattr(lookahead, 'lexpos'):
+ t.lexpos = t.endlexpos = lookahead.lexpos
+ t.value = lookahead
+ lookaheadstack.append(lookahead)
+ lookahead = t
+ else:
+ sym = symstack.pop()
+ statestack.pop()
+ state = statestack[-1]
+
+ continue
+
+ # Call an error function here
+ raise RuntimeError('yacc: internal parser error!!!\n')
+
+ #--! parseopt-notrack-end
+
+# -----------------------------------------------------------------------------
+# === Grammar Representation ===
+#
+# The following functions, classes, and variables are used to represent and
+# manipulate the rules that make up a grammar.
+# -----------------------------------------------------------------------------
+
+# regex matching identifiers
+_is_identifier = re.compile(r'^[a-zA-Z0-9_-]+$')
+
+# -----------------------------------------------------------------------------
+# class Production:
+#
+# This class stores the raw information about a single production or grammar rule.
+# A grammar rule refers to a specification such as this:
+#
+# expr : expr PLUS term
+#
+# Here are the basic attributes defined on all productions
+#
+# name - Name of the production. For example 'expr'
+# prod - A list of symbols on the right side ['expr','PLUS','term']
+# prec - Production precedence level
+# number - Production number.
+# func - Function that executes on reduce
+# file - File where production function is defined
+# lineno - Line number where production function is defined
+#
+# The following attributes are defined or optional.
+#
+# len - Length of the production (number of symbols on right hand side)
+# usyms - Set of unique symbols found in the production
+# -----------------------------------------------------------------------------
+
+class Production(object):
+ reduced = 0
+ def __init__(self, number, name, prod, precedence=('right', 0), func=None, file='', line=0):
+ self.name = name
+ self.prod = tuple(prod)
+ self.number = number
+ self.func = func
+ self.callable = None
+ self.file = file
+ self.line = line
+ self.prec = precedence
+
+ # Internal settings used during table construction
+
+ self.len = len(self.prod) # Length of the production
+
+ # Create a list of unique production symbols used in the production
+ self.usyms = []
+ for s in self.prod:
+ if s not in self.usyms:
+ self.usyms.append(s)
+
+ # List of all LR items for the production
+ self.lr_items = []
+ self.lr_next = None
+
+ # Create a string representation
+ if self.prod:
+ self.str = '%s -> %s' % (self.name, ' '.join(self.prod))
+ else:
+ self.str = '%s -> ' % self.name
+
+ def __str__(self):
+ return self.str
+
+ def __repr__(self):
+ return 'Production(' + str(self) + ')'
+
+ def __len__(self):
+ return len(self.prod)
+
+ def __nonzero__(self):
+ return 1
+
+ def __getitem__(self, index):
+ return self.prod[index]
+
+ # Return the nth lr_item from the production (or None if at the end)
+ def lr_item(self, n):
+ if n > len(self.prod):
+ return None
+ p = LRItem(self, n)
+ # Precompute the list of productions immediately following.
+ try:
+ p.lr_after = Prodnames[p.prod[n+1]]
+ except (IndexError, KeyError):
+ p.lr_after = []
+ try:
+ p.lr_before = p.prod[n-1]
+ except IndexError:
+ p.lr_before = None
+ return p
+
+ # Bind the production function name to a callable
+ def bind(self, pdict):
+ if self.func:
+ self.callable = pdict[self.func]
+
+# This class serves as a minimal standin for Production objects when
+# reading table data from files. It only contains information
+# actually used by the LR parsing engine, plus some additional
+# debugging information.
+class MiniProduction(object):
+ def __init__(self, str, name, len, func, file, line):
+ self.name = name
+ self.len = len
+ self.func = func
+ self.callable = None
+ self.file = file
+ self.line = line
+ self.str = str
+
+ def __str__(self):
+ return self.str
+
+ def __repr__(self):
+ return 'MiniProduction(%s)' % self.str
+
+ # Bind the production function name to a callable
+ def bind(self, pdict):
+ if self.func:
+ self.callable = pdict[self.func]
+
+
+# -----------------------------------------------------------------------------
+# class LRItem
+#
+# This class represents a specific stage of parsing a production rule. For
+# example:
+#
+# expr : expr . PLUS term
+#
+# In the above, the "." represents the current location of the parse. Here
+# basic attributes:
+#
+# name - Name of the production. For example 'expr'
+# prod - A list of symbols on the right side ['expr','.', 'PLUS','term']
+# number - Production number.
+#
+# lr_next Next LR item. Example, if we are ' expr -> expr . PLUS term'
+# then lr_next refers to 'expr -> expr PLUS . term'
+# lr_index - LR item index (location of the ".") in the prod list.
+# lookaheads - LALR lookahead symbols for this item
+# len - Length of the production (number of symbols on right hand side)
+# lr_after - List of all productions that immediately follow
+# lr_before - Grammar symbol immediately before
+# -----------------------------------------------------------------------------
+
+class LRItem(object):
+ def __init__(self, p, n):
+ self.name = p.name
+ self.prod = list(p.prod)
+ self.number = p.number
+ self.lr_index = n
+ self.lookaheads = {}
+ self.prod.insert(n, '.')
+ self.prod = tuple(self.prod)
+ self.len = len(self.prod)
+ self.usyms = p.usyms
+
+ def __str__(self):
+ if self.prod:
+ s = '%s -> %s' % (self.name, ' '.join(self.prod))
+ else:
+ s = '%s -> ' % self.name
+ return s
+
+ def __repr__(self):
+ return 'LRItem(' + str(self) + ')'
+
+# -----------------------------------------------------------------------------
+# rightmost_terminal()
+#
+# Return the rightmost terminal from a list of symbols. Used in add_production()
+# -----------------------------------------------------------------------------
+def rightmost_terminal(symbols, terminals):
+ i = len(symbols) - 1
+ while i >= 0:
+ if symbols[i] in terminals:
+ return symbols[i]
+ i -= 1
+ return None
+
+# -----------------------------------------------------------------------------
+# === GRAMMAR CLASS ===
+#
+# The following class represents the contents of the specified grammar along
+# with various computed properties such as first sets, follow sets, LR items, etc.
+# This data is used for critical parts of the table generation process later.
+# -----------------------------------------------------------------------------
+
+class GrammarError(YaccError):
+ pass
+
+class Grammar(object):
+ def __init__(self, terminals):
+ self.Productions = [None] # A list of all of the productions. The first
+ # entry is always reserved for the purpose of
+ # building an augmented grammar
+
+ self.Prodnames = {} # A dictionary mapping the names of nonterminals to a list of all
+ # productions of that nonterminal.
+
+ self.Prodmap = {} # A dictionary that is only used to detect duplicate
+ # productions.
+
+ self.Terminals = {} # A dictionary mapping the names of terminal symbols to a
+ # list of the rules where they are used.
+
+ for term in terminals:
+ self.Terminals[term] = []
+
+ self.Terminals['error'] = []
+
+ self.Nonterminals = {} # A dictionary mapping names of nonterminals to a list
+ # of rule numbers where they are used.
+
+ self.First = {} # A dictionary of precomputed FIRST(x) symbols
+
+ self.Follow = {} # A dictionary of precomputed FOLLOW(x) symbols
+
+ self.Precedence = {} # Precedence rules for each terminal. Contains tuples of the
+ # form ('right',level) or ('nonassoc', level) or ('left',level)
+
+ self.UsedPrecedence = set() # Precedence rules that were actually used by the grammer.
+ # This is only used to provide error checking and to generate
+ # a warning about unused precedence rules.
+
+ self.Start = None # Starting symbol for the grammar
+
+
+ def __len__(self):
+ return len(self.Productions)
+
+ def __getitem__(self, index):
+ return self.Productions[index]
+
+ # -----------------------------------------------------------------------------
+ # set_precedence()
+ #
+ # Sets the precedence for a given terminal. assoc is the associativity such as
+ # 'left','right', or 'nonassoc'. level is a numeric level.
+ #
+ # -----------------------------------------------------------------------------
+
+ def set_precedence(self, term, assoc, level):
+ assert self.Productions == [None], 'Must call set_precedence() before add_production()'
+ if term in self.Precedence:
+ raise GrammarError('Precedence already specified for terminal %r' % term)
+ if assoc not in ['left', 'right', 'nonassoc']:
+ raise GrammarError("Associativity must be one of 'left','right', or 'nonassoc'")
+ self.Precedence[term] = (assoc, level)
+
+ # -----------------------------------------------------------------------------
+ # add_production()
+ #
+ # Given an action function, this function assembles a production rule and
+ # computes its precedence level.
+ #
+ # The production rule is supplied as a list of symbols. For example,
+ # a rule such as 'expr : expr PLUS term' has a production name of 'expr' and
+ # symbols ['expr','PLUS','term'].
+ #
+ # Precedence is determined by the precedence of the right-most non-terminal
+ # or the precedence of a terminal specified by %prec.
+ #
+ # A variety of error checks are performed to make sure production symbols
+ # are valid and that %prec is used correctly.
+ # -----------------------------------------------------------------------------
+
+ def add_production(self, prodname, syms, func=None, file='', line=0):
+
+ if prodname in self.Terminals:
+ raise GrammarError('%s:%d: Illegal rule name %r. Already defined as a token' % (file, line, prodname))
+ if prodname == 'error':
+ raise GrammarError('%s:%d: Illegal rule name %r. error is a reserved word' % (file, line, prodname))
+ if not _is_identifier.match(prodname):
+ raise GrammarError('%s:%d: Illegal rule name %r' % (file, line, prodname))
+
+ # Look for literal tokens
+ for n, s in enumerate(syms):
+ if s[0] in "'\"":
+ try:
+ c = eval(s)
+ if (len(c) > 1):
+ raise GrammarError('%s:%d: Literal token %s in rule %r may only be a single character' %
+ (file, line, s, prodname))
+ if c not in self.Terminals:
+ self.Terminals[c] = []
+ syms[n] = c
+ continue
+ except SyntaxError:
+ pass
+ if not _is_identifier.match(s) and s != '%prec':
+ raise GrammarError('%s:%d: Illegal name %r in rule %r' % (file, line, s, prodname))
+
+ # Determine the precedence level
+ if '%prec' in syms:
+ if syms[-1] == '%prec':
+ raise GrammarError('%s:%d: Syntax error. Nothing follows %%prec' % (file, line))
+ if syms[-2] != '%prec':
+ raise GrammarError('%s:%d: Syntax error. %%prec can only appear at the end of a grammar rule' %
+ (file, line))
+ precname = syms[-1]
+ prodprec = self.Precedence.get(precname)
+ if not prodprec:
+ raise GrammarError('%s:%d: Nothing known about the precedence of %r' % (file, line, precname))
+ else:
+ self.UsedPrecedence.add(precname)
+ del syms[-2:] # Drop %prec from the rule
+ else:
+ # If no %prec, precedence is determined by the rightmost terminal symbol
+ precname = rightmost_terminal(syms, self.Terminals)
+ prodprec = self.Precedence.get(precname, ('right', 0))
+
+ # See if the rule is already in the rulemap
+ map = '%s -> %s' % (prodname, syms)
+ if map in self.Prodmap:
+ m = self.Prodmap[map]
+ raise GrammarError('%s:%d: Duplicate rule %s. ' % (file, line, m) +
+ 'Previous definition at %s:%d' % (m.file, m.line))
+
+ # From this point on, everything is valid. Create a new Production instance
+ pnumber = len(self.Productions)
+ if prodname not in self.Nonterminals:
+ self.Nonterminals[prodname] = []
+
+ # Add the production number to Terminals and Nonterminals
+ for t in syms:
+ if t in self.Terminals:
+ self.Terminals[t].append(pnumber)
+ else:
+ if t not in self.Nonterminals:
+ self.Nonterminals[t] = []
+ self.Nonterminals[t].append(pnumber)
+
+ # Create a production and add it to the list of productions
+ p = Production(pnumber, prodname, syms, prodprec, func, file, line)
+ self.Productions.append(p)
+ self.Prodmap[map] = p
+
+ # Add to the global productions list
+ try:
+ self.Prodnames[prodname].append(p)
+ except KeyError:
+ self.Prodnames[prodname] = [p]
+
+ # -----------------------------------------------------------------------------
+ # set_start()
+ #
+ # Sets the starting symbol and creates the augmented grammar. Production
+ # rule 0 is S' -> start where start is the start symbol.
+ # -----------------------------------------------------------------------------
+
+ def set_start(self, start=None):
+ if not start:
+ start = self.Productions[1].name
+ if start not in self.Nonterminals:
+ raise GrammarError('start symbol %s undefined' % start)
+ self.Productions[0] = Production(0, "S'", [start])
+ self.Nonterminals[start].append(0)
+ self.Start = start
+
+ # -----------------------------------------------------------------------------
+ # find_unreachable()
+ #
+ # Find all of the nonterminal symbols that can't be reached from the starting
+ # symbol. Returns a list of nonterminals that can't be reached.
+ # -----------------------------------------------------------------------------
+
+ def find_unreachable(self):
+
+ # Mark all symbols that are reachable from a symbol s
+ def mark_reachable_from(s):
+ if s in reachable:
+ return
+ reachable.add(s)
+ for p in self.Prodnames.get(s, []):
+ for r in p.prod:
+ mark_reachable_from(r)
+
+ reachable = set()
+ mark_reachable_from(self.Productions[0].prod[0])
+ return [s for s in self.Nonterminals if s not in reachable]
+
+ # -----------------------------------------------------------------------------
+ # infinite_cycles()
+ #
+ # This function looks at the various parsing rules and tries to detect
+ # infinite recursion cycles (grammar rules where there is no possible way
+ # to derive a string of only terminals).
+ # -----------------------------------------------------------------------------
+
+ def infinite_cycles(self):
+ terminates = {}
+
+ # Terminals:
+ for t in self.Terminals:
+ terminates[t] = True
+
+ terminates['$end'] = True
+
+ # Nonterminals:
+
+ # Initialize to false:
+ for n in self.Nonterminals:
+ terminates[n] = False
+
+ # Then propagate termination until no change:
+ while True:
+ some_change = False
+ for (n, pl) in self.Prodnames.items():
+ # Nonterminal n terminates iff any of its productions terminates.
+ for p in pl:
+ # Production p terminates iff all of its rhs symbols terminate.
+ for s in p.prod:
+ if not terminates[s]:
+ # The symbol s does not terminate,
+ # so production p does not terminate.
+ p_terminates = False
+ break
+ else:
+ # didn't break from the loop,
+ # so every symbol s terminates
+ # so production p terminates.
+ p_terminates = True
+
+ if p_terminates:
+ # symbol n terminates!
+ if not terminates[n]:
+ terminates[n] = True
+ some_change = True
+ # Don't need to consider any more productions for this n.
+ break
+
+ if not some_change:
+ break
+
+ infinite = []
+ for (s, term) in terminates.items():
+ if not term:
+ if s not in self.Prodnames and s not in self.Terminals and s != 'error':
+ # s is used-but-not-defined, and we've already warned of that,
+ # so it would be overkill to say that it's also non-terminating.
+ pass
+ else:
+ infinite.append(s)
+
+ return infinite
+
+ # -----------------------------------------------------------------------------
+ # undefined_symbols()
+ #
+ # Find all symbols that were used the grammar, but not defined as tokens or
+ # grammar rules. Returns a list of tuples (sym, prod) where sym in the symbol
+ # and prod is the production where the symbol was used.
+ # -----------------------------------------------------------------------------
+ def undefined_symbols(self):
+ result = []
+ for p in self.Productions:
+ if not p:
+ continue
+
+ for s in p.prod:
+ if s not in self.Prodnames and s not in self.Terminals and s != 'error':
+ result.append((s, p))
+ return result
+
+ # -----------------------------------------------------------------------------
+ # unused_terminals()
+ #
+ # Find all terminals that were defined, but not used by the grammar. Returns
+ # a list of all symbols.
+ # -----------------------------------------------------------------------------
+ def unused_terminals(self):
+ unused_tok = []
+ for s, v in self.Terminals.items():
+ if s != 'error' and not v:
+ unused_tok.append(s)
+
+ return unused_tok
+
+ # ------------------------------------------------------------------------------
+ # unused_rules()
+ #
+ # Find all grammar rules that were defined, but not used (maybe not reachable)
+ # Returns a list of productions.
+ # ------------------------------------------------------------------------------
+
+ def unused_rules(self):
+ unused_prod = []
+ for s, v in self.Nonterminals.items():
+ if not v:
+ p = self.Prodnames[s][0]
+ unused_prod.append(p)
+ return unused_prod
+
+ # -----------------------------------------------------------------------------
+ # unused_precedence()
+ #
+ # Returns a list of tuples (term,precedence) corresponding to precedence
+ # rules that were never used by the grammar. term is the name of the terminal
+ # on which precedence was applied and precedence is a string such as 'left' or
+ # 'right' corresponding to the type of precedence.
+ # -----------------------------------------------------------------------------
+
+ def unused_precedence(self):
+ unused = []
+ for termname in self.Precedence:
+ if not (termname in self.Terminals or termname in self.UsedPrecedence):
+ unused.append((termname, self.Precedence[termname][0]))
+
+ return unused
+
+ # -------------------------------------------------------------------------
+ # _first()
+ #
+ # Compute the value of FIRST1(beta) where beta is a tuple of symbols.
+ #
+ # During execution of compute_first1, the result may be incomplete.
+ # Afterward (e.g., when called from compute_follow()), it will be complete.
+ # -------------------------------------------------------------------------
+ def _first(self, beta):
+
+ # We are computing First(x1,x2,x3,...,xn)
+ result = []
+ for x in beta:
+ x_produces_empty = False
+
+ # Add all the non- symbols of First[x] to the result.
+ for f in self.First[x]:
+ if f == '':
+ x_produces_empty = True
+ else:
+ if f not in result:
+ result.append(f)
+
+ if x_produces_empty:
+ # We have to consider the next x in beta,
+ # i.e. stay in the loop.
+ pass
+ else:
+ # We don't have to consider any further symbols in beta.
+ break
+ else:
+ # There was no 'break' from the loop,
+ # so x_produces_empty was true for all x in beta,
+ # so beta produces empty as well.
+ result.append('')
+
+ return result
+
+ # -------------------------------------------------------------------------
+ # compute_first()
+ #
+ # Compute the value of FIRST1(X) for all symbols
+ # -------------------------------------------------------------------------
+ def compute_first(self):
+ if self.First:
+ return self.First
+
+ # Terminals:
+ for t in self.Terminals:
+ self.First[t] = [t]
+
+ self.First['$end'] = ['$end']
+
+ # Nonterminals:
+
+ # Initialize to the empty set:
+ for n in self.Nonterminals:
+ self.First[n] = []
+
+ # Then propagate symbols until no change:
+ while True:
+ some_change = False
+ for n in self.Nonterminals:
+ for p in self.Prodnames[n]:
+ for f in self._first(p.prod):
+ if f not in self.First[n]:
+ self.First[n].append(f)
+ some_change = True
+ if not some_change:
+ break
+
+ return self.First
+
+ # ---------------------------------------------------------------------
+ # compute_follow()
+ #
+ # Computes all of the follow sets for every non-terminal symbol. The
+ # follow set is the set of all symbols that might follow a given
+ # non-terminal. See the Dragon book, 2nd Ed. p. 189.
+ # ---------------------------------------------------------------------
+ def compute_follow(self, start=None):
+ # If already computed, return the result
+ if self.Follow:
+ return self.Follow
+
+ # If first sets not computed yet, do that first.
+ if not self.First:
+ self.compute_first()
+
+ # Add '$end' to the follow list of the start symbol
+ for k in self.Nonterminals:
+ self.Follow[k] = []
+
+ if not start:
+ start = self.Productions[1].name
+
+ self.Follow[start] = ['$end']
+
+ while True:
+ didadd = False
+ for p in self.Productions[1:]:
+ # Here is the production set
+ for i, B in enumerate(p.prod):
+ if B in self.Nonterminals:
+ # Okay. We got a non-terminal in a production
+ fst = self._first(p.prod[i+1:])
+ hasempty = False
+ for f in fst:
+ if f != '' and f not in self.Follow[B]:
+ self.Follow[B].append(f)
+ didadd = True
+ if f == '':
+ hasempty = True
+ if hasempty or i == (len(p.prod)-1):
+ # Add elements of follow(a) to follow(b)
+ for f in self.Follow[p.name]:
+ if f not in self.Follow[B]:
+ self.Follow[B].append(f)
+ didadd = True
+ if not didadd:
+ break
+ return self.Follow
+
+
+ # -----------------------------------------------------------------------------
+ # build_lritems()
+ #
+ # This function walks the list of productions and builds a complete set of the
+ # LR items. The LR items are stored in two ways: First, they are uniquely
+ # numbered and placed in the list _lritems. Second, a linked list of LR items
+ # is built for each production. For example:
+ #
+ # E -> E PLUS E
+ #
+ # Creates the list
+ #
+ # [E -> . E PLUS E, E -> E . PLUS E, E -> E PLUS . E, E -> E PLUS E . ]
+ # -----------------------------------------------------------------------------
+
+ def build_lritems(self):
+ for p in self.Productions:
+ lastlri = p
+ i = 0
+ lr_items = []
+ while True:
+ if i > len(p):
+ lri = None
+ else:
+ lri = LRItem(p, i)
+ # Precompute the list of productions immediately following
+ try:
+ lri.lr_after = self.Prodnames[lri.prod[i+1]]
+ except (IndexError, KeyError):
+ lri.lr_after = []
+ try:
+ lri.lr_before = lri.prod[i-1]
+ except IndexError:
+ lri.lr_before = None
+
+ lastlri.lr_next = lri
+ if not lri:
+ break
+ lr_items.append(lri)
+ lastlri = lri
+ i += 1
+ p.lr_items = lr_items
+
+# -----------------------------------------------------------------------------
+# == Class LRTable ==
+#
+# This basic class represents a basic table of LR parsing information.
+# Methods for generating the tables are not defined here. They are defined
+# in the derived class LRGeneratedTable.
+# -----------------------------------------------------------------------------
+
+class VersionError(YaccError):
+ pass
+
+class LRTable(object):
+ def __init__(self):
+ self.lr_action = None
+ self.lr_goto = None
+ self.lr_productions = None
+ self.lr_method = None
+
+ def read_table(self, module):
+ if isinstance(module, types.ModuleType):
+ parsetab = module
+ else:
+ exec('import %s' % module)
+ parsetab = sys.modules[module]
+
+ if parsetab._tabversion != __tabversion__:
+ raise VersionError('yacc table file version is out of date')
+
+ self.lr_action = parsetab._lr_action
+ self.lr_goto = parsetab._lr_goto
+
+ self.lr_productions = []
+ for p in parsetab._lr_productions:
+ self.lr_productions.append(MiniProduction(*p))
+
+ self.lr_method = parsetab._lr_method
+ return parsetab._lr_signature
+
+ def read_pickle(self, filename):
+ try:
+ import cPickle as pickle
+ except ImportError:
+ import pickle
+
+ if not os.path.exists(filename):
+ raise ImportError
+
+ in_f = open(filename, 'rb')
+
+ tabversion = pickle.load(in_f)
+ if tabversion != __tabversion__:
+ raise VersionError('yacc table file version is out of date')
+ self.lr_method = pickle.load(in_f)
+ signature = pickle.load(in_f)
+ self.lr_action = pickle.load(in_f)
+ self.lr_goto = pickle.load(in_f)
+ productions = pickle.load(in_f)
+
+ self.lr_productions = []
+ for p in productions:
+ self.lr_productions.append(MiniProduction(*p))
+
+ in_f.close()
+ return signature
+
+ # Bind all production function names to callable objects in pdict
+ def bind_callables(self, pdict):
+ for p in self.lr_productions:
+ p.bind(pdict)
+
+
+# -----------------------------------------------------------------------------
+# === LR Generator ===
+#
+# The following classes and functions are used to generate LR parsing tables on
+# a grammar.
+# -----------------------------------------------------------------------------
+
+# -----------------------------------------------------------------------------
+# digraph()
+# traverse()
+#
+# The following two functions are used to compute set valued functions
+# of the form:
+#
+# F(x) = F'(x) U U{F(y) | x R y}
+#
+# This is used to compute the values of Read() sets as well as FOLLOW sets
+# in LALR(1) generation.
+#
+# Inputs: X - An input set
+# R - A relation
+# FP - Set-valued function
+# ------------------------------------------------------------------------------
+
+def digraph(X, R, FP):
+ N = {}
+ for x in X:
+ N[x] = 0
+ stack = []
+ F = {}
+ for x in X:
+ if N[x] == 0:
+ traverse(x, N, stack, F, X, R, FP)
+ return F
+
+def traverse(x, N, stack, F, X, R, FP):
+ stack.append(x)
+ d = len(stack)
+ N[x] = d
+ F[x] = FP(x) # F(X) <- F'(x)
+
+ rel = R(x) # Get y's related to x
+ for y in rel:
+ if N[y] == 0:
+ traverse(y, N, stack, F, X, R, FP)
+ N[x] = min(N[x], N[y])
+ for a in F.get(y, []):
+ if a not in F[x]:
+ F[x].append(a)
+ if N[x] == d:
+ N[stack[-1]] = MAXINT
+ F[stack[-1]] = F[x]
+ element = stack.pop()
+ while element != x:
+ N[stack[-1]] = MAXINT
+ F[stack[-1]] = F[x]
+ element = stack.pop()
+
+class LALRError(YaccError):
+ pass
+
+# -----------------------------------------------------------------------------
+# == LRGeneratedTable ==
+#
+# This class implements the LR table generation algorithm. There are no
+# public methods except for write()
+# -----------------------------------------------------------------------------
+
+class LRGeneratedTable(LRTable):
+ def __init__(self, grammar, method='LALR', log=None):
+ if method not in ['SLR', 'LALR']:
+ raise LALRError('Unsupported method %s' % method)
+
+ self.grammar = grammar
+ self.lr_method = method
+
+ # Set up the logger
+ if not log:
+ log = NullLogger()
+ self.log = log
+
+ # Internal attributes
+ self.lr_action = {} # Action table
+ self.lr_goto = {} # Goto table
+ self.lr_productions = grammar.Productions # Copy of grammar Production array
+ self.lr_goto_cache = {} # Cache of computed gotos
+ self.lr0_cidhash = {} # Cache of closures
+
+ self._add_count = 0 # Internal counter used to detect cycles
+
+ # Diagonistic information filled in by the table generator
+ self.sr_conflict = 0
+ self.rr_conflict = 0
+ self.conflicts = [] # List of conflicts
+
+ self.sr_conflicts = []
+ self.rr_conflicts = []
+
+ # Build the tables
+ self.grammar.build_lritems()
+ self.grammar.compute_first()
+ self.grammar.compute_follow()
+ self.lr_parse_table()
+
+ # Compute the LR(0) closure operation on I, where I is a set of LR(0) items.
+
+ def lr0_closure(self, I):
+ self._add_count += 1
+
+ # Add everything in I to J
+ J = I[:]
+ didadd = True
+ while didadd:
+ didadd = False
+ for j in J:
+ for x in j.lr_after:
+ if getattr(x, 'lr0_added', 0) == self._add_count:
+ continue
+ # Add B --> .G to J
+ J.append(x.lr_next)
+ x.lr0_added = self._add_count
+ didadd = True
+
+ return J
+
+ # Compute the LR(0) goto function goto(I,X) where I is a set
+ # of LR(0) items and X is a grammar symbol. This function is written
+ # in a way that guarantees uniqueness of the generated goto sets
+ # (i.e. the same goto set will never be returned as two different Python
+ # objects). With uniqueness, we can later do fast set comparisons using
+ # id(obj) instead of element-wise comparison.
+
+ def lr0_goto(self, I, x):
+ # First we look for a previously cached entry
+ g = self.lr_goto_cache.get((id(I), x))
+ if g:
+ return g
+
+ # Now we generate the goto set in a way that guarantees uniqueness
+ # of the result
+
+ s = self.lr_goto_cache.get(x)
+ if not s:
+ s = {}
+ self.lr_goto_cache[x] = s
+
+ gs = []
+ for p in I:
+ n = p.lr_next
+ if n and n.lr_before == x:
+ s1 = s.get(id(n))
+ if not s1:
+ s1 = {}
+ s[id(n)] = s1
+ gs.append(n)
+ s = s1
+ g = s.get('$end')
+ if not g:
+ if gs:
+ g = self.lr0_closure(gs)
+ s['$end'] = g
+ else:
+ s['$end'] = gs
+ self.lr_goto_cache[(id(I), x)] = g
+ return g
+
+ # Compute the LR(0) sets of item function
+ def lr0_items(self):
+ C = [self.lr0_closure([self.grammar.Productions[0].lr_next])]
+ i = 0
+ for I in C:
+ self.lr0_cidhash[id(I)] = i
+ i += 1
+
+ # Loop over the items in C and each grammar symbols
+ i = 0
+ while i < len(C):
+ I = C[i]
+ i += 1
+
+ # Collect all of the symbols that could possibly be in the goto(I,X) sets
+ asyms = {}
+ for ii in I:
+ for s in ii.usyms:
+ asyms[s] = None
+
+ for x in asyms:
+ g = self.lr0_goto(I, x)
+ if not g or id(g) in self.lr0_cidhash:
+ continue
+ self.lr0_cidhash[id(g)] = len(C)
+ C.append(g)
+
+ return C
+
+ # -----------------------------------------------------------------------------
+ # ==== LALR(1) Parsing ====
+ #
+ # LALR(1) parsing is almost exactly the same as SLR except that instead of
+ # relying upon Follow() sets when performing reductions, a more selective
+ # lookahead set that incorporates the state of the LR(0) machine is utilized.
+ # Thus, we mainly just have to focus on calculating the lookahead sets.
+ #
+ # The method used here is due to DeRemer and Pennelo (1982).
+ #
+ # DeRemer, F. L., and T. J. Pennelo: "Efficient Computation of LALR(1)
+ # Lookahead Sets", ACM Transactions on Programming Languages and Systems,
+ # Vol. 4, No. 4, Oct. 1982, pp. 615-649
+ #
+ # Further details can also be found in:
+ #
+ # J. Tremblay and P. Sorenson, "The Theory and Practice of Compiler Writing",
+ # McGraw-Hill Book Company, (1985).
+ #
+ # -----------------------------------------------------------------------------
+
+ # -----------------------------------------------------------------------------
+ # compute_nullable_nonterminals()
+ #
+ # Creates a dictionary containing all of the non-terminals that might produce
+ # an empty production.
+ # -----------------------------------------------------------------------------
+
+ def compute_nullable_nonterminals(self):
+ nullable = set()
+ num_nullable = 0
+ while True:
+ for p in self.grammar.Productions[1:]:
+ if p.len == 0:
+ nullable.add(p.name)
+ continue
+ for t in p.prod:
+ if t not in nullable:
+ break
+ else:
+ nullable.add(p.name)
+ if len(nullable) == num_nullable:
+ break
+ num_nullable = len(nullable)
+ return nullable
+
+ # -----------------------------------------------------------------------------
+ # find_nonterminal_trans(C)
+ #
+ # Given a set of LR(0) items, this functions finds all of the non-terminal
+ # transitions. These are transitions in which a dot appears immediately before
+ # a non-terminal. Returns a list of tuples of the form (state,N) where state
+ # is the state number and N is the nonterminal symbol.
+ #
+ # The input C is the set of LR(0) items.
+ # -----------------------------------------------------------------------------
+
+ def find_nonterminal_transitions(self, C):
+ trans = []
+ for stateno, state in enumerate(C):
+ for p in state:
+ if p.lr_index < p.len - 1:
+ t = (stateno, p.prod[p.lr_index+1])
+ if t[1] in self.grammar.Nonterminals:
+ if t not in trans:
+ trans.append(t)
+ return trans
+
+ # -----------------------------------------------------------------------------
+ # dr_relation()
+ #
+ # Computes the DR(p,A) relationships for non-terminal transitions. The input
+ # is a tuple (state,N) where state is a number and N is a nonterminal symbol.
+ #
+ # Returns a list of terminals.
+ # -----------------------------------------------------------------------------
+
+ def dr_relation(self, C, trans, nullable):
+ dr_set = {}
+ state, N = trans
+ terms = []
+
+ g = self.lr0_goto(C[state], N)
+ for p in g:
+ if p.lr_index < p.len - 1:
+ a = p.prod[p.lr_index+1]
+ if a in self.grammar.Terminals:
+ if a not in terms:
+ terms.append(a)
+
+ # This extra bit is to handle the start state
+ if state == 0 and N == self.grammar.Productions[0].prod[0]:
+ terms.append('$end')
+
+ return terms
+
+ # -----------------------------------------------------------------------------
+ # reads_relation()
+ #
+ # Computes the READS() relation (p,A) READS (t,C).
+ # -----------------------------------------------------------------------------
+
+ def reads_relation(self, C, trans, empty):
+ # Look for empty transitions
+ rel = []
+ state, N = trans
+
+ g = self.lr0_goto(C[state], N)
+ j = self.lr0_cidhash.get(id(g), -1)
+ for p in g:
+ if p.lr_index < p.len - 1:
+ a = p.prod[p.lr_index + 1]
+ if a in empty:
+ rel.append((j, a))
+
+ return rel
+
+ # -----------------------------------------------------------------------------
+ # compute_lookback_includes()
+ #
+ # Determines the lookback and includes relations
+ #
+ # LOOKBACK:
+ #
+ # This relation is determined by running the LR(0) state machine forward.
+ # For example, starting with a production "N : . A B C", we run it forward
+ # to obtain "N : A B C ." We then build a relationship between this final
+ # state and the starting state. These relationships are stored in a dictionary
+ # lookdict.
+ #
+ # INCLUDES:
+ #
+ # Computes the INCLUDE() relation (p,A) INCLUDES (p',B).
+ #
+ # This relation is used to determine non-terminal transitions that occur
+ # inside of other non-terminal transition states. (p,A) INCLUDES (p', B)
+ # if the following holds:
+ #
+ # B -> LAT, where T -> epsilon and p' -L-> p
+ #
+ # L is essentially a prefix (which may be empty), T is a suffix that must be
+ # able to derive an empty string. State p' must lead to state p with the string L.
+ #
+ # -----------------------------------------------------------------------------
+
+ def compute_lookback_includes(self, C, trans, nullable):
+ lookdict = {} # Dictionary of lookback relations
+ includedict = {} # Dictionary of include relations
+
+ # Make a dictionary of non-terminal transitions
+ dtrans = {}
+ for t in trans:
+ dtrans[t] = 1
+
+ # Loop over all transitions and compute lookbacks and includes
+ for state, N in trans:
+ lookb = []
+ includes = []
+ for p in C[state]:
+ if p.name != N:
+ continue
+
+ # Okay, we have a name match. We now follow the production all the way
+ # through the state machine until we get the . on the right hand side
+
+ lr_index = p.lr_index
+ j = state
+ while lr_index < p.len - 1:
+ lr_index = lr_index + 1
+ t = p.prod[lr_index]
+
+ # Check to see if this symbol and state are a non-terminal transition
+ if (j, t) in dtrans:
+ # Yes. Okay, there is some chance that this is an includes relation
+ # the only way to know for certain is whether the rest of the
+ # production derives empty
+
+ li = lr_index + 1
+ while li < p.len:
+ if p.prod[li] in self.grammar.Terminals:
+ break # No forget it
+ if p.prod[li] not in nullable:
+ break
+ li = li + 1
+ else:
+ # Appears to be a relation between (j,t) and (state,N)
+ includes.append((j, t))
+
+ g = self.lr0_goto(C[j], t) # Go to next set
+ j = self.lr0_cidhash.get(id(g), -1) # Go to next state
+
+ # When we get here, j is the final state, now we have to locate the production
+ for r in C[j]:
+ if r.name != p.name:
+ continue
+ if r.len != p.len:
+ continue
+ i = 0
+ # This look is comparing a production ". A B C" with "A B C ."
+ while i < r.lr_index:
+ if r.prod[i] != p.prod[i+1]:
+ break
+ i = i + 1
+ else:
+ lookb.append((j, r))
+ for i in includes:
+ if i not in includedict:
+ includedict[i] = []
+ includedict[i].append((state, N))
+ lookdict[(state, N)] = lookb
+
+ return lookdict, includedict
+
+ # -----------------------------------------------------------------------------
+ # compute_read_sets()
+ #
+ # Given a set of LR(0) items, this function computes the read sets.
+ #
+ # Inputs: C = Set of LR(0) items
+ # ntrans = Set of nonterminal transitions
+ # nullable = Set of empty transitions
+ #
+ # Returns a set containing the read sets
+ # -----------------------------------------------------------------------------
+
+ def compute_read_sets(self, C, ntrans, nullable):
+ FP = lambda x: self.dr_relation(C, x, nullable)
+ R = lambda x: self.reads_relation(C, x, nullable)
+ F = digraph(ntrans, R, FP)
+ return F
+
+ # -----------------------------------------------------------------------------
+ # compute_follow_sets()
+ #
+ # Given a set of LR(0) items, a set of non-terminal transitions, a readset,
+ # and an include set, this function computes the follow sets
+ #
+ # Follow(p,A) = Read(p,A) U U {Follow(p',B) | (p,A) INCLUDES (p',B)}
+ #
+ # Inputs:
+ # ntrans = Set of nonterminal transitions
+ # readsets = Readset (previously computed)
+ # inclsets = Include sets (previously computed)
+ #
+ # Returns a set containing the follow sets
+ # -----------------------------------------------------------------------------
+
+ def compute_follow_sets(self, ntrans, readsets, inclsets):
+ FP = lambda x: readsets[x]
+ R = lambda x: inclsets.get(x, [])
+ F = digraph(ntrans, R, FP)
+ return F
+
+ # -----------------------------------------------------------------------------
+ # add_lookaheads()
+ #
+ # Attaches the lookahead symbols to grammar rules.
+ #
+ # Inputs: lookbacks - Set of lookback relations
+ # followset - Computed follow set
+ #
+ # This function directly attaches the lookaheads to productions contained
+ # in the lookbacks set
+ # -----------------------------------------------------------------------------
+
+ def add_lookaheads(self, lookbacks, followset):
+ for trans, lb in lookbacks.items():
+ # Loop over productions in lookback
+ for state, p in lb:
+ if state not in p.lookaheads:
+ p.lookaheads[state] = []
+ f = followset.get(trans, [])
+ for a in f:
+ if a not in p.lookaheads[state]:
+ p.lookaheads[state].append(a)
+
+ # -----------------------------------------------------------------------------
+ # add_lalr_lookaheads()
+ #
+ # This function does all of the work of adding lookahead information for use
+ # with LALR parsing
+ # -----------------------------------------------------------------------------
+
+ def add_lalr_lookaheads(self, C):
+ # Determine all of the nullable nonterminals
+ nullable = self.compute_nullable_nonterminals()
+
+ # Find all non-terminal transitions
+ trans = self.find_nonterminal_transitions(C)
+
+ # Compute read sets
+ readsets = self.compute_read_sets(C, trans, nullable)
+
+ # Compute lookback/includes relations
+ lookd, included = self.compute_lookback_includes(C, trans, nullable)
+
+ # Compute LALR FOLLOW sets
+ followsets = self.compute_follow_sets(trans, readsets, included)
+
+ # Add all of the lookaheads
+ self.add_lookaheads(lookd, followsets)
+
+ # -----------------------------------------------------------------------------
+ # lr_parse_table()
+ #
+ # This function constructs the parse tables for SLR or LALR
+ # -----------------------------------------------------------------------------
+ def lr_parse_table(self):
+ Productions = self.grammar.Productions
+ Precedence = self.grammar.Precedence
+ goto = self.lr_goto # Goto array
+ action = self.lr_action # Action array
+ log = self.log # Logger for output
+
+ actionp = {} # Action production array (temporary)
+
+ log.info('Parsing method: %s', self.lr_method)
+
+ # Step 1: Construct C = { I0, I1, ... IN}, collection of LR(0) items
+ # This determines the number of states
+
+ C = self.lr0_items()
+
+ if self.lr_method == 'LALR':
+ self.add_lalr_lookaheads(C)
+
+ # Build the parser table, state by state
+ st = 0
+ for I in C:
+ # Loop over each production in I
+ actlist = [] # List of actions
+ st_action = {}
+ st_actionp = {}
+ st_goto = {}
+ log.info('')
+ log.info('state %d', st)
+ log.info('')
+ for p in I:
+ log.info(' (%d) %s', p.number, p)
+ log.info('')
+
+ for p in I:
+ if p.len == p.lr_index + 1:
+ if p.name == "S'":
+ # Start symbol. Accept!
+ st_action['$end'] = 0
+ st_actionp['$end'] = p
+ else:
+ # We are at the end of a production. Reduce!
+ if self.lr_method == 'LALR':
+ laheads = p.lookaheads[st]
+ else:
+ laheads = self.grammar.Follow[p.name]
+ for a in laheads:
+ actlist.append((a, p, 'reduce using rule %d (%s)' % (p.number, p)))
+ r = st_action.get(a)
+ if r is not None:
+ # Whoa. Have a shift/reduce or reduce/reduce conflict
+ if r > 0:
+ # Need to decide on shift or reduce here
+ # By default we favor shifting. Need to add
+ # some precedence rules here.
+
+ # Shift precedence comes from the token
+ sprec, slevel = Precedence.get(a, ('right', 0))
+
+ # Reduce precedence comes from rule being reduced (p)
+ rprec, rlevel = Productions[p.number].prec
+
+ if (slevel < rlevel) or ((slevel == rlevel) and (rprec == 'left')):
+ # We really need to reduce here.
+ st_action[a] = -p.number
+ st_actionp[a] = p
+ if not slevel and not rlevel:
+ log.info(' ! shift/reduce conflict for %s resolved as reduce', a)
+ self.sr_conflicts.append((st, a, 'reduce'))
+ Productions[p.number].reduced += 1
+ elif (slevel == rlevel) and (rprec == 'nonassoc'):
+ st_action[a] = None
+ else:
+ # Hmmm. Guess we'll keep the shift
+ if not rlevel:
+ log.info(' ! shift/reduce conflict for %s resolved as shift', a)
+ self.sr_conflicts.append((st, a, 'shift'))
+ elif r < 0:
+ # Reduce/reduce conflict. In this case, we favor the rule
+ # that was defined first in the grammar file
+ oldp = Productions[-r]
+ pp = Productions[p.number]
+ if oldp.line > pp.line:
+ st_action[a] = -p.number
+ st_actionp[a] = p
+ chosenp, rejectp = pp, oldp
+ Productions[p.number].reduced += 1
+ Productions[oldp.number].reduced -= 1
+ else:
+ chosenp, rejectp = oldp, pp
+ self.rr_conflicts.append((st, chosenp, rejectp))
+ log.info(' ! reduce/reduce conflict for %s resolved using rule %d (%s)',
+ a, st_actionp[a].number, st_actionp[a])
+ else:
+ raise LALRError('Unknown conflict in state %d' % st)
+ else:
+ st_action[a] = -p.number
+ st_actionp[a] = p
+ Productions[p.number].reduced += 1
+ else:
+ i = p.lr_index
+ a = p.prod[i+1] # Get symbol right after the "."
+ if a in self.grammar.Terminals:
+ g = self.lr0_goto(I, a)
+ j = self.lr0_cidhash.get(id(g), -1)
+ if j >= 0:
+ # We are in a shift state
+ actlist.append((a, p, 'shift and go to state %d' % j))
+ r = st_action.get(a)
+ if r is not None:
+ # Whoa have a shift/reduce or shift/shift conflict
+ if r > 0:
+ if r != j:
+ raise LALRError('Shift/shift conflict in state %d' % st)
+ elif r < 0:
+ # Do a precedence check.
+ # - if precedence of reduce rule is higher, we reduce.
+ # - if precedence of reduce is same and left assoc, we reduce.
+ # - otherwise we shift
+
+ # Shift precedence comes from the token
+ sprec, slevel = Precedence.get(a, ('right', 0))
+
+ # Reduce precedence comes from the rule that could have been reduced
+ rprec, rlevel = Productions[st_actionp[a].number].prec
+
+ if (slevel > rlevel) or ((slevel == rlevel) and (rprec == 'right')):
+ # We decide to shift here... highest precedence to shift
+ Productions[st_actionp[a].number].reduced -= 1
+ st_action[a] = j
+ st_actionp[a] = p
+ if not rlevel:
+ log.info(' ! shift/reduce conflict for %s resolved as shift', a)
+ self.sr_conflicts.append((st, a, 'shift'))
+ elif (slevel == rlevel) and (rprec == 'nonassoc'):
+ st_action[a] = None
+ else:
+ # Hmmm. Guess we'll keep the reduce
+ if not slevel and not rlevel:
+ log.info(' ! shift/reduce conflict for %s resolved as reduce', a)
+ self.sr_conflicts.append((st, a, 'reduce'))
+
+ else:
+ raise LALRError('Unknown conflict in state %d' % st)
+ else:
+ st_action[a] = j
+ st_actionp[a] = p
+
+ # Print the actions associated with each terminal
+ _actprint = {}
+ for a, p, m in actlist:
+ if a in st_action:
+ if p is st_actionp[a]:
+ log.info(' %-15s %s', a, m)
+ _actprint[(a, m)] = 1
+ log.info('')
+ # Print the actions that were not used. (debugging)
+ not_used = 0
+ for a, p, m in actlist:
+ if a in st_action:
+ if p is not st_actionp[a]:
+ if not (a, m) in _actprint:
+ log.debug(' ! %-15s [ %s ]', a, m)
+ not_used = 1
+ _actprint[(a, m)] = 1
+ if not_used:
+ log.debug('')
+
+ # Construct the goto table for this state
+
+ nkeys = {}
+ for ii in I:
+ for s in ii.usyms:
+ if s in self.grammar.Nonterminals:
+ nkeys[s] = None
+ for n in nkeys:
+ g = self.lr0_goto(I, n)
+ j = self.lr0_cidhash.get(id(g), -1)
+ if j >= 0:
+ st_goto[n] = j
+ log.info(' %-30s shift and go to state %d', n, j)
+
+ action[st] = st_action
+ actionp[st] = st_actionp
+ goto[st] = st_goto
+ st += 1
+
+ # -----------------------------------------------------------------------------
+ # write()
+ #
+ # This function writes the LR parsing tables to a file
+ # -----------------------------------------------------------------------------
+
+ def write_table(self, tabmodule, outputdir='', signature=''):
+ if isinstance(tabmodule, types.ModuleType):
+ raise IOError("Won't overwrite existing tabmodule")
+
+ basemodulename = tabmodule.split('.')[-1]
+ filename = os.path.join(outputdir, basemodulename) + '.py'
+ try:
+ f = open(filename, 'w')
+
+ f.write('''
+# %s
+# This file is automatically generated. Do not edit.
+_tabversion = %r
+
+_lr_method = %r
+
+_lr_signature = %r
+ ''' % (os.path.basename(filename), __tabversion__, self.lr_method, signature))
+
+ # Change smaller to 0 to go back to original tables
+ smaller = 1
+
+ # Factor out names to try and make smaller
+ if smaller:
+ items = {}
+
+ for s, nd in self.lr_action.items():
+ for name, v in nd.items():
+ i = items.get(name)
+ if not i:
+ i = ([], [])
+ items[name] = i
+ i[0].append(s)
+ i[1].append(v)
+
+ f.write('\n_lr_action_items = {')
+ for k, v in items.items():
+ f.write('%r:([' % k)
+ for i in v[0]:
+ f.write('%r,' % i)
+ f.write('],[')
+ for i in v[1]:
+ f.write('%r,' % i)
+
+ f.write(']),')
+ f.write('}\n')
+
+ f.write('''
+_lr_action = {}
+for _k, _v in _lr_action_items.items():
+ for _x,_y in zip(_v[0],_v[1]):
+ if not _x in _lr_action: _lr_action[_x] = {}
+ _lr_action[_x][_k] = _y
+del _lr_action_items
+''')
+
+ else:
+ f.write('\n_lr_action = { ')
+ for k, v in self.lr_action.items():
+ f.write('(%r,%r):%r,' % (k[0], k[1], v))
+ f.write('}\n')
+
+ if smaller:
+ # Factor out names to try and make smaller
+ items = {}
+
+ for s, nd in self.lr_goto.items():
+ for name, v in nd.items():
+ i = items.get(name)
+ if not i:
+ i = ([], [])
+ items[name] = i
+ i[0].append(s)
+ i[1].append(v)
+
+ f.write('\n_lr_goto_items = {')
+ for k, v in items.items():
+ f.write('%r:([' % k)
+ for i in v[0]:
+ f.write('%r,' % i)
+ f.write('],[')
+ for i in v[1]:
+ f.write('%r,' % i)
+
+ f.write(']),')
+ f.write('}\n')
+
+ f.write('''
+_lr_goto = {}
+for _k, _v in _lr_goto_items.items():
+ for _x, _y in zip(_v[0], _v[1]):
+ if not _x in _lr_goto: _lr_goto[_x] = {}
+ _lr_goto[_x][_k] = _y
+del _lr_goto_items
+''')
+ else:
+ f.write('\n_lr_goto = { ')
+ for k, v in self.lr_goto.items():
+ f.write('(%r,%r):%r,' % (k[0], k[1], v))
+ f.write('}\n')
+
+ # Write production table
+ f.write('_lr_productions = [\n')
+ for p in self.lr_productions:
+ if p.func:
+ f.write(' (%r,%r,%d,%r,%r,%d),\n' % (p.str, p.name, p.len,
+ p.func, os.path.basename(p.file), p.line))
+ else:
+ f.write(' (%r,%r,%d,None,None,None),\n' % (str(p), p.name, p.len))
+ f.write(']\n')
+ f.close()
+
+ except IOError as e:
+ raise
+
+
+ # -----------------------------------------------------------------------------
+ # pickle_table()
+ #
+ # This function pickles the LR parsing tables to a supplied file object
+ # -----------------------------------------------------------------------------
+
+ def pickle_table(self, filename, signature=''):
+ try:
+ import cPickle as pickle
+ except ImportError:
+ import pickle
+ with open(filename, 'wb') as outf:
+ pickle.dump(__tabversion__, outf, pickle_protocol)
+ pickle.dump(self.lr_method, outf, pickle_protocol)
+ pickle.dump(signature, outf, pickle_protocol)
+ pickle.dump(self.lr_action, outf, pickle_protocol)
+ pickle.dump(self.lr_goto, outf, pickle_protocol)
+
+ outp = []
+ for p in self.lr_productions:
+ if p.func:
+ outp.append((p.str, p.name, p.len, p.func, os.path.basename(p.file), p.line))
+ else:
+ outp.append((str(p), p.name, p.len, None, None, None))
+ pickle.dump(outp, outf, pickle_protocol)
+
+# -----------------------------------------------------------------------------
+# === INTROSPECTION ===
+#
+# The following functions and classes are used to implement the PLY
+# introspection features followed by the yacc() function itself.
+# -----------------------------------------------------------------------------
+
+# -----------------------------------------------------------------------------
+# get_caller_module_dict()
+#
+# This function returns a dictionary containing all of the symbols defined within
+# a caller further down the call stack. This is used to get the environment
+# associated with the yacc() call if none was provided.
+# -----------------------------------------------------------------------------
+
+def get_caller_module_dict(levels):
+ f = sys._getframe(levels)
+ ldict = f.f_globals.copy()
+ if f.f_globals != f.f_locals:
+ ldict.update(f.f_locals)
+ return ldict
+
+# -----------------------------------------------------------------------------
+# parse_grammar()
+#
+# This takes a raw grammar rule string and parses it into production data
+# -----------------------------------------------------------------------------
+def parse_grammar(doc, file, line):
+ grammar = []
+ # Split the doc string into lines
+ pstrings = doc.splitlines()
+ lastp = None
+ dline = line
+ for ps in pstrings:
+ dline += 1
+ p = ps.split()
+ if not p:
+ continue
+ try:
+ if p[0] == '|':
+ # This is a continuation of a previous rule
+ if not lastp:
+ raise SyntaxError("%s:%d: Misplaced '|'" % (file, dline))
+ prodname = lastp
+ syms = p[1:]
+ else:
+ prodname = p[0]
+ lastp = prodname
+ syms = p[2:]
+ assign = p[1]
+ if assign != ':' and assign != '::=':
+ raise SyntaxError("%s:%d: Syntax error. Expected ':'" % (file, dline))
+
+ grammar.append((file, dline, prodname, syms))
+ except SyntaxError:
+ raise
+ except Exception:
+ raise SyntaxError('%s:%d: Syntax error in rule %r' % (file, dline, ps.strip()))
+
+ return grammar
+
+# -----------------------------------------------------------------------------
+# ParserReflect()
+#
+# This class represents information extracted for building a parser including
+# start symbol, error function, tokens, precedence list, action functions,
+# etc.
+# -----------------------------------------------------------------------------
+class ParserReflect(object):
+ def __init__(self, pdict, log=None):
+ self.pdict = pdict
+ self.start = None
+ self.error_func = None
+ self.tokens = None
+ self.modules = set()
+ self.grammar = []
+ self.error = False
+
+ if log is None:
+ self.log = PlyLogger(sys.stderr)
+ else:
+ self.log = log
+
+ # Get all of the basic information
+ def get_all(self):
+ self.get_start()
+ self.get_error_func()
+ self.get_tokens()
+ self.get_precedence()
+ self.get_pfunctions()
+
+ # Validate all of the information
+ def validate_all(self):
+ self.validate_start()
+ self.validate_error_func()
+ self.validate_tokens()
+ self.validate_precedence()
+ self.validate_pfunctions()
+ self.validate_modules()
+ return self.error
+
+ # Compute a signature over the grammar
+ def signature(self):
+ parts = []
+ try:
+ if self.start:
+ parts.append(self.start)
+ if self.prec:
+ parts.append(''.join([''.join(p) for p in self.prec]))
+ if self.tokens:
+ parts.append(' '.join(self.tokens))
+ for f in self.pfuncs:
+ if f[3]:
+ parts.append(f[3])
+ except (TypeError, ValueError):
+ pass
+ return ''.join(parts)
+
+ # -----------------------------------------------------------------------------
+ # validate_modules()
+ #
+ # This method checks to see if there are duplicated p_rulename() functions
+ # in the parser module file. Without this function, it is really easy for
+ # users to make mistakes by cutting and pasting code fragments (and it's a real
+ # bugger to try and figure out why the resulting parser doesn't work). Therefore,
+ # we just do a little regular expression pattern matching of def statements
+ # to try and detect duplicates.
+ # -----------------------------------------------------------------------------
+
+ def validate_modules(self):
+ # Match def p_funcname(
+ fre = re.compile(r'\s*def\s+(p_[a-zA-Z_0-9]*)\(')
+
+ for module in self.modules:
+ try:
+ lines, linen = inspect.getsourcelines(module)
+ except IOError:
+ continue
+
+ counthash = {}
+ for linen, line in enumerate(lines):
+ linen += 1
+ m = fre.match(line)
+ if m:
+ name = m.group(1)
+ prev = counthash.get(name)
+ if not prev:
+ counthash[name] = linen
+ else:
+ filename = inspect.getsourcefile(module)
+ self.log.warning('%s:%d: Function %s redefined. Previously defined on line %d',
+ filename, linen, name, prev)
+
+ # Get the start symbol
+ def get_start(self):
+ self.start = self.pdict.get('start')
+
+ # Validate the start symbol
+ def validate_start(self):
+ if self.start is not None:
+ if not isinstance(self.start, string_types):
+ self.log.error("'start' must be a string")
+
+ # Look for error handler
+ def get_error_func(self):
+ self.error_func = self.pdict.get('p_error')
+
+ # Validate the error function
+ def validate_error_func(self):
+ if self.error_func:
+ if isinstance(self.error_func, types.FunctionType):
+ ismethod = 0
+ elif isinstance(self.error_func, types.MethodType):
+ ismethod = 1
+ else:
+ self.log.error("'p_error' defined, but is not a function or method")
+ self.error = True
+ return
+
+ eline = self.error_func.__code__.co_firstlineno
+ efile = self.error_func.__code__.co_filename
+ module = inspect.getmodule(self.error_func)
+ self.modules.add(module)
+
+ argcount = self.error_func.__code__.co_argcount - ismethod
+ if argcount != 1:
+ self.log.error('%s:%d: p_error() requires 1 argument', efile, eline)
+ self.error = True
+
+ # Get the tokens map
+ def get_tokens(self):
+ tokens = self.pdict.get('tokens')
+ if not tokens:
+ self.log.error('No token list is defined')
+ self.error = True
+ return
+
+ if not isinstance(tokens, (list, tuple)):
+ self.log.error('tokens must be a list or tuple')
+ self.error = True
+ return
+
+ if not tokens:
+ self.log.error('tokens is empty')
+ self.error = True
+ return
+
+ self.tokens = tokens
+
+ # Validate the tokens
+ def validate_tokens(self):
+ # Validate the tokens.
+ if 'error' in self.tokens:
+ self.log.error("Illegal token name 'error'. Is a reserved word")
+ self.error = True
+ return
+
+ terminals = set()
+ for n in self.tokens:
+ if n in terminals:
+ self.log.warning('Token %r multiply defined', n)
+ terminals.add(n)
+
+ # Get the precedence map (if any)
+ def get_precedence(self):
+ self.prec = self.pdict.get('precedence')
+
+ # Validate and parse the precedence map
+ def validate_precedence(self):
+ preclist = []
+ if self.prec:
+ if not isinstance(self.prec, (list, tuple)):
+ self.log.error('precedence must be a list or tuple')
+ self.error = True
+ return
+ for level, p in enumerate(self.prec):
+ if not isinstance(p, (list, tuple)):
+ self.log.error('Bad precedence table')
+ self.error = True
+ return
+
+ if len(p) < 2:
+ self.log.error('Malformed precedence entry %s. Must be (assoc, term, ..., term)', p)
+ self.error = True
+ return
+ assoc = p[0]
+ if not isinstance(assoc, string_types):
+ self.log.error('precedence associativity must be a string')
+ self.error = True
+ return
+ for term in p[1:]:
+ if not isinstance(term, string_types):
+ self.log.error('precedence items must be strings')
+ self.error = True
+ return
+ preclist.append((term, assoc, level+1))
+ self.preclist = preclist
+
+ # Get all p_functions from the grammar
+ def get_pfunctions(self):
+ p_functions = []
+ for name, item in self.pdict.items():
+ if not name.startswith('p_') or name == 'p_error':
+ continue
+ if isinstance(item, (types.FunctionType, types.MethodType)):
+ line = getattr(item, 'co_firstlineno', item.__code__.co_firstlineno)
+ module = inspect.getmodule(item)
+ p_functions.append((line, module, name, item.__doc__))
+
+ # Sort all of the actions by line number; make sure to stringify
+ # modules to make them sortable, since `line` may not uniquely sort all
+ # p functions
+ p_functions.sort(key=lambda p_function: (
+ p_function[0],
+ str(p_function[1]),
+ p_function[2],
+ p_function[3]))
+ self.pfuncs = p_functions
+
+ # Validate all of the p_functions
+ def validate_pfunctions(self):
+ grammar = []
+ # Check for non-empty symbols
+ if len(self.pfuncs) == 0:
+ self.log.error('no rules of the form p_rulename are defined')
+ self.error = True
+ return
+
+ for line, module, name, doc in self.pfuncs:
+ file = inspect.getsourcefile(module)
+ func = self.pdict[name]
+ if isinstance(func, types.MethodType):
+ reqargs = 2
+ else:
+ reqargs = 1
+ if func.__code__.co_argcount > reqargs:
+ self.log.error('%s:%d: Rule %r has too many arguments', file, line, func.__name__)
+ self.error = True
+ elif func.__code__.co_argcount < reqargs:
+ self.log.error('%s:%d: Rule %r requires an argument', file, line, func.__name__)
+ self.error = True
+ elif not func.__doc__:
+ self.log.warning('%s:%d: No documentation string specified in function %r (ignored)',
+ file, line, func.__name__)
+ else:
+ try:
+ parsed_g = parse_grammar(doc, file, line)
+ for g in parsed_g:
+ grammar.append((name, g))
+ except SyntaxError as e:
+ self.log.error(str(e))
+ self.error = True
+
+ # Looks like a valid grammar rule
+ # Mark the file in which defined.
+ self.modules.add(module)
+
+ # Secondary validation step that looks for p_ definitions that are not functions
+ # or functions that look like they might be grammar rules.
+
+ for n, v in self.pdict.items():
+ if n.startswith('p_') and isinstance(v, (types.FunctionType, types.MethodType)):
+ continue
+ if n.startswith('t_'):
+ continue
+ if n.startswith('p_') and n != 'p_error':
+ self.log.warning('%r not defined as a function', n)
+ if ((isinstance(v, types.FunctionType) and v.__code__.co_argcount == 1) or
+ (isinstance(v, types.MethodType) and v.__func__.__code__.co_argcount == 2)):
+ if v.__doc__:
+ try:
+ doc = v.__doc__.split(' ')
+ if doc[1] == ':':
+ self.log.warning('%s:%d: Possible grammar rule %r defined without p_ prefix',
+ v.__code__.co_filename, v.__code__.co_firstlineno, n)
+ except IndexError:
+ pass
+
+ self.grammar = grammar
+
+# -----------------------------------------------------------------------------
+# yacc(module)
+#
+# Build a parser
+# -----------------------------------------------------------------------------
+
+def yacc(method='LALR', debug=yaccdebug, module=None, tabmodule=tab_module, start=None,
+ check_recursion=True, optimize=False, write_tables=True, debugfile=debug_file,
+ outputdir=None, debuglog=None, errorlog=None, picklefile=None):
+
+ if tabmodule is None:
+ tabmodule = tab_module
+
+ # Reference to the parsing method of the last built parser
+ global parse
+
+ # If pickling is enabled, table files are not created
+ if picklefile:
+ write_tables = 0
+
+ if errorlog is None:
+ errorlog = PlyLogger(sys.stderr)
+
+ # Get the module dictionary used for the parser
+ if module:
+ _items = [(k, getattr(module, k)) for k in dir(module)]
+ pdict = dict(_items)
+ # If no __file__ attribute is available, try to obtain it from the __module__ instead
+ if '__file__' not in pdict:
+ pdict['__file__'] = sys.modules[pdict['__module__']].__file__
+ else:
+ pdict = get_caller_module_dict(2)
+
+ if outputdir is None:
+ # If no output directory is set, the location of the output files
+ # is determined according to the following rules:
+ # - If tabmodule specifies a package, files go into that package directory
+ # - Otherwise, files go in the same directory as the specifying module
+ if isinstance(tabmodule, types.ModuleType):
+ srcfile = tabmodule.__file__
+ else:
+ if '.' not in tabmodule:
+ srcfile = pdict['__file__']
+ else:
+ parts = tabmodule.split('.')
+ pkgname = '.'.join(parts[:-1])
+ exec('import %s' % pkgname)
+ srcfile = getattr(sys.modules[pkgname], '__file__', '')
+ outputdir = os.path.dirname(srcfile)
+
+ # Determine if the module is package of a package or not.
+ # If so, fix the tabmodule setting so that tables load correctly
+ pkg = pdict.get('__package__')
+ if pkg and isinstance(tabmodule, str):
+ if '.' not in tabmodule:
+ tabmodule = pkg + '.' + tabmodule
+
+
+
+ # Set start symbol if it's specified directly using an argument
+ if start is not None:
+ pdict['start'] = start
+
+ # Collect parser information from the dictionary
+ pinfo = ParserReflect(pdict, log=errorlog)
+ pinfo.get_all()
+
+ if pinfo.error:
+ raise YaccError('Unable to build parser')
+
+ # Check signature against table files (if any)
+ signature = pinfo.signature()
+
+ # Read the tables
+ try:
+ lr = LRTable()
+ if picklefile:
+ read_signature = lr.read_pickle(picklefile)
+ else:
+ read_signature = lr.read_table(tabmodule)
+ if optimize or (read_signature == signature):
+ try:
+ lr.bind_callables(pinfo.pdict)
+ parser = LRParser(lr, pinfo.error_func)
+ parse = parser.parse
+ return parser
+ except Exception as e:
+ errorlog.warning('There was a problem loading the table file: %r', e)
+ except VersionError as e:
+ errorlog.warning(str(e))
+ except ImportError:
+ pass
+
+ if debuglog is None:
+ if debug:
+ try:
+ debuglog = PlyLogger(open(os.path.join(outputdir, debugfile), 'w'))
+ except IOError as e:
+ errorlog.warning("Couldn't open %r. %s" % (debugfile, e))
+ debuglog = NullLogger()
+ else:
+ debuglog = NullLogger()
+
+ debuglog.info('Created by PLY version %s (http://www.dabeaz.com/ply)', __version__)
+
+ errors = False
+
+ # Validate the parser information
+ if pinfo.validate_all():
+ raise YaccError('Unable to build parser')
+
+ if not pinfo.error_func:
+ errorlog.warning('no p_error() function is defined')
+
+ # Create a grammar object
+ grammar = Grammar(pinfo.tokens)
+
+ # Set precedence level for terminals
+ for term, assoc, level in pinfo.preclist:
+ try:
+ grammar.set_precedence(term, assoc, level)
+ except GrammarError as e:
+ errorlog.warning('%s', e)
+
+ # Add productions to the grammar
+ for funcname, gram in pinfo.grammar:
+ file, line, prodname, syms = gram
+ try:
+ grammar.add_production(prodname, syms, funcname, file, line)
+ except GrammarError as e:
+ errorlog.error('%s', e)
+ errors = True
+
+ # Set the grammar start symbols
+ try:
+ if start is None:
+ grammar.set_start(pinfo.start)
+ else:
+ grammar.set_start(start)
+ except GrammarError as e:
+ errorlog.error(str(e))
+ errors = True
+
+ if errors:
+ raise YaccError('Unable to build parser')
+
+ # Verify the grammar structure
+ undefined_symbols = grammar.undefined_symbols()
+ for sym, prod in undefined_symbols:
+ errorlog.error('%s:%d: Symbol %r used, but not defined as a token or a rule', prod.file, prod.line, sym)
+ errors = True
+
+ unused_terminals = grammar.unused_terminals()
+ if unused_terminals:
+ debuglog.info('')
+ debuglog.info('Unused terminals:')
+ debuglog.info('')
+ for term in unused_terminals:
+ errorlog.warning('Token %r defined, but not used', term)
+ debuglog.info(' %s', term)
+
+ # Print out all productions to the debug log
+ if debug:
+ debuglog.info('')
+ debuglog.info('Grammar')
+ debuglog.info('')
+ for n, p in enumerate(grammar.Productions):
+ debuglog.info('Rule %-5d %s', n, p)
+
+ # Find unused non-terminals
+ unused_rules = grammar.unused_rules()
+ for prod in unused_rules:
+ errorlog.warning('%s:%d: Rule %r defined, but not used', prod.file, prod.line, prod.name)
+
+ if len(unused_terminals) == 1:
+ errorlog.warning('There is 1 unused token')
+ if len(unused_terminals) > 1:
+ errorlog.warning('There are %d unused tokens', len(unused_terminals))
+
+ if len(unused_rules) == 1:
+ errorlog.warning('There is 1 unused rule')
+ if len(unused_rules) > 1:
+ errorlog.warning('There are %d unused rules', len(unused_rules))
+
+ if debug:
+ debuglog.info('')
+ debuglog.info('Terminals, with rules where they appear')
+ debuglog.info('')
+ terms = list(grammar.Terminals)
+ terms.sort()
+ for term in terms:
+ debuglog.info('%-20s : %s', term, ' '.join([str(s) for s in grammar.Terminals[term]]))
+
+ debuglog.info('')
+ debuglog.info('Nonterminals, with rules where they appear')
+ debuglog.info('')
+ nonterms = list(grammar.Nonterminals)
+ nonterms.sort()
+ for nonterm in nonterms:
+ debuglog.info('%-20s : %s', nonterm, ' '.join([str(s) for s in grammar.Nonterminals[nonterm]]))
+ debuglog.info('')
+
+ if check_recursion:
+ unreachable = grammar.find_unreachable()
+ for u in unreachable:
+ errorlog.warning('Symbol %r is unreachable', u)
+
+ infinite = grammar.infinite_cycles()
+ for inf in infinite:
+ errorlog.error('Infinite recursion detected for symbol %r', inf)
+ errors = True
+
+ unused_prec = grammar.unused_precedence()
+ for term, assoc in unused_prec:
+ errorlog.error('Precedence rule %r defined for unknown symbol %r', assoc, term)
+ errors = True
+
+ if errors:
+ raise YaccError('Unable to build parser')
+
+ # Run the LRGeneratedTable on the grammar
+ if debug:
+ errorlog.debug('Generating %s tables', method)
+
+ lr = LRGeneratedTable(grammar, method, debuglog)
+
+ if debug:
+ num_sr = len(lr.sr_conflicts)
+
+ # Report shift/reduce and reduce/reduce conflicts
+ if num_sr == 1:
+ errorlog.warning('1 shift/reduce conflict')
+ elif num_sr > 1:
+ errorlog.warning('%d shift/reduce conflicts', num_sr)
+
+ num_rr = len(lr.rr_conflicts)
+ if num_rr == 1:
+ errorlog.warning('1 reduce/reduce conflict')
+ elif num_rr > 1:
+ errorlog.warning('%d reduce/reduce conflicts', num_rr)
+
+ # Write out conflicts to the output file
+ if debug and (lr.sr_conflicts or lr.rr_conflicts):
+ debuglog.warning('')
+ debuglog.warning('Conflicts:')
+ debuglog.warning('')
+
+ for state, tok, resolution in lr.sr_conflicts:
+ debuglog.warning('shift/reduce conflict for %s in state %d resolved as %s', tok, state, resolution)
+
+ already_reported = set()
+ for state, rule, rejected in lr.rr_conflicts:
+ if (state, id(rule), id(rejected)) in already_reported:
+ continue
+ debuglog.warning('reduce/reduce conflict in state %d resolved using rule (%s)', state, rule)
+ debuglog.warning('rejected rule (%s) in state %d', rejected, state)
+ errorlog.warning('reduce/reduce conflict in state %d resolved using rule (%s)', state, rule)
+ errorlog.warning('rejected rule (%s) in state %d', rejected, state)
+ already_reported.add((state, id(rule), id(rejected)))
+
+ warned_never = []
+ for state, rule, rejected in lr.rr_conflicts:
+ if not rejected.reduced and (rejected not in warned_never):
+ debuglog.warning('Rule (%s) is never reduced', rejected)
+ errorlog.warning('Rule (%s) is never reduced', rejected)
+ warned_never.append(rejected)
+
+ # Write the table file if requested
+ if write_tables:
+ try:
+ lr.write_table(tabmodule, outputdir, signature)
+ except IOError as e:
+ errorlog.warning("Couldn't create %r. %s" % (tabmodule, e))
+
+ # Write a pickled version of the tables
+ if picklefile:
+ try:
+ lr.pickle_table(picklefile, signature)
+ except IOError as e:
+ errorlog.warning("Couldn't create %r. %s" % (picklefile, e))
+
+ # Build the parser
+ lr.bind_callables(pinfo.pdict)
+ parser = LRParser(lr, pinfo.error_func)
+
+ parse = parser.parse
+ return parser
diff --git a/billinglayer/python/pycparser/ply/ygen.py b/billinglayer/python/pycparser/ply/ygen.py
new file mode 100644
index 0000000..acf5ca1
--- /dev/null
+++ b/billinglayer/python/pycparser/ply/ygen.py
@@ -0,0 +1,74 @@
+# ply: ygen.py
+#
+# This is a support program that auto-generates different versions of the YACC parsing
+# function with different features removed for the purposes of performance.
+#
+# Users should edit the method LParser.parsedebug() in yacc.py. The source code
+# for that method is then used to create the other methods. See the comments in
+# yacc.py for further details.
+
+import os.path
+import shutil
+
+def get_source_range(lines, tag):
+ srclines = enumerate(lines)
+ start_tag = '#--! %s-start' % tag
+ end_tag = '#--! %s-end' % tag
+
+ for start_index, line in srclines:
+ if line.strip().startswith(start_tag):
+ break
+
+ for end_index, line in srclines:
+ if line.strip().endswith(end_tag):
+ break
+
+ return (start_index + 1, end_index)
+
+def filter_section(lines, tag):
+ filtered_lines = []
+ include = True
+ tag_text = '#--! %s' % tag
+ for line in lines:
+ if line.strip().startswith(tag_text):
+ include = not include
+ elif include:
+ filtered_lines.append(line)
+ return filtered_lines
+
+def main():
+ dirname = os.path.dirname(__file__)
+ shutil.copy2(os.path.join(dirname, 'yacc.py'), os.path.join(dirname, 'yacc.py.bak'))
+ with open(os.path.join(dirname, 'yacc.py'), 'r') as f:
+ lines = f.readlines()
+
+ parse_start, parse_end = get_source_range(lines, 'parsedebug')
+ parseopt_start, parseopt_end = get_source_range(lines, 'parseopt')
+ parseopt_notrack_start, parseopt_notrack_end = get_source_range(lines, 'parseopt-notrack')
+
+ # Get the original source
+ orig_lines = lines[parse_start:parse_end]
+
+ # Filter the DEBUG sections out
+ parseopt_lines = filter_section(orig_lines, 'DEBUG')
+
+ # Filter the TRACKING sections out
+ parseopt_notrack_lines = filter_section(parseopt_lines, 'TRACKING')
+
+ # Replace the parser source sections with updated versions
+ lines[parseopt_notrack_start:parseopt_notrack_end] = parseopt_notrack_lines
+ lines[parseopt_start:parseopt_end] = parseopt_lines
+
+ lines = [line.rstrip()+'\n' for line in lines]
+ with open(os.path.join(dirname, 'yacc.py'), 'w') as f:
+ f.writelines(lines)
+
+ print('Updated yacc.py')
+
+if __name__ == '__main__':
+ main()
+
+
+
+
+
diff --git a/billinglayer/python/pycparser/plyparser.py b/billinglayer/python/pycparser/plyparser.py
new file mode 100644
index 0000000..b8f4c43
--- /dev/null
+++ b/billinglayer/python/pycparser/plyparser.py
@@ -0,0 +1,133 @@
+#-----------------------------------------------------------------
+# plyparser.py
+#
+# PLYParser class and other utilities for simplifying programming
+# parsers with PLY
+#
+# Eli Bendersky [https://eli.thegreenplace.net/]
+# License: BSD
+#-----------------------------------------------------------------
+
+import warnings
+
+class Coord(object):
+ """ Coordinates of a syntactic element. Consists of:
+ - File name
+ - Line number
+ - (optional) column number, for the Lexer
+ """
+ __slots__ = ('file', 'line', 'column', '__weakref__')
+ def __init__(self, file, line, column=None):
+ self.file = file
+ self.line = line
+ self.column = column
+
+ def __str__(self):
+ str = "%s:%s" % (self.file, self.line)
+ if self.column: str += ":%s" % self.column
+ return str
+
+
+class ParseError(Exception): pass
+
+
+class PLYParser(object):
+ def _create_opt_rule(self, rulename):
+ """ Given a rule name, creates an optional ply.yacc rule
+ for it. The name of the optional rule is
+ _opt
+ """
+ optname = rulename + '_opt'
+
+ def optrule(self, p):
+ p[0] = p[1]
+
+ optrule.__doc__ = '%s : empty\n| %s' % (optname, rulename)
+ optrule.__name__ = 'p_%s' % optname
+ setattr(self.__class__, optrule.__name__, optrule)
+
+ def _coord(self, lineno, column=None):
+ return Coord(
+ file=self.clex.filename,
+ line=lineno,
+ column=column)
+
+ def _token_coord(self, p, token_idx):
+ """ Returns the coordinates for the YaccProduction object 'p' indexed
+ with 'token_idx'. The coordinate includes the 'lineno' and
+ 'column'. Both follow the lex semantic, starting from 1.
+ """
+ last_cr = p.lexer.lexer.lexdata.rfind('\n', 0, p.lexpos(token_idx))
+ if last_cr < 0:
+ last_cr = -1
+ column = (p.lexpos(token_idx) - (last_cr))
+ return self._coord(p.lineno(token_idx), column)
+
+ def _parse_error(self, msg, coord):
+ raise ParseError("%s: %s" % (coord, msg))
+
+
+def parameterized(*params):
+ """ Decorator to create parameterized rules.
+
+ Parameterized rule methods must be named starting with 'p_' and contain
+ 'xxx', and their docstrings may contain 'xxx' and 'yyy'. These will be
+ replaced by the given parameter tuples. For example, ``p_xxx_rule()`` with
+ docstring 'xxx_rule : yyy' when decorated with
+ ``@parameterized(('id', 'ID'))`` produces ``p_id_rule()`` with the docstring
+ 'id_rule : ID'. Using multiple tuples produces multiple rules.
+ """
+ def decorate(rule_func):
+ rule_func._params = params
+ return rule_func
+ return decorate
+
+
+def template(cls):
+ """ Class decorator to generate rules from parameterized rule templates.
+
+ See `parameterized` for more information on parameterized rules.
+ """
+ issued_nodoc_warning = False
+ for attr_name in dir(cls):
+ if attr_name.startswith('p_'):
+ method = getattr(cls, attr_name)
+ if hasattr(method, '_params'):
+ # Remove the template method
+ delattr(cls, attr_name)
+ # Create parameterized rules from this method; only run this if
+ # the method has a docstring. This is to address an issue when
+ # pycparser's users are installed in -OO mode which strips
+ # docstrings away.
+ # See: https://github.com/eliben/pycparser/pull/198/ and
+ # https://github.com/eliben/pycparser/issues/197
+ # for discussion.
+ if method.__doc__ is not None:
+ _create_param_rules(cls, method)
+ elif not issued_nodoc_warning:
+ warnings.warn(
+ 'parsing methods must have __doc__ for pycparser to work properly',
+ RuntimeWarning,
+ stacklevel=2)
+ issued_nodoc_warning = True
+ return cls
+
+
+def _create_param_rules(cls, func):
+ """ Create ply.yacc rules based on a parameterized rule function
+
+ Generates new methods (one per each pair of parameters) based on the
+ template rule function `func`, and attaches them to `cls`. The rule
+ function's parameters must be accessible via its `_params` attribute.
+ """
+ for xxx, yyy in func._params:
+ # Use the template method's body for each new method
+ def param_rule(self, p):
+ func(self, p)
+
+ # Substitute in the params for the grammar rule and function name
+ param_rule.__doc__ = func.__doc__.replace('xxx', xxx).replace('yyy', yyy)
+ param_rule.__name__ = func.__name__.replace('xxx', xxx)
+
+ # Attach the new method to the class
+ setattr(cls, param_rule.__name__, param_rule)
diff --git a/billinglayer/python/pycparser/yacctab.py b/billinglayer/python/pycparser/yacctab.py
new file mode 100644
index 0000000..0622c36
--- /dev/null
+++ b/billinglayer/python/pycparser/yacctab.py
@@ -0,0 +1,366 @@
+
+# yacctab.py
+# This file is automatically generated. Do not edit.
+_tabversion = '3.10'
+
+_lr_method = 'LALR'
+
+_lr_signature = 'translation_unit_or_emptyleftLORleftLANDleftORleftXORleftANDleftEQNEleftGTGELTLEleftRSHIFTLSHIFTleftPLUSMINUSleftTIMESDIVIDEMODAUTO BREAK CASE CHAR CONST CONTINUE DEFAULT DO DOUBLE ELSE ENUM EXTERN FLOAT FOR GOTO IF INLINE INT LONG REGISTER OFFSETOF RESTRICT RETURN SHORT SIGNED SIZEOF STATIC STRUCT SWITCH TYPEDEF UNION UNSIGNED VOID VOLATILE WHILE __INT128 _BOOL _COMPLEX _NORETURN _THREAD_LOCAL _STATIC_ASSERT _ATOMIC _ALIGNOF _ALIGNAS ID TYPEID INT_CONST_DEC INT_CONST_OCT INT_CONST_HEX INT_CONST_BIN INT_CONST_CHAR FLOAT_CONST HEX_FLOAT_CONST CHAR_CONST WCHAR_CONST U8CHAR_CONST U16CHAR_CONST U32CHAR_CONST STRING_LITERAL WSTRING_LITERAL U8STRING_LITERAL U16STRING_LITERAL U32STRING_LITERAL PLUS MINUS TIMES DIVIDE MOD OR AND NOT XOR LSHIFT RSHIFT LOR LAND LNOT LT LE GT GE EQ NE EQUALS TIMESEQUAL DIVEQUAL MODEQUAL PLUSEQUAL MINUSEQUAL LSHIFTEQUAL RSHIFTEQUAL ANDEQUAL XOREQUAL OREQUAL PLUSPLUS MINUSMINUS ARROW CONDOP LPAREN RPAREN LBRACKET RBRACKET LBRACE RBRACE COMMA PERIOD SEMI COLON ELLIPSIS PPHASH PPPRAGMA PPPRAGMASTRabstract_declarator_opt : empty\n| abstract_declaratorassignment_expression_opt : empty\n| assignment_expressionblock_item_list_opt : empty\n| block_item_listdeclaration_list_opt : empty\n| declaration_listdeclaration_specifiers_no_type_opt : empty\n| declaration_specifiers_no_typedesignation_opt : empty\n| designationexpression_opt : empty\n| expressionid_init_declarator_list_opt : empty\n| id_init_declarator_listidentifier_list_opt : empty\n| identifier_listinit_declarator_list_opt : empty\n| init_declarator_listinitializer_list_opt : empty\n| initializer_listparameter_type_list_opt : empty\n| parameter_type_liststruct_declarator_list_opt : empty\n| struct_declarator_listtype_qualifier_list_opt : empty\n| type_qualifier_list direct_id_declarator : ID\n direct_id_declarator : LPAREN id_declarator RPAREN\n direct_id_declarator : direct_id_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_id_declarator : direct_id_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET\n | direct_id_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET\n direct_id_declarator : direct_id_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET\n direct_id_declarator : direct_id_declarator LPAREN parameter_type_list RPAREN\n | direct_id_declarator LPAREN identifier_list_opt RPAREN\n direct_typeid_declarator : TYPEID\n direct_typeid_declarator : LPAREN typeid_declarator RPAREN\n direct_typeid_declarator : direct_typeid_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_typeid_declarator : direct_typeid_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET\n | direct_typeid_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET\n direct_typeid_declarator : direct_typeid_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET\n direct_typeid_declarator : direct_typeid_declarator LPAREN parameter_type_list RPAREN\n | direct_typeid_declarator LPAREN identifier_list_opt RPAREN\n direct_typeid_noparen_declarator : TYPEID\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET\n | direct_typeid_noparen_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LPAREN parameter_type_list RPAREN\n | direct_typeid_noparen_declarator LPAREN identifier_list_opt RPAREN\n id_declarator : direct_id_declarator\n id_declarator : pointer direct_id_declarator\n typeid_declarator : direct_typeid_declarator\n typeid_declarator : pointer direct_typeid_declarator\n typeid_noparen_declarator : direct_typeid_noparen_declarator\n typeid_noparen_declarator : pointer direct_typeid_noparen_declarator\n translation_unit_or_empty : translation_unit\n | empty\n translation_unit : external_declaration\n translation_unit : translation_unit external_declaration\n external_declaration : function_definition\n external_declaration : declaration\n external_declaration : pp_directive\n | pppragma_directive\n external_declaration : SEMI\n external_declaration : static_assert\n static_assert : _STATIC_ASSERT LPAREN constant_expression COMMA unified_string_literal RPAREN\n | _STATIC_ASSERT LPAREN constant_expression RPAREN\n pp_directive : PPHASH\n pppragma_directive : PPPRAGMA\n | PPPRAGMA PPPRAGMASTR\n function_definition : id_declarator declaration_list_opt compound_statement\n function_definition : declaration_specifiers id_declarator declaration_list_opt compound_statement\n statement : labeled_statement\n | expression_statement\n | compound_statement\n | selection_statement\n | iteration_statement\n | jump_statement\n | pppragma_directive\n | static_assert\n pragmacomp_or_statement : pppragma_directive statement\n | statement\n decl_body : declaration_specifiers init_declarator_list_opt\n | declaration_specifiers_no_type id_init_declarator_list_opt\n declaration : decl_body SEMI\n declaration_list : declaration\n | declaration_list declaration\n declaration_specifiers_no_type : type_qualifier declaration_specifiers_no_type_opt\n declaration_specifiers_no_type : storage_class_specifier declaration_specifiers_no_type_opt\n declaration_specifiers_no_type : function_specifier declaration_specifiers_no_type_opt\n declaration_specifiers_no_type : atomic_specifier declaration_specifiers_no_type_opt\n declaration_specifiers_no_type : alignment_specifier declaration_specifiers_no_type_opt\n declaration_specifiers : declaration_specifiers type_qualifier\n declaration_specifiers : declaration_specifiers storage_class_specifier\n declaration_specifiers : declaration_specifiers function_specifier\n declaration_specifiers : declaration_specifiers type_specifier_no_typeid\n declaration_specifiers : type_specifier\n declaration_specifiers : declaration_specifiers_no_type type_specifier\n declaration_specifiers : declaration_specifiers alignment_specifier\n storage_class_specifier : AUTO\n | REGISTER\n | STATIC\n | EXTERN\n | TYPEDEF\n | _THREAD_LOCAL\n function_specifier : INLINE\n | _NORETURN\n type_specifier_no_typeid : VOID\n | _BOOL\n | CHAR\n | SHORT\n | INT\n | LONG\n | FLOAT\n | DOUBLE\n | _COMPLEX\n | SIGNED\n | UNSIGNED\n | __INT128\n type_specifier : typedef_name\n | enum_specifier\n | struct_or_union_specifier\n | type_specifier_no_typeid\n | atomic_specifier\n atomic_specifier : _ATOMIC LPAREN type_name RPAREN\n type_qualifier : CONST\n | RESTRICT\n | VOLATILE\n | _ATOMIC\n init_declarator_list : init_declarator\n | init_declarator_list COMMA init_declarator\n init_declarator : declarator\n | declarator EQUALS initializer\n id_init_declarator_list : id_init_declarator\n | id_init_declarator_list COMMA init_declarator\n id_init_declarator : id_declarator\n | id_declarator EQUALS initializer\n specifier_qualifier_list : specifier_qualifier_list type_specifier_no_typeid\n specifier_qualifier_list : specifier_qualifier_list type_qualifier\n specifier_qualifier_list : type_specifier\n specifier_qualifier_list : type_qualifier_list type_specifier\n specifier_qualifier_list : alignment_specifier\n specifier_qualifier_list : specifier_qualifier_list alignment_specifier\n struct_or_union_specifier : struct_or_union ID\n | struct_or_union TYPEID\n struct_or_union_specifier : struct_or_union brace_open struct_declaration_list brace_close\n | struct_or_union brace_open brace_close\n struct_or_union_specifier : struct_or_union ID brace_open struct_declaration_list brace_close\n | struct_or_union ID brace_open brace_close\n | struct_or_union TYPEID brace_open struct_declaration_list brace_close\n | struct_or_union TYPEID brace_open brace_close\n struct_or_union : STRUCT\n | UNION\n struct_declaration_list : struct_declaration\n | struct_declaration_list struct_declaration\n struct_declaration : specifier_qualifier_list struct_declarator_list_opt SEMI\n struct_declaration : SEMI\n struct_declaration : pppragma_directive\n struct_declarator_list : struct_declarator\n | struct_declarator_list COMMA struct_declarator\n struct_declarator : declarator\n struct_declarator : declarator COLON constant_expression\n | COLON constant_expression\n enum_specifier : ENUM ID\n | ENUM TYPEID\n enum_specifier : ENUM brace_open enumerator_list brace_close\n enum_specifier : ENUM ID brace_open enumerator_list brace_close\n | ENUM TYPEID brace_open enumerator_list brace_close\n enumerator_list : enumerator\n | enumerator_list COMMA\n | enumerator_list COMMA enumerator\n alignment_specifier : _ALIGNAS LPAREN type_name RPAREN\n | _ALIGNAS LPAREN constant_expression RPAREN\n enumerator : ID\n | ID EQUALS constant_expression\n declarator : id_declarator\n | typeid_declarator\n pointer : TIMES type_qualifier_list_opt\n | TIMES type_qualifier_list_opt pointer\n type_qualifier_list : type_qualifier\n | type_qualifier_list type_qualifier\n parameter_type_list : parameter_list\n | parameter_list COMMA ELLIPSIS\n parameter_list : parameter_declaration\n | parameter_list COMMA parameter_declaration\n parameter_declaration : declaration_specifiers id_declarator\n | declaration_specifiers typeid_noparen_declarator\n parameter_declaration : declaration_specifiers abstract_declarator_opt\n identifier_list : identifier\n | identifier_list COMMA identifier\n initializer : assignment_expression\n initializer : brace_open initializer_list_opt brace_close\n | brace_open initializer_list COMMA brace_close\n initializer_list : designation_opt initializer\n | initializer_list COMMA designation_opt initializer\n designation : designator_list EQUALS\n designator_list : designator\n | designator_list designator\n designator : LBRACKET constant_expression RBRACKET\n | PERIOD identifier\n type_name : specifier_qualifier_list abstract_declarator_opt\n abstract_declarator : pointer\n abstract_declarator : pointer direct_abstract_declarator\n abstract_declarator : direct_abstract_declarator\n direct_abstract_declarator : LPAREN abstract_declarator RPAREN direct_abstract_declarator : direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET\n direct_abstract_declarator : LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_abstract_declarator : direct_abstract_declarator LBRACKET TIMES RBRACKET\n direct_abstract_declarator : LBRACKET TIMES RBRACKET\n direct_abstract_declarator : direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN\n direct_abstract_declarator : LPAREN parameter_type_list_opt RPAREN\n block_item : declaration\n | statement\n block_item_list : block_item\n | block_item_list block_item\n compound_statement : brace_open block_item_list_opt brace_close labeled_statement : ID COLON pragmacomp_or_statement labeled_statement : CASE constant_expression COLON pragmacomp_or_statement labeled_statement : DEFAULT COLON pragmacomp_or_statement selection_statement : IF LPAREN expression RPAREN pragmacomp_or_statement selection_statement : IF LPAREN expression RPAREN statement ELSE pragmacomp_or_statement selection_statement : SWITCH LPAREN expression RPAREN pragmacomp_or_statement iteration_statement : WHILE LPAREN expression RPAREN pragmacomp_or_statement iteration_statement : DO pragmacomp_or_statement WHILE LPAREN expression RPAREN SEMI iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement iteration_statement : FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement jump_statement : GOTO ID SEMI jump_statement : BREAK SEMI jump_statement : CONTINUE SEMI jump_statement : RETURN expression SEMI\n | RETURN SEMI\n expression_statement : expression_opt SEMI expression : assignment_expression\n | expression COMMA assignment_expression\n assignment_expression : LPAREN compound_statement RPAREN typedef_name : TYPEID assignment_expression : conditional_expression\n | unary_expression assignment_operator assignment_expression\n assignment_operator : EQUALS\n | XOREQUAL\n | TIMESEQUAL\n | DIVEQUAL\n | MODEQUAL\n | PLUSEQUAL\n | MINUSEQUAL\n | LSHIFTEQUAL\n | RSHIFTEQUAL\n | ANDEQUAL\n | OREQUAL\n constant_expression : conditional_expression conditional_expression : binary_expression\n | binary_expression CONDOP expression COLON conditional_expression\n binary_expression : cast_expression\n | binary_expression TIMES binary_expression\n | binary_expression DIVIDE binary_expression\n | binary_expression MOD binary_expression\n | binary_expression PLUS binary_expression\n | binary_expression MINUS binary_expression\n | binary_expression RSHIFT binary_expression\n | binary_expression LSHIFT binary_expression\n | binary_expression LT binary_expression\n | binary_expression LE binary_expression\n | binary_expression GE binary_expression\n | binary_expression GT binary_expression\n | binary_expression EQ binary_expression\n | binary_expression NE binary_expression\n | binary_expression AND binary_expression\n | binary_expression OR binary_expression\n | binary_expression XOR binary_expression\n | binary_expression LAND binary_expression\n | binary_expression LOR binary_expression\n cast_expression : unary_expression cast_expression : LPAREN type_name RPAREN cast_expression unary_expression : postfix_expression unary_expression : PLUSPLUS unary_expression\n | MINUSMINUS unary_expression\n | unary_operator cast_expression\n unary_expression : SIZEOF unary_expression\n | SIZEOF LPAREN type_name RPAREN\n | _ALIGNOF LPAREN type_name RPAREN\n unary_operator : AND\n | TIMES\n | PLUS\n | MINUS\n | NOT\n | LNOT\n postfix_expression : primary_expression postfix_expression : postfix_expression LBRACKET expression RBRACKET postfix_expression : postfix_expression LPAREN argument_expression_list RPAREN\n | postfix_expression LPAREN RPAREN\n postfix_expression : postfix_expression PERIOD ID\n | postfix_expression PERIOD TYPEID\n | postfix_expression ARROW ID\n | postfix_expression ARROW TYPEID\n postfix_expression : postfix_expression PLUSPLUS\n | postfix_expression MINUSMINUS\n postfix_expression : LPAREN type_name RPAREN brace_open initializer_list brace_close\n | LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close\n primary_expression : identifier primary_expression : constant primary_expression : unified_string_literal\n | unified_wstring_literal\n primary_expression : LPAREN expression RPAREN primary_expression : OFFSETOF LPAREN type_name COMMA offsetof_member_designator RPAREN\n offsetof_member_designator : identifier\n | offsetof_member_designator PERIOD identifier\n | offsetof_member_designator LBRACKET expression RBRACKET\n argument_expression_list : assignment_expression\n | argument_expression_list COMMA assignment_expression\n identifier : ID constant : INT_CONST_DEC\n | INT_CONST_OCT\n | INT_CONST_HEX\n | INT_CONST_BIN\n | INT_CONST_CHAR\n constant : FLOAT_CONST\n | HEX_FLOAT_CONST\n constant : CHAR_CONST\n | WCHAR_CONST\n | U8CHAR_CONST\n | U16CHAR_CONST\n | U32CHAR_CONST\n unified_string_literal : STRING_LITERAL\n | unified_string_literal STRING_LITERAL\n unified_wstring_literal : WSTRING_LITERAL\n | U8STRING_LITERAL\n | U16STRING_LITERAL\n | U32STRING_LITERAL\n | unified_wstring_literal WSTRING_LITERAL\n | unified_wstring_literal U8STRING_LITERAL\n | unified_wstring_literal U16STRING_LITERAL\n | unified_wstring_literal U32STRING_LITERAL\n brace_open : LBRACE\n brace_close : RBRACE\n empty : '
+
+_lr_action_items = {'INT_CONST_CHAR':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,132,-335,-28,-182,-27,132,-337,-87,-72,-337,132,-286,-285,132,132,-283,-287,-288,132,-284,132,132,132,-336,-183,132,132,-28,-337,132,-28,-337,-337,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,-337,-76,-79,-82,-75,132,-77,132,132,-81,-215,-214,-80,-216,132,-78,132,132,-69,-284,132,132,-284,132,132,-244,-247,-245,-241,-242,-246,-248,132,-250,-251,-243,-249,-12,132,132,-11,132,132,132,132,-234,-233,132,-231,132,132,-217,132,-230,132,-84,-218,132,132,132,-337,-337,-198,132,132,132,-337,-284,-229,-232,132,-221,132,-83,-219,-68,132,-28,-337,132,-11,132,132,-220,132,132,132,-284,132,132,132,-337,132,-225,-224,-222,-84,132,132,132,-226,-223,132,-228,-227,]),'VOID':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,71,75,76,80,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,101,105,109,111,118,119,120,121,122,123,124,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,211,214,223,229,231,233,239,240,241,267,269,275,278,279,280,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[6,-337,-113,-128,6,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,6,-120,-115,-65,-102,-126,-131,-108,-238,-111,-122,-63,-129,6,-29,-121,-116,-62,-112,-70,-52,-123,-117,-337,-337,-119,-337,-114,-130,6,-118,-71,-103,-337,-9,-131,-91,-10,-96,-98,6,-131,-95,-101,-97,6,-53,-126,6,-88,6,6,-93,6,-147,-335,-146,6,-167,-166,-182,-100,-126,6,-87,-90,-94,-92,-61,-72,6,-144,-142,6,6,6,-73,6,-89,6,6,6,-149,-159,-160,-156,-336,6,-183,-30,6,6,-74,6,6,6,6,-174,-175,6,-143,-140,6,-141,-145,-76,-79,-82,-75,-77,6,-81,-215,-214,-80,-216,-78,-127,6,-153,6,-151,-148,-157,-168,-69,-36,-35,6,6,6,-234,-233,6,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,6,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'LBRACKET':([2,3,5,6,7,10,11,12,13,18,20,22,23,26,27,30,33,34,35,36,39,42,43,44,46,48,49,50,54,56,58,60,62,68,71,73,76,77,80,81,82,86,96,97,98,100,101,103,104,105,106,109,111,127,132,133,134,136,138,139,140,141,142,143,145,147,148,152,153,154,156,160,161,163,164,166,167,168,169,176,177,187,191,198,199,200,211,216,227,230,235,236,237,238,240,241,261,263,269,275,276,278,279,280,283,310,312,314,316,317,328,340,341,342,344,345,347,355,356,371,376,402,403,404,405,407,411,414,442,443,448,449,453,454,457,458,464,465,470,472,474,482,483,488,489,490,492,511,512,518,519,520,526,527,529,530,531,532,544,545,547,550,551,559,560,563,565,570,571,572,],[-113,-128,-124,-110,-106,-104,-107,-125,-105,-99,-109,-120,-115,-102,-126,-108,-238,-111,-337,-122,-129,-29,-121,-116,-112,117,-123,-117,-119,-114,-130,-118,-103,-96,-98,128,-131,-37,-95,-101,-97,117,-147,-335,-146,-167,-166,-28,-180,-182,-27,-100,-126,128,-317,-321,-318,-303,-324,-330,-313,-319,-144,-301,-314,-142,-327,-325,-304,-322,-302,-315,-289,-328,-316,-329,-320,265,-323,-312,282,-149,-336,-183,-181,-30,282,-38,373,-326,-334,-332,-331,-333,-174,-175,-298,-297,-143,-140,282,282,-141,-145,421,-312,-127,-153,-151,-148,-168,-36,-35,282,282,459,-45,-44,-43,-199,373,-296,-295,-294,-293,-292,-305,421,-152,-150,-170,-169,-31,-34,282,459,-39,-42,-202,373,-200,-290,-291,373,-213,-207,-211,-33,-32,-41,-40,-201,549,-307,-209,-208,-210,-212,-51,-50,-306,373,-299,-46,-49,-308,-300,-48,-47,-309,]),'WCHAR_CONST':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,133,-335,-28,-182,-27,133,-337,-87,-72,-337,133,-286,-285,133,133,-283,-287,-288,133,-284,133,133,133,-336,-183,133,133,-28,-337,133,-28,-337,-337,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,-337,-76,-79,-82,-75,133,-77,133,133,-81,-215,-214,-80,-216,133,-78,133,133,-69,-284,133,133,-284,133,133,-244,-247,-245,-241,-242,-246,-248,133,-250,-251,-243,-249,-12,133,133,-11,133,133,133,133,-234,-233,133,-231,133,133,-217,133,-230,133,-84,-218,133,133,133,-337,-337,-198,133,133,133,-337,-284,-229,-232,133,-221,133,-83,-219,-68,133,-28,-337,133,-11,133,133,-220,133,133,133,-284,133,133,133,-337,133,-225,-224,-222,-84,133,133,133,-226,-223,133,-228,-227,]),'FLOAT_CONST':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,134,-335,-28,-182,-27,134,-337,-87,-72,-337,134,-286,-285,134,134,-283,-287,-288,134,-284,134,134,134,-336,-183,134,134,-28,-337,134,-28,-337,-337,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,-337,-76,-79,-82,-75,134,-77,134,134,-81,-215,-214,-80,-216,134,-78,134,134,-69,-284,134,134,-284,134,134,-244,-247,-245,-241,-242,-246,-248,134,-250,-251,-243,-249,-12,134,134,-11,134,134,134,134,-234,-233,134,-231,134,134,-217,134,-230,134,-84,-218,134,134,134,-337,-337,-198,134,134,134,-337,-284,-229,-232,134,-221,134,-83,-219,-68,134,-28,-337,134,-11,134,134,-220,134,134,134,-284,134,134,134,-337,134,-225,-224,-222,-84,134,134,134,-226,-223,134,-228,-227,]),'MINUS':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,132,133,134,135,136,137,138,139,140,141,143,144,145,146,148,149,150,151,152,153,154,156,158,160,161,162,163,164,165,166,167,168,169,171,173,174,175,176,181,191,198,201,204,205,206,218,219,220,224,227,229,230,231,232,233,234,235,236,237,238,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,263,265,266,268,273,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,310,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,478,480,481,482,483,484,487,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,547,549,550,551,553,554,555,557,558,565,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,135,-335,-28,-182,-27,135,-337,-87,-72,-337,135,-317,-321,-318,-286,-303,-285,-324,-330,-313,-319,-301,-274,-314,135,-327,135,-283,-287,-325,-304,-322,-302,-255,-315,-289,245,-328,-316,-288,-329,-320,-276,-323,135,-284,135,135,-312,135,-336,-183,135,135,-28,-337,135,-28,-337,-274,-337,135,-326,135,-280,135,-277,-334,-332,-331,-333,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,-298,-297,135,135,-279,-278,-337,-76,-79,-82,-75,135,-77,135,135,-81,-215,-214,-80,-216,135,-78,-312,135,135,-69,-284,135,135,-284,135,135,-244,-247,-245,-241,-242,-246,-248,135,-250,-251,-243,-249,-12,135,135,-11,245,245,245,-260,245,245,245,-259,245,245,-257,-256,245,245,245,245,245,-258,-296,-295,-294,-293,-292,-305,135,135,135,135,-234,-233,135,-231,135,135,-217,135,-230,135,-84,-218,135,135,135,-337,-337,-198,135,-281,-282,135,-290,-291,135,-275,-337,-284,-229,-232,135,-221,135,-83,-219,-68,135,-28,-337,135,-11,135,135,-220,135,135,135,-284,135,135,-306,135,-337,-299,135,-225,-224,-222,-84,-300,135,135,135,-226,-223,135,-228,-227,]),'RPAREN':([2,3,5,6,7,10,11,12,13,18,20,22,23,26,27,30,33,34,35,36,39,42,43,44,46,48,49,50,54,56,58,60,62,68,71,73,76,77,80,81,82,86,96,98,100,101,103,104,105,106,107,109,111,118,125,127,129,132,133,134,136,138,139,140,141,142,143,144,145,147,148,152,153,154,156,157,158,159,160,161,162,163,164,166,167,168,169,176,177,178,183,187,191,198,199,200,203,207,208,209,210,211,212,213,215,216,221,222,224,225,230,232,234,235,236,237,238,240,241,261,263,266,268,269,270,271,272,273,274,275,276,277,278,279,280,281,283,294,312,314,316,317,328,340,341,342,343,344,345,346,347,348,355,356,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,408,409,411,414,415,416,417,418,422,433,439,442,443,448,449,452,453,454,457,458,460,461,462,463,464,465,468,476,478,480,482,483,486,487,489,490,492,495,501,503,507,511,512,516,517,518,519,524,525,526,527,529,530,531,532,544,545,547,551,553,556,559,560,563,565,566,567,570,571,572,573,],[-113,-128,-124,-110,-106,-104,-107,-125,-105,-99,-109,-120,-115,-102,-126,-108,-238,-111,-337,-122,-129,-29,-121,-116,-112,-52,-123,-117,-119,-114,-130,-118,-103,-96,-98,-54,-131,-37,-95,-101,-97,-53,-147,-146,-167,-166,-28,-180,-182,-27,200,-100,-126,-337,216,-55,-337,-317,-321,-318,-303,-324,-330,-313,-319,-144,-301,-274,-314,-142,-327,-325,-304,-322,-302,240,-255,241,-315,-289,-253,-328,-316,-329,-320,-276,-323,-312,-337,-252,312,-149,-336,-183,-181,-30,332,340,-17,341,-186,-337,-18,-184,-191,-38,355,356,-274,-239,-326,-280,-277,-334,-332,-331,-333,-174,-175,-298,-297,407,-279,-143,411,413,-235,-278,-203,-140,-204,-1,-337,-141,-145,-2,-206,-14,-127,-153,-151,-148,-168,-36,-35,-337,-190,-204,-56,-188,-45,-189,-44,-43,476,477,478,479,480,-261,-273,-262,-260,-264,-268,-263,-259,-266,-271,-257,-256,-265,-272,-267,-269,-270,-258,-296,-295,-294,-293,-292,-310,483,-305,-205,-23,-24,489,490,-337,-13,-218,-152,-150,-170,-169,510,-31,-34,-204,-57,-337,-192,-185,-187,-39,-42,-240,-237,-281,-282,-290,-291,-236,-275,-213,-207,-211,532,535,537,539,-33,-32,544,545,-41,-40,-254,-311,547,-307,-209,-208,-210,-212,-51,-50,-306,-299,-337,568,-46,-49,-308,-300,-337,574,-48,-47,-309,577,]),'STRUCT':([0,1,3,7,10,11,13,14,16,17,19,20,21,25,26,27,29,30,38,39,40,42,45,47,48,52,53,55,58,59,61,62,63,64,65,66,67,75,85,86,87,90,91,93,94,95,97,99,105,118,119,120,121,122,123,124,129,172,174,180,181,182,184,185,186,188,189,190,191,198,200,214,223,229,231,233,239,240,241,267,278,284,285,286,289,291,298,300,301,302,303,305,308,312,313,315,318,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,446,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[24,-337,-128,-106,-104,-107,-105,-64,-60,-67,-66,-109,24,-65,-102,-337,-131,-108,-63,-129,24,-29,-62,-70,-52,-337,-337,-337,-130,24,-71,-103,-337,-9,-131,-91,-10,24,24,-53,-337,-88,24,24,-93,24,-335,24,-182,24,-87,-90,-94,-92,-61,-72,24,24,24,-73,24,-89,24,24,24,-159,-160,-156,-336,-183,-30,24,-74,24,24,24,24,-174,-175,24,24,-76,-79,-82,-75,-77,24,-81,-215,-214,-80,-216,-78,-127,24,24,-157,-69,-36,-35,24,24,24,-234,-233,24,-231,-217,-230,-81,-84,-218,-158,-31,-34,24,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'LONG':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,71,75,76,80,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,101,105,109,111,118,119,120,121,122,123,124,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,211,214,223,229,231,233,239,240,241,267,269,275,278,279,280,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[23,-337,-113,-128,23,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,23,-120,-115,-65,-102,-126,-131,-108,-238,-111,-122,-63,-129,23,-29,-121,-116,-62,-112,-70,-52,-123,-117,-337,-337,-119,-337,-114,-130,23,-118,-71,-103,-337,-9,-131,-91,-10,-96,-98,23,-131,-95,-101,-97,23,-53,-126,23,-88,23,23,-93,23,-147,-335,-146,23,-167,-166,-182,-100,-126,23,-87,-90,-94,-92,-61,-72,23,-144,-142,23,23,23,-73,23,-89,23,23,23,-149,-159,-160,-156,-336,23,-183,-30,23,23,-74,23,23,23,23,-174,-175,23,-143,-140,23,-141,-145,-76,-79,-82,-75,-77,23,-81,-215,-214,-80,-216,-78,-127,23,-153,23,-151,-148,-157,-168,-69,-36,-35,23,23,23,-234,-233,23,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,23,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'PLUS':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,132,133,134,135,136,137,138,139,140,141,143,144,145,146,148,149,150,151,152,153,154,156,158,160,161,162,163,164,165,166,167,168,169,171,173,174,175,176,181,191,198,201,204,205,206,218,219,220,224,227,229,230,231,232,233,234,235,236,237,238,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,263,265,266,268,273,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,310,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,478,480,481,482,483,484,487,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,547,549,550,551,553,554,555,557,558,565,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,137,-335,-28,-182,-27,137,-337,-87,-72,-337,137,-317,-321,-318,-286,-303,-285,-324,-330,-313,-319,-301,-274,-314,137,-327,137,-283,-287,-325,-304,-322,-302,-255,-315,-289,249,-328,-316,-288,-329,-320,-276,-323,137,-284,137,137,-312,137,-336,-183,137,137,-28,-337,137,-28,-337,-274,-337,137,-326,137,-280,137,-277,-334,-332,-331,-333,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,-298,-297,137,137,-279,-278,-337,-76,-79,-82,-75,137,-77,137,137,-81,-215,-214,-80,-216,137,-78,-312,137,137,-69,-284,137,137,-284,137,137,-244,-247,-245,-241,-242,-246,-248,137,-250,-251,-243,-249,-12,137,137,-11,249,249,249,-260,249,249,249,-259,249,249,-257,-256,249,249,249,249,249,-258,-296,-295,-294,-293,-292,-305,137,137,137,137,-234,-233,137,-231,137,137,-217,137,-230,137,-84,-218,137,137,137,-337,-337,-198,137,-281,-282,137,-290,-291,137,-275,-337,-284,-229,-232,137,-221,137,-83,-219,-68,137,-28,-337,137,-11,137,137,-220,137,137,137,-284,137,137,-306,137,-337,-299,137,-225,-224,-222,-84,-300,137,137,137,-226,-223,137,-228,-227,]),'ELLIPSIS':([350,],[462,]),'U32STRING_LITERAL':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,139,146,148,149,150,151,153,163,165,166,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,235,236,237,238,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,139,-335,-28,-182,-27,139,-337,-87,-72,-337,139,-286,-285,-330,139,-327,139,-283,-287,235,-328,-288,-329,139,-284,139,139,139,-336,-183,139,139,-28,-337,139,-28,-337,-337,139,139,139,-334,-332,-331,-333,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,-337,-76,-79,-82,-75,139,-77,139,139,-81,-215,-214,-80,-216,139,-78,139,139,-69,-284,139,139,-284,139,139,-244,-247,-245,-241,-242,-246,-248,139,-250,-251,-243,-249,-12,139,139,-11,139,139,139,139,-234,-233,139,-231,139,139,-217,139,-230,139,-84,-218,139,139,139,-337,-337,-198,139,139,139,-337,-284,-229,-232,139,-221,139,-83,-219,-68,139,-28,-337,139,-11,139,139,-220,139,139,139,-284,139,139,139,-337,139,-225,-224,-222,-84,139,139,139,-226,-223,139,-228,-227,]),'GT':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,250,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-261,250,-262,-260,-264,250,-263,-259,-266,250,-257,-256,-265,250,250,250,250,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'GOTO':([61,97,119,124,181,191,284,285,286,289,291,298,300,301,302,303,305,307,308,332,424,425,428,429,432,435,437,438,439,440,496,497,500,502,505,506,510,535,536,537,539,554,555,557,558,569,574,575,576,577,578,579,],[-71,-335,-87,-72,287,-336,-76,-79,-82,-75,-77,287,-81,-215,-214,-80,-216,287,-78,-69,-234,-233,-231,287,-217,-230,287,-84,-218,287,-229,-232,-221,287,-83,-219,-68,287,-220,287,287,-225,-224,-222,-84,287,287,-226,-223,287,-228,-227,]),'ENUM':([0,1,3,7,10,11,13,14,16,17,19,20,21,25,26,27,29,30,38,39,40,42,45,47,48,52,53,55,58,59,61,62,63,64,65,66,67,75,85,86,87,90,91,93,94,95,97,99,105,118,119,120,121,122,123,124,129,172,174,180,181,182,184,185,186,188,189,190,191,198,200,214,223,229,231,233,239,240,241,267,278,284,285,286,289,291,298,300,301,302,303,305,308,312,313,315,318,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,446,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[32,-337,-128,-106,-104,-107,-105,-64,-60,-67,-66,-109,32,-65,-102,-337,-131,-108,-63,-129,32,-29,-62,-70,-52,-337,-337,-337,-130,32,-71,-103,-337,-9,-131,-91,-10,32,32,-53,-337,-88,32,32,-93,32,-335,32,-182,32,-87,-90,-94,-92,-61,-72,32,32,32,-73,32,-89,32,32,32,-159,-160,-156,-336,-183,-30,32,-74,32,32,32,32,-174,-175,32,32,-76,-79,-82,-75,-77,32,-81,-215,-214,-80,-216,-78,-127,32,32,-157,-69,-36,-35,32,32,32,-234,-233,32,-231,-217,-230,-81,-84,-218,-158,-31,-34,32,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'PERIOD':([97,132,133,134,136,138,139,140,141,143,145,148,152,153,154,156,160,161,163,164,166,167,168,169,176,191,227,230,235,236,237,238,261,263,310,371,376,402,403,404,405,407,411,470,472,474,482,483,488,520,526,527,547,550,551,563,565,572,],[-335,-317,-321,-318,-303,-324,-330,-313,-319,-301,-314,-327,-325,-304,-322,-302,-315,-289,-328,-316,-329,-320,264,-323,-312,-336,372,-326,-334,-332,-331,-333,-298,-297,-312,-199,372,-296,-295,-294,-293,-292,-305,-202,372,-200,-290,-291,372,-201,548,-307,-306,372,-299,-308,-300,-309,]),'GE':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,254,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-261,254,-262,-260,-264,254,-263,-259,-266,254,-257,-256,-265,254,254,254,254,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'INT_CONST_DEC':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,140,-335,-28,-182,-27,140,-337,-87,-72,-337,140,-286,-285,140,140,-283,-287,-288,140,-284,140,140,140,-336,-183,140,140,-28,-337,140,-28,-337,-337,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,-337,-76,-79,-82,-75,140,-77,140,140,-81,-215,-214,-80,-216,140,-78,140,140,-69,-284,140,140,-284,140,140,-244,-247,-245,-241,-242,-246,-248,140,-250,-251,-243,-249,-12,140,140,-11,140,140,140,140,-234,-233,140,-231,140,140,-217,140,-230,140,-84,-218,140,140,140,-337,-337,-198,140,140,140,-337,-284,-229,-232,140,-221,140,-83,-219,-68,140,-28,-337,140,-11,140,140,-220,140,140,140,-284,140,140,140,-337,140,-225,-224,-222,-84,140,140,140,-226,-223,140,-228,-227,]),'ARROW':([132,133,134,136,138,139,140,141,143,145,148,152,153,154,156,160,161,163,164,166,167,168,169,176,191,230,235,236,237,238,261,263,310,402,403,404,405,407,411,482,483,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-314,-327,-325,-304,-322,-302,-315,-289,-328,-316,-329,-320,262,-323,-312,-336,-326,-334,-332,-331,-333,-298,-297,-312,-296,-295,-294,-293,-292,-305,-290,-291,-306,-299,-300,]),'_STATIC_ASSERT':([0,14,16,17,19,25,38,45,47,59,61,97,119,123,124,180,181,191,223,284,285,286,289,291,298,300,301,302,303,305,307,308,332,424,425,428,429,432,435,437,438,439,440,496,497,500,502,505,506,510,535,536,537,539,554,555,557,558,569,574,575,576,577,578,579,],[41,-64,-60,-67,-66,-65,-63,-62,-70,41,-71,-335,-87,-61,-72,-73,41,-336,-74,-76,-79,-82,-75,-77,41,-81,-215,-214,-80,-216,41,-78,-69,-234,-233,-231,41,-217,-230,41,-84,-218,41,-229,-232,-221,41,-83,-219,-68,41,-220,41,41,-225,-224,-222,-84,41,41,-226,-223,41,-228,-227,]),'CHAR':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,71,75,76,80,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,101,105,109,111,118,119,120,121,122,123,124,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,211,214,223,229,231,233,239,240,241,267,269,275,278,279,280,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[46,-337,-113,-128,46,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,46,-120,-115,-65,-102,-126,-131,-108,-238,-111,-122,-63,-129,46,-29,-121,-116,-62,-112,-70,-52,-123,-117,-337,-337,-119,-337,-114,-130,46,-118,-71,-103,-337,-9,-131,-91,-10,-96,-98,46,-131,-95,-101,-97,46,-53,-126,46,-88,46,46,-93,46,-147,-335,-146,46,-167,-166,-182,-100,-126,46,-87,-90,-94,-92,-61,-72,46,-144,-142,46,46,46,-73,46,-89,46,46,46,-149,-159,-160,-156,-336,46,-183,-30,46,46,-74,46,46,46,46,-174,-175,46,-143,-140,46,-141,-145,-76,-79,-82,-75,-77,46,-81,-215,-214,-80,-216,-78,-127,46,-153,46,-151,-148,-157,-168,-69,-36,-35,46,46,46,-234,-233,46,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,46,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'HEX_FLOAT_CONST':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,141,-335,-28,-182,-27,141,-337,-87,-72,-337,141,-286,-285,141,141,-283,-287,-288,141,-284,141,141,141,-336,-183,141,141,-28,-337,141,-28,-337,-337,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,-337,-76,-79,-82,-75,141,-77,141,141,-81,-215,-214,-80,-216,141,-78,141,141,-69,-284,141,141,-284,141,141,-244,-247,-245,-241,-242,-246,-248,141,-250,-251,-243,-249,-12,141,141,-11,141,141,141,141,-234,-233,141,-231,141,141,-217,141,-230,141,-84,-218,141,141,141,-337,-337,-198,141,141,141,-337,-284,-229,-232,141,-221,141,-83,-219,-68,141,-28,-337,141,-11,141,141,-220,141,141,141,-284,141,141,141,-337,141,-225,-224,-222,-84,141,141,141,-226,-223,141,-228,-227,]),'DOUBLE':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,71,75,76,80,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,101,105,109,111,118,119,120,121,122,123,124,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,211,214,223,229,231,233,239,240,241,267,269,275,278,279,280,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[50,-337,-113,-128,50,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,50,-120,-115,-65,-102,-126,-131,-108,-238,-111,-122,-63,-129,50,-29,-121,-116,-62,-112,-70,-52,-123,-117,-337,-337,-119,-337,-114,-130,50,-118,-71,-103,-337,-9,-131,-91,-10,-96,-98,50,-131,-95,-101,-97,50,-53,-126,50,-88,50,50,-93,50,-147,-335,-146,50,-167,-166,-182,-100,-126,50,-87,-90,-94,-92,-61,-72,50,-144,-142,50,50,50,-73,50,-89,50,50,50,-149,-159,-160,-156,-336,50,-183,-30,50,50,-74,50,50,50,50,-174,-175,50,-143,-140,50,-141,-145,-76,-79,-82,-75,-77,50,-81,-215,-214,-80,-216,-78,-127,50,-153,50,-151,-148,-157,-168,-69,-36,-35,50,50,50,-234,-233,50,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,50,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'MINUSEQUAL':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,160,161,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-315,-289,-328,-316,-329,-320,-276,-323,-312,-336,358,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'INT_CONST_OCT':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,145,-335,-28,-182,-27,145,-337,-87,-72,-337,145,-286,-285,145,145,-283,-287,-288,145,-284,145,145,145,-336,-183,145,145,-28,-337,145,-28,-337,-337,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,-337,-76,-79,-82,-75,145,-77,145,145,-81,-215,-214,-80,-216,145,-78,145,145,-69,-284,145,145,-284,145,145,-244,-247,-245,-241,-242,-246,-248,145,-250,-251,-243,-249,-12,145,145,-11,145,145,145,145,-234,-233,145,-231,145,145,-217,145,-230,145,-84,-218,145,145,145,-337,-337,-198,145,145,145,-337,-284,-229,-232,145,-221,145,-83,-219,-68,145,-28,-337,145,-11,145,145,-220,145,145,145,-284,145,145,145,-337,145,-225,-224,-222,-84,145,145,145,-226,-223,145,-228,-227,]),'TIMESEQUAL':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,160,161,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-315,-289,-328,-316,-329,-320,-276,-323,-312,-336,367,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'OR':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,259,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-261,259,-262,-260,-264,-268,-263,-259,-266,-271,-257,-256,-265,259,-267,-269,-270,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'SHORT':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,71,75,76,80,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,101,105,109,111,118,119,120,121,122,123,124,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,211,214,223,229,231,233,239,240,241,267,269,275,278,279,280,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[2,-337,-113,-128,2,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,2,-120,-115,-65,-102,-126,-131,-108,-238,-111,-122,-63,-129,2,-29,-121,-116,-62,-112,-70,-52,-123,-117,-337,-337,-119,-337,-114,-130,2,-118,-71,-103,-337,-9,-131,-91,-10,-96,-98,2,-131,-95,-101,-97,2,-53,-126,2,-88,2,2,-93,2,-147,-335,-146,2,-167,-166,-182,-100,-126,2,-87,-90,-94,-92,-61,-72,2,-144,-142,2,2,2,-73,2,-89,2,2,2,-149,-159,-160,-156,-336,2,-183,-30,2,2,-74,2,2,2,2,-174,-175,2,-143,-140,2,-141,-145,-76,-79,-82,-75,-77,2,-81,-215,-214,-80,-216,-78,-127,2,-153,2,-151,-148,-157,-168,-69,-36,-35,2,2,2,-234,-233,2,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,2,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'RETURN':([61,97,119,124,181,191,284,285,286,289,291,298,300,301,302,303,305,307,308,332,424,425,428,429,432,435,437,438,439,440,496,497,500,502,505,506,510,535,536,537,539,554,555,557,558,569,574,575,576,577,578,579,],[-71,-335,-87,-72,290,-336,-76,-79,-82,-75,-77,290,-81,-215,-214,-80,-216,290,-78,-69,-234,-233,-231,290,-217,-230,290,-84,-218,290,-229,-232,-221,290,-83,-219,-68,290,-220,290,290,-225,-224,-222,-84,290,290,-226,-223,290,-228,-227,]),'RSHIFTEQUAL':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,160,161,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-315,-289,-328,-316,-329,-320,-276,-323,-312,-336,368,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'_ALIGNAS':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,65,68,71,75,76,80,81,82,85,86,87,89,90,93,95,96,97,98,99,100,101,109,111,118,119,123,124,129,142,147,174,177,180,181,182,184,185,186,187,188,189,190,191,192,200,211,223,229,231,233,239,240,241,267,269,275,278,279,280,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[8,8,-113,-128,8,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,8,-120,-115,-65,-102,8,-131,-108,-238,-111,-122,-63,-129,-29,-121,-116,-62,-112,-70,-52,-123,-117,8,8,-119,8,-114,-130,8,-118,-71,-103,8,-131,-96,-98,8,-131,-95,-101,-97,8,-53,8,8,-88,8,8,-147,-335,-146,8,-167,-166,-100,-126,8,-87,-61,-72,8,-144,-142,8,8,-73,8,-89,8,8,8,-149,-159,-160,-156,-336,8,-30,8,-74,8,8,8,8,-174,-175,8,-143,-140,8,-141,-145,-76,-79,-82,-75,-77,8,-81,-215,-214,-80,-216,-78,-127,8,-153,8,-151,-148,-157,-168,-69,-36,-35,8,8,8,-234,-233,8,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,8,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'RESTRICT':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,35,36,38,39,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,65,68,71,75,76,80,81,82,85,86,87,89,90,93,95,96,97,98,99,100,101,103,105,109,111,117,118,119,123,124,128,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,205,206,211,219,220,223,229,231,233,239,240,241,267,269,275,278,279,280,282,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,459,460,496,497,500,505,506,510,511,512,514,515,536,554,555,557,558,575,576,578,579,],[39,39,-113,-128,39,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,39,-120,-115,-65,-102,39,-131,-108,-238,-111,39,-122,-63,-129,-29,-121,-116,-62,-112,-70,-52,-123,-117,39,39,-119,39,-114,-130,39,-118,-71,-103,39,-131,-96,-98,39,-131,-95,-101,-97,39,-53,39,39,-88,39,39,-147,-335,-146,39,-167,-166,39,-182,-100,-126,39,39,-87,-61,-72,39,39,-144,-142,39,39,39,-73,39,-89,39,39,39,-149,-159,-160,-156,-336,39,-183,-30,39,39,39,39,39,-74,39,39,39,39,-174,-175,39,-143,-140,39,-141,-145,39,-76,-79,-82,-75,-77,39,-81,-215,-214,-80,-216,-78,-127,39,-153,39,-151,-148,-157,-168,-69,-36,-35,39,39,39,-234,-233,39,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,39,39,-229,-232,-221,-83,-219,-68,-33,-32,39,39,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'STATIC':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,65,68,71,75,76,80,81,82,86,87,89,90,93,96,97,98,100,101,105,109,111,117,118,119,123,124,128,129,180,181,182,187,191,198,200,205,211,219,223,240,241,278,284,285,286,289,291,298,300,301,302,303,305,308,312,314,316,317,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,448,449,453,454,459,460,496,497,500,505,506,510,511,512,514,536,554,555,557,558,575,576,578,579,],[10,10,-113,-128,10,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,10,-120,-115,-65,-102,10,-131,-108,-238,-111,-122,-63,-129,-29,-121,-116,-62,-112,-70,-52,-123,-117,10,10,-119,10,-114,-130,10,-118,-71,-103,10,-131,-96,-98,10,-131,-95,-101,-97,-53,10,10,-88,10,-147,-335,-146,-167,-166,-182,-100,-126,206,10,-87,-61,-72,220,10,-73,10,-89,-149,-336,-183,-30,338,10,353,-74,-174,-175,10,-76,-79,-82,-75,-77,10,-81,-215,-214,-80,-216,-78,-127,-153,-151,-148,-168,-69,-36,-35,10,10,10,-234,-233,10,-231,-217,-230,-81,-84,-218,-152,-150,-170,-169,-31,-34,515,10,-229,-232,-221,-83,-219,-68,-33,-32,542,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'SIZEOF':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,146,-335,-28,-182,-27,146,-337,-87,-72,-337,146,-286,-285,146,146,-283,-287,-288,146,-284,146,146,146,-336,-183,146,146,-28,-337,146,-28,-337,-337,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,-337,-76,-79,-82,-75,146,-77,146,146,-81,-215,-214,-80,-216,146,-78,146,146,-69,-284,146,146,-284,146,146,-244,-247,-245,-241,-242,-246,-248,146,-250,-251,-243,-249,-12,146,146,-11,146,146,146,146,-234,-233,146,-231,146,146,-217,146,-230,146,-84,-218,146,146,146,-337,-337,-198,146,146,146,-337,-284,-229,-232,146,-221,146,-83,-219,-68,146,-28,-337,146,-11,146,146,-220,146,146,146,-284,146,146,146,-337,146,-225,-224,-222,-84,146,146,146,-226,-223,146,-228,-227,]),'UNSIGNED':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,71,75,76,80,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,101,105,109,111,118,119,120,121,122,123,124,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,211,214,223,229,231,233,239,240,241,267,269,275,278,279,280,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[22,-337,-113,-128,22,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,22,-120,-115,-65,-102,-126,-131,-108,-238,-111,-122,-63,-129,22,-29,-121,-116,-62,-112,-70,-52,-123,-117,-337,-337,-119,-337,-114,-130,22,-118,-71,-103,-337,-9,-131,-91,-10,-96,-98,22,-131,-95,-101,-97,22,-53,-126,22,-88,22,22,-93,22,-147,-335,-146,22,-167,-166,-182,-100,-126,22,-87,-90,-94,-92,-61,-72,22,-144,-142,22,22,22,-73,22,-89,22,22,22,-149,-159,-160,-156,-336,22,-183,-30,22,22,-74,22,22,22,22,-174,-175,22,-143,-140,22,-141,-145,-76,-79,-82,-75,-77,22,-81,-215,-214,-80,-216,-78,-127,22,-153,22,-151,-148,-157,-168,-69,-36,-35,22,22,22,-234,-233,22,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,22,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'UNION':([0,1,3,7,10,11,13,14,16,17,19,20,21,25,26,27,29,30,38,39,40,42,45,47,48,52,53,55,58,59,61,62,63,64,65,66,67,75,85,86,87,90,91,93,94,95,97,99,105,118,119,120,121,122,123,124,129,172,174,180,181,182,184,185,186,188,189,190,191,198,200,214,223,229,231,233,239,240,241,267,278,284,285,286,289,291,298,300,301,302,303,305,308,312,313,315,318,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,446,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[28,-337,-128,-106,-104,-107,-105,-64,-60,-67,-66,-109,28,-65,-102,-337,-131,-108,-63,-129,28,-29,-62,-70,-52,-337,-337,-337,-130,28,-71,-103,-337,-9,-131,-91,-10,28,28,-53,-337,-88,28,28,-93,28,-335,28,-182,28,-87,-90,-94,-92,-61,-72,28,28,28,-73,28,-89,28,28,28,-159,-160,-156,-336,-183,-30,28,-74,28,28,28,28,-174,-175,28,28,-76,-79,-82,-75,-77,28,-81,-215,-214,-80,-216,-78,-127,28,28,-157,-69,-36,-35,28,28,28,-234,-233,28,-231,-217,-230,-81,-84,-218,-158,-31,-34,28,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'COLON':([2,3,5,6,12,22,23,33,34,36,39,42,43,44,46,48,49,50,54,56,58,60,73,74,76,77,86,96,98,100,101,111,127,132,133,134,136,138,139,140,141,142,143,144,145,147,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,178,179,187,191,192,200,216,224,225,230,232,234,235,236,237,238,240,241,261,263,268,269,272,273,275,279,280,295,310,312,314,316,317,324,328,340,341,355,356,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,407,411,431,442,443,445,448,449,453,454,464,465,468,476,478,480,482,483,486,487,511,512,518,519,524,547,551,565,],[-113,-128,-124,-110,-125,-120,-115,-238,-111,-122,-129,-29,-121,-116,-112,-52,-123,-117,-119,-114,-130,-118,-54,-179,-131,-37,-53,-147,-146,-167,-166,-126,-55,-317,-321,-318,-303,-324,-330,-313,-319,-144,-301,-274,-314,-142,-327,-325,-304,-322,-302,-255,-315,-289,-253,-328,-316,-329,-320,-276,-323,-312,-252,-178,-149,-336,319,-30,-38,-274,-239,-326,-280,-277,-334,-332,-331,-333,-174,-175,-298,-297,-279,-143,-235,-278,-140,-141,-145,429,440,-127,-153,-151,-148,447,-168,-36,-35,-44,-43,-261,-273,-262,-260,-264,-268,-263,-259,-266,-271,-257,-256,-265,-272,-267,-269,481,-270,-258,-296,-295,-294,-293,-292,-305,502,-152,-150,319,-170,-169,-31,-34,-39,-42,-240,-237,-281,-282,-290,-291,-236,-275,-33,-32,-41,-40,-254,-306,-299,-300,]),'$end':([0,9,14,16,17,19,25,38,45,47,57,59,61,119,123,124,180,191,223,332,439,510,],[-337,0,-64,-60,-67,-66,-65,-63,-62,-70,-59,-58,-71,-87,-61,-72,-73,-336,-74,-69,-218,-68,]),'WSTRING_LITERAL':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,139,146,148,149,150,151,153,163,165,166,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,235,236,237,238,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,148,-335,-28,-182,-27,148,-337,-87,-72,-337,148,-286,-285,-330,148,-327,148,-283,-287,237,-328,-288,-329,148,-284,148,148,148,-336,-183,148,148,-28,-337,148,-28,-337,-337,148,148,148,-334,-332,-331,-333,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,-337,-76,-79,-82,-75,148,-77,148,148,-81,-215,-214,-80,-216,148,-78,148,148,-69,-284,148,148,-284,148,148,-244,-247,-245,-241,-242,-246,-248,148,-250,-251,-243,-249,-12,148,148,-11,148,148,148,148,-234,-233,148,-231,148,148,-217,148,-230,148,-84,-218,148,148,148,-337,-337,-198,148,148,148,-337,-284,-229,-232,148,-221,148,-83,-219,-68,148,-28,-337,148,-11,148,148,-220,148,148,148,-284,148,148,148,-337,148,-225,-224,-222,-84,148,148,148,-226,-223,148,-228,-227,]),'DIVIDE':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,252,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,252,252,252,252,252,252,252,252,252,252,-257,-256,252,252,252,252,252,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'FOR':([61,97,119,124,181,191,284,285,286,289,291,298,300,301,302,303,305,307,308,332,424,425,428,429,432,435,437,438,439,440,496,497,500,502,505,506,510,535,536,537,539,554,555,557,558,569,574,575,576,577,578,579,],[-71,-335,-87,-72,292,-336,-76,-79,-82,-75,-77,292,-81,-215,-214,-80,-216,292,-78,-69,-234,-233,-231,292,-217,-230,292,-84,-218,292,-229,-232,-221,292,-83,-219,-68,292,-220,292,292,-225,-224,-222,-84,292,292,-226,-223,292,-228,-227,]),'PLUSPLUS':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,132,133,134,135,136,137,138,139,140,141,143,145,146,148,149,150,151,152,153,154,156,160,161,163,164,165,166,167,168,169,171,173,174,175,176,181,191,198,201,204,205,206,218,219,220,227,229,230,231,233,235,236,237,238,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,263,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,310,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,402,403,404,405,407,411,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,482,483,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,547,549,550,551,553,554,555,557,558,565,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,149,-335,-28,-182,-27,149,-337,-87,-72,-337,149,-317,-321,-318,-286,-303,-285,-324,-330,-313,-319,-301,-314,149,-327,149,-283,-287,-325,-304,-322,-302,-315,-289,-328,-316,-288,-329,-320,263,-323,149,-284,149,149,-312,149,-336,-183,149,149,-28,-337,149,-28,-337,-337,149,-326,149,149,-334,-332,-331,-333,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,-298,-297,149,149,-337,-76,-79,-82,-75,149,-77,149,149,-81,-215,-214,-80,-216,149,-78,-312,149,149,-69,-284,149,149,-284,149,149,-244,-247,-245,-241,-242,-246,-248,149,-250,-251,-243,-249,-12,149,149,-11,-296,-295,-294,-293,-292,-305,149,149,149,149,-234,-233,149,-231,149,149,-217,149,-230,149,-84,-218,149,149,149,-337,-337,-198,149,149,-290,-291,149,-337,-284,-229,-232,149,-221,149,-83,-219,-68,149,-28,-337,149,-11,149,149,-220,149,149,149,-284,149,149,-306,149,-337,-299,149,-225,-224,-222,-84,-300,149,149,149,-226,-223,149,-228,-227,]),'EQUALS':([42,48,73,74,75,77,78,86,110,127,132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,160,161,163,164,166,167,168,169,176,179,191,197,200,216,224,230,232,234,235,236,237,238,261,263,268,273,310,340,341,355,356,371,376,402,403,404,405,407,411,453,454,464,465,470,474,478,480,482,483,487,511,512,518,519,520,547,551,565,],[-29,-52,-54,-179,-178,-37,131,-53,201,-55,-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-315,-289,-328,-316,-329,-320,-276,-323,-312,-178,-336,329,-30,-38,360,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-36,-35,-44,-43,-199,475,-296,-295,-294,-293,-292,-305,-31,-34,-39,-42,-202,-200,-281,-282,-290,-291,-275,-33,-32,-41,-40,-201,-306,-299,-300,]),'ELSE':([61,124,191,284,285,286,289,291,300,303,308,332,424,425,428,435,437,438,439,496,497,500,505,506,510,536,554,555,557,558,575,576,578,579,],[-71,-72,-336,-76,-79,-82,-75,-77,-81,-80,-78,-69,-234,-233,-231,-230,-81,-84,-218,-229,-232,-221,-83,-219,-68,-220,-225,-224,-222,569,-226,-223,-228,-227,]),'ANDEQUAL':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,160,161,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-315,-289,-328,-316,-329,-320,-276,-323,-312,-336,365,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'EQ':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,256,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-261,256,-262,-260,-264,-268,-263,-259,-266,256,-257,-256,-265,256,-267,256,256,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'AND':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,132,133,134,135,136,137,138,139,140,141,143,144,145,146,148,149,150,151,152,153,154,156,158,160,161,162,163,164,165,166,167,168,169,171,173,174,175,176,181,191,198,201,204,205,206,218,219,220,224,227,229,230,231,232,233,234,235,236,237,238,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,263,265,266,268,273,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,310,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,478,480,481,482,483,484,487,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,547,549,550,551,553,554,555,557,558,565,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,150,-335,-28,-182,-27,150,-337,-87,-72,-337,150,-317,-321,-318,-286,-303,-285,-324,-330,-313,-319,-301,-274,-314,150,-327,150,-283,-287,-325,-304,-322,-302,-255,-315,-289,257,-328,-316,-288,-329,-320,-276,-323,150,-284,150,150,-312,150,-336,-183,150,150,-28,-337,150,-28,-337,-274,-337,150,-326,150,-280,150,-277,-334,-332,-331,-333,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,-298,-297,150,150,-279,-278,-337,-76,-79,-82,-75,150,-77,150,150,-81,-215,-214,-80,-216,150,-78,-312,150,150,-69,-284,150,150,-284,150,150,-244,-247,-245,-241,-242,-246,-248,150,-250,-251,-243,-249,-12,150,150,-11,-261,257,-262,-260,-264,-268,-263,-259,-266,257,-257,-256,-265,257,-267,-269,257,-258,-296,-295,-294,-293,-292,-305,150,150,150,150,-234,-233,150,-231,150,150,-217,150,-230,150,-84,-218,150,150,150,-337,-337,-198,150,-281,-282,150,-290,-291,150,-275,-337,-284,-229,-232,150,-221,150,-83,-219,-68,150,-28,-337,150,-11,150,150,-220,150,150,150,-284,150,150,-306,150,-337,-299,150,-225,-224,-222,-84,-300,150,150,150,-226,-223,150,-228,-227,]),'TYPEID':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,69,71,72,75,76,80,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,101,103,104,105,106,109,111,118,119,120,121,122,123,124,126,129,142,147,172,174,180,181,182,184,185,186,187,188,189,190,191,192,198,199,200,202,211,214,223,229,231,233,239,240,241,262,264,267,269,275,278,279,280,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,344,350,422,424,425,427,428,432,435,437,438,439,442,443,445,446,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[33,-337,-113,-128,77,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,33,-120,-115,-154,-65,-102,-126,-155,-131,-108,96,100,-238,-111,-337,-122,-63,-129,33,-29,-121,-116,-62,-112,-70,-52,-123,-117,-337,-337,-119,-337,-114,-130,33,-118,-71,-103,-337,-9,-131,-91,-10,-96,77,-98,77,33,-131,-95,-101,-97,33,-53,-126,77,-88,33,33,-93,33,-147,-335,-146,33,-167,-166,-28,-180,-182,-27,-100,-126,33,-87,-90,-94,-92,-61,-72,77,33,-144,-142,33,33,-73,33,-89,33,33,33,-149,-159,-160,-156,-336,77,-183,-181,-30,77,347,33,-74,33,33,33,33,-174,-175,402,404,33,-143,-140,33,-141,-145,-76,-79,-82,-75,-77,33,-81,-215,-214,-80,-216,-78,-127,33,-153,33,-151,-148,-157,-168,-69,-36,-35,33,347,33,33,-234,-233,33,-231,-217,-230,-81,-84,-218,-152,-150,77,-158,-170,-169,-31,-34,33,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'LBRACE':([21,24,28,31,32,42,48,61,75,86,88,90,92,93,96,97,98,100,101,119,124,130,131,181,182,191,200,201,227,229,284,285,286,289,291,298,300,301,302,303,305,307,308,332,340,341,369,375,377,413,424,425,428,429,432,435,437,438,439,440,453,454,472,475,477,478,479,488,496,497,500,502,505,506,510,511,512,521,522,535,536,537,539,550,554,555,557,558,569,574,575,576,577,578,579,],[-337,-154,-155,97,97,-29,-52,-71,-337,-53,-7,-88,97,-8,97,-335,97,97,97,-87,-72,97,97,97,-89,-336,-30,97,-337,97,-76,-79,-82,-75,-77,97,-81,-215,-214,-80,-216,97,-78,-69,-36,-35,-12,97,-11,97,-234,-233,-231,97,-217,-230,97,-84,-218,97,-31,-34,-337,-198,97,97,97,-337,-229,-232,-221,97,-83,-219,-68,-33,-32,97,-11,97,-220,97,97,-337,-225,-224,-222,-84,97,97,-226,-223,97,-228,-227,]),'PPHASH':([0,14,16,17,19,25,38,45,47,59,61,119,123,124,180,191,223,332,439,510,],[47,-64,-60,-67,-66,-65,-63,-62,-70,47,-71,-87,-61,-72,-73,-336,-74,-69,-218,-68,]),'INT':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,71,75,76,80,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,101,105,109,111,118,119,120,121,122,123,124,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,211,214,223,229,231,233,239,240,241,267,269,275,278,279,280,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[56,-337,-113,-128,56,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,56,-120,-115,-65,-102,-126,-131,-108,-238,-111,-122,-63,-129,56,-29,-121,-116,-62,-112,-70,-52,-123,-117,-337,-337,-119,-337,-114,-130,56,-118,-71,-103,-337,-9,-131,-91,-10,-96,-98,56,-131,-95,-101,-97,56,-53,-126,56,-88,56,56,-93,56,-147,-335,-146,56,-167,-166,-182,-100,-126,56,-87,-90,-94,-92,-61,-72,56,-144,-142,56,56,56,-73,56,-89,56,56,56,-149,-159,-160,-156,-336,56,-183,-30,56,56,-74,56,56,56,56,-174,-175,56,-143,-140,56,-141,-145,-76,-79,-82,-75,-77,56,-81,-215,-214,-80,-216,-78,-127,56,-153,56,-151,-148,-157,-168,-69,-36,-35,56,56,56,-234,-233,56,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,56,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'SIGNED':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,71,75,76,80,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,101,105,109,111,118,119,120,121,122,123,124,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,211,214,223,229,231,233,239,240,241,267,269,275,278,279,280,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[54,-337,-113,-128,54,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,54,-120,-115,-65,-102,-126,-131,-108,-238,-111,-122,-63,-129,54,-29,-121,-116,-62,-112,-70,-52,-123,-117,-337,-337,-119,-337,-114,-130,54,-118,-71,-103,-337,-9,-131,-91,-10,-96,-98,54,-131,-95,-101,-97,54,-53,-126,54,-88,54,54,-93,54,-147,-335,-146,54,-167,-166,-182,-100,-126,54,-87,-90,-94,-92,-61,-72,54,-144,-142,54,54,54,-73,54,-89,54,54,54,-149,-159,-160,-156,-336,54,-183,-30,54,54,-74,54,54,54,54,-174,-175,54,-143,-140,54,-141,-145,-76,-79,-82,-75,-77,54,-81,-215,-214,-80,-216,-78,-127,54,-153,54,-151,-148,-157,-168,-69,-36,-35,54,54,54,-234,-233,54,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,54,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'CONTINUE':([61,97,119,124,181,191,284,285,286,289,291,298,300,301,302,303,305,307,308,332,424,425,428,429,432,435,437,438,439,440,496,497,500,502,505,506,510,535,536,537,539,554,555,557,558,569,574,575,576,577,578,579,],[-71,-335,-87,-72,293,-336,-76,-79,-82,-75,-77,293,-81,-215,-214,-80,-216,293,-78,-69,-234,-233,-231,293,-217,-230,293,-84,-218,293,-229,-232,-221,293,-83,-219,-68,293,-220,293,293,-225,-224,-222,-84,293,293,-226,-223,293,-228,-227,]),'NOT':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,151,-335,-28,-182,-27,151,-337,-87,-72,-337,151,-286,-285,151,151,-283,-287,-288,151,-284,151,151,151,-336,-183,151,151,-28,-337,151,-28,-337,-337,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,-337,-76,-79,-82,-75,151,-77,151,151,-81,-215,-214,-80,-216,151,-78,151,151,-69,-284,151,151,-284,151,151,-244,-247,-245,-241,-242,-246,-248,151,-250,-251,-243,-249,-12,151,151,-11,151,151,151,151,-234,-233,151,-231,151,151,-217,151,-230,151,-84,-218,151,151,151,-337,-337,-198,151,151,151,-337,-284,-229,-232,151,-221,151,-83,-219,-68,151,-28,-337,151,-11,151,151,-220,151,151,151,-284,151,151,151,-337,151,-225,-224,-222,-84,151,151,151,-226,-223,151,-228,-227,]),'OREQUAL':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,160,161,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-315,-289,-328,-316,-329,-320,-276,-323,-312,-336,366,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'MOD':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,260,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,260,260,260,260,260,260,260,260,260,260,-257,-256,260,260,260,260,260,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'RSHIFT':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,242,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-261,242,-262,-260,242,242,242,-259,242,242,-257,-256,242,242,242,242,242,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'DEFAULT':([61,97,119,124,181,191,284,285,286,289,291,298,300,301,302,303,305,307,308,332,424,425,428,429,432,435,437,438,439,440,496,497,500,502,505,506,510,535,536,537,539,554,555,557,558,569,574,575,576,577,578,579,],[-71,-335,-87,-72,295,-336,-76,-79,-82,-75,-77,295,-81,-215,-214,-80,-216,295,-78,-69,-234,-233,-231,295,-217,-230,295,-84,-218,295,-229,-232,-221,295,-83,-219,-68,295,-220,295,295,-225,-224,-222,-84,295,295,-226,-223,295,-228,-227,]),'_NORETURN':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,65,68,71,75,76,80,81,82,86,87,89,90,93,96,97,98,100,101,109,111,118,119,123,124,129,180,181,182,187,191,200,211,223,240,241,278,284,285,286,289,291,298,300,301,302,303,305,308,312,314,316,317,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[20,20,-113,-128,20,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,20,-120,-115,-65,-102,20,-131,-108,-238,-111,-122,-63,-129,-29,-121,-116,-62,-112,-70,-52,-123,-117,20,20,-119,20,-114,-130,20,-118,-71,-103,20,-131,-96,-98,20,-131,-95,-101,-97,-53,20,20,-88,20,-147,-335,-146,-167,-166,-100,-126,20,-87,-61,-72,20,-73,20,-89,-149,-336,-30,20,-74,-174,-175,20,-76,-79,-82,-75,-77,20,-81,-215,-214,-80,-216,-78,-127,-153,-151,-148,-168,-69,-36,-35,20,20,20,-234,-233,20,-231,-217,-230,-81,-84,-218,-152,-150,-170,-169,-31,-34,20,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'__INT128':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,71,75,76,80,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,101,105,109,111,118,119,120,121,122,123,124,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,211,214,223,229,231,233,239,240,241,267,269,275,278,279,280,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[43,-337,-113,-128,43,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,43,-120,-115,-65,-102,-126,-131,-108,-238,-111,-122,-63,-129,43,-29,-121,-116,-62,-112,-70,-52,-123,-117,-337,-337,-119,-337,-114,-130,43,-118,-71,-103,-337,-9,-131,-91,-10,-96,-98,43,-131,-95,-101,-97,43,-53,-126,43,-88,43,43,-93,43,-147,-335,-146,43,-167,-166,-182,-100,-126,43,-87,-90,-94,-92,-61,-72,43,-144,-142,43,43,43,-73,43,-89,43,43,43,-149,-159,-160,-156,-336,43,-183,-30,43,43,-74,43,43,43,43,-174,-175,43,-143,-140,43,-141,-145,-76,-79,-82,-75,-77,43,-81,-215,-214,-80,-216,-78,-127,43,-153,43,-151,-148,-157,-168,-69,-36,-35,43,43,43,-234,-233,43,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,43,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'WHILE':([61,97,119,124,181,191,284,285,286,289,291,298,300,301,302,303,305,307,308,332,424,425,428,429,432,435,436,437,438,439,440,496,497,500,502,505,506,510,535,536,537,539,554,555,557,558,569,574,575,576,577,578,579,],[-71,-335,-87,-72,296,-336,-76,-79,-82,-75,-77,296,-81,-215,-214,-80,-216,296,-78,-69,-234,-233,-231,296,-217,-230,504,296,-84,-218,296,-229,-232,-221,296,-83,-219,-68,296,-220,296,296,-225,-224,-222,-84,296,296,-226,-223,296,-228,-227,]),'U8CHAR_CONST':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,154,-335,-28,-182,-27,154,-337,-87,-72,-337,154,-286,-285,154,154,-283,-287,-288,154,-284,154,154,154,-336,-183,154,154,-28,-337,154,-28,-337,-337,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,-337,-76,-79,-82,-75,154,-77,154,154,-81,-215,-214,-80,-216,154,-78,154,154,-69,-284,154,154,-284,154,154,-244,-247,-245,-241,-242,-246,-248,154,-250,-251,-243,-249,-12,154,154,-11,154,154,154,154,-234,-233,154,-231,154,154,-217,154,-230,154,-84,-218,154,154,154,-337,-337,-198,154,154,154,-337,-284,-229,-232,154,-221,154,-83,-219,-68,154,-28,-337,154,-11,154,154,-220,154,154,154,-284,154,154,154,-337,154,-225,-224,-222,-84,154,154,154,-226,-223,154,-228,-227,]),'_ALIGNOF':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,155,-335,-28,-182,-27,155,-337,-87,-72,-337,155,-286,-285,155,155,-283,-287,-288,155,-284,155,155,155,-336,-183,155,155,-28,-337,155,-28,-337,-337,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,-337,-76,-79,-82,-75,155,-77,155,155,-81,-215,-214,-80,-216,155,-78,155,155,-69,-284,155,155,-284,155,155,-244,-247,-245,-241,-242,-246,-248,155,-250,-251,-243,-249,-12,155,155,-11,155,155,155,155,-234,-233,155,-231,155,155,-217,155,-230,155,-84,-218,155,155,155,-337,-337,-198,155,155,155,-337,-284,-229,-232,155,-221,155,-83,-219,-68,155,-28,-337,155,-11,155,155,-220,155,155,155,-284,155,155,155,-337,155,-225,-224,-222,-84,155,155,155,-226,-223,155,-228,-227,]),'EXTERN':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,65,68,71,75,76,80,81,82,86,87,89,90,93,96,97,98,100,101,109,111,118,119,123,124,129,180,181,182,187,191,200,211,223,240,241,278,284,285,286,289,291,298,300,301,302,303,305,308,312,314,316,317,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[13,13,-113,-128,13,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,13,-120,-115,-65,-102,13,-131,-108,-238,-111,-122,-63,-129,-29,-121,-116,-62,-112,-70,-52,-123,-117,13,13,-119,13,-114,-130,13,-118,-71,-103,13,-131,-96,-98,13,-131,-95,-101,-97,-53,13,13,-88,13,-147,-335,-146,-167,-166,-100,-126,13,-87,-61,-72,13,-73,13,-89,-149,-336,-30,13,-74,-174,-175,13,-76,-79,-82,-75,-77,13,-81,-215,-214,-80,-216,-78,-127,-153,-151,-148,-168,-69,-36,-35,13,13,13,-234,-233,13,-231,-217,-230,-81,-84,-218,-152,-150,-170,-169,-31,-34,13,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'CASE':([61,97,119,124,181,191,284,285,286,289,291,298,300,301,302,303,305,307,308,332,424,425,428,429,432,435,437,438,439,440,496,497,500,502,505,506,510,535,536,537,539,554,555,557,558,569,574,575,576,577,578,579,],[-71,-335,-87,-72,297,-336,-76,-79,-82,-75,-77,297,-81,-215,-214,-80,-216,297,-78,-69,-234,-233,-231,297,-217,-230,297,-84,-218,297,-229,-232,-221,297,-83,-219,-68,297,-220,297,297,-225,-224,-222,-84,297,297,-226,-223,297,-228,-227,]),'LAND':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,255,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-261,255,-262,-260,-264,-268,-263,-259,-266,-271,-257,-256,-265,-272,-267,-269,-270,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'REGISTER':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,65,68,71,75,76,80,81,82,86,87,89,90,93,96,97,98,100,101,109,111,118,119,123,124,129,180,181,182,187,191,200,211,223,240,241,278,284,285,286,289,291,298,300,301,302,303,305,308,312,314,316,317,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[62,62,-113,-128,62,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,62,-120,-115,-65,-102,62,-131,-108,-238,-111,-122,-63,-129,-29,-121,-116,-62,-112,-70,-52,-123,-117,62,62,-119,62,-114,-130,62,-118,-71,-103,62,-131,-96,-98,62,-131,-95,-101,-97,-53,62,62,-88,62,-147,-335,-146,-167,-166,-100,-126,62,-87,-61,-72,62,-73,62,-89,-149,-336,-30,62,-74,-174,-175,62,-76,-79,-82,-75,-77,62,-81,-215,-214,-80,-216,-78,-127,-153,-151,-148,-168,-69,-36,-35,62,62,62,-234,-233,62,-231,-217,-230,-81,-84,-218,-152,-150,-170,-169,-31,-34,62,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'MODEQUAL':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,160,161,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-315,-289,-328,-316,-329,-320,-276,-323,-312,-336,359,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'NE':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,247,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-261,247,-262,-260,-264,-268,-263,-259,-266,247,-257,-256,-265,247,-267,247,247,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'SWITCH':([61,97,119,124,181,191,284,285,286,289,291,298,300,301,302,303,305,307,308,332,424,425,428,429,432,435,437,438,439,440,496,497,500,502,505,506,510,535,536,537,539,554,555,557,558,569,574,575,576,577,578,579,],[-71,-335,-87,-72,299,-336,-76,-79,-82,-75,-77,299,-81,-215,-214,-80,-216,299,-78,-69,-234,-233,-231,299,-217,-230,299,-84,-218,299,-229,-232,-221,299,-83,-219,-68,299,-220,299,299,-225,-224,-222,-84,299,299,-226,-223,299,-228,-227,]),'INT_CONST_HEX':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,160,-335,-28,-182,-27,160,-337,-87,-72,-337,160,-286,-285,160,160,-283,-287,-288,160,-284,160,160,160,-336,-183,160,160,-28,-337,160,-28,-337,-337,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,-337,-76,-79,-82,-75,160,-77,160,160,-81,-215,-214,-80,-216,160,-78,160,160,-69,-284,160,160,-284,160,160,-244,-247,-245,-241,-242,-246,-248,160,-250,-251,-243,-249,-12,160,160,-11,160,160,160,160,-234,-233,160,-231,160,160,-217,160,-230,160,-84,-218,160,160,160,-337,-337,-198,160,160,160,-337,-284,-229,-232,160,-221,160,-83,-219,-68,160,-28,-337,160,-11,160,160,-220,160,160,160,-284,160,160,160,-337,160,-225,-224,-222,-84,160,160,160,-226,-223,160,-228,-227,]),'_COMPLEX':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,71,75,76,80,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,101,105,109,111,118,119,120,121,122,123,124,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,211,214,223,229,231,233,239,240,241,267,269,275,278,279,280,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[60,-337,-113,-128,60,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,60,-120,-115,-65,-102,-126,-131,-108,-238,-111,-122,-63,-129,60,-29,-121,-116,-62,-112,-70,-52,-123,-117,-337,-337,-119,-337,-114,-130,60,-118,-71,-103,-337,-9,-131,-91,-10,-96,-98,60,-131,-95,-101,-97,60,-53,-126,60,-88,60,60,-93,60,-147,-335,-146,60,-167,-166,-182,-100,-126,60,-87,-90,-94,-92,-61,-72,60,-144,-142,60,60,60,-73,60,-89,60,60,60,-149,-159,-160,-156,-336,60,-183,-30,60,60,-74,60,60,60,60,-174,-175,60,-143,-140,60,-141,-145,-76,-79,-82,-75,-77,60,-81,-215,-214,-80,-216,-78,-127,60,-153,60,-151,-148,-157,-168,-69,-36,-35,60,60,60,-234,-233,60,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,60,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'PPPRAGMASTR':([61,],[124,]),'PLUSEQUAL':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,160,161,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-315,-289,-328,-316,-329,-320,-276,-323,-312,-336,362,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'U32CHAR_CONST':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,138,-335,-28,-182,-27,138,-337,-87,-72,-337,138,-286,-285,138,138,-283,-287,-288,138,-284,138,138,138,-336,-183,138,138,-28,-337,138,-28,-337,-337,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,-337,-76,-79,-82,-75,138,-77,138,138,-81,-215,-214,-80,-216,138,-78,138,138,-69,-284,138,138,-284,138,138,-244,-247,-245,-241,-242,-246,-248,138,-250,-251,-243,-249,-12,138,138,-11,138,138,138,138,-234,-233,138,-231,138,138,-217,138,-230,138,-84,-218,138,138,138,-337,-337,-198,138,138,138,-337,-284,-229,-232,138,-221,138,-83,-219,-68,138,-28,-337,138,-11,138,138,-220,138,138,138,-284,138,138,138,-337,138,-225,-224,-222,-84,138,138,138,-226,-223,138,-228,-227,]),'CONDOP':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,258,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-261,-273,-262,-260,-264,-268,-263,-259,-266,-271,-257,-256,-265,-272,-267,-269,-270,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'U8STRING_LITERAL':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,139,146,148,149,150,151,153,163,165,166,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,235,236,237,238,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,163,-335,-28,-182,-27,163,-337,-87,-72,-337,163,-286,-285,-330,163,-327,163,-283,-287,236,-328,-288,-329,163,-284,163,163,163,-336,-183,163,163,-28,-337,163,-28,-337,-337,163,163,163,-334,-332,-331,-333,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,-337,-76,-79,-82,-75,163,-77,163,163,-81,-215,-214,-80,-216,163,-78,163,163,-69,-284,163,163,-284,163,163,-244,-247,-245,-241,-242,-246,-248,163,-250,-251,-243,-249,-12,163,163,-11,163,163,163,163,-234,-233,163,-231,163,163,-217,163,-230,163,-84,-218,163,163,163,-337,-337,-198,163,163,163,-337,-284,-229,-232,163,-221,163,-83,-219,-68,163,-28,-337,163,-11,163,163,-220,163,163,163,-284,163,163,163,-337,163,-225,-224,-222,-84,163,163,163,-226,-223,163,-228,-227,]),'BREAK':([61,97,119,124,181,191,284,285,286,289,291,298,300,301,302,303,305,307,308,332,424,425,428,429,432,435,437,438,439,440,496,497,500,502,505,506,510,535,536,537,539,554,555,557,558,569,574,575,576,577,578,579,],[-71,-335,-87,-72,304,-336,-76,-79,-82,-75,-77,304,-81,-215,-214,-80,-216,304,-78,-69,-234,-233,-231,304,-217,-230,304,-84,-218,304,-229,-232,-221,304,-83,-219,-68,304,-220,304,304,-225,-224,-222,-84,304,304,-226,-223,304,-228,-227,]),'VOLATILE':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,35,36,38,39,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,65,68,71,75,76,80,81,82,85,86,87,89,90,93,95,96,97,98,99,100,101,103,105,109,111,117,118,119,123,124,128,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,205,206,211,219,220,223,229,231,233,239,240,241,267,269,275,278,279,280,282,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,459,460,496,497,500,505,506,510,511,512,514,515,536,554,555,557,558,575,576,578,579,],[58,58,-113,-128,58,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,58,-120,-115,-65,-102,58,-131,-108,-238,-111,58,-122,-63,-129,-29,-121,-116,-62,-112,-70,-52,-123,-117,58,58,-119,58,-114,-130,58,-118,-71,-103,58,-131,-96,-98,58,-131,-95,-101,-97,58,-53,58,58,-88,58,58,-147,-335,-146,58,-167,-166,58,-182,-100,-126,58,58,-87,-61,-72,58,58,-144,-142,58,58,58,-73,58,-89,58,58,58,-149,-159,-160,-156,-336,58,-183,-30,58,58,58,58,58,-74,58,58,58,58,-174,-175,58,-143,-140,58,-141,-145,58,-76,-79,-82,-75,-77,58,-81,-215,-214,-80,-216,-78,-127,58,-153,58,-151,-148,-157,-168,-69,-36,-35,58,58,58,-234,-233,58,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,58,58,-229,-232,-221,-83,-219,-68,-33,-32,58,58,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'PPPRAGMA':([0,14,16,17,19,25,38,45,47,59,61,97,99,119,123,124,180,181,184,185,186,188,189,190,191,223,284,285,286,289,291,298,300,301,302,303,305,307,308,313,315,318,332,424,425,428,429,432,435,437,438,439,440,446,496,497,500,502,505,506,510,535,536,537,539,554,555,557,558,569,574,575,576,577,578,579,],[61,-64,-60,-67,-66,-65,-63,-62,-70,61,-71,-335,61,-87,-61,-72,-73,61,61,61,61,-159,-160,-156,-336,-74,-76,-79,-82,-75,-77,61,-81,-215,-214,-80,-216,61,-78,61,61,-157,-69,-234,-233,-231,61,-217,-230,61,-84,-218,61,-158,-229,-232,-221,61,-83,-219,-68,61,-220,61,61,-225,-224,-222,-84,61,61,-226,-223,61,-228,-227,]),'INLINE':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,65,68,71,75,76,80,81,82,86,87,89,90,93,96,97,98,100,101,109,111,118,119,123,124,129,180,181,182,187,191,200,211,223,240,241,278,284,285,286,289,291,298,300,301,302,303,305,308,312,314,316,317,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[30,30,-113,-128,30,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,30,-120,-115,-65,-102,30,-131,-108,-238,-111,-122,-63,-129,-29,-121,-116,-62,-112,-70,-52,-123,-117,30,30,-119,30,-114,-130,30,-118,-71,-103,30,-131,-96,-98,30,-131,-95,-101,-97,-53,30,30,-88,30,-147,-335,-146,-167,-166,-100,-126,30,-87,-61,-72,30,-73,30,-89,-149,-336,-30,30,-74,-174,-175,30,-76,-79,-82,-75,-77,30,-81,-215,-214,-80,-216,-78,-127,-153,-151,-148,-168,-69,-36,-35,30,30,30,-234,-233,30,-231,-217,-230,-81,-84,-218,-152,-150,-170,-169,-31,-34,30,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'INT_CONST_BIN':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,164,-335,-28,-182,-27,164,-337,-87,-72,-337,164,-286,-285,164,164,-283,-287,-288,164,-284,164,164,164,-336,-183,164,164,-28,-337,164,-28,-337,-337,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,-337,-76,-79,-82,-75,164,-77,164,164,-81,-215,-214,-80,-216,164,-78,164,164,-69,-284,164,164,-284,164,164,-244,-247,-245,-241,-242,-246,-248,164,-250,-251,-243,-249,-12,164,164,-11,164,164,164,164,-234,-233,164,-231,164,164,-217,164,-230,164,-84,-218,164,164,164,-337,-337,-198,164,164,164,-337,-284,-229,-232,164,-221,164,-83,-219,-68,164,-28,-337,164,-11,164,164,-220,164,164,164,-284,164,164,164,-337,164,-225,-224,-222,-84,164,164,164,-226,-223,164,-228,-227,]),'DO':([61,97,119,124,181,191,284,285,286,289,291,298,300,301,302,303,305,307,308,332,424,425,428,429,432,435,437,438,439,440,496,497,500,502,505,506,510,535,536,537,539,554,555,557,558,569,574,575,576,577,578,579,],[-71,-335,-87,-72,307,-336,-76,-79,-82,-75,-77,307,-81,-215,-214,-80,-216,307,-78,-69,-234,-233,-231,307,-217,-230,307,-84,-218,307,-229,-232,-221,307,-83,-219,-68,307,-220,307,307,-225,-224,-222,-84,307,307,-226,-223,307,-228,-227,]),'LNOT':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,165,-335,-28,-182,-27,165,-337,-87,-72,-337,165,-286,-285,165,165,-283,-287,-288,165,-284,165,165,165,-336,-183,165,165,-28,-337,165,-28,-337,-337,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,-337,-76,-79,-82,-75,165,-77,165,165,-81,-215,-214,-80,-216,165,-78,165,165,-69,-284,165,165,-284,165,165,-244,-247,-245,-241,-242,-246,-248,165,-250,-251,-243,-249,-12,165,165,-11,165,165,165,165,-234,-233,165,-231,165,165,-217,165,-230,165,-84,-218,165,165,165,-337,-337,-198,165,165,165,-337,-284,-229,-232,165,-221,165,-83,-219,-68,165,-28,-337,165,-11,165,165,-220,165,165,165,-284,165,165,165,-337,165,-225,-224,-222,-84,165,165,165,-226,-223,165,-228,-227,]),'CONST':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,35,36,38,39,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,65,68,71,75,76,80,81,82,85,86,87,89,90,93,95,96,97,98,99,100,101,103,105,109,111,117,118,119,123,124,128,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,205,206,211,219,220,223,229,231,233,239,240,241,267,269,275,278,279,280,282,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,459,460,496,497,500,505,506,510,511,512,514,515,536,554,555,557,558,575,576,578,579,],[3,3,-113,-128,3,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,3,-120,-115,-65,-102,3,-131,-108,-238,-111,3,-122,-63,-129,-29,-121,-116,-62,-112,-70,-52,-123,-117,3,3,-119,3,-114,-130,3,-118,-71,-103,3,-131,-96,-98,3,-131,-95,-101,-97,3,-53,3,3,-88,3,3,-147,-335,-146,3,-167,-166,3,-182,-100,-126,3,3,-87,-61,-72,3,3,-144,-142,3,3,3,-73,3,-89,3,3,3,-149,-159,-160,-156,-336,3,-183,-30,3,3,3,3,3,-74,3,3,3,3,-174,-175,3,-143,-140,3,-141,-145,3,-76,-79,-82,-75,-77,3,-81,-215,-214,-80,-216,-78,-127,3,-153,3,-151,-148,-157,-168,-69,-36,-35,3,3,3,-234,-233,3,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,3,3,-229,-232,-221,-83,-219,-68,-33,-32,3,3,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'LSHIFT':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,244,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-261,244,-262,-260,244,244,244,-259,244,244,-257,-256,244,244,244,244,244,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'LOR':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,243,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-261,-273,-262,-260,-264,-268,-263,-259,-266,-271,-257,-256,-265,-272,-267,-269,-270,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'CHAR_CONST':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,167,-335,-28,-182,-27,167,-337,-87,-72,-337,167,-286,-285,167,167,-283,-287,-288,167,-284,167,167,167,-336,-183,167,167,-28,-337,167,-28,-337,-337,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,-337,-76,-79,-82,-75,167,-77,167,167,-81,-215,-214,-80,-216,167,-78,167,167,-69,-284,167,167,-284,167,167,-244,-247,-245,-241,-242,-246,-248,167,-250,-251,-243,-249,-12,167,167,-11,167,167,167,167,-234,-233,167,-231,167,167,-217,167,-230,167,-84,-218,167,167,167,-337,-337,-198,167,167,167,-337,-284,-229,-232,167,-221,167,-83,-219,-68,167,-28,-337,167,-11,167,167,-220,167,167,167,-284,167,167,167,-337,167,-225,-224,-222,-84,167,167,167,-226,-223,167,-228,-227,]),'U16STRING_LITERAL':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,139,146,148,149,150,151,153,163,165,166,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,235,236,237,238,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,166,-335,-28,-182,-27,166,-337,-87,-72,-337,166,-286,-285,-330,166,-327,166,-283,-287,238,-328,-288,-329,166,-284,166,166,166,-336,-183,166,166,-28,-337,166,-28,-337,-337,166,166,166,-334,-332,-331,-333,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,-337,-76,-79,-82,-75,166,-77,166,166,-81,-215,-214,-80,-216,166,-78,166,166,-69,-284,166,166,-284,166,166,-244,-247,-245,-241,-242,-246,-248,166,-250,-251,-243,-249,-12,166,166,-11,166,166,166,166,-234,-233,166,-231,166,166,-217,166,-230,166,-84,-218,166,166,166,-337,-337,-198,166,166,166,-337,-284,-229,-232,166,-221,166,-83,-219,-68,166,-28,-337,166,-11,166,166,-220,166,166,166,-284,166,166,166,-337,166,-225,-224,-222,-84,166,166,166,-226,-223,166,-228,-227,]),'RBRACE':([61,97,99,119,124,132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,178,181,184,185,186,188,189,190,191,195,196,197,224,225,227,228,230,232,234,235,236,237,238,261,263,268,273,284,285,286,289,291,298,300,301,302,303,305,306,308,309,313,315,318,325,326,327,332,370,374,377,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,424,425,428,432,435,437,438,439,446,450,451,468,469,472,473,476,478,480,482,483,487,496,497,500,505,506,510,523,524,528,536,546,547,550,551,554,555,557,558,565,575,576,578,579,],[-71,-335,191,-87,-72,-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,-253,-328,-316,-329,-320,-276,-323,-312,-252,-337,191,191,191,-159,-160,-156,-336,-171,191,-176,-274,-239,-337,-193,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-76,-79,-82,-75,-77,-6,-81,-215,-214,-80,-216,-5,-78,191,191,191,-157,191,191,-172,-69,191,-22,-21,-261,-273,-262,-260,-264,-268,-263,-259,-266,-271,-257,-256,-265,-272,-267,-269,-270,-258,-296,-295,-294,-293,-292,-305,-234,-233,-231,-217,-230,-81,-84,-218,-158,-173,-177,-240,-194,191,-196,-237,-281,-282,-290,-291,-275,-229,-232,-221,-83,-219,-68,-195,-254,191,-220,-197,-306,191,-299,-225,-224,-222,-84,-300,-226,-223,-228,-227,]),'_BOOL':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,71,75,76,80,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,101,105,109,111,118,119,120,121,122,123,124,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,211,214,223,229,231,233,239,240,241,267,269,275,278,279,280,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[34,-337,-113,-128,34,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,34,-120,-115,-65,-102,-126,-131,-108,-238,-111,-122,-63,-129,34,-29,-121,-116,-62,-112,-70,-52,-123,-117,-337,-337,-119,-337,-114,-130,34,-118,-71,-103,-337,-9,-131,-91,-10,-96,-98,34,-131,-95,-101,-97,34,-53,-126,34,-88,34,34,-93,34,-147,-335,-146,34,-167,-166,-182,-100,-126,34,-87,-90,-94,-92,-61,-72,34,-144,-142,34,34,34,-73,34,-89,34,34,34,-149,-159,-160,-156,-336,34,-183,-30,34,34,-74,34,34,34,34,-174,-175,34,-143,-140,34,-141,-145,-76,-79,-82,-75,-77,34,-81,-215,-214,-80,-216,-78,-127,34,-153,34,-151,-148,-157,-168,-69,-36,-35,34,34,34,-234,-233,34,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,34,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'LE':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,246,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-261,246,-262,-260,-264,246,-263,-259,-266,246,-257,-256,-265,246,246,246,246,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'SEMI':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,22,23,25,26,27,29,30,33,34,36,38,39,40,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,70,71,73,74,75,76,77,78,79,80,81,82,83,84,86,87,89,91,94,96,97,98,99,100,101,108,109,110,111,113,114,115,119,120,121,122,123,124,127,132,133,134,136,138,139,140,141,142,143,144,145,147,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,178,179,180,181,184,185,186,187,188,189,190,191,192,200,216,217,223,224,225,226,228,230,232,234,235,236,237,238,240,241,261,263,268,269,272,273,275,279,280,284,285,286,288,289,290,291,293,294,298,300,301,302,303,304,305,306,307,308,310,312,313,314,315,316,317,318,320,321,322,323,324,328,330,331,332,340,341,355,356,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,423,424,425,426,427,428,429,432,433,435,437,438,439,440,442,443,444,446,448,449,453,454,464,465,468,469,476,478,480,482,483,486,487,496,497,498,499,500,502,505,506,508,509,510,511,512,518,519,523,524,533,534,535,536,537,539,547,551,552,554,555,557,558,565,568,569,574,575,576,577,578,579,],[19,-337,-113,-128,-337,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,-120,-115,-65,-102,-126,-131,-108,-238,-111,-122,-63,-129,-337,-29,-121,-116,-62,-112,-70,-52,-123,-117,119,-337,-337,-119,-337,-114,-130,19,-118,-71,-103,-337,-9,-131,-91,-10,-96,-20,-98,-54,-179,-178,-131,-37,-134,-85,-95,-101,-97,-19,-132,-53,-126,-337,-337,-93,-147,-335,-146,188,-167,-166,-136,-100,-138,-126,-16,-86,-15,-87,-90,-94,-92,-61,-72,-55,-317,-321,-318,-303,-324,-330,-313,-319,-144,-301,-274,-314,-142,-327,-325,-304,-322,-302,-255,-315,-289,-253,-328,-316,-329,-320,-276,-323,-312,-252,-178,-73,-337,188,188,188,-149,-159,-160,-156,-336,-337,-30,-38,-133,-74,-274,-239,-135,-193,-326,-280,-277,-334,-332,-331,-333,-174,-175,-298,-297,-279,-143,-235,-278,-140,-141,-145,-76,-79,-82,424,-75,425,-77,428,-14,-337,-81,-215,-214,-80,435,-216,-13,-337,-78,-312,-127,188,-153,188,-151,-148,-157,-26,-25,446,-161,-163,-168,-139,-137,-69,-36,-35,-44,-43,-261,-273,-262,-260,-264,-268,-263,-259,-266,-271,-257,-256,-265,-272,-267,-269,-270,-258,-296,-295,-294,-293,-292,-305,496,-234,-233,497,-337,-231,-337,-217,-13,-230,-81,-84,-218,-337,-152,-150,-165,-158,-170,-169,-31,-34,-39,-42,-240,-194,-237,-281,-282,-290,-291,-236,-275,-229,-232,533,-337,-221,-337,-83,-219,-162,-164,-68,-33,-32,-41,-40,-195,-254,-337,553,-337,-220,-337,-337,-306,-299,566,-225,-224,-222,-84,-300,575,-337,-337,-226,-223,-337,-228,-227,]),'_THREAD_LOCAL':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,65,68,71,75,76,80,81,82,86,87,89,90,93,96,97,98,100,101,109,111,118,119,123,124,129,180,181,182,187,191,200,211,223,240,241,278,284,285,286,289,291,298,300,301,302,303,305,308,312,314,316,317,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[11,11,-113,-128,11,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,11,-120,-115,-65,-102,11,-131,-108,-238,-111,-122,-63,-129,-29,-121,-116,-62,-112,-70,-52,-123,-117,11,11,-119,11,-114,-130,11,-118,-71,-103,11,-131,-96,-98,11,-131,-95,-101,-97,-53,11,11,-88,11,-147,-335,-146,-167,-166,-100,-126,11,-87,-61,-72,11,-73,11,-89,-149,-336,-30,11,-74,-174,-175,11,-76,-79,-82,-75,-77,11,-81,-215,-214,-80,-216,-78,-127,-153,-151,-148,-168,-69,-36,-35,11,11,11,-234,-233,11,-231,-217,-230,-81,-84,-218,-152,-150,-170,-169,-31,-34,11,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'LT':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,248,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-261,248,-262,-260,-264,248,-263,-259,-266,248,-257,-256,-265,248,248,248,248,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'COMMA':([2,3,5,6,7,10,11,12,13,18,20,22,23,26,27,30,33,34,35,36,39,42,43,44,46,48,49,50,54,56,58,60,62,68,70,71,73,74,75,76,77,78,80,81,82,84,86,96,98,100,101,103,104,105,106,108,109,110,111,113,127,132,133,134,136,138,139,140,141,142,143,144,145,147,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,177,178,179,187,191,195,196,197,198,199,200,203,210,211,212,213,215,216,217,224,225,226,228,230,232,234,235,236,237,238,240,241,261,263,268,269,270,272,273,274,275,276,277,279,280,281,283,294,310,312,314,316,317,320,323,324,325,326,327,328,330,331,340,341,343,344,345,346,347,348,355,356,374,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,414,426,442,443,444,448,449,450,451,453,454,458,461,463,464,465,468,469,473,476,478,480,482,483,486,487,489,490,492,501,503,507,508,509,511,512,518,519,523,524,525,528,529,530,531,532,544,545,546,547,551,556,559,560,564,565,570,571,],[-113,-128,-124,-110,-106,-104,-107,-125,-105,-99,-109,-120,-115,-102,-126,-108,-238,-111,-337,-122,-129,-29,-121,-116,-112,-52,-123,-117,-119,-114,-130,-118,-103,-96,126,-98,-54,-179,-178,-131,-37,-134,-95,-101,-97,-132,-53,-147,-146,-167,-166,-28,-180,-182,-27,-136,-100,-138,-126,202,-55,-317,-321,-318,-303,-324,-330,-313,-319,-144,-301,-274,-314,-142,-327,-325,-304,-322,-302,-255,-315,-289,-253,-328,-316,-329,-320,-276,-323,-312,-337,-252,-178,-149,-336,-171,327,-176,-183,-181,-30,333,-186,-337,349,350,-191,-38,-133,-274,-239,-135,-193,-326,-280,-277,-334,-332,-331,-333,-174,-175,-298,-297,-279,-143,412,-235,-278,-203,-140,-204,-1,-141,-145,-2,-206,412,-312,-127,-153,-151,-148,445,-161,-163,327,327,-172,-168,-139,-137,-36,-35,-190,-204,-56,-188,-45,-189,-44,-43,472,-261,-273,-262,-260,-264,-268,-263,-259,-266,-271,-257,-256,-265,-272,-267,-269,412,-270,-258,-296,-295,-294,-293,412,-292,-310,484,485,-305,-205,412,-152,-150,-165,-170,-169,-173,-177,-31,-34,-57,-192,-187,-39,-42,-240,-194,-196,-237,-281,-282,-290,-291,-236,-275,-213,-207,-211,412,412,412,-162,-164,-33,-32,-41,-40,-195,-254,-311,550,-209,-208,-210,-212,-51,-50,-197,-306,-299,412,-46,-49,412,-300,-48,-47,]),'U16CHAR_CONST':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,169,-335,-28,-182,-27,169,-337,-87,-72,-337,169,-286,-285,169,169,-283,-287,-288,169,-284,169,169,169,-336,-183,169,169,-28,-337,169,-28,-337,-337,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,-337,-76,-79,-82,-75,169,-77,169,169,-81,-215,-214,-80,-216,169,-78,169,169,-69,-284,169,169,-284,169,169,-244,-247,-245,-241,-242,-246,-248,169,-250,-251,-243,-249,-12,169,169,-11,169,169,169,169,-234,-233,169,-231,169,169,-217,169,-230,169,-84,-218,169,169,169,-337,-337,-198,169,169,169,-337,-284,-229,-232,169,-221,169,-83,-219,-68,169,-28,-337,169,-11,169,169,-220,169,169,169,-284,169,169,169,-337,169,-225,-224,-222,-84,169,169,169,-226,-223,169,-228,-227,]),'OFFSETOF':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,170,-335,-28,-182,-27,170,-337,-87,-72,-337,170,-286,-285,170,170,-283,-287,-288,170,-284,170,170,170,-336,-183,170,170,-28,-337,170,-28,-337,-337,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,-337,-76,-79,-82,-75,170,-77,170,170,-81,-215,-214,-80,-216,170,-78,170,170,-69,-284,170,170,-284,170,170,-244,-247,-245,-241,-242,-246,-248,170,-250,-251,-243,-249,-12,170,170,-11,170,170,170,170,-234,-233,170,-231,170,170,-217,170,-230,170,-84,-218,170,170,170,-337,-337,-198,170,170,170,-337,-284,-229,-232,170,-221,170,-83,-219,-68,170,-28,-337,170,-11,170,170,-220,170,170,170,-284,170,170,170,-337,170,-225,-224,-222,-84,170,170,170,-226,-223,170,-228,-227,]),'_ATOMIC':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,35,36,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,71,75,76,80,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,101,103,105,109,111,117,118,119,120,121,122,123,124,128,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,205,206,211,214,219,220,223,229,231,233,239,240,241,267,269,275,278,279,280,282,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,459,460,496,497,500,505,506,510,511,512,514,515,536,554,555,557,558,575,576,578,579,],[29,65,-113,-128,76,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,65,-120,-115,-65,-102,65,-131,-108,-238,-111,76,-122,-63,-129,112,-29,-121,-116,-62,-112,-70,-52,-123,-117,65,65,-119,65,-114,-130,29,-118,-71,-103,65,-9,-131,-91,-10,-96,-98,65,-131,-95,-101,-97,29,-53,65,76,-88,112,65,-93,29,-147,-335,-146,29,-167,-166,76,-182,-100,-126,76,29,-87,-90,-94,-92,-61,-72,76,29,-144,-142,65,29,76,-73,65,-89,29,29,29,-149,-159,-160,-156,-336,76,-183,-30,76,76,76,112,76,76,-74,29,29,29,29,-174,-175,29,-143,-140,29,-141,-145,76,-76,-79,-82,-75,-77,65,-81,-215,-214,-80,-216,-78,-127,29,-153,29,-151,-148,-157,-168,-69,-36,-35,29,29,29,-234,-233,65,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,76,29,-229,-232,-221,-83,-219,-68,-33,-32,76,76,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'TYPEDEF':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,65,68,71,75,76,80,81,82,86,87,89,90,93,96,97,98,100,101,109,111,118,119,123,124,129,180,181,182,187,191,200,211,223,240,241,278,284,285,286,289,291,298,300,301,302,303,305,308,312,314,316,317,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[7,7,-113,-128,7,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,7,-120,-115,-65,-102,7,-131,-108,-238,-111,-122,-63,-129,-29,-121,-116,-62,-112,-70,-52,-123,-117,7,7,-119,7,-114,-130,7,-118,-71,-103,7,-131,-96,-98,7,-131,-95,-101,-97,-53,7,7,-88,7,-147,-335,-146,-167,-166,-100,-126,7,-87,-61,-72,7,-73,7,-89,-149,-336,-30,7,-74,-174,-175,7,-76,-79,-82,-75,-77,7,-81,-215,-214,-80,-216,-78,-127,-153,-151,-148,-168,-69,-36,-35,7,7,7,-234,-233,7,-231,-217,-230,-81,-84,-218,-152,-150,-170,-169,-31,-34,7,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'XOR':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,251,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-261,251,-262,-260,-264,-268,-263,-259,-266,-271,-257,-256,-265,251,-267,-269,251,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'AUTO':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,65,68,71,75,76,80,81,82,86,87,89,90,93,96,97,98,100,101,109,111,118,119,123,124,129,180,181,182,187,191,200,211,223,240,241,278,284,285,286,289,291,298,300,301,302,303,305,308,312,314,316,317,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[26,26,-113,-128,26,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,26,-120,-115,-65,-102,26,-131,-108,-238,-111,-122,-63,-129,-29,-121,-116,-62,-112,-70,-52,-123,-117,26,26,-119,26,-114,-130,26,-118,-71,-103,26,-131,-96,-98,26,-131,-95,-101,-97,-53,26,26,-88,26,-147,-335,-146,-167,-166,-100,-126,26,-87,-61,-72,26,-73,26,-89,-149,-336,-30,26,-74,-174,-175,26,-76,-79,-82,-75,-77,26,-81,-215,-214,-80,-216,-78,-127,-153,-151,-148,-168,-69,-36,-35,26,26,26,-234,-233,26,-231,-217,-230,-81,-84,-218,-152,-150,-170,-169,-31,-34,26,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'DIVEQUAL':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,160,161,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-315,-289,-328,-316,-329,-320,-276,-323,-312,-336,357,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'TIMES':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,22,23,25,26,27,29,30,33,34,35,36,37,38,39,40,43,44,45,46,47,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,69,71,76,80,81,82,85,87,89,91,94,96,97,98,100,101,103,104,105,106,109,111,116,117,119,120,121,122,123,124,126,128,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,156,158,160,161,162,163,164,165,166,167,168,169,171,173,174,175,176,177,180,181,187,191,192,198,201,202,204,205,206,211,218,219,220,223,224,227,229,230,231,232,233,234,235,236,237,238,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,263,265,266,268,269,273,275,278,279,280,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,310,312,314,316,317,319,328,329,332,336,338,339,342,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,442,443,445,447,448,449,459,472,475,477,478,480,481,482,483,484,487,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,547,549,550,551,553,554,555,557,558,565,566,569,574,575,576,577,578,579,],[35,-337,-113,-128,35,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,-120,-115,-65,-102,-126,-131,-108,-238,-111,-337,-122,35,-63,-129,35,-121,-116,-62,-112,-70,-123,-117,-337,-337,-119,-337,-114,-130,35,-118,-71,-103,-337,-9,-131,-91,-10,-96,35,-98,-131,-95,-101,-97,173,-126,35,35,-93,-147,-335,-146,-167,-166,-28,35,-182,-27,-100,-126,173,-337,-87,-90,-94,-92,-61,-72,35,-337,173,-317,-321,-318,-286,-303,-285,-324,-330,-313,-319,-144,-301,-274,-314,173,-142,-327,173,-283,-287,-325,-304,-322,-302,-255,-315,-289,253,-328,-316,-288,-329,-320,-276,-323,173,-284,173,173,-312,35,-73,173,-149,-336,35,-183,173,35,336,-28,-337,35,352,-28,-337,-74,-274,-337,173,-326,173,-280,173,-277,-334,-332,-331,-333,-174,-175,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,-298,-297,173,173,-279,-143,-278,-140,35,-141,-145,420,-76,-79,-82,-75,173,-77,173,173,-81,-215,-214,-80,-216,173,-78,-312,-127,-153,-151,-148,173,-168,173,-69,-284,173,173,35,-284,173,173,-244,-247,-245,-241,-242,-246,-248,173,-250,-251,-243,-249,-12,173,173,-11,253,253,253,253,253,253,253,253,253,253,-257,-256,253,253,253,253,253,-258,-296,-295,-294,-293,-292,-305,173,173,173,494,-234,-233,173,-231,173,173,-217,173,-230,173,-84,-218,173,173,-152,-150,35,173,-170,-169,-337,-337,-198,173,-281,-282,173,-290,-291,173,-275,-337,-284,-229,-232,173,-221,173,-83,-219,-68,541,-28,-337,173,-11,173,173,-220,173,173,173,-284,173,173,-306,173,-337,-299,173,-225,-224,-222,-84,-300,173,173,173,-226,-223,173,-228,-227,]),'LPAREN':([0,1,2,3,4,5,6,7,8,10,11,12,13,14,15,16,17,18,19,20,22,23,25,26,27,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,69,71,72,73,76,77,80,81,82,85,86,87,89,91,94,96,97,98,100,101,103,104,105,106,109,111,112,116,117,119,120,121,122,123,124,126,127,128,131,132,133,134,135,136,137,138,139,140,141,142,143,145,146,147,148,149,150,151,152,153,154,155,156,160,161,163,164,165,166,167,168,169,170,171,173,174,175,176,177,180,181,187,191,192,198,199,200,201,202,204,205,206,211,216,218,219,220,223,227,229,230,231,233,235,236,237,238,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,263,265,266,269,275,276,278,279,280,282,283,284,285,286,289,290,291,292,296,297,298,299,300,301,302,303,305,307,308,310,311,312,314,316,317,319,328,329,332,336,338,339,340,341,342,344,345,347,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,402,403,404,405,407,411,412,413,414,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,442,443,445,447,448,449,453,454,457,458,459,464,465,472,475,477,481,482,483,484,488,489,490,492,494,496,497,499,500,502,504,505,506,510,511,512,513,514,515,518,519,521,522,529,530,531,532,533,535,536,537,538,539,541,542,543,544,545,547,549,550,551,553,554,555,557,558,559,560,565,566,569,570,571,574,575,576,577,578,579,],[37,-337,-113,-128,69,-124,-110,-106,85,-104,-107,-125,-105,-64,37,-60,-67,-99,-66,-109,-120,-115,-65,-102,-126,95,-108,-238,-111,-337,-122,37,-63,-129,37,116,-29,-121,-116,-62,-112,-70,118,-123,-117,-337,-337,-119,-337,-114,-130,37,-118,-71,-103,-337,-9,95,-91,-10,-96,69,-98,69,129,-131,-37,-95,-101,-97,174,118,-126,69,37,-93,-147,-335,-146,-167,-166,-28,-180,-182,-27,-100,-126,95,174,-337,-87,-90,-94,-92,-61,-72,69,129,-337,229,-317,-321,-318,-286,-303,-285,-324,-330,-313,-319,-144,-301,-314,231,-142,-327,233,-283,-287,-325,-304,-322,239,-302,-315,-289,-328,-316,-288,-329,-320,266,-323,267,174,-284,229,233,-312,278,-73,229,-149,-336,69,-183,-181,-30,229,69,229,-28,-337,342,-38,229,-28,-337,-74,-337,229,-326,229,229,-334,-332,-331,-333,-174,-175,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,229,174,174,-298,-297,229,229,-143,-140,278,278,-141,-145,-337,422,-76,-79,-82,-75,229,-77,427,430,174,229,434,-81,-215,-214,-80,-216,229,-78,-312,441,-127,-153,-151,-148,174,-168,174,-69,-284,229,229,-36,-35,342,342,460,-45,-284,229,229,-44,-43,-244,-247,-245,-241,-242,-246,-248,229,-250,-251,-243,-249,-12,174,229,-11,-296,-295,-294,-293,-292,-305,229,174,422,229,229,-234,-233,229,-231,229,229,-217,229,-230,229,-84,-218,229,229,-152,-150,69,174,-170,-169,-31,-34,342,460,-337,-39,-42,-337,-198,174,174,-290,-291,229,-337,-213,-207,-211,-284,-229,-232,229,-221,229,538,-83,-219,-68,-33,-32,229,-28,-337,-41,-40,229,-11,-209,-208,-210,-212,229,229,-220,229,229,229,-284,229,229,-51,-50,-306,229,-337,-299,229,-225,-224,-222,-84,-46,-49,-300,229,229,-48,-47,229,-226,-223,229,-228,-227,]),'MINUSMINUS':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,132,133,134,135,136,137,138,139,140,141,143,145,146,148,149,150,151,152,153,154,156,160,161,163,164,165,166,167,168,169,171,173,174,175,176,181,191,198,201,204,205,206,218,219,220,227,229,230,231,233,235,236,237,238,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,263,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,310,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,402,403,404,405,407,411,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,482,483,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,547,549,550,551,553,554,555,557,558,565,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,175,-335,-28,-182,-27,175,-337,-87,-72,-337,175,-317,-321,-318,-286,-303,-285,-324,-330,-313,-319,-301,-314,175,-327,175,-283,-287,-325,-304,-322,-302,-315,-289,-328,-316,-288,-329,-320,261,-323,175,-284,175,175,-312,175,-336,-183,175,175,-28,-337,175,-28,-337,-337,175,-326,175,175,-334,-332,-331,-333,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,-298,-297,175,175,-337,-76,-79,-82,-75,175,-77,175,175,-81,-215,-214,-80,-216,175,-78,-312,175,175,-69,-284,175,175,-284,175,175,-244,-247,-245,-241,-242,-246,-248,175,-250,-251,-243,-249,-12,175,175,-11,-296,-295,-294,-293,-292,-305,175,175,175,175,-234,-233,175,-231,175,175,-217,175,-230,175,-84,-218,175,175,175,-337,-337,-198,175,175,-290,-291,175,-337,-284,-229,-232,175,-221,175,-83,-219,-68,175,-28,-337,175,-11,175,175,-220,175,175,175,-284,175,175,-306,175,-337,-299,175,-225,-224,-222,-84,-300,175,175,175,-226,-223,175,-228,-227,]),'ID':([0,1,2,3,4,5,6,7,10,11,12,13,14,15,16,17,18,19,20,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,43,44,45,46,47,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,69,71,72,76,80,81,82,85,87,89,91,94,96,97,98,100,101,102,103,104,105,106,109,111,116,117,118,119,120,121,122,123,124,126,128,129,131,135,137,142,146,147,149,150,151,165,171,173,174,175,180,181,187,191,192,193,194,198,199,201,202,204,205,206,211,218,219,220,223,227,229,231,233,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,262,264,265,266,269,275,279,280,282,284,285,286,287,289,290,291,297,298,300,301,302,303,305,307,308,312,314,316,317,319,327,328,329,332,336,338,339,342,344,349,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,372,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,442,443,445,447,448,449,457,459,460,472,475,477,481,484,485,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,548,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[42,-337,-113,-128,42,-124,-110,-106,-104,-107,-125,-105,-64,42,-60,-67,-99,-66,-109,-120,-115,-154,-65,-102,-126,-155,-131,-108,98,101,-238,-111,-337,-122,42,-63,-129,42,-121,-116,-62,-112,-70,-123,-117,-337,-337,-119,-337,-114,-130,42,-118,-71,-103,-337,-9,-131,-91,-10,-96,42,-98,42,-131,-95,-101,-97,176,-126,42,42,-93,-147,-335,-146,-167,-166,197,-28,-180,-182,-27,-100,-126,176,-337,176,-87,-90,-94,-92,-61,-72,42,-337,176,176,-286,-285,-144,176,-142,176,-283,-287,-288,176,-284,176,176,-73,310,-149,-336,42,197,197,-183,-181,176,42,176,-28,-337,42,176,-28,-337,-74,-337,176,176,176,-174,-175,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,403,405,176,176,-143,-140,-141,-145,-337,-76,-79,-82,423,-75,176,-77,176,310,-81,-215,-214,-80,-216,310,-78,-127,-153,-151,-148,176,197,-168,176,-69,-284,176,176,42,42,176,-284,176,176,-244,-247,-245,-241,-242,-246,-248,176,-250,-251,-243,-249,-12,176,176,176,-11,176,176,176,176,-234,-233,176,-231,310,176,-217,176,-230,310,-84,-218,310,176,-152,-150,42,176,-170,-169,42,-337,176,-337,-198,176,176,176,176,-337,-284,-229,-232,176,-221,310,-83,-219,-68,176,-28,-337,176,-11,176,310,-220,310,176,310,-284,176,176,176,176,-337,176,-225,-224,-222,-84,176,310,310,-226,-223,310,-228,-227,]),'IF':([61,97,119,124,181,191,284,285,286,289,291,298,300,301,302,303,305,307,308,332,424,425,428,429,432,435,437,438,439,440,496,497,500,502,505,506,510,535,536,537,539,554,555,557,558,569,574,575,576,577,578,579,],[-71,-335,-87,-72,311,-336,-76,-79,-82,-75,-77,311,-81,-215,-214,-80,-216,311,-78,-69,-234,-233,-231,311,-217,-230,311,-84,-218,311,-229,-232,-221,311,-83,-219,-68,311,-220,311,311,-225,-224,-222,-84,311,311,-226,-223,311,-228,-227,]),'STRING_LITERAL':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,136,137,146,149,150,151,152,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,230,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,333,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,452,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,152,-335,-28,-182,-27,152,-337,-87,-72,-337,152,-286,230,-285,152,152,-283,-287,-325,-288,152,-284,152,152,152,-336,-183,152,152,-28,-337,152,-28,-337,-337,152,-326,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,-337,-76,-79,-82,-75,152,-77,152,152,-81,-215,-214,-80,-216,152,-78,152,152,-69,152,-284,152,152,-284,152,152,-244,-247,-245,-241,-242,-246,-248,152,-250,-251,-243,-249,-12,152,152,-11,152,152,152,152,-234,-233,152,-231,152,152,-217,152,-230,152,-84,-218,152,152,152,230,-337,-337,-198,152,152,152,-337,-284,-229,-232,152,-221,152,-83,-219,-68,152,-28,-337,152,-11,152,152,-220,152,152,152,-284,152,152,152,-337,152,-225,-224,-222,-84,152,152,152,-226,-223,152,-228,-227,]),'FLOAT':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,71,75,76,80,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,101,105,109,111,118,119,120,121,122,123,124,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,211,214,223,229,231,233,239,240,241,267,269,275,278,279,280,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[44,-337,-113,-128,44,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,44,-120,-115,-65,-102,-126,-131,-108,-238,-111,-122,-63,-129,44,-29,-121,-116,-62,-112,-70,-52,-123,-117,-337,-337,-119,-337,-114,-130,44,-118,-71,-103,-337,-9,-131,-91,-10,-96,-98,44,-131,-95,-101,-97,44,-53,-126,44,-88,44,44,-93,44,-147,-335,-146,44,-167,-166,-182,-100,-126,44,-87,-90,-94,-92,-61,-72,44,-144,-142,44,44,44,-73,44,-89,44,44,44,-149,-159,-160,-156,-336,44,-183,-30,44,44,-74,44,44,44,44,-174,-175,44,-143,-140,44,-141,-145,-76,-79,-82,-75,-77,44,-81,-215,-214,-80,-216,-78,-127,44,-153,44,-151,-148,-157,-168,-69,-36,-35,44,44,44,-234,-233,44,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,44,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'XOREQUAL':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,160,161,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-315,-289,-328,-316,-329,-320,-276,-323,-312,-336,361,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'LSHIFTEQUAL':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,160,161,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-315,-289,-328,-316,-329,-320,-276,-323,-312,-336,363,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'RBRACKET':([3,39,58,76,103,105,106,117,128,132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,178,191,198,204,205,218,219,224,225,230,232,234,235,236,237,238,261,263,268,272,273,282,334,335,336,337,351,352,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,406,407,411,419,420,421,455,456,459,466,467,468,471,476,478,480,482,483,486,487,491,493,494,513,514,524,540,541,547,551,561,562,564,565,],[-128,-129,-130,-131,-28,-182,-27,-337,-337,-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,-253,-328,-316,-329,-320,-276,-323,-312,-252,-336,-183,-337,-28,-337,-28,-274,-239,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-235,-278,-337,453,-4,454,-3,464,465,-261,-273,-262,-260,-264,-268,-263,-259,-266,-271,-257,-256,-265,-272,-267,-269,-270,-258,-296,-295,-294,-293,482,-292,-305,-337,492,-337,511,512,-337,518,519,-240,520,-237,-281,-282,-290,-291,-236,-275,529,530,531,-337,-28,-254,559,560,-306,-299,570,571,572,-300,]),}
+
+_lr_action = {}
+for _k, _v in _lr_action_items.items():
+ for _x,_y in zip(_v[0],_v[1]):
+ if not _x in _lr_action: _lr_action[_x] = {}
+ _lr_action[_x][_k] = _y
+del _lr_action_items
+
+_lr_goto_items = {'expression_statement':([181,298,307,429,437,440,502,535,537,539,569,574,577,],[284,284,284,284,284,284,284,284,284,284,284,284,284,]),'struct_or_union_specifier':([0,21,40,59,75,85,91,93,95,99,118,129,172,174,181,184,185,186,214,229,231,233,239,267,278,298,313,315,342,350,422,427,460,],[5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,]),'init_declarator_list':([4,89,],[70,70,]),'init_declarator_list_opt':([4,89,],[79,79,]),'iteration_statement':([181,298,307,429,437,440,502,535,537,539,569,574,577,],[285,285,285,285,285,285,285,285,285,285,285,285,285,]),'static_assert':([0,59,181,298,307,429,437,440,502,535,537,539,569,574,577,],[17,17,286,286,286,286,286,286,286,286,286,286,286,286,286,]),'unified_string_literal':([85,116,131,146,149,171,174,175,181,201,204,218,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,290,297,298,307,319,329,333,338,339,353,354,364,373,375,412,413,419,421,427,429,430,434,437,440,441,447,477,481,484,499,502,513,521,533,535,537,538,539,542,543,549,553,566,569,574,577,],[136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,452,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,]),'assignment_expression_opt':([204,218,419,421,513,],[334,351,491,493,540,]),'brace_open':([31,32,92,96,98,100,101,130,131,181,201,229,298,307,375,413,429,437,440,477,478,479,502,521,535,537,539,569,574,577,],[99,102,181,184,185,193,194,181,227,181,227,181,181,181,227,488,181,181,181,488,488,488,181,227,181,181,181,181,181,181,]),'enumerator':([102,193,194,327,],[195,195,195,450,]),'typeid_noparen_declarator':([211,],[348,]),'type_qualifier_list_opt':([35,117,128,206,220,282,459,515,],[104,204,218,339,354,419,513,543,]),'declaration_specifiers_no_type_opt':([1,27,52,53,55,63,87,],[66,94,120,121,122,94,94,]),'expression_opt':([181,298,307,427,429,437,440,499,502,533,535,537,539,553,566,569,574,577,],[288,288,288,498,288,288,288,534,288,552,288,288,288,567,573,288,288,288,]),'designation':([227,472,488,550,],[369,369,369,369,]),'parameter_list':([118,129,278,342,422,460,],[213,213,213,213,213,213,]),'alignment_specifier':([0,1,4,21,27,52,53,55,59,63,75,85,87,89,93,95,99,118,129,174,177,181,184,185,186,192,211,229,231,233,239,267,278,298,313,315,342,350,422,427,460,],[53,53,81,53,53,53,53,53,53,53,53,142,53,81,53,142,142,53,53,142,280,53,142,142,142,280,81,142,142,142,142,142,53,53,142,142,53,53,53,53,53,]),'labeled_statement':([181,298,307,429,437,440,502,535,537,539,569,574,577,],[289,289,289,289,289,289,289,289,289,289,289,289,289,]),'abstract_declarator':([177,211,278,342,],[281,281,418,418,]),'translation_unit':([0,],[59,]),'init_declarator':([4,89,126,202,],[84,84,217,331,]),'direct_abstract_declarator':([177,211,276,278,342,344,457,],[283,283,414,283,283,414,414,]),'designator_list':([227,472,488,550,],[376,376,376,376,]),'identifier':([85,116,118,129,131,146,149,171,174,175,181,201,204,218,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,290,297,298,307,319,329,338,339,349,353,354,364,372,373,375,412,413,419,421,427,429,430,434,437,440,441,447,460,477,481,484,485,499,502,513,521,533,535,537,538,539,542,543,548,549,553,566,569,574,577,],[143,143,215,215,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,461,143,143,143,470,143,143,143,143,143,143,143,143,143,143,143,143,143,143,215,143,143,143,527,143,143,143,143,143,143,143,143,143,143,143,563,143,143,143,143,143,143,]),'offsetof_member_designator':([485,],[526,]),'unary_expression':([85,116,131,146,149,171,174,175,181,201,204,218,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,290,297,298,307,319,329,338,339,353,354,364,373,375,412,413,419,421,427,429,430,434,437,440,441,447,477,481,484,499,502,513,521,533,535,537,538,539,542,543,549,553,566,569,574,577,],[144,144,224,232,234,144,224,273,224,224,224,224,224,224,224,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,224,144,144,224,224,224,144,224,224,144,144,224,224,224,224,224,144,224,224,144,224,224,224,224,224,224,224,224,224,144,144,144,224,224,224,224,224,224,224,224,224,224,224,224,224,224,224,224,224,224,]),'abstract_declarator_opt':([177,211,],[274,343,]),'initializer':([131,201,375,521,],[226,330,473,546,]),'direct_id_declarator':([0,4,15,37,40,59,69,72,89,91,126,192,202,211,342,344,445,457,],[48,48,86,48,48,48,48,86,48,48,48,48,48,48,48,86,48,86,]),'struct_declaration_list':([99,184,185,],[186,313,315,]),'pp_directive':([0,59,],[14,14,]),'declaration_list':([21,75,],[93,93,]),'id_init_declarator':([40,91,],[108,108,]),'type_specifier':([0,21,40,59,75,85,91,93,95,99,118,129,172,174,181,184,185,186,214,229,231,233,239,267,278,298,313,315,342,350,422,427,460,],[18,18,109,18,18,147,109,18,147,147,18,18,269,147,18,147,147,147,109,147,147,147,147,147,18,18,147,147,18,18,18,18,18,]),'compound_statement':([92,130,181,229,298,307,429,437,440,502,535,537,539,569,574,577,],[180,223,291,378,291,291,291,291,291,291,291,291,291,291,291,291,]),'pointer':([0,4,37,40,59,69,89,91,104,126,177,192,202,211,278,342,445,],[15,72,15,15,15,72,72,15,199,72,276,72,72,344,276,457,72,]),'typeid_declarator':([4,69,89,126,192,202,445,],[74,125,74,74,74,74,74,]),'id_init_declarator_list':([40,91,],[113,113,]),'declarator':([4,89,126,192,202,445,],[78,78,78,324,78,324,]),'argument_expression_list':([266,],[409,]),'struct_declarator_list_opt':([192,],[322,]),'block_item_list':([181,],[298,]),'parameter_type_list_opt':([278,342,422,],[417,417,495,]),'struct_declarator':([192,445,],[323,508,]),'type_qualifier':([0,1,4,21,27,35,52,53,55,59,63,75,85,87,89,93,95,99,103,117,118,128,129,172,174,177,181,184,185,186,192,205,206,211,219,220,229,231,233,239,267,278,282,298,313,315,342,350,422,427,459,460,514,515,],[52,52,80,52,52,105,52,52,52,52,52,52,105,52,80,52,105,105,198,105,52,105,52,198,105,279,52,105,105,105,279,198,105,80,198,105,105,105,105,105,105,52,105,52,105,105,52,52,52,52,105,52,198,105,]),'assignment_operator':([224,],[364,]),'expression':([174,181,229,231,233,258,265,290,298,307,427,429,430,434,437,440,441,499,502,533,535,537,538,539,549,553,566,569,574,577,],[270,294,270,270,270,399,406,426,294,294,294,294,501,503,294,294,507,294,294,294,294,294,556,294,564,294,294,294,294,294,]),'storage_class_specifier':([0,1,4,21,27,52,53,55,59,63,75,87,89,93,118,129,181,211,278,298,342,350,422,427,460,],[1,1,68,1,1,1,1,1,1,1,1,1,68,1,1,1,1,68,1,1,1,1,1,1,1,]),'unified_wstring_literal':([85,116,131,146,149,171,174,175,181,201,204,218,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,290,297,298,307,319,329,338,339,353,354,364,373,375,412,413,419,421,427,429,430,434,437,440,441,447,477,481,484,499,502,513,521,533,535,537,538,539,542,543,549,553,566,569,574,577,],[153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,]),'translation_unit_or_empty':([0,],[9,]),'initializer_list_opt':([227,],[370,]),'brace_close':([99,184,185,186,196,309,313,315,325,326,370,472,528,550,],[187,314,316,317,328,439,442,443,448,449,469,523,551,565,]),'direct_typeid_declarator':([4,69,72,89,126,192,202,445,],[73,73,127,73,73,73,73,73,]),'external_declaration':([0,59,],[16,123,]),'pragmacomp_or_statement':([307,429,440,502,535,537,539,569,574,577,],[436,500,506,536,554,555,557,576,578,579,]),'type_name':([85,95,174,229,231,233,239,267,],[157,183,271,379,380,381,382,410,]),'typedef_name':([0,21,40,59,75,85,91,93,95,99,118,129,172,174,181,184,185,186,214,229,231,233,239,267,278,298,313,315,342,350,422,427,460,],[36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,]),'pppragma_directive':([0,59,99,181,184,185,186,298,307,313,315,429,437,440,502,535,537,539,569,574,577,],[25,25,189,300,189,189,189,300,437,189,189,437,300,437,437,437,437,437,437,437,437,]),'statement':([181,298,307,429,437,440,502,535,537,539,569,574,577,],[301,301,438,438,505,438,438,438,438,558,438,438,438,]),'cast_expression':([85,116,131,171,174,181,201,204,218,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,290,297,298,307,319,329,338,339,353,354,364,373,375,412,413,419,421,427,429,430,434,437,440,441,447,477,481,484,499,502,513,521,533,535,537,538,539,542,543,549,553,566,569,574,577,],[158,158,158,268,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,487,158,158,158,158,158,158,158,158,158,158,487,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,]),'atomic_specifier':([0,1,21,27,40,52,53,55,59,63,75,85,87,91,93,95,99,118,129,172,174,181,184,185,186,214,229,231,233,239,267,278,298,313,315,342,350,422,427,460,],[27,63,87,63,111,63,63,63,27,63,87,111,63,111,87,111,111,27,27,111,111,87,111,111,111,111,111,111,111,111,111,27,87,111,111,27,27,27,87,27,]),'struct_declarator_list':([192,],[320,]),'empty':([0,1,4,21,27,35,40,52,53,55,63,75,87,89,91,117,118,128,129,177,181,192,204,206,211,218,220,227,278,282,298,307,342,419,421,422,427,429,437,440,459,460,472,488,499,502,513,515,533,535,537,539,550,553,566,569,574,577,],[57,64,83,88,64,106,115,64,64,64,64,88,64,83,115,106,208,106,208,277,306,321,337,106,277,337,106,377,415,106,433,433,415,337,337,415,433,433,433,433,106,208,522,522,433,433,337,106,433,433,433,433,522,433,433,433,433,433,]),'parameter_declaration':([118,129,278,342,350,422,460,],[210,210,210,210,463,210,210,]),'primary_expression':([85,116,131,146,149,171,174,175,181,201,204,218,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,290,297,298,307,319,329,338,339,353,354,364,373,375,412,413,419,421,427,429,430,434,437,440,441,447,477,481,484,499,502,513,521,533,535,537,538,539,542,543,549,553,566,569,574,577,],[161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,]),'declaration':([0,21,59,75,93,181,298,427,],[38,90,38,90,182,302,302,499,]),'declaration_specifiers_no_type':([0,1,21,27,52,53,55,59,63,75,87,93,118,129,181,278,298,342,350,422,427,460,],[40,67,91,67,67,67,67,40,67,91,67,91,214,214,91,214,91,214,214,214,91,214,]),'jump_statement':([181,298,307,429,437,440,502,535,537,539,569,574,577,],[303,303,303,303,303,303,303,303,303,303,303,303,303,]),'enumerator_list':([102,193,194,],[196,325,326,]),'block_item':([181,298,],[305,432,]),'constant_expression':([85,116,297,319,329,373,447,],[159,203,431,444,451,471,509,]),'identifier_list_opt':([118,129,460,],[207,221,516,]),'constant':([85,116,131,146,149,171,174,175,181,201,204,218,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,290,297,298,307,319,329,338,339,353,354,364,373,375,412,413,419,421,427,429,430,434,437,440,441,447,477,481,484,499,502,513,521,533,535,537,538,539,542,543,549,553,566,569,574,577,],[156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,]),'type_specifier_no_typeid':([0,4,21,40,59,75,85,89,91,93,95,99,118,129,172,174,177,181,184,185,186,192,211,214,229,231,233,239,267,278,298,313,315,342,350,422,427,460,],[12,71,12,12,12,12,12,71,12,12,12,12,12,12,12,12,275,12,12,12,12,275,71,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,]),'struct_declaration':([99,184,185,186,313,315,],[190,190,190,318,318,318,]),'direct_typeid_noparen_declarator':([211,344,],[345,458,]),'id_declarator':([0,4,37,40,59,69,89,91,126,192,202,211,342,445,],[21,75,107,110,21,107,179,110,179,179,179,346,107,179,]),'selection_statement':([181,298,307,429,437,440,502,535,537,539,569,574,577,],[308,308,308,308,308,308,308,308,308,308,308,308,308,]),'postfix_expression':([85,116,131,146,149,171,174,175,181,201,204,218,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,290,297,298,307,319,329,338,339,353,354,364,373,375,412,413,419,421,427,429,430,434,437,440,441,447,477,481,484,499,502,513,521,533,535,537,538,539,542,543,549,553,566,569,574,577,],[168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,]),'initializer_list':([227,488,],[374,528,]),'unary_operator':([85,116,131,146,149,171,174,175,181,201,204,218,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,290,297,298,307,319,329,338,339,353,354,364,373,375,412,413,419,421,427,429,430,434,437,440,441,447,477,481,484,499,502,513,521,533,535,537,538,539,542,543,549,553,566,569,574,577,],[171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,]),'struct_or_union':([0,21,40,59,75,85,91,93,95,99,118,129,172,174,181,184,185,186,214,229,231,233,239,267,278,298,313,315,342,350,422,427,460,],[31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,]),'block_item_list_opt':([181,],[309,]),'assignment_expression':([131,174,181,201,204,218,229,231,233,258,265,266,290,298,307,338,339,353,354,364,375,412,419,421,427,429,430,434,437,440,441,484,499,502,513,521,533,535,537,538,539,542,543,549,553,566,569,574,577,],[228,272,272,228,335,335,272,272,272,272,272,408,272,272,272,455,456,466,467,468,228,486,335,335,272,272,272,272,272,272,272,525,272,272,335,228,272,272,272,272,272,561,562,272,272,272,272,272,272,]),'designation_opt':([227,472,488,550,],[375,521,375,521,]),'parameter_type_list':([118,129,278,342,422,460,],[209,222,416,416,416,517,]),'type_qualifier_list':([35,85,95,99,117,128,174,184,185,186,206,220,229,231,233,239,267,282,313,315,459,515,],[103,172,172,172,205,219,172,172,172,172,103,103,172,172,172,172,172,103,172,172,514,103,]),'designator':([227,376,472,488,550,],[371,474,371,371,371,]),'id_init_declarator_list_opt':([40,91,],[114,114,]),'declaration_specifiers':([0,21,59,75,93,118,129,181,278,298,342,350,422,427,460,],[4,89,4,89,89,211,211,89,211,89,211,211,211,89,211,]),'identifier_list':([118,129,460,],[212,212,212,]),'declaration_list_opt':([21,75,],[92,130,]),'function_definition':([0,59,],[45,45,]),'binary_expression':([85,116,131,174,181,201,204,218,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,290,297,298,307,319,329,338,339,353,354,364,373,375,412,419,421,427,429,430,434,437,440,441,447,481,484,499,502,513,521,533,535,537,538,539,542,543,549,553,566,569,574,577,],[162,162,162,162,162,162,162,162,162,162,162,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,162,400,401,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,]),'enum_specifier':([0,21,40,59,75,85,91,93,95,99,118,129,172,174,181,184,185,186,214,229,231,233,239,267,278,298,313,315,342,350,422,427,460,],[49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,]),'decl_body':([0,21,59,75,93,181,298,427,],[51,51,51,51,51,51,51,51,]),'function_specifier':([0,1,4,21,27,52,53,55,59,63,75,87,89,93,118,129,181,211,278,298,342,350,422,427,460,],[55,55,82,55,55,55,55,55,55,55,55,55,82,55,55,55,55,82,55,55,55,55,55,55,55,]),'specifier_qualifier_list':([85,95,99,174,184,185,186,229,231,233,239,267,313,315,],[177,177,192,177,192,192,192,177,177,177,177,177,192,192,]),'conditional_expression':([85,116,131,174,181,201,204,218,229,231,233,258,265,266,290,297,298,307,319,329,338,339,353,354,364,373,375,412,419,421,427,429,430,434,437,440,441,447,481,484,499,502,513,521,533,535,537,538,539,542,543,549,553,566,569,574,577,],[178,178,225,225,225,225,225,225,225,225,225,225,225,225,225,178,225,225,178,178,225,225,225,225,225,178,225,225,225,225,225,225,225,225,225,225,225,178,524,225,225,225,225,225,225,225,225,225,225,225,225,225,225,225,225,225,225,]),}
+
+_lr_goto = {}
+for _k, _v in _lr_goto_items.items():
+ for _x, _y in zip(_v[0], _v[1]):
+ if not _x in _lr_goto: _lr_goto[_x] = {}
+ _lr_goto[_x][_k] = _y
+del _lr_goto_items
+_lr_productions = [
+ ("S' -> translation_unit_or_empty","S'",1,None,None,None),
+ ('abstract_declarator_opt -> empty','abstract_declarator_opt',1,'p_abstract_declarator_opt','plyparser.py',43),
+ ('abstract_declarator_opt -> abstract_declarator','abstract_declarator_opt',1,'p_abstract_declarator_opt','plyparser.py',44),
+ ('assignment_expression_opt -> empty','assignment_expression_opt',1,'p_assignment_expression_opt','plyparser.py',43),
+ ('assignment_expression_opt -> assignment_expression','assignment_expression_opt',1,'p_assignment_expression_opt','plyparser.py',44),
+ ('block_item_list_opt -> empty','block_item_list_opt',1,'p_block_item_list_opt','plyparser.py',43),
+ ('block_item_list_opt -> block_item_list','block_item_list_opt',1,'p_block_item_list_opt','plyparser.py',44),
+ ('declaration_list_opt -> empty','declaration_list_opt',1,'p_declaration_list_opt','plyparser.py',43),
+ ('declaration_list_opt -> declaration_list','declaration_list_opt',1,'p_declaration_list_opt','plyparser.py',44),
+ ('declaration_specifiers_no_type_opt -> empty','declaration_specifiers_no_type_opt',1,'p_declaration_specifiers_no_type_opt','plyparser.py',43),
+ ('declaration_specifiers_no_type_opt -> declaration_specifiers_no_type','declaration_specifiers_no_type_opt',1,'p_declaration_specifiers_no_type_opt','plyparser.py',44),
+ ('designation_opt -> empty','designation_opt',1,'p_designation_opt','plyparser.py',43),
+ ('designation_opt -> designation','designation_opt',1,'p_designation_opt','plyparser.py',44),
+ ('expression_opt -> empty','expression_opt',1,'p_expression_opt','plyparser.py',43),
+ ('expression_opt -> expression','expression_opt',1,'p_expression_opt','plyparser.py',44),
+ ('id_init_declarator_list_opt -> empty','id_init_declarator_list_opt',1,'p_id_init_declarator_list_opt','plyparser.py',43),
+ ('id_init_declarator_list_opt -> id_init_declarator_list','id_init_declarator_list_opt',1,'p_id_init_declarator_list_opt','plyparser.py',44),
+ ('identifier_list_opt -> empty','identifier_list_opt',1,'p_identifier_list_opt','plyparser.py',43),
+ ('identifier_list_opt -> identifier_list','identifier_list_opt',1,'p_identifier_list_opt','plyparser.py',44),
+ ('init_declarator_list_opt -> empty','init_declarator_list_opt',1,'p_init_declarator_list_opt','plyparser.py',43),
+ ('init_declarator_list_opt -> init_declarator_list','init_declarator_list_opt',1,'p_init_declarator_list_opt','plyparser.py',44),
+ ('initializer_list_opt -> empty','initializer_list_opt',1,'p_initializer_list_opt','plyparser.py',43),
+ ('initializer_list_opt -> initializer_list','initializer_list_opt',1,'p_initializer_list_opt','plyparser.py',44),
+ ('parameter_type_list_opt -> empty','parameter_type_list_opt',1,'p_parameter_type_list_opt','plyparser.py',43),
+ ('parameter_type_list_opt -> parameter_type_list','parameter_type_list_opt',1,'p_parameter_type_list_opt','plyparser.py',44),
+ ('struct_declarator_list_opt -> empty','struct_declarator_list_opt',1,'p_struct_declarator_list_opt','plyparser.py',43),
+ ('struct_declarator_list_opt -> struct_declarator_list','struct_declarator_list_opt',1,'p_struct_declarator_list_opt','plyparser.py',44),
+ ('type_qualifier_list_opt -> empty','type_qualifier_list_opt',1,'p_type_qualifier_list_opt','plyparser.py',43),
+ ('type_qualifier_list_opt -> type_qualifier_list','type_qualifier_list_opt',1,'p_type_qualifier_list_opt','plyparser.py',44),
+ ('direct_id_declarator -> ID','direct_id_declarator',1,'p_direct_id_declarator_1','plyparser.py',126),
+ ('direct_id_declarator -> LPAREN id_declarator RPAREN','direct_id_declarator',3,'p_direct_id_declarator_2','plyparser.py',126),
+ ('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_id_declarator',5,'p_direct_id_declarator_3','plyparser.py',126),
+ ('direct_id_declarator -> direct_id_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_id_declarator',6,'p_direct_id_declarator_4','plyparser.py',126),
+ ('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_id_declarator',6,'p_direct_id_declarator_4','plyparser.py',127),
+ ('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_id_declarator',5,'p_direct_id_declarator_5','plyparser.py',126),
+ ('direct_id_declarator -> direct_id_declarator LPAREN parameter_type_list RPAREN','direct_id_declarator',4,'p_direct_id_declarator_6','plyparser.py',126),
+ ('direct_id_declarator -> direct_id_declarator LPAREN identifier_list_opt RPAREN','direct_id_declarator',4,'p_direct_id_declarator_6','plyparser.py',127),
+ ('direct_typeid_declarator -> TYPEID','direct_typeid_declarator',1,'p_direct_typeid_declarator_1','plyparser.py',126),
+ ('direct_typeid_declarator -> LPAREN typeid_declarator RPAREN','direct_typeid_declarator',3,'p_direct_typeid_declarator_2','plyparser.py',126),
+ ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_typeid_declarator',5,'p_direct_typeid_declarator_3','plyparser.py',126),
+ ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_typeid_declarator',6,'p_direct_typeid_declarator_4','plyparser.py',126),
+ ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_typeid_declarator',6,'p_direct_typeid_declarator_4','plyparser.py',127),
+ ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_typeid_declarator',5,'p_direct_typeid_declarator_5','plyparser.py',126),
+ ('direct_typeid_declarator -> direct_typeid_declarator LPAREN parameter_type_list RPAREN','direct_typeid_declarator',4,'p_direct_typeid_declarator_6','plyparser.py',126),
+ ('direct_typeid_declarator -> direct_typeid_declarator LPAREN identifier_list_opt RPAREN','direct_typeid_declarator',4,'p_direct_typeid_declarator_6','plyparser.py',127),
+ ('direct_typeid_noparen_declarator -> TYPEID','direct_typeid_noparen_declarator',1,'p_direct_typeid_noparen_declarator_1','plyparser.py',126),
+ ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_typeid_noparen_declarator',5,'p_direct_typeid_noparen_declarator_3','plyparser.py',126),
+ ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_typeid_noparen_declarator',6,'p_direct_typeid_noparen_declarator_4','plyparser.py',126),
+ ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_typeid_noparen_declarator',6,'p_direct_typeid_noparen_declarator_4','plyparser.py',127),
+ ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_typeid_noparen_declarator',5,'p_direct_typeid_noparen_declarator_5','plyparser.py',126),
+ ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LPAREN parameter_type_list RPAREN','direct_typeid_noparen_declarator',4,'p_direct_typeid_noparen_declarator_6','plyparser.py',126),
+ ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LPAREN identifier_list_opt RPAREN','direct_typeid_noparen_declarator',4,'p_direct_typeid_noparen_declarator_6','plyparser.py',127),
+ ('id_declarator -> direct_id_declarator','id_declarator',1,'p_id_declarator_1','plyparser.py',126),
+ ('id_declarator -> pointer direct_id_declarator','id_declarator',2,'p_id_declarator_2','plyparser.py',126),
+ ('typeid_declarator -> direct_typeid_declarator','typeid_declarator',1,'p_typeid_declarator_1','plyparser.py',126),
+ ('typeid_declarator -> pointer direct_typeid_declarator','typeid_declarator',2,'p_typeid_declarator_2','plyparser.py',126),
+ ('typeid_noparen_declarator -> direct_typeid_noparen_declarator','typeid_noparen_declarator',1,'p_typeid_noparen_declarator_1','plyparser.py',126),
+ ('typeid_noparen_declarator -> pointer direct_typeid_noparen_declarator','typeid_noparen_declarator',2,'p_typeid_noparen_declarator_2','plyparser.py',126),
+ ('translation_unit_or_empty -> translation_unit','translation_unit_or_empty',1,'p_translation_unit_or_empty','c_parser.py',509),
+ ('translation_unit_or_empty -> empty','translation_unit_or_empty',1,'p_translation_unit_or_empty','c_parser.py',510),
+ ('translation_unit -> external_declaration','translation_unit',1,'p_translation_unit_1','c_parser.py',518),
+ ('translation_unit -> translation_unit external_declaration','translation_unit',2,'p_translation_unit_2','c_parser.py',524),
+ ('external_declaration -> function_definition','external_declaration',1,'p_external_declaration_1','c_parser.py',534),
+ ('external_declaration -> declaration','external_declaration',1,'p_external_declaration_2','c_parser.py',539),
+ ('external_declaration -> pp_directive','external_declaration',1,'p_external_declaration_3','c_parser.py',544),
+ ('external_declaration -> pppragma_directive','external_declaration',1,'p_external_declaration_3','c_parser.py',545),
+ ('external_declaration -> SEMI','external_declaration',1,'p_external_declaration_4','c_parser.py',550),
+ ('external_declaration -> static_assert','external_declaration',1,'p_external_declaration_5','c_parser.py',555),
+ ('static_assert -> _STATIC_ASSERT LPAREN constant_expression COMMA unified_string_literal RPAREN','static_assert',6,'p_static_assert_declaration','c_parser.py',560),
+ ('static_assert -> _STATIC_ASSERT LPAREN constant_expression RPAREN','static_assert',4,'p_static_assert_declaration','c_parser.py',561),
+ ('pp_directive -> PPHASH','pp_directive',1,'p_pp_directive','c_parser.py',569),
+ ('pppragma_directive -> PPPRAGMA','pppragma_directive',1,'p_pppragma_directive','c_parser.py',575),
+ ('pppragma_directive -> PPPRAGMA PPPRAGMASTR','pppragma_directive',2,'p_pppragma_directive','c_parser.py',576),
+ ('function_definition -> id_declarator declaration_list_opt compound_statement','function_definition',3,'p_function_definition_1','c_parser.py',586),
+ ('function_definition -> declaration_specifiers id_declarator declaration_list_opt compound_statement','function_definition',4,'p_function_definition_2','c_parser.py',604),
+ ('statement -> labeled_statement','statement',1,'p_statement','c_parser.py',619),
+ ('statement -> expression_statement','statement',1,'p_statement','c_parser.py',620),
+ ('statement -> compound_statement','statement',1,'p_statement','c_parser.py',621),
+ ('statement -> selection_statement','statement',1,'p_statement','c_parser.py',622),
+ ('statement -> iteration_statement','statement',1,'p_statement','c_parser.py',623),
+ ('statement -> jump_statement','statement',1,'p_statement','c_parser.py',624),
+ ('statement -> pppragma_directive','statement',1,'p_statement','c_parser.py',625),
+ ('statement -> static_assert','statement',1,'p_statement','c_parser.py',626),
+ ('pragmacomp_or_statement -> pppragma_directive statement','pragmacomp_or_statement',2,'p_pragmacomp_or_statement','c_parser.py',674),
+ ('pragmacomp_or_statement -> statement','pragmacomp_or_statement',1,'p_pragmacomp_or_statement','c_parser.py',675),
+ ('decl_body -> declaration_specifiers init_declarator_list_opt','decl_body',2,'p_decl_body','c_parser.py',694),
+ ('decl_body -> declaration_specifiers_no_type id_init_declarator_list_opt','decl_body',2,'p_decl_body','c_parser.py',695),
+ ('declaration -> decl_body SEMI','declaration',2,'p_declaration','c_parser.py',755),
+ ('declaration_list -> declaration','declaration_list',1,'p_declaration_list','c_parser.py',764),
+ ('declaration_list -> declaration_list declaration','declaration_list',2,'p_declaration_list','c_parser.py',765),
+ ('declaration_specifiers_no_type -> type_qualifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_1','c_parser.py',775),
+ ('declaration_specifiers_no_type -> storage_class_specifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_2','c_parser.py',780),
+ ('declaration_specifiers_no_type -> function_specifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_3','c_parser.py',785),
+ ('declaration_specifiers_no_type -> atomic_specifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_4','c_parser.py',792),
+ ('declaration_specifiers_no_type -> alignment_specifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_5','c_parser.py',797),
+ ('declaration_specifiers -> declaration_specifiers type_qualifier','declaration_specifiers',2,'p_declaration_specifiers_1','c_parser.py',802),
+ ('declaration_specifiers -> declaration_specifiers storage_class_specifier','declaration_specifiers',2,'p_declaration_specifiers_2','c_parser.py',807),
+ ('declaration_specifiers -> declaration_specifiers function_specifier','declaration_specifiers',2,'p_declaration_specifiers_3','c_parser.py',812),
+ ('declaration_specifiers -> declaration_specifiers type_specifier_no_typeid','declaration_specifiers',2,'p_declaration_specifiers_4','c_parser.py',817),
+ ('declaration_specifiers -> type_specifier','declaration_specifiers',1,'p_declaration_specifiers_5','c_parser.py',822),
+ ('declaration_specifiers -> declaration_specifiers_no_type type_specifier','declaration_specifiers',2,'p_declaration_specifiers_6','c_parser.py',827),
+ ('declaration_specifiers -> declaration_specifiers alignment_specifier','declaration_specifiers',2,'p_declaration_specifiers_7','c_parser.py',832),
+ ('storage_class_specifier -> AUTO','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',837),
+ ('storage_class_specifier -> REGISTER','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',838),
+ ('storage_class_specifier -> STATIC','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',839),
+ ('storage_class_specifier -> EXTERN','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',840),
+ ('storage_class_specifier -> TYPEDEF','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',841),
+ ('storage_class_specifier -> _THREAD_LOCAL','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',842),
+ ('function_specifier -> INLINE','function_specifier',1,'p_function_specifier','c_parser.py',847),
+ ('function_specifier -> _NORETURN','function_specifier',1,'p_function_specifier','c_parser.py',848),
+ ('type_specifier_no_typeid -> VOID','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',853),
+ ('type_specifier_no_typeid -> _BOOL','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',854),
+ ('type_specifier_no_typeid -> CHAR','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',855),
+ ('type_specifier_no_typeid -> SHORT','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',856),
+ ('type_specifier_no_typeid -> INT','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',857),
+ ('type_specifier_no_typeid -> LONG','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',858),
+ ('type_specifier_no_typeid -> FLOAT','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',859),
+ ('type_specifier_no_typeid -> DOUBLE','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',860),
+ ('type_specifier_no_typeid -> _COMPLEX','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',861),
+ ('type_specifier_no_typeid -> SIGNED','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',862),
+ ('type_specifier_no_typeid -> UNSIGNED','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',863),
+ ('type_specifier_no_typeid -> __INT128','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',864),
+ ('type_specifier -> typedef_name','type_specifier',1,'p_type_specifier','c_parser.py',869),
+ ('type_specifier -> enum_specifier','type_specifier',1,'p_type_specifier','c_parser.py',870),
+ ('type_specifier -> struct_or_union_specifier','type_specifier',1,'p_type_specifier','c_parser.py',871),
+ ('type_specifier -> type_specifier_no_typeid','type_specifier',1,'p_type_specifier','c_parser.py',872),
+ ('type_specifier -> atomic_specifier','type_specifier',1,'p_type_specifier','c_parser.py',873),
+ ('atomic_specifier -> _ATOMIC LPAREN type_name RPAREN','atomic_specifier',4,'p_atomic_specifier','c_parser.py',879),
+ ('type_qualifier -> CONST','type_qualifier',1,'p_type_qualifier','c_parser.py',886),
+ ('type_qualifier -> RESTRICT','type_qualifier',1,'p_type_qualifier','c_parser.py',887),
+ ('type_qualifier -> VOLATILE','type_qualifier',1,'p_type_qualifier','c_parser.py',888),
+ ('type_qualifier -> _ATOMIC','type_qualifier',1,'p_type_qualifier','c_parser.py',889),
+ ('init_declarator_list -> init_declarator','init_declarator_list',1,'p_init_declarator_list','c_parser.py',894),
+ ('init_declarator_list -> init_declarator_list COMMA init_declarator','init_declarator_list',3,'p_init_declarator_list','c_parser.py',895),
+ ('init_declarator -> declarator','init_declarator',1,'p_init_declarator','c_parser.py',903),
+ ('init_declarator -> declarator EQUALS initializer','init_declarator',3,'p_init_declarator','c_parser.py',904),
+ ('id_init_declarator_list -> id_init_declarator','id_init_declarator_list',1,'p_id_init_declarator_list','c_parser.py',909),
+ ('id_init_declarator_list -> id_init_declarator_list COMMA init_declarator','id_init_declarator_list',3,'p_id_init_declarator_list','c_parser.py',910),
+ ('id_init_declarator -> id_declarator','id_init_declarator',1,'p_id_init_declarator','c_parser.py',915),
+ ('id_init_declarator -> id_declarator EQUALS initializer','id_init_declarator',3,'p_id_init_declarator','c_parser.py',916),
+ ('specifier_qualifier_list -> specifier_qualifier_list type_specifier_no_typeid','specifier_qualifier_list',2,'p_specifier_qualifier_list_1','c_parser.py',923),
+ ('specifier_qualifier_list -> specifier_qualifier_list type_qualifier','specifier_qualifier_list',2,'p_specifier_qualifier_list_2','c_parser.py',928),
+ ('specifier_qualifier_list -> type_specifier','specifier_qualifier_list',1,'p_specifier_qualifier_list_3','c_parser.py',933),
+ ('specifier_qualifier_list -> type_qualifier_list type_specifier','specifier_qualifier_list',2,'p_specifier_qualifier_list_4','c_parser.py',938),
+ ('specifier_qualifier_list -> alignment_specifier','specifier_qualifier_list',1,'p_specifier_qualifier_list_5','c_parser.py',943),
+ ('specifier_qualifier_list -> specifier_qualifier_list alignment_specifier','specifier_qualifier_list',2,'p_specifier_qualifier_list_6','c_parser.py',948),
+ ('struct_or_union_specifier -> struct_or_union ID','struct_or_union_specifier',2,'p_struct_or_union_specifier_1','c_parser.py',956),
+ ('struct_or_union_specifier -> struct_or_union TYPEID','struct_or_union_specifier',2,'p_struct_or_union_specifier_1','c_parser.py',957),
+ ('struct_or_union_specifier -> struct_or_union brace_open struct_declaration_list brace_close','struct_or_union_specifier',4,'p_struct_or_union_specifier_2','c_parser.py',967),
+ ('struct_or_union_specifier -> struct_or_union brace_open brace_close','struct_or_union_specifier',3,'p_struct_or_union_specifier_2','c_parser.py',968),
+ ('struct_or_union_specifier -> struct_or_union ID brace_open struct_declaration_list brace_close','struct_or_union_specifier',5,'p_struct_or_union_specifier_3','c_parser.py',985),
+ ('struct_or_union_specifier -> struct_or_union ID brace_open brace_close','struct_or_union_specifier',4,'p_struct_or_union_specifier_3','c_parser.py',986),
+ ('struct_or_union_specifier -> struct_or_union TYPEID brace_open struct_declaration_list brace_close','struct_or_union_specifier',5,'p_struct_or_union_specifier_3','c_parser.py',987),
+ ('struct_or_union_specifier -> struct_or_union TYPEID brace_open brace_close','struct_or_union_specifier',4,'p_struct_or_union_specifier_3','c_parser.py',988),
+ ('struct_or_union -> STRUCT','struct_or_union',1,'p_struct_or_union','c_parser.py',1004),
+ ('struct_or_union -> UNION','struct_or_union',1,'p_struct_or_union','c_parser.py',1005),
+ ('struct_declaration_list -> struct_declaration','struct_declaration_list',1,'p_struct_declaration_list','c_parser.py',1012),
+ ('struct_declaration_list -> struct_declaration_list struct_declaration','struct_declaration_list',2,'p_struct_declaration_list','c_parser.py',1013),
+ ('struct_declaration -> specifier_qualifier_list struct_declarator_list_opt SEMI','struct_declaration',3,'p_struct_declaration_1','c_parser.py',1021),
+ ('struct_declaration -> SEMI','struct_declaration',1,'p_struct_declaration_2','c_parser.py',1059),
+ ('struct_declaration -> pppragma_directive','struct_declaration',1,'p_struct_declaration_3','c_parser.py',1064),
+ ('struct_declarator_list -> struct_declarator','struct_declarator_list',1,'p_struct_declarator_list','c_parser.py',1069),
+ ('struct_declarator_list -> struct_declarator_list COMMA struct_declarator','struct_declarator_list',3,'p_struct_declarator_list','c_parser.py',1070),
+ ('struct_declarator -> declarator','struct_declarator',1,'p_struct_declarator_1','c_parser.py',1078),
+ ('struct_declarator -> declarator COLON constant_expression','struct_declarator',3,'p_struct_declarator_2','c_parser.py',1083),
+ ('struct_declarator -> COLON constant_expression','struct_declarator',2,'p_struct_declarator_2','c_parser.py',1084),
+ ('enum_specifier -> ENUM ID','enum_specifier',2,'p_enum_specifier_1','c_parser.py',1092),
+ ('enum_specifier -> ENUM TYPEID','enum_specifier',2,'p_enum_specifier_1','c_parser.py',1093),
+ ('enum_specifier -> ENUM brace_open enumerator_list brace_close','enum_specifier',4,'p_enum_specifier_2','c_parser.py',1098),
+ ('enum_specifier -> ENUM ID brace_open enumerator_list brace_close','enum_specifier',5,'p_enum_specifier_3','c_parser.py',1103),
+ ('enum_specifier -> ENUM TYPEID brace_open enumerator_list brace_close','enum_specifier',5,'p_enum_specifier_3','c_parser.py',1104),
+ ('enumerator_list -> enumerator','enumerator_list',1,'p_enumerator_list','c_parser.py',1109),
+ ('enumerator_list -> enumerator_list COMMA','enumerator_list',2,'p_enumerator_list','c_parser.py',1110),
+ ('enumerator_list -> enumerator_list COMMA enumerator','enumerator_list',3,'p_enumerator_list','c_parser.py',1111),
+ ('alignment_specifier -> _ALIGNAS LPAREN type_name RPAREN','alignment_specifier',4,'p_alignment_specifier','c_parser.py',1122),
+ ('alignment_specifier -> _ALIGNAS LPAREN constant_expression RPAREN','alignment_specifier',4,'p_alignment_specifier','c_parser.py',1123),
+ ('enumerator -> ID','enumerator',1,'p_enumerator','c_parser.py',1128),
+ ('enumerator -> ID EQUALS constant_expression','enumerator',3,'p_enumerator','c_parser.py',1129),
+ ('declarator -> id_declarator','declarator',1,'p_declarator','c_parser.py',1144),
+ ('declarator -> typeid_declarator','declarator',1,'p_declarator','c_parser.py',1145),
+ ('pointer -> TIMES type_qualifier_list_opt','pointer',2,'p_pointer','c_parser.py',1257),
+ ('pointer -> TIMES type_qualifier_list_opt pointer','pointer',3,'p_pointer','c_parser.py',1258),
+ ('type_qualifier_list -> type_qualifier','type_qualifier_list',1,'p_type_qualifier_list','c_parser.py',1287),
+ ('type_qualifier_list -> type_qualifier_list type_qualifier','type_qualifier_list',2,'p_type_qualifier_list','c_parser.py',1288),
+ ('parameter_type_list -> parameter_list','parameter_type_list',1,'p_parameter_type_list','c_parser.py',1293),
+ ('parameter_type_list -> parameter_list COMMA ELLIPSIS','parameter_type_list',3,'p_parameter_type_list','c_parser.py',1294),
+ ('parameter_list -> parameter_declaration','parameter_list',1,'p_parameter_list','c_parser.py',1302),
+ ('parameter_list -> parameter_list COMMA parameter_declaration','parameter_list',3,'p_parameter_list','c_parser.py',1303),
+ ('parameter_declaration -> declaration_specifiers id_declarator','parameter_declaration',2,'p_parameter_declaration_1','c_parser.py',1322),
+ ('parameter_declaration -> declaration_specifiers typeid_noparen_declarator','parameter_declaration',2,'p_parameter_declaration_1','c_parser.py',1323),
+ ('parameter_declaration -> declaration_specifiers abstract_declarator_opt','parameter_declaration',2,'p_parameter_declaration_2','c_parser.py',1334),
+ ('identifier_list -> identifier','identifier_list',1,'p_identifier_list','c_parser.py',1366),
+ ('identifier_list -> identifier_list COMMA identifier','identifier_list',3,'p_identifier_list','c_parser.py',1367),
+ ('initializer -> assignment_expression','initializer',1,'p_initializer_1','c_parser.py',1376),
+ ('initializer -> brace_open initializer_list_opt brace_close','initializer',3,'p_initializer_2','c_parser.py',1381),
+ ('initializer -> brace_open initializer_list COMMA brace_close','initializer',4,'p_initializer_2','c_parser.py',1382),
+ ('initializer_list -> designation_opt initializer','initializer_list',2,'p_initializer_list','c_parser.py',1390),
+ ('initializer_list -> initializer_list COMMA designation_opt initializer','initializer_list',4,'p_initializer_list','c_parser.py',1391),
+ ('designation -> designator_list EQUALS','designation',2,'p_designation','c_parser.py',1402),
+ ('designator_list -> designator','designator_list',1,'p_designator_list','c_parser.py',1410),
+ ('designator_list -> designator_list designator','designator_list',2,'p_designator_list','c_parser.py',1411),
+ ('designator -> LBRACKET constant_expression RBRACKET','designator',3,'p_designator','c_parser.py',1416),
+ ('designator -> PERIOD identifier','designator',2,'p_designator','c_parser.py',1417),
+ ('type_name -> specifier_qualifier_list abstract_declarator_opt','type_name',2,'p_type_name','c_parser.py',1422),
+ ('abstract_declarator -> pointer','abstract_declarator',1,'p_abstract_declarator_1','c_parser.py',1434),
+ ('abstract_declarator -> pointer direct_abstract_declarator','abstract_declarator',2,'p_abstract_declarator_2','c_parser.py',1442),
+ ('abstract_declarator -> direct_abstract_declarator','abstract_declarator',1,'p_abstract_declarator_3','c_parser.py',1447),
+ ('direct_abstract_declarator -> LPAREN abstract_declarator RPAREN','direct_abstract_declarator',3,'p_direct_abstract_declarator_1','c_parser.py',1457),
+ ('direct_abstract_declarator -> direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator_2','c_parser.py',1461),
+ ('direct_abstract_declarator -> LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator_3','c_parser.py',1472),
+ ('direct_abstract_declarator -> direct_abstract_declarator LBRACKET TIMES RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator_4','c_parser.py',1482),
+ ('direct_abstract_declarator -> LBRACKET TIMES RBRACKET','direct_abstract_declarator',3,'p_direct_abstract_declarator_5','c_parser.py',1493),
+ ('direct_abstract_declarator -> direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN','direct_abstract_declarator',4,'p_direct_abstract_declarator_6','c_parser.py',1502),
+ ('direct_abstract_declarator -> LPAREN parameter_type_list_opt RPAREN','direct_abstract_declarator',3,'p_direct_abstract_declarator_7','c_parser.py',1512),
+ ('block_item -> declaration','block_item',1,'p_block_item','c_parser.py',1523),
+ ('block_item -> statement','block_item',1,'p_block_item','c_parser.py',1524),
+ ('block_item_list -> block_item','block_item_list',1,'p_block_item_list','c_parser.py',1531),
+ ('block_item_list -> block_item_list block_item','block_item_list',2,'p_block_item_list','c_parser.py',1532),
+ ('compound_statement -> brace_open block_item_list_opt brace_close','compound_statement',3,'p_compound_statement_1','c_parser.py',1538),
+ ('labeled_statement -> ID COLON pragmacomp_or_statement','labeled_statement',3,'p_labeled_statement_1','c_parser.py',1544),
+ ('labeled_statement -> CASE constant_expression COLON pragmacomp_or_statement','labeled_statement',4,'p_labeled_statement_2','c_parser.py',1548),
+ ('labeled_statement -> DEFAULT COLON pragmacomp_or_statement','labeled_statement',3,'p_labeled_statement_3','c_parser.py',1552),
+ ('selection_statement -> IF LPAREN expression RPAREN pragmacomp_or_statement','selection_statement',5,'p_selection_statement_1','c_parser.py',1556),
+ ('selection_statement -> IF LPAREN expression RPAREN statement ELSE pragmacomp_or_statement','selection_statement',7,'p_selection_statement_2','c_parser.py',1560),
+ ('selection_statement -> SWITCH LPAREN expression RPAREN pragmacomp_or_statement','selection_statement',5,'p_selection_statement_3','c_parser.py',1564),
+ ('iteration_statement -> WHILE LPAREN expression RPAREN pragmacomp_or_statement','iteration_statement',5,'p_iteration_statement_1','c_parser.py',1569),
+ ('iteration_statement -> DO pragmacomp_or_statement WHILE LPAREN expression RPAREN SEMI','iteration_statement',7,'p_iteration_statement_2','c_parser.py',1573),
+ ('iteration_statement -> FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement','iteration_statement',9,'p_iteration_statement_3','c_parser.py',1577),
+ ('iteration_statement -> FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement','iteration_statement',8,'p_iteration_statement_4','c_parser.py',1581),
+ ('jump_statement -> GOTO ID SEMI','jump_statement',3,'p_jump_statement_1','c_parser.py',1586),
+ ('jump_statement -> BREAK SEMI','jump_statement',2,'p_jump_statement_2','c_parser.py',1590),
+ ('jump_statement -> CONTINUE SEMI','jump_statement',2,'p_jump_statement_3','c_parser.py',1594),
+ ('jump_statement -> RETURN expression SEMI','jump_statement',3,'p_jump_statement_4','c_parser.py',1598),
+ ('jump_statement -> RETURN SEMI','jump_statement',2,'p_jump_statement_4','c_parser.py',1599),
+ ('expression_statement -> expression_opt SEMI','expression_statement',2,'p_expression_statement','c_parser.py',1604),
+ ('expression -> assignment_expression','expression',1,'p_expression','c_parser.py',1611),
+ ('expression -> expression COMMA assignment_expression','expression',3,'p_expression','c_parser.py',1612),
+ ('assignment_expression -> LPAREN compound_statement RPAREN','assignment_expression',3,'p_parenthesized_compound_expression','c_parser.py',1624),
+ ('typedef_name -> TYPEID','typedef_name',1,'p_typedef_name','c_parser.py',1628),
+ ('assignment_expression -> conditional_expression','assignment_expression',1,'p_assignment_expression','c_parser.py',1632),
+ ('assignment_expression -> unary_expression assignment_operator assignment_expression','assignment_expression',3,'p_assignment_expression','c_parser.py',1633),
+ ('assignment_operator -> EQUALS','assignment_operator',1,'p_assignment_operator','c_parser.py',1646),
+ ('assignment_operator -> XOREQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1647),
+ ('assignment_operator -> TIMESEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1648),
+ ('assignment_operator -> DIVEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1649),
+ ('assignment_operator -> MODEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1650),
+ ('assignment_operator -> PLUSEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1651),
+ ('assignment_operator -> MINUSEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1652),
+ ('assignment_operator -> LSHIFTEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1653),
+ ('assignment_operator -> RSHIFTEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1654),
+ ('assignment_operator -> ANDEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1655),
+ ('assignment_operator -> OREQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1656),
+ ('constant_expression -> conditional_expression','constant_expression',1,'p_constant_expression','c_parser.py',1661),
+ ('conditional_expression -> binary_expression','conditional_expression',1,'p_conditional_expression','c_parser.py',1665),
+ ('conditional_expression -> binary_expression CONDOP expression COLON conditional_expression','conditional_expression',5,'p_conditional_expression','c_parser.py',1666),
+ ('binary_expression -> cast_expression','binary_expression',1,'p_binary_expression','c_parser.py',1674),
+ ('binary_expression -> binary_expression TIMES binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1675),
+ ('binary_expression -> binary_expression DIVIDE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1676),
+ ('binary_expression -> binary_expression MOD binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1677),
+ ('binary_expression -> binary_expression PLUS binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1678),
+ ('binary_expression -> binary_expression MINUS binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1679),
+ ('binary_expression -> binary_expression RSHIFT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1680),
+ ('binary_expression -> binary_expression LSHIFT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1681),
+ ('binary_expression -> binary_expression LT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1682),
+ ('binary_expression -> binary_expression LE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1683),
+ ('binary_expression -> binary_expression GE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1684),
+ ('binary_expression -> binary_expression GT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1685),
+ ('binary_expression -> binary_expression EQ binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1686),
+ ('binary_expression -> binary_expression NE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1687),
+ ('binary_expression -> binary_expression AND binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1688),
+ ('binary_expression -> binary_expression OR binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1689),
+ ('binary_expression -> binary_expression XOR binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1690),
+ ('binary_expression -> binary_expression LAND binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1691),
+ ('binary_expression -> binary_expression LOR binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1692),
+ ('cast_expression -> unary_expression','cast_expression',1,'p_cast_expression_1','c_parser.py',1700),
+ ('cast_expression -> LPAREN type_name RPAREN cast_expression','cast_expression',4,'p_cast_expression_2','c_parser.py',1704),
+ ('unary_expression -> postfix_expression','unary_expression',1,'p_unary_expression_1','c_parser.py',1708),
+ ('unary_expression -> PLUSPLUS unary_expression','unary_expression',2,'p_unary_expression_2','c_parser.py',1712),
+ ('unary_expression -> MINUSMINUS unary_expression','unary_expression',2,'p_unary_expression_2','c_parser.py',1713),
+ ('unary_expression -> unary_operator cast_expression','unary_expression',2,'p_unary_expression_2','c_parser.py',1714),
+ ('unary_expression -> SIZEOF unary_expression','unary_expression',2,'p_unary_expression_3','c_parser.py',1719),
+ ('unary_expression -> SIZEOF LPAREN type_name RPAREN','unary_expression',4,'p_unary_expression_3','c_parser.py',1720),
+ ('unary_expression -> _ALIGNOF LPAREN type_name RPAREN','unary_expression',4,'p_unary_expression_3','c_parser.py',1721),
+ ('unary_operator -> AND','unary_operator',1,'p_unary_operator','c_parser.py',1729),
+ ('unary_operator -> TIMES','unary_operator',1,'p_unary_operator','c_parser.py',1730),
+ ('unary_operator -> PLUS','unary_operator',1,'p_unary_operator','c_parser.py',1731),
+ ('unary_operator -> MINUS','unary_operator',1,'p_unary_operator','c_parser.py',1732),
+ ('unary_operator -> NOT','unary_operator',1,'p_unary_operator','c_parser.py',1733),
+ ('unary_operator -> LNOT','unary_operator',1,'p_unary_operator','c_parser.py',1734),
+ ('postfix_expression -> primary_expression','postfix_expression',1,'p_postfix_expression_1','c_parser.py',1739),
+ ('postfix_expression -> postfix_expression LBRACKET expression RBRACKET','postfix_expression',4,'p_postfix_expression_2','c_parser.py',1743),
+ ('postfix_expression -> postfix_expression LPAREN argument_expression_list RPAREN','postfix_expression',4,'p_postfix_expression_3','c_parser.py',1747),
+ ('postfix_expression -> postfix_expression LPAREN RPAREN','postfix_expression',3,'p_postfix_expression_3','c_parser.py',1748),
+ ('postfix_expression -> postfix_expression PERIOD ID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1753),
+ ('postfix_expression -> postfix_expression PERIOD TYPEID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1754),
+ ('postfix_expression -> postfix_expression ARROW ID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1755),
+ ('postfix_expression -> postfix_expression ARROW TYPEID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1756),
+ ('postfix_expression -> postfix_expression PLUSPLUS','postfix_expression',2,'p_postfix_expression_5','c_parser.py',1762),
+ ('postfix_expression -> postfix_expression MINUSMINUS','postfix_expression',2,'p_postfix_expression_5','c_parser.py',1763),
+ ('postfix_expression -> LPAREN type_name RPAREN brace_open initializer_list brace_close','postfix_expression',6,'p_postfix_expression_6','c_parser.py',1768),
+ ('postfix_expression -> LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close','postfix_expression',7,'p_postfix_expression_6','c_parser.py',1769),
+ ('primary_expression -> identifier','primary_expression',1,'p_primary_expression_1','c_parser.py',1774),
+ ('primary_expression -> constant','primary_expression',1,'p_primary_expression_2','c_parser.py',1778),
+ ('primary_expression -> unified_string_literal','primary_expression',1,'p_primary_expression_3','c_parser.py',1782),
+ ('primary_expression -> unified_wstring_literal','primary_expression',1,'p_primary_expression_3','c_parser.py',1783),
+ ('primary_expression -> LPAREN expression RPAREN','primary_expression',3,'p_primary_expression_4','c_parser.py',1788),
+ ('primary_expression -> OFFSETOF LPAREN type_name COMMA offsetof_member_designator RPAREN','primary_expression',6,'p_primary_expression_5','c_parser.py',1792),
+ ('offsetof_member_designator -> identifier','offsetof_member_designator',1,'p_offsetof_member_designator','c_parser.py',1800),
+ ('offsetof_member_designator -> offsetof_member_designator PERIOD identifier','offsetof_member_designator',3,'p_offsetof_member_designator','c_parser.py',1801),
+ ('offsetof_member_designator -> offsetof_member_designator LBRACKET expression RBRACKET','offsetof_member_designator',4,'p_offsetof_member_designator','c_parser.py',1802),
+ ('argument_expression_list -> assignment_expression','argument_expression_list',1,'p_argument_expression_list','c_parser.py',1814),
+ ('argument_expression_list -> argument_expression_list COMMA assignment_expression','argument_expression_list',3,'p_argument_expression_list','c_parser.py',1815),
+ ('identifier -> ID','identifier',1,'p_identifier','c_parser.py',1824),
+ ('constant -> INT_CONST_DEC','constant',1,'p_constant_1','c_parser.py',1828),
+ ('constant -> INT_CONST_OCT','constant',1,'p_constant_1','c_parser.py',1829),
+ ('constant -> INT_CONST_HEX','constant',1,'p_constant_1','c_parser.py',1830),
+ ('constant -> INT_CONST_BIN','constant',1,'p_constant_1','c_parser.py',1831),
+ ('constant -> INT_CONST_CHAR','constant',1,'p_constant_1','c_parser.py',1832),
+ ('constant -> FLOAT_CONST','constant',1,'p_constant_2','c_parser.py',1851),
+ ('constant -> HEX_FLOAT_CONST','constant',1,'p_constant_2','c_parser.py',1852),
+ ('constant -> CHAR_CONST','constant',1,'p_constant_3','c_parser.py',1868),
+ ('constant -> WCHAR_CONST','constant',1,'p_constant_3','c_parser.py',1869),
+ ('constant -> U8CHAR_CONST','constant',1,'p_constant_3','c_parser.py',1870),
+ ('constant -> U16CHAR_CONST','constant',1,'p_constant_3','c_parser.py',1871),
+ ('constant -> U32CHAR_CONST','constant',1,'p_constant_3','c_parser.py',1872),
+ ('unified_string_literal -> STRING_LITERAL','unified_string_literal',1,'p_unified_string_literal','c_parser.py',1883),
+ ('unified_string_literal -> unified_string_literal STRING_LITERAL','unified_string_literal',2,'p_unified_string_literal','c_parser.py',1884),
+ ('unified_wstring_literal -> WSTRING_LITERAL','unified_wstring_literal',1,'p_unified_wstring_literal','c_parser.py',1894),
+ ('unified_wstring_literal -> U8STRING_LITERAL','unified_wstring_literal',1,'p_unified_wstring_literal','c_parser.py',1895),
+ ('unified_wstring_literal -> U16STRING_LITERAL','unified_wstring_literal',1,'p_unified_wstring_literal','c_parser.py',1896),
+ ('unified_wstring_literal -> U32STRING_LITERAL','unified_wstring_literal',1,'p_unified_wstring_literal','c_parser.py',1897),
+ ('unified_wstring_literal -> unified_wstring_literal WSTRING_LITERAL','unified_wstring_literal',2,'p_unified_wstring_literal','c_parser.py',1898),
+ ('unified_wstring_literal -> unified_wstring_literal U8STRING_LITERAL','unified_wstring_literal',2,'p_unified_wstring_literal','c_parser.py',1899),
+ ('unified_wstring_literal -> unified_wstring_literal U16STRING_LITERAL','unified_wstring_literal',2,'p_unified_wstring_literal','c_parser.py',1900),
+ ('unified_wstring_literal -> unified_wstring_literal U32STRING_LITERAL','unified_wstring_literal',2,'p_unified_wstring_literal','c_parser.py',1901),
+ ('brace_open -> LBRACE','brace_open',1,'p_brace_open','c_parser.py',1911),
+ ('brace_close -> RBRACE','brace_close',1,'p_brace_close','c_parser.py',1917),
+ ('empty -> ','empty',0,'p_empty','c_parser.py',1923),
+]
diff --git a/billinglayer/python/requests-2.31.0.dist-info/INSTALLER b/billinglayer/python/requests-2.31.0.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/billinglayer/python/requests-2.31.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/billinglayer/python/requests-2.31.0.dist-info/LICENSE b/billinglayer/python/requests-2.31.0.dist-info/LICENSE
new file mode 100644
index 0000000..67db858
--- /dev/null
+++ b/billinglayer/python/requests-2.31.0.dist-info/LICENSE
@@ -0,0 +1,175 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
diff --git a/billinglayer/python/requests-2.31.0.dist-info/METADATA b/billinglayer/python/requests-2.31.0.dist-info/METADATA
new file mode 100644
index 0000000..05779fa
--- /dev/null
+++ b/billinglayer/python/requests-2.31.0.dist-info/METADATA
@@ -0,0 +1,122 @@
+Metadata-Version: 2.1
+Name: requests
+Version: 2.31.0
+Summary: Python HTTP for Humans.
+Home-page: https://requests.readthedocs.io
+Author: Kenneth Reitz
+Author-email: me@kennethreitz.org
+License: Apache 2.0
+Project-URL: Documentation, https://requests.readthedocs.io
+Project-URL: Source, https://github.com/psf/requests
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Natural Language :: English
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Internet :: WWW/HTTP
+Classifier: Topic :: Software Development :: Libraries
+Requires-Python: >=3.7
+Description-Content-Type: text/markdown
+License-File: LICENSE
+Requires-Dist: charset-normalizer (<4,>=2)
+Requires-Dist: idna (<4,>=2.5)
+Requires-Dist: urllib3 (<3,>=1.21.1)
+Requires-Dist: certifi (>=2017.4.17)
+Provides-Extra: security
+Provides-Extra: socks
+Requires-Dist: PySocks (!=1.5.7,>=1.5.6) ; extra == 'socks'
+Provides-Extra: use_chardet_on_py3
+Requires-Dist: chardet (<6,>=3.0.2) ; extra == 'use_chardet_on_py3'
+
+# Requests
+
+**Requests** is a simple, yet elegant, HTTP library.
+
+```python
+>>> import requests
+>>> r = requests.get('https://httpbin.org/basic-auth/user/pass', auth=('user', 'pass'))
+>>> r.status_code
+200
+>>> r.headers['content-type']
+'application/json; charset=utf8'
+>>> r.encoding
+'utf-8'
+>>> r.text
+'{"authenticated": true, ...'
+>>> r.json()
+{'authenticated': True, ...}
+```
+
+Requests allows you to send HTTP/1.1 requests extremely easily. There’s no need to manually add query strings to your URLs, or to form-encode your `PUT` & `POST` data — but nowadays, just use the `json` method!
+
+Requests is one of the most downloaded Python packages today, pulling in around `30M downloads / week`— according to GitHub, Requests is currently [depended upon](https://github.com/psf/requests/network/dependents?package_id=UGFja2FnZS01NzA4OTExNg%3D%3D) by `1,000,000+` repositories. You may certainly put your trust in this code.
+
+[](https://pepy.tech/project/requests)
+[](https://pypi.org/project/requests)
+[](https://github.com/psf/requests/graphs/contributors)
+
+## Installing Requests and Supported Versions
+
+Requests is available on PyPI:
+
+```console
+$ python -m pip install requests
+```
+
+Requests officially supports Python 3.7+.
+
+## Supported Features & Best–Practices
+
+Requests is ready for the demands of building robust and reliable HTTP–speaking applications, for the needs of today.
+
+- Keep-Alive & Connection Pooling
+- International Domains and URLs
+- Sessions with Cookie Persistence
+- Browser-style TLS/SSL Verification
+- Basic & Digest Authentication
+- Familiar `dict`–like Cookies
+- Automatic Content Decompression and Decoding
+- Multi-part File Uploads
+- SOCKS Proxy Support
+- Connection Timeouts
+- Streaming Downloads
+- Automatic honoring of `.netrc`
+- Chunked HTTP Requests
+
+## API Reference and User Guide available on [Read the Docs](https://requests.readthedocs.io)
+
+[](https://requests.readthedocs.io)
+
+## Cloning the repository
+
+When cloning the Requests repository, you may need to add the `-c
+fetch.fsck.badTimezone=ignore` flag to avoid an error about a bad commit (see
+[this issue](https://github.com/psf/requests/issues/2690) for more background):
+
+```shell
+git clone -c fetch.fsck.badTimezone=ignore https://github.com/psf/requests.git
+```
+
+You can also apply this setting to your global Git config:
+
+```shell
+git config --global fetch.fsck.badTimezone ignore
+```
+
+---
+
+[](https://kennethreitz.org) [](https://www.python.org/psf)
+
+
diff --git a/billinglayer/python/requests-2.31.0.dist-info/RECORD b/billinglayer/python/requests-2.31.0.dist-info/RECORD
new file mode 100644
index 0000000..abb6ac6
--- /dev/null
+++ b/billinglayer/python/requests-2.31.0.dist-info/RECORD
@@ -0,0 +1,43 @@
+requests-2.31.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+requests-2.31.0.dist-info/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142
+requests-2.31.0.dist-info/METADATA,sha256=eCPokOnbb0FROLrfl0R5EpDvdufsb9CaN4noJH__54I,4634
+requests-2.31.0.dist-info/RECORD,,
+requests-2.31.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+requests-2.31.0.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
+requests-2.31.0.dist-info/top_level.txt,sha256=fMSVmHfb5rbGOo6xv-O_tUX6j-WyixssE-SnwcDRxNQ,9
+requests/__init__.py,sha256=LvmKhjIz8mHaKXthC2Mv5ykZ1d92voyf3oJpd-VuAig,4963
+requests/__pycache__/__init__.cpython-311.pyc,,
+requests/__pycache__/__version__.cpython-311.pyc,,
+requests/__pycache__/_internal_utils.cpython-311.pyc,,
+requests/__pycache__/adapters.cpython-311.pyc,,
+requests/__pycache__/api.cpython-311.pyc,,
+requests/__pycache__/auth.cpython-311.pyc,,
+requests/__pycache__/certs.cpython-311.pyc,,
+requests/__pycache__/compat.cpython-311.pyc,,
+requests/__pycache__/cookies.cpython-311.pyc,,
+requests/__pycache__/exceptions.cpython-311.pyc,,
+requests/__pycache__/help.cpython-311.pyc,,
+requests/__pycache__/hooks.cpython-311.pyc,,
+requests/__pycache__/models.cpython-311.pyc,,
+requests/__pycache__/packages.cpython-311.pyc,,
+requests/__pycache__/sessions.cpython-311.pyc,,
+requests/__pycache__/status_codes.cpython-311.pyc,,
+requests/__pycache__/structures.cpython-311.pyc,,
+requests/__pycache__/utils.cpython-311.pyc,,
+requests/__version__.py,sha256=ssI3Ezt7PaxgkOW45GhtwPUclo_SO_ygtIm4A74IOfw,435
+requests/_internal_utils.py,sha256=nMQymr4hs32TqVo5AbCrmcJEhvPUh7xXlluyqwslLiQ,1495
+requests/adapters.py,sha256=v_FmjU5KZ76k-YttShZYB5RprIzhhL8Y3zgW9p4eBQ8,19553
+requests/api.py,sha256=q61xcXq4tmiImrvcSVLTbFyCiD2F-L_-hWKGbz4y8vg,6449
+requests/auth.py,sha256=h-HLlVx9j8rKV5hfSAycP2ApOSglTz77R0tz7qCbbEE,10187
+requests/certs.py,sha256=Z9Sb410Anv6jUFTyss0jFFhU6xst8ctELqfy8Ev23gw,429
+requests/compat.py,sha256=yxntVOSEHGMrn7FNr_32EEam1ZNAdPRdSE13_yaHzTk,1451
+requests/cookies.py,sha256=kD3kNEcCj-mxbtf5fJsSaT86eGoEYpD3X0CSgpzl7BM,18560
+requests/exceptions.py,sha256=DhveFBclVjTRxhRduVpO-GbMYMID2gmjdLfNEqNpI_U,3811
+requests/help.py,sha256=gPX5d_H7Xd88aDABejhqGgl9B1VFRTt5BmiYvL3PzIQ,3875
+requests/hooks.py,sha256=CiuysiHA39V5UfcCBXFIx83IrDpuwfN9RcTUgv28ftQ,733
+requests/models.py,sha256=-DlKi0or8gFAM6VzutobXvvBW_2wrJuOF5NfndTIddA,35223
+requests/packages.py,sha256=DXgv-FJIczZITmv0vEBAhWj4W-5CGCIN_ksvgR17Dvs,957
+requests/sessions.py,sha256=-LvTzrPtetSTrR3buxu4XhdgMrJFLB1q5D7P--L2Xhw,30373
+requests/status_codes.py,sha256=FvHmT5uH-_uimtRz5hH9VCbt7VV-Nei2J9upbej6j8g,4235
+requests/structures.py,sha256=-IbmhVz06S-5aPSZuUthZ6-6D9XOjRuTXHOabY041XM,2912
+requests/utils.py,sha256=6sx2X3cIVA8BgWOg8odxFy-_lbWDFETU8HI4fU4Rmqw,33448
diff --git a/billinglayer/python/requests-2.31.0.dist-info/REQUESTED b/billinglayer/python/requests-2.31.0.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/billinglayer/python/requests-2.31.0.dist-info/WHEEL b/billinglayer/python/requests-2.31.0.dist-info/WHEEL
new file mode 100644
index 0000000..1f37c02
--- /dev/null
+++ b/billinglayer/python/requests-2.31.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.40.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/billinglayer/python/requests-2.31.0.dist-info/top_level.txt b/billinglayer/python/requests-2.31.0.dist-info/top_level.txt
new file mode 100644
index 0000000..f229360
--- /dev/null
+++ b/billinglayer/python/requests-2.31.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+requests
diff --git a/billinglayer/python/requests/__init__.py b/billinglayer/python/requests/__init__.py
new file mode 100644
index 0000000..300a16c
--- /dev/null
+++ b/billinglayer/python/requests/__init__.py
@@ -0,0 +1,180 @@
+# __
+# /__) _ _ _ _ _/ _
+# / ( (- (/ (/ (- _) / _)
+# /
+
+"""
+Requests HTTP Library
+~~~~~~~~~~~~~~~~~~~~~
+
+Requests is an HTTP library, written in Python, for human beings.
+Basic GET usage:
+
+ >>> import requests
+ >>> r = requests.get('https://www.python.org')
+ >>> r.status_code
+ 200
+ >>> b'Python is a programming language' in r.content
+ True
+
+... or POST:
+
+ >>> payload = dict(key1='value1', key2='value2')
+ >>> r = requests.post('https://httpbin.org/post', data=payload)
+ >>> print(r.text)
+ {
+ ...
+ "form": {
+ "key1": "value1",
+ "key2": "value2"
+ },
+ ...
+ }
+
+The other HTTP methods are supported - see `requests.api`. Full documentation
+is at .
+
+:copyright: (c) 2017 by Kenneth Reitz.
+:license: Apache 2.0, see LICENSE for more details.
+"""
+
+import warnings
+
+import urllib3
+
+from .exceptions import RequestsDependencyWarning
+
+try:
+ from charset_normalizer import __version__ as charset_normalizer_version
+except ImportError:
+ charset_normalizer_version = None
+
+try:
+ from chardet import __version__ as chardet_version
+except ImportError:
+ chardet_version = None
+
+
+def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version):
+ urllib3_version = urllib3_version.split(".")
+ assert urllib3_version != ["dev"] # Verify urllib3 isn't installed from git.
+
+ # Sometimes, urllib3 only reports its version as 16.1.
+ if len(urllib3_version) == 2:
+ urllib3_version.append("0")
+
+ # Check urllib3 for compatibility.
+ major, minor, patch = urllib3_version # noqa: F811
+ major, minor, patch = int(major), int(minor), int(patch)
+ # urllib3 >= 1.21.1
+ assert major >= 1
+ if major == 1:
+ assert minor >= 21
+
+ # Check charset_normalizer for compatibility.
+ if chardet_version:
+ major, minor, patch = chardet_version.split(".")[:3]
+ major, minor, patch = int(major), int(minor), int(patch)
+ # chardet_version >= 3.0.2, < 6.0.0
+ assert (3, 0, 2) <= (major, minor, patch) < (6, 0, 0)
+ elif charset_normalizer_version:
+ major, minor, patch = charset_normalizer_version.split(".")[:3]
+ major, minor, patch = int(major), int(minor), int(patch)
+ # charset_normalizer >= 2.0.0 < 4.0.0
+ assert (2, 0, 0) <= (major, minor, patch) < (4, 0, 0)
+ else:
+ raise Exception("You need either charset_normalizer or chardet installed")
+
+
+def _check_cryptography(cryptography_version):
+ # cryptography < 1.3.4
+ try:
+ cryptography_version = list(map(int, cryptography_version.split(".")))
+ except ValueError:
+ return
+
+ if cryptography_version < [1, 3, 4]:
+ warning = "Old version of cryptography ({}) may cause slowdown.".format(
+ cryptography_version
+ )
+ warnings.warn(warning, RequestsDependencyWarning)
+
+
+# Check imported dependencies for compatibility.
+try:
+ check_compatibility(
+ urllib3.__version__, chardet_version, charset_normalizer_version
+ )
+except (AssertionError, ValueError):
+ warnings.warn(
+ "urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported "
+ "version!".format(
+ urllib3.__version__, chardet_version, charset_normalizer_version
+ ),
+ RequestsDependencyWarning,
+ )
+
+# Attempt to enable urllib3's fallback for SNI support
+# if the standard library doesn't support SNI or the
+# 'ssl' library isn't available.
+try:
+ try:
+ import ssl
+ except ImportError:
+ ssl = None
+
+ if not getattr(ssl, "HAS_SNI", False):
+ from urllib3.contrib import pyopenssl
+
+ pyopenssl.inject_into_urllib3()
+
+ # Check cryptography version
+ from cryptography import __version__ as cryptography_version
+
+ _check_cryptography(cryptography_version)
+except ImportError:
+ pass
+
+# urllib3's DependencyWarnings should be silenced.
+from urllib3.exceptions import DependencyWarning
+
+warnings.simplefilter("ignore", DependencyWarning)
+
+# Set default logging handler to avoid "No handler found" warnings.
+import logging
+from logging import NullHandler
+
+from . import packages, utils
+from .__version__ import (
+ __author__,
+ __author_email__,
+ __build__,
+ __cake__,
+ __copyright__,
+ __description__,
+ __license__,
+ __title__,
+ __url__,
+ __version__,
+)
+from .api import delete, get, head, options, patch, post, put, request
+from .exceptions import (
+ ConnectionError,
+ ConnectTimeout,
+ FileModeWarning,
+ HTTPError,
+ JSONDecodeError,
+ ReadTimeout,
+ RequestException,
+ Timeout,
+ TooManyRedirects,
+ URLRequired,
+)
+from .models import PreparedRequest, Request, Response
+from .sessions import Session, session
+from .status_codes import codes
+
+logging.getLogger(__name__).addHandler(NullHandler())
+
+# FileModeWarnings go off per the default.
+warnings.simplefilter("default", FileModeWarning, append=True)
diff --git a/billinglayer/python/requests/__pycache__/__init__.cpython-311.pyc b/billinglayer/python/requests/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..e541c98
Binary files /dev/null and b/billinglayer/python/requests/__pycache__/__init__.cpython-311.pyc differ
diff --git a/billinglayer/python/requests/__pycache__/__version__.cpython-311.pyc b/billinglayer/python/requests/__pycache__/__version__.cpython-311.pyc
new file mode 100644
index 0000000..e0e072c
Binary files /dev/null and b/billinglayer/python/requests/__pycache__/__version__.cpython-311.pyc differ
diff --git a/billinglayer/python/requests/__pycache__/_internal_utils.cpython-311.pyc b/billinglayer/python/requests/__pycache__/_internal_utils.cpython-311.pyc
new file mode 100644
index 0000000..8492ba0
Binary files /dev/null and b/billinglayer/python/requests/__pycache__/_internal_utils.cpython-311.pyc differ
diff --git a/billinglayer/python/requests/__pycache__/adapters.cpython-311.pyc b/billinglayer/python/requests/__pycache__/adapters.cpython-311.pyc
new file mode 100644
index 0000000..b9bf20b
Binary files /dev/null and b/billinglayer/python/requests/__pycache__/adapters.cpython-311.pyc differ
diff --git a/billinglayer/python/requests/__pycache__/api.cpython-311.pyc b/billinglayer/python/requests/__pycache__/api.cpython-311.pyc
new file mode 100644
index 0000000..fe789a7
Binary files /dev/null and b/billinglayer/python/requests/__pycache__/api.cpython-311.pyc differ
diff --git a/billinglayer/python/requests/__pycache__/auth.cpython-311.pyc b/billinglayer/python/requests/__pycache__/auth.cpython-311.pyc
new file mode 100644
index 0000000..4097e67
Binary files /dev/null and b/billinglayer/python/requests/__pycache__/auth.cpython-311.pyc differ
diff --git a/billinglayer/python/requests/__pycache__/certs.cpython-311.pyc b/billinglayer/python/requests/__pycache__/certs.cpython-311.pyc
new file mode 100644
index 0000000..13ffd8f
Binary files /dev/null and b/billinglayer/python/requests/__pycache__/certs.cpython-311.pyc differ
diff --git a/billinglayer/python/requests/__pycache__/compat.cpython-311.pyc b/billinglayer/python/requests/__pycache__/compat.cpython-311.pyc
new file mode 100644
index 0000000..fe1bbb2
Binary files /dev/null and b/billinglayer/python/requests/__pycache__/compat.cpython-311.pyc differ
diff --git a/billinglayer/python/requests/__pycache__/cookies.cpython-311.pyc b/billinglayer/python/requests/__pycache__/cookies.cpython-311.pyc
new file mode 100644
index 0000000..5d56ea1
Binary files /dev/null and b/billinglayer/python/requests/__pycache__/cookies.cpython-311.pyc differ
diff --git a/billinglayer/python/requests/__pycache__/exceptions.cpython-311.pyc b/billinglayer/python/requests/__pycache__/exceptions.cpython-311.pyc
new file mode 100644
index 0000000..fc56679
Binary files /dev/null and b/billinglayer/python/requests/__pycache__/exceptions.cpython-311.pyc differ
diff --git a/billinglayer/python/requests/__pycache__/help.cpython-311.pyc b/billinglayer/python/requests/__pycache__/help.cpython-311.pyc
new file mode 100644
index 0000000..f6e4071
Binary files /dev/null and b/billinglayer/python/requests/__pycache__/help.cpython-311.pyc differ
diff --git a/billinglayer/python/requests/__pycache__/hooks.cpython-311.pyc b/billinglayer/python/requests/__pycache__/hooks.cpython-311.pyc
new file mode 100644
index 0000000..8c8e15e
Binary files /dev/null and b/billinglayer/python/requests/__pycache__/hooks.cpython-311.pyc differ
diff --git a/billinglayer/python/requests/__pycache__/models.cpython-311.pyc b/billinglayer/python/requests/__pycache__/models.cpython-311.pyc
new file mode 100644
index 0000000..1bfe1da
Binary files /dev/null and b/billinglayer/python/requests/__pycache__/models.cpython-311.pyc differ
diff --git a/billinglayer/python/requests/__pycache__/packages.cpython-311.pyc b/billinglayer/python/requests/__pycache__/packages.cpython-311.pyc
new file mode 100644
index 0000000..c9d92d4
Binary files /dev/null and b/billinglayer/python/requests/__pycache__/packages.cpython-311.pyc differ
diff --git a/billinglayer/python/requests/__pycache__/sessions.cpython-311.pyc b/billinglayer/python/requests/__pycache__/sessions.cpython-311.pyc
new file mode 100644
index 0000000..81b900b
Binary files /dev/null and b/billinglayer/python/requests/__pycache__/sessions.cpython-311.pyc differ
diff --git a/billinglayer/python/requests/__pycache__/status_codes.cpython-311.pyc b/billinglayer/python/requests/__pycache__/status_codes.cpython-311.pyc
new file mode 100644
index 0000000..fd03f8a
Binary files /dev/null and b/billinglayer/python/requests/__pycache__/status_codes.cpython-311.pyc differ
diff --git a/billinglayer/python/requests/__pycache__/structures.cpython-311.pyc b/billinglayer/python/requests/__pycache__/structures.cpython-311.pyc
new file mode 100644
index 0000000..846abbe
Binary files /dev/null and b/billinglayer/python/requests/__pycache__/structures.cpython-311.pyc differ
diff --git a/billinglayer/python/requests/__pycache__/utils.cpython-311.pyc b/billinglayer/python/requests/__pycache__/utils.cpython-311.pyc
new file mode 100644
index 0000000..4178568
Binary files /dev/null and b/billinglayer/python/requests/__pycache__/utils.cpython-311.pyc differ
diff --git a/billinglayer/python/requests/__version__.py b/billinglayer/python/requests/__version__.py
new file mode 100644
index 0000000..5063c3f
--- /dev/null
+++ b/billinglayer/python/requests/__version__.py
@@ -0,0 +1,14 @@
+# .-. .-. .-. . . .-. .-. .-. .-.
+# |( |- |.| | | |- `-. | `-.
+# ' ' `-' `-`.`-' `-' `-' ' `-'
+
+__title__ = "requests"
+__description__ = "Python HTTP for Humans."
+__url__ = "https://requests.readthedocs.io"
+__version__ = "2.31.0"
+__build__ = 0x023100
+__author__ = "Kenneth Reitz"
+__author_email__ = "me@kennethreitz.org"
+__license__ = "Apache 2.0"
+__copyright__ = "Copyright Kenneth Reitz"
+__cake__ = "\u2728 \U0001f370 \u2728"
diff --git a/billinglayer/python/requests/_internal_utils.py b/billinglayer/python/requests/_internal_utils.py
new file mode 100644
index 0000000..f2cf635
--- /dev/null
+++ b/billinglayer/python/requests/_internal_utils.py
@@ -0,0 +1,50 @@
+"""
+requests._internal_utils
+~~~~~~~~~~~~~~
+
+Provides utility functions that are consumed internally by Requests
+which depend on extremely few external helpers (such as compat)
+"""
+import re
+
+from .compat import builtin_str
+
+_VALID_HEADER_NAME_RE_BYTE = re.compile(rb"^[^:\s][^:\r\n]*$")
+_VALID_HEADER_NAME_RE_STR = re.compile(r"^[^:\s][^:\r\n]*$")
+_VALID_HEADER_VALUE_RE_BYTE = re.compile(rb"^\S[^\r\n]*$|^$")
+_VALID_HEADER_VALUE_RE_STR = re.compile(r"^\S[^\r\n]*$|^$")
+
+_HEADER_VALIDATORS_STR = (_VALID_HEADER_NAME_RE_STR, _VALID_HEADER_VALUE_RE_STR)
+_HEADER_VALIDATORS_BYTE = (_VALID_HEADER_NAME_RE_BYTE, _VALID_HEADER_VALUE_RE_BYTE)
+HEADER_VALIDATORS = {
+ bytes: _HEADER_VALIDATORS_BYTE,
+ str: _HEADER_VALIDATORS_STR,
+}
+
+
+def to_native_string(string, encoding="ascii"):
+ """Given a string object, regardless of type, returns a representation of
+ that string in the native string type, encoding and decoding where
+ necessary. This assumes ASCII unless told otherwise.
+ """
+ if isinstance(string, builtin_str):
+ out = string
+ else:
+ out = string.decode(encoding)
+
+ return out
+
+
+def unicode_is_ascii(u_string):
+ """Determine if unicode string only contains ASCII characters.
+
+ :param str u_string: unicode string to check. Must be unicode
+ and not Python 2 `str`.
+ :rtype: bool
+ """
+ assert isinstance(u_string, str)
+ try:
+ u_string.encode("ascii")
+ return True
+ except UnicodeEncodeError:
+ return False
diff --git a/billinglayer/python/requests/adapters.py b/billinglayer/python/requests/adapters.py
new file mode 100644
index 0000000..78e3bb6
--- /dev/null
+++ b/billinglayer/python/requests/adapters.py
@@ -0,0 +1,538 @@
+"""
+requests.adapters
+~~~~~~~~~~~~~~~~~
+
+This module contains the transport adapters that Requests uses to define
+and maintain connections.
+"""
+
+import os.path
+import socket # noqa: F401
+
+from urllib3.exceptions import ClosedPoolError, ConnectTimeoutError
+from urllib3.exceptions import HTTPError as _HTTPError
+from urllib3.exceptions import InvalidHeader as _InvalidHeader
+from urllib3.exceptions import (
+ LocationValueError,
+ MaxRetryError,
+ NewConnectionError,
+ ProtocolError,
+)
+from urllib3.exceptions import ProxyError as _ProxyError
+from urllib3.exceptions import ReadTimeoutError, ResponseError
+from urllib3.exceptions import SSLError as _SSLError
+from urllib3.poolmanager import PoolManager, proxy_from_url
+from urllib3.util import Timeout as TimeoutSauce
+from urllib3.util import parse_url
+from urllib3.util.retry import Retry
+
+from .auth import _basic_auth_str
+from .compat import basestring, urlparse
+from .cookies import extract_cookies_to_jar
+from .exceptions import (
+ ConnectionError,
+ ConnectTimeout,
+ InvalidHeader,
+ InvalidProxyURL,
+ InvalidSchema,
+ InvalidURL,
+ ProxyError,
+ ReadTimeout,
+ RetryError,
+ SSLError,
+)
+from .models import Response
+from .structures import CaseInsensitiveDict
+from .utils import (
+ DEFAULT_CA_BUNDLE_PATH,
+ extract_zipped_paths,
+ get_auth_from_url,
+ get_encoding_from_headers,
+ prepend_scheme_if_needed,
+ select_proxy,
+ urldefragauth,
+)
+
+try:
+ from urllib3.contrib.socks import SOCKSProxyManager
+except ImportError:
+
+ def SOCKSProxyManager(*args, **kwargs):
+ raise InvalidSchema("Missing dependencies for SOCKS support.")
+
+
+DEFAULT_POOLBLOCK = False
+DEFAULT_POOLSIZE = 10
+DEFAULT_RETRIES = 0
+DEFAULT_POOL_TIMEOUT = None
+
+
+class BaseAdapter:
+ """The Base Transport Adapter"""
+
+ def __init__(self):
+ super().__init__()
+
+ def send(
+ self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
+ ):
+ """Sends PreparedRequest object. Returns Response object.
+
+ :param request: The :class:`PreparedRequest ` being sent.
+ :param stream: (optional) Whether to stream the request content.
+ :param timeout: (optional) How long to wait for the server to send
+ data before giving up, as a float, or a :ref:`(connect timeout,
+ read timeout) ` tuple.
+ :type timeout: float or tuple
+ :param verify: (optional) Either a boolean, in which case it controls whether we verify
+ the server's TLS certificate, or a string, in which case it must be a path
+ to a CA bundle to use
+ :param cert: (optional) Any user-provided SSL certificate to be trusted.
+ :param proxies: (optional) The proxies dictionary to apply to the request.
+ """
+ raise NotImplementedError
+
+ def close(self):
+ """Cleans up adapter specific items."""
+ raise NotImplementedError
+
+
+class HTTPAdapter(BaseAdapter):
+ """The built-in HTTP Adapter for urllib3.
+
+ Provides a general-case interface for Requests sessions to contact HTTP and
+ HTTPS urls by implementing the Transport Adapter interface. This class will
+ usually be created by the :class:`Session ` class under the
+ covers.
+
+ :param pool_connections: The number of urllib3 connection pools to cache.
+ :param pool_maxsize: The maximum number of connections to save in the pool.
+ :param max_retries: The maximum number of retries each connection
+ should attempt. Note, this applies only to failed DNS lookups, socket
+ connections and connection timeouts, never to requests where data has
+ made it to the server. By default, Requests does not retry failed
+ connections. If you need granular control over the conditions under
+ which we retry a request, import urllib3's ``Retry`` class and pass
+ that instead.
+ :param pool_block: Whether the connection pool should block for connections.
+
+ Usage::
+
+ >>> import requests
+ >>> s = requests.Session()
+ >>> a = requests.adapters.HTTPAdapter(max_retries=3)
+ >>> s.mount('http://', a)
+ """
+
+ __attrs__ = [
+ "max_retries",
+ "config",
+ "_pool_connections",
+ "_pool_maxsize",
+ "_pool_block",
+ ]
+
+ def __init__(
+ self,
+ pool_connections=DEFAULT_POOLSIZE,
+ pool_maxsize=DEFAULT_POOLSIZE,
+ max_retries=DEFAULT_RETRIES,
+ pool_block=DEFAULT_POOLBLOCK,
+ ):
+ if max_retries == DEFAULT_RETRIES:
+ self.max_retries = Retry(0, read=False)
+ else:
+ self.max_retries = Retry.from_int(max_retries)
+ self.config = {}
+ self.proxy_manager = {}
+
+ super().__init__()
+
+ self._pool_connections = pool_connections
+ self._pool_maxsize = pool_maxsize
+ self._pool_block = pool_block
+
+ self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
+
+ def __getstate__(self):
+ return {attr: getattr(self, attr, None) for attr in self.__attrs__}
+
+ def __setstate__(self, state):
+ # Can't handle by adding 'proxy_manager' to self.__attrs__ because
+ # self.poolmanager uses a lambda function, which isn't pickleable.
+ self.proxy_manager = {}
+ self.config = {}
+
+ for attr, value in state.items():
+ setattr(self, attr, value)
+
+ self.init_poolmanager(
+ self._pool_connections, self._pool_maxsize, block=self._pool_block
+ )
+
+ def init_poolmanager(
+ self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs
+ ):
+ """Initializes a urllib3 PoolManager.
+
+ This method should not be called from user code, and is only
+ exposed for use when subclassing the
+ :class:`HTTPAdapter `.
+
+ :param connections: The number of urllib3 connection pools to cache.
+ :param maxsize: The maximum number of connections to save in the pool.
+ :param block: Block when no free connections are available.
+ :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
+ """
+ # save these values for pickling
+ self._pool_connections = connections
+ self._pool_maxsize = maxsize
+ self._pool_block = block
+
+ self.poolmanager = PoolManager(
+ num_pools=connections,
+ maxsize=maxsize,
+ block=block,
+ **pool_kwargs,
+ )
+
+ def proxy_manager_for(self, proxy, **proxy_kwargs):
+ """Return urllib3 ProxyManager for the given proxy.
+
+ This method should not be called from user code, and is only
+ exposed for use when subclassing the
+ :class:`HTTPAdapter `.
+
+ :param proxy: The proxy to return a urllib3 ProxyManager for.
+ :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
+ :returns: ProxyManager
+ :rtype: urllib3.ProxyManager
+ """
+ if proxy in self.proxy_manager:
+ manager = self.proxy_manager[proxy]
+ elif proxy.lower().startswith("socks"):
+ username, password = get_auth_from_url(proxy)
+ manager = self.proxy_manager[proxy] = SOCKSProxyManager(
+ proxy,
+ username=username,
+ password=password,
+ num_pools=self._pool_connections,
+ maxsize=self._pool_maxsize,
+ block=self._pool_block,
+ **proxy_kwargs,
+ )
+ else:
+ proxy_headers = self.proxy_headers(proxy)
+ manager = self.proxy_manager[proxy] = proxy_from_url(
+ proxy,
+ proxy_headers=proxy_headers,
+ num_pools=self._pool_connections,
+ maxsize=self._pool_maxsize,
+ block=self._pool_block,
+ **proxy_kwargs,
+ )
+
+ return manager
+
+ def cert_verify(self, conn, url, verify, cert):
+ """Verify a SSL certificate. This method should not be called from user
+ code, and is only exposed for use when subclassing the
+ :class:`HTTPAdapter `.
+
+ :param conn: The urllib3 connection object associated with the cert.
+ :param url: The requested URL.
+ :param verify: Either a boolean, in which case it controls whether we verify
+ the server's TLS certificate, or a string, in which case it must be a path
+ to a CA bundle to use
+ :param cert: The SSL certificate to verify.
+ """
+ if url.lower().startswith("https") and verify:
+
+ cert_loc = None
+
+ # Allow self-specified cert location.
+ if verify is not True:
+ cert_loc = verify
+
+ if not cert_loc:
+ cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
+
+ if not cert_loc or not os.path.exists(cert_loc):
+ raise OSError(
+ f"Could not find a suitable TLS CA certificate bundle, "
+ f"invalid path: {cert_loc}"
+ )
+
+ conn.cert_reqs = "CERT_REQUIRED"
+
+ if not os.path.isdir(cert_loc):
+ conn.ca_certs = cert_loc
+ else:
+ conn.ca_cert_dir = cert_loc
+ else:
+ conn.cert_reqs = "CERT_NONE"
+ conn.ca_certs = None
+ conn.ca_cert_dir = None
+
+ if cert:
+ if not isinstance(cert, basestring):
+ conn.cert_file = cert[0]
+ conn.key_file = cert[1]
+ else:
+ conn.cert_file = cert
+ conn.key_file = None
+ if conn.cert_file and not os.path.exists(conn.cert_file):
+ raise OSError(
+ f"Could not find the TLS certificate file, "
+ f"invalid path: {conn.cert_file}"
+ )
+ if conn.key_file and not os.path.exists(conn.key_file):
+ raise OSError(
+ f"Could not find the TLS key file, invalid path: {conn.key_file}"
+ )
+
+ def build_response(self, req, resp):
+ """Builds a :class:`Response ` object from a urllib3
+ response. This should not be called from user code, and is only exposed
+ for use when subclassing the
+ :class:`HTTPAdapter `
+
+ :param req: The :class:`PreparedRequest ` used to generate the response.
+ :param resp: The urllib3 response object.
+ :rtype: requests.Response
+ """
+ response = Response()
+
+ # Fallback to None if there's no status_code, for whatever reason.
+ response.status_code = getattr(resp, "status", None)
+
+ # Make headers case-insensitive.
+ response.headers = CaseInsensitiveDict(getattr(resp, "headers", {}))
+
+ # Set encoding.
+ response.encoding = get_encoding_from_headers(response.headers)
+ response.raw = resp
+ response.reason = response.raw.reason
+
+ if isinstance(req.url, bytes):
+ response.url = req.url.decode("utf-8")
+ else:
+ response.url = req.url
+
+ # Add new cookies from the server.
+ extract_cookies_to_jar(response.cookies, req, resp)
+
+ # Give the Response some context.
+ response.request = req
+ response.connection = self
+
+ return response
+
+ def get_connection(self, url, proxies=None):
+ """Returns a urllib3 connection for the given URL. This should not be
+ called from user code, and is only exposed for use when subclassing the
+ :class:`HTTPAdapter `.
+
+ :param url: The URL to connect to.
+ :param proxies: (optional) A Requests-style dictionary of proxies used on this request.
+ :rtype: urllib3.ConnectionPool
+ """
+ proxy = select_proxy(url, proxies)
+
+ if proxy:
+ proxy = prepend_scheme_if_needed(proxy, "http")
+ proxy_url = parse_url(proxy)
+ if not proxy_url.host:
+ raise InvalidProxyURL(
+ "Please check proxy URL. It is malformed "
+ "and could be missing the host."
+ )
+ proxy_manager = self.proxy_manager_for(proxy)
+ conn = proxy_manager.connection_from_url(url)
+ else:
+ # Only scheme should be lower case
+ parsed = urlparse(url)
+ url = parsed.geturl()
+ conn = self.poolmanager.connection_from_url(url)
+
+ return conn
+
+ def close(self):
+ """Disposes of any internal state.
+
+ Currently, this closes the PoolManager and any active ProxyManager,
+ which closes any pooled connections.
+ """
+ self.poolmanager.clear()
+ for proxy in self.proxy_manager.values():
+ proxy.clear()
+
+ def request_url(self, request, proxies):
+ """Obtain the url to use when making the final request.
+
+ If the message is being sent through a HTTP proxy, the full URL has to
+ be used. Otherwise, we should only use the path portion of the URL.
+
+ This should not be called from user code, and is only exposed for use
+ when subclassing the
+ :class:`HTTPAdapter `.
+
+ :param request: The :class:`PreparedRequest ` being sent.
+ :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
+ :rtype: str
+ """
+ proxy = select_proxy(request.url, proxies)
+ scheme = urlparse(request.url).scheme
+
+ is_proxied_http_request = proxy and scheme != "https"
+ using_socks_proxy = False
+ if proxy:
+ proxy_scheme = urlparse(proxy).scheme.lower()
+ using_socks_proxy = proxy_scheme.startswith("socks")
+
+ url = request.path_url
+ if is_proxied_http_request and not using_socks_proxy:
+ url = urldefragauth(request.url)
+
+ return url
+
+ def add_headers(self, request, **kwargs):
+ """Add any headers needed by the connection. As of v2.0 this does
+ nothing by default, but is left for overriding by users that subclass
+ the :class:`HTTPAdapter `.
+
+ This should not be called from user code, and is only exposed for use
+ when subclassing the
+ :class:`HTTPAdapter `.
+
+ :param request: The :class:`PreparedRequest ` to add headers to.
+ :param kwargs: The keyword arguments from the call to send().
+ """
+ pass
+
+ def proxy_headers(self, proxy):
+ """Returns a dictionary of the headers to add to any request sent
+ through a proxy. This works with urllib3 magic to ensure that they are
+ correctly sent to the proxy, rather than in a tunnelled request if
+ CONNECT is being used.
+
+ This should not be called from user code, and is only exposed for use
+ when subclassing the
+ :class:`HTTPAdapter `.
+
+ :param proxy: The url of the proxy being used for this request.
+ :rtype: dict
+ """
+ headers = {}
+ username, password = get_auth_from_url(proxy)
+
+ if username:
+ headers["Proxy-Authorization"] = _basic_auth_str(username, password)
+
+ return headers
+
+ def send(
+ self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
+ ):
+ """Sends PreparedRequest object. Returns Response object.
+
+ :param request: The :class:`PreparedRequest ` being sent.
+ :param stream: (optional) Whether to stream the request content.
+ :param timeout: (optional) How long to wait for the server to send
+ data before giving up, as a float, or a :ref:`(connect timeout,
+ read timeout) ` tuple.
+ :type timeout: float or tuple or urllib3 Timeout object
+ :param verify: (optional) Either a boolean, in which case it controls whether
+ we verify the server's TLS certificate, or a string, in which case it
+ must be a path to a CA bundle to use
+ :param cert: (optional) Any user-provided SSL certificate to be trusted.
+ :param proxies: (optional) The proxies dictionary to apply to the request.
+ :rtype: requests.Response
+ """
+
+ try:
+ conn = self.get_connection(request.url, proxies)
+ except LocationValueError as e:
+ raise InvalidURL(e, request=request)
+
+ self.cert_verify(conn, request.url, verify, cert)
+ url = self.request_url(request, proxies)
+ self.add_headers(
+ request,
+ stream=stream,
+ timeout=timeout,
+ verify=verify,
+ cert=cert,
+ proxies=proxies,
+ )
+
+ chunked = not (request.body is None or "Content-Length" in request.headers)
+
+ if isinstance(timeout, tuple):
+ try:
+ connect, read = timeout
+ timeout = TimeoutSauce(connect=connect, read=read)
+ except ValueError:
+ raise ValueError(
+ f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
+ f"or a single float to set both timeouts to the same value."
+ )
+ elif isinstance(timeout, TimeoutSauce):
+ pass
+ else:
+ timeout = TimeoutSauce(connect=timeout, read=timeout)
+
+ try:
+ resp = conn.urlopen(
+ method=request.method,
+ url=url,
+ body=request.body,
+ headers=request.headers,
+ redirect=False,
+ assert_same_host=False,
+ preload_content=False,
+ decode_content=False,
+ retries=self.max_retries,
+ timeout=timeout,
+ chunked=chunked,
+ )
+
+ except (ProtocolError, OSError) as err:
+ raise ConnectionError(err, request=request)
+
+ except MaxRetryError as e:
+ if isinstance(e.reason, ConnectTimeoutError):
+ # TODO: Remove this in 3.0.0: see #2811
+ if not isinstance(e.reason, NewConnectionError):
+ raise ConnectTimeout(e, request=request)
+
+ if isinstance(e.reason, ResponseError):
+ raise RetryError(e, request=request)
+
+ if isinstance(e.reason, _ProxyError):
+ raise ProxyError(e, request=request)
+
+ if isinstance(e.reason, _SSLError):
+ # This branch is for urllib3 v1.22 and later.
+ raise SSLError(e, request=request)
+
+ raise ConnectionError(e, request=request)
+
+ except ClosedPoolError as e:
+ raise ConnectionError(e, request=request)
+
+ except _ProxyError as e:
+ raise ProxyError(e)
+
+ except (_SSLError, _HTTPError) as e:
+ if isinstance(e, _SSLError):
+ # This branch is for urllib3 versions earlier than v1.22
+ raise SSLError(e, request=request)
+ elif isinstance(e, ReadTimeoutError):
+ raise ReadTimeout(e, request=request)
+ elif isinstance(e, _InvalidHeader):
+ raise InvalidHeader(e, request=request)
+ else:
+ raise
+
+ return self.build_response(request, resp)
diff --git a/billinglayer/python/requests/api.py b/billinglayer/python/requests/api.py
new file mode 100644
index 0000000..cd0b3ee
--- /dev/null
+++ b/billinglayer/python/requests/api.py
@@ -0,0 +1,157 @@
+"""
+requests.api
+~~~~~~~~~~~~
+
+This module implements the Requests API.
+
+:copyright: (c) 2012 by Kenneth Reitz.
+:license: Apache2, see LICENSE for more details.
+"""
+
+from . import sessions
+
+
+def request(method, url, **kwargs):
+ """Constructs and sends a :class:`Request `.
+
+ :param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``.
+ :param url: URL for the new :class:`Request` object.
+ :param params: (optional) Dictionary, list of tuples or bytes to send
+ in the query string for the :class:`Request`.
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the :class:`Request`.
+ :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
+ :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
+ :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
+ :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload.
+ ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')``
+ or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string
+ defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers
+ to add for the file.
+ :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
+ :param timeout: (optional) How many seconds to wait for the server to send data
+ before giving up, as a float, or a :ref:`(connect timeout, read
+ timeout) ` tuple.
+ :type timeout: float or tuple
+ :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``.
+ :type allow_redirects: bool
+ :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
+ :param verify: (optional) Either a boolean, in which case it controls whether we verify
+ the server's TLS certificate, or a string, in which case it must be a path
+ to a CA bundle to use. Defaults to ``True``.
+ :param stream: (optional) if ``False``, the response content will be immediately downloaded.
+ :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
+ :return: :class:`Response ` object
+ :rtype: requests.Response
+
+ Usage::
+
+ >>> import requests
+ >>> req = requests.request('GET', 'https://httpbin.org/get')
+ >>> req
+
+ """
+
+ # By using the 'with' statement we are sure the session is closed, thus we
+ # avoid leaving sockets open which can trigger a ResourceWarning in some
+ # cases, and look like a memory leak in others.
+ with sessions.Session() as session:
+ return session.request(method=method, url=url, **kwargs)
+
+
+def get(url, params=None, **kwargs):
+ r"""Sends a GET request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param params: (optional) Dictionary, list of tuples or bytes to send
+ in the query string for the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response ` object
+ :rtype: requests.Response
+ """
+
+ return request("get", url, params=params, **kwargs)
+
+
+def options(url, **kwargs):
+ r"""Sends an OPTIONS request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response ` object
+ :rtype: requests.Response
+ """
+
+ return request("options", url, **kwargs)
+
+
+def head(url, **kwargs):
+ r"""Sends a HEAD request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes. If
+ `allow_redirects` is not provided, it will be set to `False` (as
+ opposed to the default :meth:`request` behavior).
+ :return: :class:`Response ` object
+ :rtype: requests.Response
+ """
+
+ kwargs.setdefault("allow_redirects", False)
+ return request("head", url, **kwargs)
+
+
+def post(url, data=None, json=None, **kwargs):
+ r"""Sends a POST request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the :class:`Request`.
+ :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response ` object
+ :rtype: requests.Response
+ """
+
+ return request("post", url, data=data, json=json, **kwargs)
+
+
+def put(url, data=None, **kwargs):
+ r"""Sends a PUT request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the :class:`Request`.
+ :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response ` object
+ :rtype: requests.Response
+ """
+
+ return request("put", url, data=data, **kwargs)
+
+
+def patch(url, data=None, **kwargs):
+ r"""Sends a PATCH request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the :class:`Request`.
+ :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response ` object
+ :rtype: requests.Response
+ """
+
+ return request("patch", url, data=data, **kwargs)
+
+
+def delete(url, **kwargs):
+ r"""Sends a DELETE request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response ` object
+ :rtype: requests.Response
+ """
+
+ return request("delete", url, **kwargs)
diff --git a/billinglayer/python/requests/auth.py b/billinglayer/python/requests/auth.py
new file mode 100644
index 0000000..9733686
--- /dev/null
+++ b/billinglayer/python/requests/auth.py
@@ -0,0 +1,315 @@
+"""
+requests.auth
+~~~~~~~~~~~~~
+
+This module contains the authentication handlers for Requests.
+"""
+
+import hashlib
+import os
+import re
+import threading
+import time
+import warnings
+from base64 import b64encode
+
+from ._internal_utils import to_native_string
+from .compat import basestring, str, urlparse
+from .cookies import extract_cookies_to_jar
+from .utils import parse_dict_header
+
+CONTENT_TYPE_FORM_URLENCODED = "application/x-www-form-urlencoded"
+CONTENT_TYPE_MULTI_PART = "multipart/form-data"
+
+
+def _basic_auth_str(username, password):
+ """Returns a Basic Auth string."""
+
+ # "I want us to put a big-ol' comment on top of it that
+ # says that this behaviour is dumb but we need to preserve
+ # it because people are relying on it."
+ # - Lukasa
+ #
+ # These are here solely to maintain backwards compatibility
+ # for things like ints. This will be removed in 3.0.0.
+ if not isinstance(username, basestring):
+ warnings.warn(
+ "Non-string usernames will no longer be supported in Requests "
+ "3.0.0. Please convert the object you've passed in ({!r}) to "
+ "a string or bytes object in the near future to avoid "
+ "problems.".format(username),
+ category=DeprecationWarning,
+ )
+ username = str(username)
+
+ if not isinstance(password, basestring):
+ warnings.warn(
+ "Non-string passwords will no longer be supported in Requests "
+ "3.0.0. Please convert the object you've passed in ({!r}) to "
+ "a string or bytes object in the near future to avoid "
+ "problems.".format(type(password)),
+ category=DeprecationWarning,
+ )
+ password = str(password)
+ # -- End Removal --
+
+ if isinstance(username, str):
+ username = username.encode("latin1")
+
+ if isinstance(password, str):
+ password = password.encode("latin1")
+
+ authstr = "Basic " + to_native_string(
+ b64encode(b":".join((username, password))).strip()
+ )
+
+ return authstr
+
+
+class AuthBase:
+ """Base class that all auth implementations derive from"""
+
+ def __call__(self, r):
+ raise NotImplementedError("Auth hooks must be callable.")
+
+
+class HTTPBasicAuth(AuthBase):
+ """Attaches HTTP Basic Authentication to the given Request object."""
+
+ def __init__(self, username, password):
+ self.username = username
+ self.password = password
+
+ def __eq__(self, other):
+ return all(
+ [
+ self.username == getattr(other, "username", None),
+ self.password == getattr(other, "password", None),
+ ]
+ )
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __call__(self, r):
+ r.headers["Authorization"] = _basic_auth_str(self.username, self.password)
+ return r
+
+
+class HTTPProxyAuth(HTTPBasicAuth):
+ """Attaches HTTP Proxy Authentication to a given Request object."""
+
+ def __call__(self, r):
+ r.headers["Proxy-Authorization"] = _basic_auth_str(self.username, self.password)
+ return r
+
+
+class HTTPDigestAuth(AuthBase):
+ """Attaches HTTP Digest Authentication to the given Request object."""
+
+ def __init__(self, username, password):
+ self.username = username
+ self.password = password
+ # Keep state in per-thread local storage
+ self._thread_local = threading.local()
+
+ def init_per_thread_state(self):
+ # Ensure state is initialized just once per-thread
+ if not hasattr(self._thread_local, "init"):
+ self._thread_local.init = True
+ self._thread_local.last_nonce = ""
+ self._thread_local.nonce_count = 0
+ self._thread_local.chal = {}
+ self._thread_local.pos = None
+ self._thread_local.num_401_calls = None
+
+ def build_digest_header(self, method, url):
+ """
+ :rtype: str
+ """
+
+ realm = self._thread_local.chal["realm"]
+ nonce = self._thread_local.chal["nonce"]
+ qop = self._thread_local.chal.get("qop")
+ algorithm = self._thread_local.chal.get("algorithm")
+ opaque = self._thread_local.chal.get("opaque")
+ hash_utf8 = None
+
+ if algorithm is None:
+ _algorithm = "MD5"
+ else:
+ _algorithm = algorithm.upper()
+ # lambdas assume digest modules are imported at the top level
+ if _algorithm == "MD5" or _algorithm == "MD5-SESS":
+
+ def md5_utf8(x):
+ if isinstance(x, str):
+ x = x.encode("utf-8")
+ return hashlib.md5(x).hexdigest()
+
+ hash_utf8 = md5_utf8
+ elif _algorithm == "SHA":
+
+ def sha_utf8(x):
+ if isinstance(x, str):
+ x = x.encode("utf-8")
+ return hashlib.sha1(x).hexdigest()
+
+ hash_utf8 = sha_utf8
+ elif _algorithm == "SHA-256":
+
+ def sha256_utf8(x):
+ if isinstance(x, str):
+ x = x.encode("utf-8")
+ return hashlib.sha256(x).hexdigest()
+
+ hash_utf8 = sha256_utf8
+ elif _algorithm == "SHA-512":
+
+ def sha512_utf8(x):
+ if isinstance(x, str):
+ x = x.encode("utf-8")
+ return hashlib.sha512(x).hexdigest()
+
+ hash_utf8 = sha512_utf8
+
+ KD = lambda s, d: hash_utf8(f"{s}:{d}") # noqa:E731
+
+ if hash_utf8 is None:
+ return None
+
+ # XXX not implemented yet
+ entdig = None
+ p_parsed = urlparse(url)
+ #: path is request-uri defined in RFC 2616 which should not be empty
+ path = p_parsed.path or "/"
+ if p_parsed.query:
+ path += f"?{p_parsed.query}"
+
+ A1 = f"{self.username}:{realm}:{self.password}"
+ A2 = f"{method}:{path}"
+
+ HA1 = hash_utf8(A1)
+ HA2 = hash_utf8(A2)
+
+ if nonce == self._thread_local.last_nonce:
+ self._thread_local.nonce_count += 1
+ else:
+ self._thread_local.nonce_count = 1
+ ncvalue = f"{self._thread_local.nonce_count:08x}"
+ s = str(self._thread_local.nonce_count).encode("utf-8")
+ s += nonce.encode("utf-8")
+ s += time.ctime().encode("utf-8")
+ s += os.urandom(8)
+
+ cnonce = hashlib.sha1(s).hexdigest()[:16]
+ if _algorithm == "MD5-SESS":
+ HA1 = hash_utf8(f"{HA1}:{nonce}:{cnonce}")
+
+ if not qop:
+ respdig = KD(HA1, f"{nonce}:{HA2}")
+ elif qop == "auth" or "auth" in qop.split(","):
+ noncebit = f"{nonce}:{ncvalue}:{cnonce}:auth:{HA2}"
+ respdig = KD(HA1, noncebit)
+ else:
+ # XXX handle auth-int.
+ return None
+
+ self._thread_local.last_nonce = nonce
+
+ # XXX should the partial digests be encoded too?
+ base = (
+ f'username="{self.username}", realm="{realm}", nonce="{nonce}", '
+ f'uri="{path}", response="{respdig}"'
+ )
+ if opaque:
+ base += f', opaque="{opaque}"'
+ if algorithm:
+ base += f', algorithm="{algorithm}"'
+ if entdig:
+ base += f', digest="{entdig}"'
+ if qop:
+ base += f', qop="auth", nc={ncvalue}, cnonce="{cnonce}"'
+
+ return f"Digest {base}"
+
+ def handle_redirect(self, r, **kwargs):
+ """Reset num_401_calls counter on redirects."""
+ if r.is_redirect:
+ self._thread_local.num_401_calls = 1
+
+ def handle_401(self, r, **kwargs):
+ """
+ Takes the given response and tries digest-auth, if needed.
+
+ :rtype: requests.Response
+ """
+
+ # If response is not 4xx, do not auth
+ # See https://github.com/psf/requests/issues/3772
+ if not 400 <= r.status_code < 500:
+ self._thread_local.num_401_calls = 1
+ return r
+
+ if self._thread_local.pos is not None:
+ # Rewind the file position indicator of the body to where
+ # it was to resend the request.
+ r.request.body.seek(self._thread_local.pos)
+ s_auth = r.headers.get("www-authenticate", "")
+
+ if "digest" in s_auth.lower() and self._thread_local.num_401_calls < 2:
+
+ self._thread_local.num_401_calls += 1
+ pat = re.compile(r"digest ", flags=re.IGNORECASE)
+ self._thread_local.chal = parse_dict_header(pat.sub("", s_auth, count=1))
+
+ # Consume content and release the original connection
+ # to allow our new request to reuse the same one.
+ r.content
+ r.close()
+ prep = r.request.copy()
+ extract_cookies_to_jar(prep._cookies, r.request, r.raw)
+ prep.prepare_cookies(prep._cookies)
+
+ prep.headers["Authorization"] = self.build_digest_header(
+ prep.method, prep.url
+ )
+ _r = r.connection.send(prep, **kwargs)
+ _r.history.append(r)
+ _r.request = prep
+
+ return _r
+
+ self._thread_local.num_401_calls = 1
+ return r
+
+ def __call__(self, r):
+ # Initialize per-thread state, if needed
+ self.init_per_thread_state()
+ # If we have a saved nonce, skip the 401
+ if self._thread_local.last_nonce:
+ r.headers["Authorization"] = self.build_digest_header(r.method, r.url)
+ try:
+ self._thread_local.pos = r.body.tell()
+ except AttributeError:
+ # In the case of HTTPDigestAuth being reused and the body of
+ # the previous request was a file-like object, pos has the
+ # file position of the previous body. Ensure it's set to
+ # None.
+ self._thread_local.pos = None
+ r.register_hook("response", self.handle_401)
+ r.register_hook("response", self.handle_redirect)
+ self._thread_local.num_401_calls = 1
+
+ return r
+
+ def __eq__(self, other):
+ return all(
+ [
+ self.username == getattr(other, "username", None),
+ self.password == getattr(other, "password", None),
+ ]
+ )
+
+ def __ne__(self, other):
+ return not self == other
diff --git a/billinglayer/python/requests/certs.py b/billinglayer/python/requests/certs.py
new file mode 100644
index 0000000..be422c3
--- /dev/null
+++ b/billinglayer/python/requests/certs.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+
+"""
+requests.certs
+~~~~~~~~~~~~~~
+
+This module returns the preferred default CA certificate bundle. There is
+only one — the one from the certifi package.
+
+If you are packaging Requests, e.g., for a Linux distribution or a managed
+environment, you can change the definition of where() to return a separately
+packaged CA bundle.
+"""
+from certifi import where
+
+if __name__ == "__main__":
+ print(where())
diff --git a/billinglayer/python/requests/compat.py b/billinglayer/python/requests/compat.py
new file mode 100644
index 0000000..6776163
--- /dev/null
+++ b/billinglayer/python/requests/compat.py
@@ -0,0 +1,79 @@
+"""
+requests.compat
+~~~~~~~~~~~~~~~
+
+This module previously handled import compatibility issues
+between Python 2 and Python 3. It remains for backwards
+compatibility until the next major version.
+"""
+
+try:
+ import chardet
+except ImportError:
+ import charset_normalizer as chardet
+
+import sys
+
+# -------
+# Pythons
+# -------
+
+# Syntax sugar.
+_ver = sys.version_info
+
+#: Python 2.x?
+is_py2 = _ver[0] == 2
+
+#: Python 3.x?
+is_py3 = _ver[0] == 3
+
+# json/simplejson module import resolution
+has_simplejson = False
+try:
+ import simplejson as json
+
+ has_simplejson = True
+except ImportError:
+ import json
+
+if has_simplejson:
+ from simplejson import JSONDecodeError
+else:
+ from json import JSONDecodeError
+
+# Keep OrderedDict for backwards compatibility.
+from collections import OrderedDict
+from collections.abc import Callable, Mapping, MutableMapping
+from http import cookiejar as cookielib
+from http.cookies import Morsel
+from io import StringIO
+
+# --------------
+# Legacy Imports
+# --------------
+from urllib.parse import (
+ quote,
+ quote_plus,
+ unquote,
+ unquote_plus,
+ urldefrag,
+ urlencode,
+ urljoin,
+ urlparse,
+ urlsplit,
+ urlunparse,
+)
+from urllib.request import (
+ getproxies,
+ getproxies_environment,
+ parse_http_list,
+ proxy_bypass,
+ proxy_bypass_environment,
+)
+
+builtin_str = str
+str = str
+bytes = bytes
+basestring = (str, bytes)
+numeric_types = (int, float)
+integer_types = (int,)
diff --git a/billinglayer/python/requests/cookies.py b/billinglayer/python/requests/cookies.py
new file mode 100644
index 0000000..bf54ab2
--- /dev/null
+++ b/billinglayer/python/requests/cookies.py
@@ -0,0 +1,561 @@
+"""
+requests.cookies
+~~~~~~~~~~~~~~~~
+
+Compatibility code to be able to use `cookielib.CookieJar` with requests.
+
+requests.utils imports from here, so be careful with imports.
+"""
+
+import calendar
+import copy
+import time
+
+from ._internal_utils import to_native_string
+from .compat import Morsel, MutableMapping, cookielib, urlparse, urlunparse
+
+try:
+ import threading
+except ImportError:
+ import dummy_threading as threading
+
+
+class MockRequest:
+ """Wraps a `requests.Request` to mimic a `urllib2.Request`.
+
+ The code in `cookielib.CookieJar` expects this interface in order to correctly
+ manage cookie policies, i.e., determine whether a cookie can be set, given the
+ domains of the request and the cookie.
+
+ The original request object is read-only. The client is responsible for collecting
+ the new headers via `get_new_headers()` and interpreting them appropriately. You
+ probably want `get_cookie_header`, defined below.
+ """
+
+ def __init__(self, request):
+ self._r = request
+ self._new_headers = {}
+ self.type = urlparse(self._r.url).scheme
+
+ def get_type(self):
+ return self.type
+
+ def get_host(self):
+ return urlparse(self._r.url).netloc
+
+ def get_origin_req_host(self):
+ return self.get_host()
+
+ def get_full_url(self):
+ # Only return the response's URL if the user hadn't set the Host
+ # header
+ if not self._r.headers.get("Host"):
+ return self._r.url
+ # If they did set it, retrieve it and reconstruct the expected domain
+ host = to_native_string(self._r.headers["Host"], encoding="utf-8")
+ parsed = urlparse(self._r.url)
+ # Reconstruct the URL as we expect it
+ return urlunparse(
+ [
+ parsed.scheme,
+ host,
+ parsed.path,
+ parsed.params,
+ parsed.query,
+ parsed.fragment,
+ ]
+ )
+
+ def is_unverifiable(self):
+ return True
+
+ def has_header(self, name):
+ return name in self._r.headers or name in self._new_headers
+
+ def get_header(self, name, default=None):
+ return self._r.headers.get(name, self._new_headers.get(name, default))
+
+ def add_header(self, key, val):
+ """cookielib has no legitimate use for this method; add it back if you find one."""
+ raise NotImplementedError(
+ "Cookie headers should be added with add_unredirected_header()"
+ )
+
+ def add_unredirected_header(self, name, value):
+ self._new_headers[name] = value
+
+ def get_new_headers(self):
+ return self._new_headers
+
+ @property
+ def unverifiable(self):
+ return self.is_unverifiable()
+
+ @property
+ def origin_req_host(self):
+ return self.get_origin_req_host()
+
+ @property
+ def host(self):
+ return self.get_host()
+
+
+class MockResponse:
+ """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
+
+ ...what? Basically, expose the parsed HTTP headers from the server response
+ the way `cookielib` expects to see them.
+ """
+
+ def __init__(self, headers):
+ """Make a MockResponse for `cookielib` to read.
+
+ :param headers: a httplib.HTTPMessage or analogous carrying the headers
+ """
+ self._headers = headers
+
+ def info(self):
+ return self._headers
+
+ def getheaders(self, name):
+ self._headers.getheaders(name)
+
+
+def extract_cookies_to_jar(jar, request, response):
+ """Extract the cookies from the response into a CookieJar.
+
+ :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar)
+ :param request: our own requests.Request object
+ :param response: urllib3.HTTPResponse object
+ """
+ if not (hasattr(response, "_original_response") and response._original_response):
+ return
+ # the _original_response field is the wrapped httplib.HTTPResponse object,
+ req = MockRequest(request)
+ # pull out the HTTPMessage with the headers and put it in the mock:
+ res = MockResponse(response._original_response.msg)
+ jar.extract_cookies(res, req)
+
+
+def get_cookie_header(jar, request):
+ """
+ Produce an appropriate Cookie header string to be sent with `request`, or None.
+
+ :rtype: str
+ """
+ r = MockRequest(request)
+ jar.add_cookie_header(r)
+ return r.get_new_headers().get("Cookie")
+
+
+def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
+ """Unsets a cookie by name, by default over all domains and paths.
+
+ Wraps CookieJar.clear(), is O(n).
+ """
+ clearables = []
+ for cookie in cookiejar:
+ if cookie.name != name:
+ continue
+ if domain is not None and domain != cookie.domain:
+ continue
+ if path is not None and path != cookie.path:
+ continue
+ clearables.append((cookie.domain, cookie.path, cookie.name))
+
+ for domain, path, name in clearables:
+ cookiejar.clear(domain, path, name)
+
+
+class CookieConflictError(RuntimeError):
+ """There are two cookies that meet the criteria specified in the cookie jar.
+ Use .get and .set and include domain and path args in order to be more specific.
+ """
+
+
+class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
+ """Compatibility class; is a cookielib.CookieJar, but exposes a dict
+ interface.
+
+ This is the CookieJar we create by default for requests and sessions that
+ don't specify one, since some clients may expect response.cookies and
+ session.cookies to support dict operations.
+
+ Requests does not use the dict interface internally; it's just for
+ compatibility with external client code. All requests code should work
+ out of the box with externally provided instances of ``CookieJar``, e.g.
+ ``LWPCookieJar`` and ``FileCookieJar``.
+
+ Unlike a regular CookieJar, this class is pickleable.
+
+ .. warning:: dictionary operations that are normally O(1) may be O(n).
+ """
+
+ def get(self, name, default=None, domain=None, path=None):
+ """Dict-like get() that also supports optional domain and path args in
+ order to resolve naming collisions from using one cookie jar over
+ multiple domains.
+
+ .. warning:: operation is O(n), not O(1).
+ """
+ try:
+ return self._find_no_duplicates(name, domain, path)
+ except KeyError:
+ return default
+
+ def set(self, name, value, **kwargs):
+ """Dict-like set() that also supports optional domain and path args in
+ order to resolve naming collisions from using one cookie jar over
+ multiple domains.
+ """
+ # support client code that unsets cookies by assignment of a None value:
+ if value is None:
+ remove_cookie_by_name(
+ self, name, domain=kwargs.get("domain"), path=kwargs.get("path")
+ )
+ return
+
+ if isinstance(value, Morsel):
+ c = morsel_to_cookie(value)
+ else:
+ c = create_cookie(name, value, **kwargs)
+ self.set_cookie(c)
+ return c
+
+ def iterkeys(self):
+ """Dict-like iterkeys() that returns an iterator of names of cookies
+ from the jar.
+
+ .. seealso:: itervalues() and iteritems().
+ """
+ for cookie in iter(self):
+ yield cookie.name
+
+ def keys(self):
+ """Dict-like keys() that returns a list of names of cookies from the
+ jar.
+
+ .. seealso:: values() and items().
+ """
+ return list(self.iterkeys())
+
+ def itervalues(self):
+ """Dict-like itervalues() that returns an iterator of values of cookies
+ from the jar.
+
+ .. seealso:: iterkeys() and iteritems().
+ """
+ for cookie in iter(self):
+ yield cookie.value
+
+ def values(self):
+ """Dict-like values() that returns a list of values of cookies from the
+ jar.
+
+ .. seealso:: keys() and items().
+ """
+ return list(self.itervalues())
+
+ def iteritems(self):
+ """Dict-like iteritems() that returns an iterator of name-value tuples
+ from the jar.
+
+ .. seealso:: iterkeys() and itervalues().
+ """
+ for cookie in iter(self):
+ yield cookie.name, cookie.value
+
+ def items(self):
+ """Dict-like items() that returns a list of name-value tuples from the
+ jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a
+ vanilla python dict of key value pairs.
+
+ .. seealso:: keys() and values().
+ """
+ return list(self.iteritems())
+
+ def list_domains(self):
+ """Utility method to list all the domains in the jar."""
+ domains = []
+ for cookie in iter(self):
+ if cookie.domain not in domains:
+ domains.append(cookie.domain)
+ return domains
+
+ def list_paths(self):
+ """Utility method to list all the paths in the jar."""
+ paths = []
+ for cookie in iter(self):
+ if cookie.path not in paths:
+ paths.append(cookie.path)
+ return paths
+
+ def multiple_domains(self):
+ """Returns True if there are multiple domains in the jar.
+ Returns False otherwise.
+
+ :rtype: bool
+ """
+ domains = []
+ for cookie in iter(self):
+ if cookie.domain is not None and cookie.domain in domains:
+ return True
+ domains.append(cookie.domain)
+ return False # there is only one domain in jar
+
+ def get_dict(self, domain=None, path=None):
+ """Takes as an argument an optional domain and path and returns a plain
+ old Python dict of name-value pairs of cookies that meet the
+ requirements.
+
+ :rtype: dict
+ """
+ dictionary = {}
+ for cookie in iter(self):
+ if (domain is None or cookie.domain == domain) and (
+ path is None or cookie.path == path
+ ):
+ dictionary[cookie.name] = cookie.value
+ return dictionary
+
+ def __contains__(self, name):
+ try:
+ return super().__contains__(name)
+ except CookieConflictError:
+ return True
+
+ def __getitem__(self, name):
+ """Dict-like __getitem__() for compatibility with client code. Throws
+ exception if there are more than one cookie with name. In that case,
+ use the more explicit get() method instead.
+
+ .. warning:: operation is O(n), not O(1).
+ """
+ return self._find_no_duplicates(name)
+
+ def __setitem__(self, name, value):
+ """Dict-like __setitem__ for compatibility with client code. Throws
+ exception if there is already a cookie of that name in the jar. In that
+ case, use the more explicit set() method instead.
+ """
+ self.set(name, value)
+
+ def __delitem__(self, name):
+ """Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s
+ ``remove_cookie_by_name()``.
+ """
+ remove_cookie_by_name(self, name)
+
+ def set_cookie(self, cookie, *args, **kwargs):
+ if (
+ hasattr(cookie.value, "startswith")
+ and cookie.value.startswith('"')
+ and cookie.value.endswith('"')
+ ):
+ cookie.value = cookie.value.replace('\\"', "")
+ return super().set_cookie(cookie, *args, **kwargs)
+
+ def update(self, other):
+ """Updates this jar with cookies from another CookieJar or dict-like"""
+ if isinstance(other, cookielib.CookieJar):
+ for cookie in other:
+ self.set_cookie(copy.copy(cookie))
+ else:
+ super().update(other)
+
+ def _find(self, name, domain=None, path=None):
+ """Requests uses this method internally to get cookie values.
+
+ If there are conflicting cookies, _find arbitrarily chooses one.
+ See _find_no_duplicates if you want an exception thrown if there are
+ conflicting cookies.
+
+ :param name: a string containing name of cookie
+ :param domain: (optional) string containing domain of cookie
+ :param path: (optional) string containing path of cookie
+ :return: cookie.value
+ """
+ for cookie in iter(self):
+ if cookie.name == name:
+ if domain is None or cookie.domain == domain:
+ if path is None or cookie.path == path:
+ return cookie.value
+
+ raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}")
+
+ def _find_no_duplicates(self, name, domain=None, path=None):
+ """Both ``__get_item__`` and ``get`` call this function: it's never
+ used elsewhere in Requests.
+
+ :param name: a string containing name of cookie
+ :param domain: (optional) string containing domain of cookie
+ :param path: (optional) string containing path of cookie
+ :raises KeyError: if cookie is not found
+ :raises CookieConflictError: if there are multiple cookies
+ that match name and optionally domain and path
+ :return: cookie.value
+ """
+ toReturn = None
+ for cookie in iter(self):
+ if cookie.name == name:
+ if domain is None or cookie.domain == domain:
+ if path is None or cookie.path == path:
+ if toReturn is not None:
+ # if there are multiple cookies that meet passed in criteria
+ raise CookieConflictError(
+ f"There are multiple cookies with name, {name!r}"
+ )
+ # we will eventually return this as long as no cookie conflict
+ toReturn = cookie.value
+
+ if toReturn:
+ return toReturn
+ raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}")
+
+ def __getstate__(self):
+ """Unlike a normal CookieJar, this class is pickleable."""
+ state = self.__dict__.copy()
+ # remove the unpickleable RLock object
+ state.pop("_cookies_lock")
+ return state
+
+ def __setstate__(self, state):
+ """Unlike a normal CookieJar, this class is pickleable."""
+ self.__dict__.update(state)
+ if "_cookies_lock" not in self.__dict__:
+ self._cookies_lock = threading.RLock()
+
+ def copy(self):
+ """Return a copy of this RequestsCookieJar."""
+ new_cj = RequestsCookieJar()
+ new_cj.set_policy(self.get_policy())
+ new_cj.update(self)
+ return new_cj
+
+ def get_policy(self):
+ """Return the CookiePolicy instance used."""
+ return self._policy
+
+
+def _copy_cookie_jar(jar):
+ if jar is None:
+ return None
+
+ if hasattr(jar, "copy"):
+ # We're dealing with an instance of RequestsCookieJar
+ return jar.copy()
+ # We're dealing with a generic CookieJar instance
+ new_jar = copy.copy(jar)
+ new_jar.clear()
+ for cookie in jar:
+ new_jar.set_cookie(copy.copy(cookie))
+ return new_jar
+
+
+def create_cookie(name, value, **kwargs):
+ """Make a cookie from underspecified parameters.
+
+ By default, the pair of `name` and `value` will be set for the domain ''
+ and sent on every request (this is sometimes called a "supercookie").
+ """
+ result = {
+ "version": 0,
+ "name": name,
+ "value": value,
+ "port": None,
+ "domain": "",
+ "path": "/",
+ "secure": False,
+ "expires": None,
+ "discard": True,
+ "comment": None,
+ "comment_url": None,
+ "rest": {"HttpOnly": None},
+ "rfc2109": False,
+ }
+
+ badargs = set(kwargs) - set(result)
+ if badargs:
+ raise TypeError(
+ f"create_cookie() got unexpected keyword arguments: {list(badargs)}"
+ )
+
+ result.update(kwargs)
+ result["port_specified"] = bool(result["port"])
+ result["domain_specified"] = bool(result["domain"])
+ result["domain_initial_dot"] = result["domain"].startswith(".")
+ result["path_specified"] = bool(result["path"])
+
+ return cookielib.Cookie(**result)
+
+
+def morsel_to_cookie(morsel):
+ """Convert a Morsel object into a Cookie containing the one k/v pair."""
+
+ expires = None
+ if morsel["max-age"]:
+ try:
+ expires = int(time.time() + int(morsel["max-age"]))
+ except ValueError:
+ raise TypeError(f"max-age: {morsel['max-age']} must be integer")
+ elif morsel["expires"]:
+ time_template = "%a, %d-%b-%Y %H:%M:%S GMT"
+ expires = calendar.timegm(time.strptime(morsel["expires"], time_template))
+ return create_cookie(
+ comment=morsel["comment"],
+ comment_url=bool(morsel["comment"]),
+ discard=False,
+ domain=morsel["domain"],
+ expires=expires,
+ name=morsel.key,
+ path=morsel["path"],
+ port=None,
+ rest={"HttpOnly": morsel["httponly"]},
+ rfc2109=False,
+ secure=bool(morsel["secure"]),
+ value=morsel.value,
+ version=morsel["version"] or 0,
+ )
+
+
+def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True):
+ """Returns a CookieJar from a key/value dictionary.
+
+ :param cookie_dict: Dict of key/values to insert into CookieJar.
+ :param cookiejar: (optional) A cookiejar to add the cookies to.
+ :param overwrite: (optional) If False, will not replace cookies
+ already in the jar with new ones.
+ :rtype: CookieJar
+ """
+ if cookiejar is None:
+ cookiejar = RequestsCookieJar()
+
+ if cookie_dict is not None:
+ names_from_jar = [cookie.name for cookie in cookiejar]
+ for name in cookie_dict:
+ if overwrite or (name not in names_from_jar):
+ cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
+
+ return cookiejar
+
+
+def merge_cookies(cookiejar, cookies):
+ """Add cookies to cookiejar and returns a merged CookieJar.
+
+ :param cookiejar: CookieJar object to add the cookies to.
+ :param cookies: Dictionary or CookieJar object to be added.
+ :rtype: CookieJar
+ """
+ if not isinstance(cookiejar, cookielib.CookieJar):
+ raise ValueError("You can only merge into CookieJar")
+
+ if isinstance(cookies, dict):
+ cookiejar = cookiejar_from_dict(cookies, cookiejar=cookiejar, overwrite=False)
+ elif isinstance(cookies, cookielib.CookieJar):
+ try:
+ cookiejar.update(cookies)
+ except AttributeError:
+ for cookie_in_jar in cookies:
+ cookiejar.set_cookie(cookie_in_jar)
+
+ return cookiejar
diff --git a/billinglayer/python/requests/exceptions.py b/billinglayer/python/requests/exceptions.py
new file mode 100644
index 0000000..e1cedf8
--- /dev/null
+++ b/billinglayer/python/requests/exceptions.py
@@ -0,0 +1,141 @@
+"""
+requests.exceptions
+~~~~~~~~~~~~~~~~~~~
+
+This module contains the set of Requests' exceptions.
+"""
+from urllib3.exceptions import HTTPError as BaseHTTPError
+
+from .compat import JSONDecodeError as CompatJSONDecodeError
+
+
+class RequestException(IOError):
+ """There was an ambiguous exception that occurred while handling your
+ request.
+ """
+
+ def __init__(self, *args, **kwargs):
+ """Initialize RequestException with `request` and `response` objects."""
+ response = kwargs.pop("response", None)
+ self.response = response
+ self.request = kwargs.pop("request", None)
+ if response is not None and not self.request and hasattr(response, "request"):
+ self.request = self.response.request
+ super().__init__(*args, **kwargs)
+
+
+class InvalidJSONError(RequestException):
+ """A JSON error occurred."""
+
+
+class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError):
+ """Couldn't decode the text into json"""
+
+ def __init__(self, *args, **kwargs):
+ """
+ Construct the JSONDecodeError instance first with all
+ args. Then use it's args to construct the IOError so that
+ the json specific args aren't used as IOError specific args
+ and the error message from JSONDecodeError is preserved.
+ """
+ CompatJSONDecodeError.__init__(self, *args)
+ InvalidJSONError.__init__(self, *self.args, **kwargs)
+
+
+class HTTPError(RequestException):
+ """An HTTP error occurred."""
+
+
+class ConnectionError(RequestException):
+ """A Connection error occurred."""
+
+
+class ProxyError(ConnectionError):
+ """A proxy error occurred."""
+
+
+class SSLError(ConnectionError):
+ """An SSL error occurred."""
+
+
+class Timeout(RequestException):
+ """The request timed out.
+
+ Catching this error will catch both
+ :exc:`~requests.exceptions.ConnectTimeout` and
+ :exc:`~requests.exceptions.ReadTimeout` errors.
+ """
+
+
+class ConnectTimeout(ConnectionError, Timeout):
+ """The request timed out while trying to connect to the remote server.
+
+ Requests that produced this error are safe to retry.
+ """
+
+
+class ReadTimeout(Timeout):
+ """The server did not send any data in the allotted amount of time."""
+
+
+class URLRequired(RequestException):
+ """A valid URL is required to make a request."""
+
+
+class TooManyRedirects(RequestException):
+ """Too many redirects."""
+
+
+class MissingSchema(RequestException, ValueError):
+ """The URL scheme (e.g. http or https) is missing."""
+
+
+class InvalidSchema(RequestException, ValueError):
+ """The URL scheme provided is either invalid or unsupported."""
+
+
+class InvalidURL(RequestException, ValueError):
+ """The URL provided was somehow invalid."""
+
+
+class InvalidHeader(RequestException, ValueError):
+ """The header value provided was somehow invalid."""
+
+
+class InvalidProxyURL(InvalidURL):
+ """The proxy URL provided is invalid."""
+
+
+class ChunkedEncodingError(RequestException):
+ """The server declared chunked encoding but sent an invalid chunk."""
+
+
+class ContentDecodingError(RequestException, BaseHTTPError):
+ """Failed to decode response content."""
+
+
+class StreamConsumedError(RequestException, TypeError):
+ """The content for this response was already consumed."""
+
+
+class RetryError(RequestException):
+ """Custom retries logic failed"""
+
+
+class UnrewindableBodyError(RequestException):
+ """Requests encountered an error when trying to rewind a body."""
+
+
+# Warnings
+
+
+class RequestsWarning(Warning):
+ """Base warning for Requests."""
+
+
+class FileModeWarning(RequestsWarning, DeprecationWarning):
+ """A file was opened in text mode, but Requests determined its binary length."""
+
+
+class RequestsDependencyWarning(RequestsWarning):
+ """An imported dependency doesn't match the expected version range."""
diff --git a/billinglayer/python/requests/help.py b/billinglayer/python/requests/help.py
new file mode 100644
index 0000000..8fbcd65
--- /dev/null
+++ b/billinglayer/python/requests/help.py
@@ -0,0 +1,134 @@
+"""Module containing bug report helper(s)."""
+
+import json
+import platform
+import ssl
+import sys
+
+import idna
+import urllib3
+
+from . import __version__ as requests_version
+
+try:
+ import charset_normalizer
+except ImportError:
+ charset_normalizer = None
+
+try:
+ import chardet
+except ImportError:
+ chardet = None
+
+try:
+ from urllib3.contrib import pyopenssl
+except ImportError:
+ pyopenssl = None
+ OpenSSL = None
+ cryptography = None
+else:
+ import cryptography
+ import OpenSSL
+
+
+def _implementation():
+ """Return a dict with the Python implementation and version.
+
+ Provide both the name and the version of the Python implementation
+ currently running. For example, on CPython 3.10.3 it will return
+ {'name': 'CPython', 'version': '3.10.3'}.
+
+ This function works best on CPython and PyPy: in particular, it probably
+ doesn't work for Jython or IronPython. Future investigation should be done
+ to work out the correct shape of the code for those platforms.
+ """
+ implementation = platform.python_implementation()
+
+ if implementation == "CPython":
+ implementation_version = platform.python_version()
+ elif implementation == "PyPy":
+ implementation_version = "{}.{}.{}".format(
+ sys.pypy_version_info.major,
+ sys.pypy_version_info.minor,
+ sys.pypy_version_info.micro,
+ )
+ if sys.pypy_version_info.releaselevel != "final":
+ implementation_version = "".join(
+ [implementation_version, sys.pypy_version_info.releaselevel]
+ )
+ elif implementation == "Jython":
+ implementation_version = platform.python_version() # Complete Guess
+ elif implementation == "IronPython":
+ implementation_version = platform.python_version() # Complete Guess
+ else:
+ implementation_version = "Unknown"
+
+ return {"name": implementation, "version": implementation_version}
+
+
+def info():
+ """Generate information for a bug report."""
+ try:
+ platform_info = {
+ "system": platform.system(),
+ "release": platform.release(),
+ }
+ except OSError:
+ platform_info = {
+ "system": "Unknown",
+ "release": "Unknown",
+ }
+
+ implementation_info = _implementation()
+ urllib3_info = {"version": urllib3.__version__}
+ charset_normalizer_info = {"version": None}
+ chardet_info = {"version": None}
+ if charset_normalizer:
+ charset_normalizer_info = {"version": charset_normalizer.__version__}
+ if chardet:
+ chardet_info = {"version": chardet.__version__}
+
+ pyopenssl_info = {
+ "version": None,
+ "openssl_version": "",
+ }
+ if OpenSSL:
+ pyopenssl_info = {
+ "version": OpenSSL.__version__,
+ "openssl_version": f"{OpenSSL.SSL.OPENSSL_VERSION_NUMBER:x}",
+ }
+ cryptography_info = {
+ "version": getattr(cryptography, "__version__", ""),
+ }
+ idna_info = {
+ "version": getattr(idna, "__version__", ""),
+ }
+
+ system_ssl = ssl.OPENSSL_VERSION_NUMBER
+ system_ssl_info = {"version": f"{system_ssl:x}" if system_ssl is not None else ""}
+
+ return {
+ "platform": platform_info,
+ "implementation": implementation_info,
+ "system_ssl": system_ssl_info,
+ "using_pyopenssl": pyopenssl is not None,
+ "using_charset_normalizer": chardet is None,
+ "pyOpenSSL": pyopenssl_info,
+ "urllib3": urllib3_info,
+ "chardet": chardet_info,
+ "charset_normalizer": charset_normalizer_info,
+ "cryptography": cryptography_info,
+ "idna": idna_info,
+ "requests": {
+ "version": requests_version,
+ },
+ }
+
+
+def main():
+ """Pretty-print the bug information as JSON."""
+ print(json.dumps(info(), sort_keys=True, indent=2))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/billinglayer/python/requests/hooks.py b/billinglayer/python/requests/hooks.py
new file mode 100644
index 0000000..d181ba2
--- /dev/null
+++ b/billinglayer/python/requests/hooks.py
@@ -0,0 +1,33 @@
+"""
+requests.hooks
+~~~~~~~~~~~~~~
+
+This module provides the capabilities for the Requests hooks system.
+
+Available hooks:
+
+``response``:
+ The response generated from a Request.
+"""
+HOOKS = ["response"]
+
+
+def default_hooks():
+ return {event: [] for event in HOOKS}
+
+
+# TODO: response is the only one
+
+
+def dispatch_hook(key, hooks, hook_data, **kwargs):
+ """Dispatches a hook dictionary on a given piece of data."""
+ hooks = hooks or {}
+ hooks = hooks.get(key)
+ if hooks:
+ if hasattr(hooks, "__call__"):
+ hooks = [hooks]
+ for hook in hooks:
+ _hook_data = hook(hook_data, **kwargs)
+ if _hook_data is not None:
+ hook_data = _hook_data
+ return hook_data
diff --git a/billinglayer/python/requests/models.py b/billinglayer/python/requests/models.py
new file mode 100644
index 0000000..617a413
--- /dev/null
+++ b/billinglayer/python/requests/models.py
@@ -0,0 +1,1034 @@
+"""
+requests.models
+~~~~~~~~~~~~~~~
+
+This module contains the primary objects that power Requests.
+"""
+
+import datetime
+
+# Import encoding now, to avoid implicit import later.
+# Implicit import within threads may cause LookupError when standard library is in a ZIP,
+# such as in Embedded Python. See https://github.com/psf/requests/issues/3578.
+import encodings.idna # noqa: F401
+from io import UnsupportedOperation
+
+from urllib3.exceptions import (
+ DecodeError,
+ LocationParseError,
+ ProtocolError,
+ ReadTimeoutError,
+ SSLError,
+)
+from urllib3.fields import RequestField
+from urllib3.filepost import encode_multipart_formdata
+from urllib3.util import parse_url
+
+from ._internal_utils import to_native_string, unicode_is_ascii
+from .auth import HTTPBasicAuth
+from .compat import (
+ Callable,
+ JSONDecodeError,
+ Mapping,
+ basestring,
+ builtin_str,
+ chardet,
+ cookielib,
+)
+from .compat import json as complexjson
+from .compat import urlencode, urlsplit, urlunparse
+from .cookies import _copy_cookie_jar, cookiejar_from_dict, get_cookie_header
+from .exceptions import (
+ ChunkedEncodingError,
+ ConnectionError,
+ ContentDecodingError,
+ HTTPError,
+ InvalidJSONError,
+ InvalidURL,
+)
+from .exceptions import JSONDecodeError as RequestsJSONDecodeError
+from .exceptions import MissingSchema
+from .exceptions import SSLError as RequestsSSLError
+from .exceptions import StreamConsumedError
+from .hooks import default_hooks
+from .status_codes import codes
+from .structures import CaseInsensitiveDict
+from .utils import (
+ check_header_validity,
+ get_auth_from_url,
+ guess_filename,
+ guess_json_utf,
+ iter_slices,
+ parse_header_links,
+ requote_uri,
+ stream_decode_response_unicode,
+ super_len,
+ to_key_val_list,
+)
+
+#: The set of HTTP status codes that indicate an automatically
+#: processable redirect.
+REDIRECT_STATI = (
+ codes.moved, # 301
+ codes.found, # 302
+ codes.other, # 303
+ codes.temporary_redirect, # 307
+ codes.permanent_redirect, # 308
+)
+
+DEFAULT_REDIRECT_LIMIT = 30
+CONTENT_CHUNK_SIZE = 10 * 1024
+ITER_CHUNK_SIZE = 512
+
+
+class RequestEncodingMixin:
+ @property
+ def path_url(self):
+ """Build the path URL to use."""
+
+ url = []
+
+ p = urlsplit(self.url)
+
+ path = p.path
+ if not path:
+ path = "/"
+
+ url.append(path)
+
+ query = p.query
+ if query:
+ url.append("?")
+ url.append(query)
+
+ return "".join(url)
+
+ @staticmethod
+ def _encode_params(data):
+ """Encode parameters in a piece of data.
+
+ Will successfully encode parameters when passed as a dict or a list of
+ 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
+ if parameters are supplied as a dict.
+ """
+
+ if isinstance(data, (str, bytes)):
+ return data
+ elif hasattr(data, "read"):
+ return data
+ elif hasattr(data, "__iter__"):
+ result = []
+ for k, vs in to_key_val_list(data):
+ if isinstance(vs, basestring) or not hasattr(vs, "__iter__"):
+ vs = [vs]
+ for v in vs:
+ if v is not None:
+ result.append(
+ (
+ k.encode("utf-8") if isinstance(k, str) else k,
+ v.encode("utf-8") if isinstance(v, str) else v,
+ )
+ )
+ return urlencode(result, doseq=True)
+ else:
+ return data
+
+ @staticmethod
+ def _encode_files(files, data):
+ """Build the body for a multipart/form-data request.
+
+ Will successfully encode files when passed as a dict or a list of
+ tuples. Order is retained if data is a list of tuples but arbitrary
+ if parameters are supplied as a dict.
+ The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype)
+ or 4-tuples (filename, fileobj, contentype, custom_headers).
+ """
+ if not files:
+ raise ValueError("Files must be provided.")
+ elif isinstance(data, basestring):
+ raise ValueError("Data must not be a string.")
+
+ new_fields = []
+ fields = to_key_val_list(data or {})
+ files = to_key_val_list(files or {})
+
+ for field, val in fields:
+ if isinstance(val, basestring) or not hasattr(val, "__iter__"):
+ val = [val]
+ for v in val:
+ if v is not None:
+ # Don't call str() on bytestrings: in Py3 it all goes wrong.
+ if not isinstance(v, bytes):
+ v = str(v)
+
+ new_fields.append(
+ (
+ field.decode("utf-8")
+ if isinstance(field, bytes)
+ else field,
+ v.encode("utf-8") if isinstance(v, str) else v,
+ )
+ )
+
+ for (k, v) in files:
+ # support for explicit filename
+ ft = None
+ fh = None
+ if isinstance(v, (tuple, list)):
+ if len(v) == 2:
+ fn, fp = v
+ elif len(v) == 3:
+ fn, fp, ft = v
+ else:
+ fn, fp, ft, fh = v
+ else:
+ fn = guess_filename(v) or k
+ fp = v
+
+ if isinstance(fp, (str, bytes, bytearray)):
+ fdata = fp
+ elif hasattr(fp, "read"):
+ fdata = fp.read()
+ elif fp is None:
+ continue
+ else:
+ fdata = fp
+
+ rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)
+ rf.make_multipart(content_type=ft)
+ new_fields.append(rf)
+
+ body, content_type = encode_multipart_formdata(new_fields)
+
+ return body, content_type
+
+
+class RequestHooksMixin:
+ def register_hook(self, event, hook):
+ """Properly register a hook."""
+
+ if event not in self.hooks:
+ raise ValueError(f'Unsupported event specified, with event name "{event}"')
+
+ if isinstance(hook, Callable):
+ self.hooks[event].append(hook)
+ elif hasattr(hook, "__iter__"):
+ self.hooks[event].extend(h for h in hook if isinstance(h, Callable))
+
+ def deregister_hook(self, event, hook):
+ """Deregister a previously registered hook.
+ Returns True if the hook existed, False if not.
+ """
+
+ try:
+ self.hooks[event].remove(hook)
+ return True
+ except ValueError:
+ return False
+
+
+class Request(RequestHooksMixin):
+ """A user-created :class:`Request ` object.
+
+ Used to prepare a :class:`PreparedRequest `, which is sent to the server.
+
+ :param method: HTTP method to use.
+ :param url: URL to send.
+ :param headers: dictionary of headers to send.
+ :param files: dictionary of {filename: fileobject} files to multipart upload.
+ :param data: the body to attach to the request. If a dictionary or
+ list of tuples ``[(key, value)]`` is provided, form-encoding will
+ take place.
+ :param json: json for the body to attach to the request (if files or data is not specified).
+ :param params: URL parameters to append to the URL. If a dictionary or
+ list of tuples ``[(key, value)]`` is provided, form-encoding will
+ take place.
+ :param auth: Auth handler or (user, pass) tuple.
+ :param cookies: dictionary or CookieJar of cookies to attach to this request.
+ :param hooks: dictionary of callback hooks, for internal usage.
+
+ Usage::
+
+ >>> import requests
+ >>> req = requests.Request('GET', 'https://httpbin.org/get')
+ >>> req.prepare()
+
+ """
+
+ def __init__(
+ self,
+ method=None,
+ url=None,
+ headers=None,
+ files=None,
+ data=None,
+ params=None,
+ auth=None,
+ cookies=None,
+ hooks=None,
+ json=None,
+ ):
+
+ # Default empty dicts for dict params.
+ data = [] if data is None else data
+ files = [] if files is None else files
+ headers = {} if headers is None else headers
+ params = {} if params is None else params
+ hooks = {} if hooks is None else hooks
+
+ self.hooks = default_hooks()
+ for (k, v) in list(hooks.items()):
+ self.register_hook(event=k, hook=v)
+
+ self.method = method
+ self.url = url
+ self.headers = headers
+ self.files = files
+ self.data = data
+ self.json = json
+ self.params = params
+ self.auth = auth
+ self.cookies = cookies
+
+ def __repr__(self):
+ return f""
+
+ def prepare(self):
+ """Constructs a :class:`PreparedRequest