WorldOfPets 2 år sedan
incheckning
0f399d79e1
100 ändrade filer med 7293 tillägg och 0 borttagningar
  1. 65 0
      README.md
  2. BIN
      __pycache__/BotClass.cpython-310.pyc
  3. BIN
      __pycache__/config.cpython-310.pyc
  4. BIN
      __pycache__/connect.cpython-310.pyc
  5. BIN
      __pycache__/function.cpython-310.pyc
  6. 87 0
      api.py
  7. 10 0
      config.py
  8. 29 0
      connect.py
  9. 46 0
      function.py
  10. 1 0
      rest_api_venv/Lib/site-packages/Flask-2.1.2.dist-info/INSTALLER
  11. 28 0
      rest_api_venv/Lib/site-packages/Flask-2.1.2.dist-info/LICENSE.rst
  12. 126 0
      rest_api_venv/Lib/site-packages/Flask-2.1.2.dist-info/METADATA
  13. 52 0
      rest_api_venv/Lib/site-packages/Flask-2.1.2.dist-info/RECORD
  14. 0 0
      rest_api_venv/Lib/site-packages/Flask-2.1.2.dist-info/REQUESTED
  15. 5 0
      rest_api_venv/Lib/site-packages/Flask-2.1.2.dist-info/WHEEL
  16. 2 0
      rest_api_venv/Lib/site-packages/Flask-2.1.2.dist-info/entry_points.txt
  17. 1 0
      rest_api_venv/Lib/site-packages/Flask-2.1.2.dist-info/top_level.txt
  18. 1 0
      rest_api_venv/Lib/site-packages/Jinja2-3.1.2.dist-info/INSTALLER
  19. 28 0
      rest_api_venv/Lib/site-packages/Jinja2-3.1.2.dist-info/LICENSE.rst
  20. 113 0
      rest_api_venv/Lib/site-packages/Jinja2-3.1.2.dist-info/METADATA
  21. 58 0
      rest_api_venv/Lib/site-packages/Jinja2-3.1.2.dist-info/RECORD
  22. 5 0
      rest_api_venv/Lib/site-packages/Jinja2-3.1.2.dist-info/WHEEL
  23. 2 0
      rest_api_venv/Lib/site-packages/Jinja2-3.1.2.dist-info/entry_points.txt
  24. 1 0
      rest_api_venv/Lib/site-packages/Jinja2-3.1.2.dist-info/top_level.txt
  25. 1 0
      rest_api_venv/Lib/site-packages/MarkupSafe-2.1.1.dist-info/INSTALLER
  26. 28 0
      rest_api_venv/Lib/site-packages/MarkupSafe-2.1.1.dist-info/LICENSE.rst
  27. 101 0
      rest_api_venv/Lib/site-packages/MarkupSafe-2.1.1.dist-info/METADATA
  28. 14 0
      rest_api_venv/Lib/site-packages/MarkupSafe-2.1.1.dist-info/RECORD
  29. 5 0
      rest_api_venv/Lib/site-packages/MarkupSafe-2.1.1.dist-info/WHEEL
  30. 1 0
      rest_api_venv/Lib/site-packages/MarkupSafe-2.1.1.dist-info/top_level.txt
  31. 1 0
      rest_api_venv/Lib/site-packages/PyNaCl-1.5.0.dist-info/INSTALLER
  32. 174 0
      rest_api_venv/Lib/site-packages/PyNaCl-1.5.0.dist-info/LICENSE
  33. 246 0
      rest_api_venv/Lib/site-packages/PyNaCl-1.5.0.dist-info/METADATA
  34. 68 0
      rest_api_venv/Lib/site-packages/PyNaCl-1.5.0.dist-info/RECORD
  35. 5 0
      rest_api_venv/Lib/site-packages/PyNaCl-1.5.0.dist-info/WHEEL
  36. 2 0
      rest_api_venv/Lib/site-packages/PyNaCl-1.5.0.dist-info/top_level.txt
  37. 1 0
      rest_api_venv/Lib/site-packages/Werkzeug-2.1.2.dist-info/INSTALLER
  38. 28 0
      rest_api_venv/Lib/site-packages/Werkzeug-2.1.2.dist-info/LICENSE.rst
  39. 128 0
      rest_api_venv/Lib/site-packages/Werkzeug-2.1.2.dist-info/METADATA
  40. 86 0
      rest_api_venv/Lib/site-packages/Werkzeug-2.1.2.dist-info/RECORD
  41. 5 0
      rest_api_venv/Lib/site-packages/Werkzeug-2.1.2.dist-info/WHEEL
  42. 1 0
      rest_api_venv/Lib/site-packages/Werkzeug-2.1.2.dist-info/top_level.txt
  43. BIN
      rest_api_venv/Lib/site-packages/__pycache__/_virtualenv.cpython-310.pyc
  44. BIN
      rest_api_venv/Lib/site-packages/__pycache__/gnupg.cpython-310.pyc
  45. BIN
      rest_api_venv/Lib/site-packages/__pycache__/six.cpython-310.pyc
  46. BIN
      rest_api_venv/Lib/site-packages/__pycache__/sshtunnel.cpython-310.pyc
  47. BIN
      rest_api_venv/Lib/site-packages/_cffi_backend.cp310-win_amd64.pyd
  48. 187 0
      rest_api_venv/Lib/site-packages/_distutils_hack/__init__.py
  49. BIN
      rest_api_venv/Lib/site-packages/_distutils_hack/__pycache__/__init__.cpython-310.pyc
  50. BIN
      rest_api_venv/Lib/site-packages/_distutils_hack/__pycache__/override.cpython-310.pyc
  51. 1 0
      rest_api_venv/Lib/site-packages/_distutils_hack/override.py
  52. 1 0
      rest_api_venv/Lib/site-packages/_virtualenv.pth
  53. 130 0
      rest_api_venv/Lib/site-packages/_virtualenv.py
  54. 1 0
      rest_api_venv/Lib/site-packages/bcrypt-3.2.2.dist-info/INSTALLER
  55. 201 0
      rest_api_venv/Lib/site-packages/bcrypt-3.2.2.dist-info/LICENSE
  56. 271 0
      rest_api_venv/Lib/site-packages/bcrypt-3.2.2.dist-info/METADATA
  57. 13 0
      rest_api_venv/Lib/site-packages/bcrypt-3.2.2.dist-info/RECORD
  58. 5 0
      rest_api_venv/Lib/site-packages/bcrypt-3.2.2.dist-info/WHEEL
  59. 2 0
      rest_api_venv/Lib/site-packages/bcrypt-3.2.2.dist-info/top_level.txt
  60. 41 0
      rest_api_venv/Lib/site-packages/bcrypt/__about__.py
  61. 171 0
      rest_api_venv/Lib/site-packages/bcrypt/__init__.py
  62. BIN
      rest_api_venv/Lib/site-packages/bcrypt/__pycache__/__about__.cpython-310.pyc
  63. BIN
      rest_api_venv/Lib/site-packages/bcrypt/__pycache__/__init__.cpython-310.pyc
  64. BIN
      rest_api_venv/Lib/site-packages/bcrypt/_bcrypt.pyd
  65. 4 0
      rest_api_venv/Lib/site-packages/bcrypt/_bcrypt.pyi
  66. 0 0
      rest_api_venv/Lib/site-packages/bcrypt/py.typed
  67. 1 0
      rest_api_venv/Lib/site-packages/cffi-1.15.0.dist-info/INSTALLER
  68. 26 0
      rest_api_venv/Lib/site-packages/cffi-1.15.0.dist-info/LICENSE
  69. 37 0
      rest_api_venv/Lib/site-packages/cffi-1.15.0.dist-info/METADATA
  70. 44 0
      rest_api_venv/Lib/site-packages/cffi-1.15.0.dist-info/RECORD
  71. 5 0
      rest_api_venv/Lib/site-packages/cffi-1.15.0.dist-info/WHEEL
  72. 3 0
      rest_api_venv/Lib/site-packages/cffi-1.15.0.dist-info/entry_points.txt
  73. 2 0
      rest_api_venv/Lib/site-packages/cffi-1.15.0.dist-info/top_level.txt
  74. 14 0
      rest_api_venv/Lib/site-packages/cffi/__init__.py
  75. BIN
      rest_api_venv/Lib/site-packages/cffi/__pycache__/__init__.cpython-310.pyc
  76. BIN
      rest_api_venv/Lib/site-packages/cffi/__pycache__/api.cpython-310.pyc
  77. BIN
      rest_api_venv/Lib/site-packages/cffi/__pycache__/backend_ctypes.cpython-310.pyc
  78. BIN
      rest_api_venv/Lib/site-packages/cffi/__pycache__/cffi_opcode.cpython-310.pyc
  79. BIN
      rest_api_venv/Lib/site-packages/cffi/__pycache__/commontypes.cpython-310.pyc
  80. BIN
      rest_api_venv/Lib/site-packages/cffi/__pycache__/cparser.cpython-310.pyc
  81. BIN
      rest_api_venv/Lib/site-packages/cffi/__pycache__/error.cpython-310.pyc
  82. BIN
      rest_api_venv/Lib/site-packages/cffi/__pycache__/ffiplatform.cpython-310.pyc
  83. BIN
      rest_api_venv/Lib/site-packages/cffi/__pycache__/lock.cpython-310.pyc
  84. BIN
      rest_api_venv/Lib/site-packages/cffi/__pycache__/model.cpython-310.pyc
  85. BIN
      rest_api_venv/Lib/site-packages/cffi/__pycache__/pkgconfig.cpython-310.pyc
  86. BIN
      rest_api_venv/Lib/site-packages/cffi/__pycache__/recompiler.cpython-310.pyc
  87. BIN
      rest_api_venv/Lib/site-packages/cffi/__pycache__/setuptools_ext.cpython-310.pyc
  88. BIN
      rest_api_venv/Lib/site-packages/cffi/__pycache__/vengine_cpy.cpython-310.pyc
  89. BIN
      rest_api_venv/Lib/site-packages/cffi/__pycache__/vengine_gen.cpython-310.pyc
  90. BIN
      rest_api_venv/Lib/site-packages/cffi/__pycache__/verifier.cpython-310.pyc
  91. 149 0
      rest_api_venv/Lib/site-packages/cffi/_cffi_errors.h
  92. 385 0
      rest_api_venv/Lib/site-packages/cffi/_cffi_include.h
  93. 527 0
      rest_api_venv/Lib/site-packages/cffi/_embedding.h
  94. 965 0
      rest_api_venv/Lib/site-packages/cffi/api.py
  95. 1121 0
      rest_api_venv/Lib/site-packages/cffi/backend_ctypes.py
  96. 187 0
      rest_api_venv/Lib/site-packages/cffi/cffi_opcode.py
  97. 80 0
      rest_api_venv/Lib/site-packages/cffi/commontypes.py
  98. 1006 0
      rest_api_venv/Lib/site-packages/cffi/cparser.py
  99. 31 0
      rest_api_venv/Lib/site-packages/cffi/error.py
  100. 127 0
      rest_api_venv/Lib/site-packages/cffi/ffiplatform.py

+ 65 - 0
README.md

@@ -0,0 +1,65 @@
+# REST API Python
+## Instruction
+1. Change config.py
+2. Open **cmd** and go to ```cd C:/your_path/RestApiPython/rest_api_venv/Scripts```
+3. Entered the following commands ```activate.bat```, ```cd ../../``` and ```python api.py```
+
+## Requests
+### GET ```http://127.0.0.2:8080/user/10``` - get one user by id
+```json
+{
+    "birthday": "Tue, 07 Jun 2022 00:00:00 GMT",
+    "id": 10,
+    "idrole": 2,
+    "insys": true,
+    "lastlogintime": "Tue, 07 Jun 2022 19:25:00 GMT",
+    "name": "Evgen4"
+}
+```
+### GET ```http://127.0.0.2:8080/users``` - get all users
+```json
+[
+    {
+        "birthday": "Sat, 04 Mar 2000 00:00:00 GMT",
+        "id": 4,
+        "insys": true,
+        "lastlogintime": "Mon, 16 May 2011 15:36:38 GMT",
+        "name": "Evgen Polivanov",
+        "role_name": "Admin"
+    },
+    {
+        "birthday": "Tue, 07 Jun 2022 00:00:00 GMT",
+        "id": 6,
+        "insys": true,
+        "lastlogintime": "Tue, 07 Jun 2022 16:58:29 GMT",
+        "name": "Evgen Polivanov",
+        "role_name": "User"
+    }
+]
+```
+### POST ```http://127.0.0.2:8080/adduser``` - add user and return id added user
+**BODY**
+```json
+{
+    "name" : "Evgen",
+    "reg_date" : "07-06-2022",
+    "log_time" : "07-06-2022 19:25",
+    "in_sys" : true,
+    "role_id" : 2
+}
+```
+### POST ```http://127.0.0.2:8080/updateuser``` - update name user by id
+**BODY**
+```json
+{
+    "id" : 10,
+    "name" : "Other name"
+}
+```
+### POST ```http://127.0.0.2:8080/deleteuser``` - delete user by id
+**BODY**
+```json
+{
+    "id" : 10
+}
+```

BIN
__pycache__/BotClass.cpython-310.pyc


BIN
__pycache__/config.cpython-310.pyc


BIN
__pycache__/connect.cpython-310.pyc


BIN
__pycache__/function.cpython-310.pyc


+ 87 - 0
api.py

@@ -0,0 +1,87 @@
+
+from flask import Flask, jsonify, request
+from xmpp import cli
+from function import query_select, query_insert, query_delete, query_update
+
+app = Flask(__name__)
+
+
+@app.route('/adduser', methods=['POST'])
+def get_add():
+    request_data = request.get_json()
+    result = query_insert('''
+    INSERT INTO rest_user (name, birthday, lastlogintime, insys, idrole) 
+    VALUES (%s, %s, %s, %s, %s)
+    RETURNING id;
+    ''', \
+        (request_data['name'], request_data['reg_date'],\
+             request_data['log_time'], request_data['in_sys'],\
+                  request_data['role_id']))
+
+    return jsonify(result)
+
+@app.route('/user/<int:id>', methods=['GET'])
+def get_user(id):
+    return jsonify(query_select('''
+    SELECT * FROM rest_user WHERE id = %s;
+    ''', (id,), True))
+
+@app.route('/users', methods=['GET'])
+def get_show_user():
+    result = query_select('''
+    SELECT rest_user.id, rest_user.name, rest_user.birthday, rest_user.insys, rest_role.name AS role_name, rest_user.lastlogintime 
+    FROM rest_user, rest_role
+    WHERE rest_user.idrole = rest_role.id
+    ORDER BY id ASC;
+    ''', (), False)
+    return jsonify(result)
+
+@app.route('/deleteuser', methods=['POST'])
+def delete_user():
+    request_data = request.get_json()
+    result = query_delete('''
+    DELETE FROM rest_user
+    WHERE id = %s;
+    ''', (request_data['id'],))
+    if(result):
+        return "Успешное удаление!", 200
+    else:
+        return "Ошибка", 400
+
+@app.route('/updateuser', methods=['POST'])
+def test_api():
+    request_data = request.get_json()
+    print(request_data['id'])
+    print(request_data['name'])
+    result = query_update(f'''
+    UPDATE rest_user
+    SET name = %s
+    WHERE id = %s;
+    ''', (request_data['name'], request_data['id']))
+    if(result):
+        return "Успешное изменение!", 200
+    else:
+        return "Ошибка", 400
+
+
+@app.route('/testmessage', methods=['POST'])
+def send_mess():
+    try:
+        request_data = request.get_json()
+        cli.send_message(request_data['from'], request_data['password'], request_data['to'], request_data['message'])
+        return "Успешное изменение!", 200
+    except:
+        return "Ошибка", 500
+
+
+return_message = ""
+
+async def message_handler(conn, mess):
+    text = mess.getBody()
+    user = mess.getFrom()
+    return text
+
+if __name__ == '__main__':
+    #from waitress import serve
+    #serve(app, host='127.0.0.2', port='8080')
+    app.run(host='127.0.0.2', port='8080', debug=True, load_dotenv=True )

+ 10 - 0
config.py

@@ -0,0 +1,10 @@
+#Подключение к базе данных
+host = "localhost"
+user = "postgres"
+password = "password"
+db_name = "postgres"
+port = 3306
+SSH_H = "46.138.247.90"
+#Подключение к боту
+jid = "test@msg.sharix-app.org"
+jidpassword = "test1234@"

+ 29 - 0
connect.py

@@ -0,0 +1,29 @@
+import psycopg2 as pg
+from sshtunnel import SSHTunnelForwarder
+from config import SSH_H, host, user, db_name
+
+def connect():
+    try:
+        print('Connecting to the PostgreSQL Database...')
+        # подключение по ssh к серверу
+        ssh_tunnel = SSHTunnelForwarder(
+            (SSH_H, 334), #ip address and port
+            ssh_username="evgeny_polivanov", #имя пользователя
+            ssh_private_key= 'D:/sshc',# путь к файлу где расположен ssh ключ (не .pub)
+            ssh_private_key_password= '',# пароль (в данном случае пароль отсутствует)
+            remote_bind_address=(host, 5432) # не особо понял что это
+        )
+        ssh_tunnel.start()  
+        print("Tunnel start!")   
+        # Подключение к базе данных 
+        conn = pg.connect(
+            host=host,
+            port=ssh_tunnel.local_bind_port,
+            user=user,
+            password= "",
+            database=db_name
+        )
+        print("Success ssh connect!")
+        return conn
+    except:
+        print('Connection Has Failed...') 

+ 46 - 0
function.py

@@ -0,0 +1,46 @@
+from connect import connect
+
+conn = connect()
+
+def query_select(query, args=(), one=False):
+    try:
+        with conn.cursor() as cursor:
+            cursor.execute(query, args)
+            r = [dict((cursor.description[i][0], value) \
+                    for i, value in enumerate(row)) for row in cursor.fetchall()]
+            return (r[0] if r else None) if one else r
+    except:
+        return "Error"
+
+def query_insert(query, args=()):
+    try:
+        with conn.cursor() as cursor:
+            cursor.execute(query, args)
+            #id = cursor.fetchone()[0]
+            conn.commit()
+            return cursor.fetchone()[0]
+    except: 
+        return "Error"
+
+def query_delete(query, args=()):
+    try:
+        with conn.cursor() as cursor:
+            cursor.execute(query, args)
+            if(cursor.rowcount != 0):
+                conn.commit()
+                return True
+        return False
+    except: 
+        return False
+
+def query_update(query, args=()):
+    try:
+        with conn.cursor() as cursor:
+            cursor.execute(query, args)
+            print(cursor.rowcount)
+            if(cursor.rowcount != 0):
+                conn.commit()
+                return True
+        return False
+    except: 
+        return False

+ 1 - 0
rest_api_venv/Lib/site-packages/Flask-2.1.2.dist-info/INSTALLER

@@ -0,0 +1 @@
+pip

+ 28 - 0
rest_api_venv/Lib/site-packages/Flask-2.1.2.dist-info/LICENSE.rst

@@ -0,0 +1,28 @@
+Copyright 2010 Pallets
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+1.  Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+
+2.  Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in the
+    documentation and/or other materials provided with the distribution.
+
+3.  Neither the name of the copyright holder nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

+ 126 - 0
rest_api_venv/Lib/site-packages/Flask-2.1.2.dist-info/METADATA

@@ -0,0 +1,126 @@
+Metadata-Version: 2.1
+Name: Flask
+Version: 2.1.2
+Summary: A simple framework for building complex web applications.
+Home-page: https://palletsprojects.com/p/flask
+Author: Armin Ronacher
+Author-email: armin.ronacher@active-4.com
+Maintainer: Pallets
+Maintainer-email: contact@palletsprojects.com
+License: BSD-3-Clause
+Project-URL: Donate, https://palletsprojects.com/donate
+Project-URL: Documentation, https://flask.palletsprojects.com/
+Project-URL: Changes, https://flask.palletsprojects.com/changes/
+Project-URL: Source Code, https://github.com/pallets/flask/
+Project-URL: Issue Tracker, https://github.com/pallets/flask/issues/
+Project-URL: Twitter, https://twitter.com/PalletsTeam
+Project-URL: Chat, https://discord.gg/pallets
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Framework :: Flask
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
+Classifier: Topic :: Internet :: WWW/HTTP :: WSGI
+Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application
+Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+License-File: LICENSE.rst
+Requires-Dist: Werkzeug (>=2.0)
+Requires-Dist: Jinja2 (>=3.0)
+Requires-Dist: itsdangerous (>=2.0)
+Requires-Dist: click (>=8.0)
+Requires-Dist: importlib-metadata (>=3.6.0) ; python_version < "3.10"
+Provides-Extra: async
+Requires-Dist: asgiref (>=3.2) ; extra == 'async'
+Provides-Extra: dotenv
+Requires-Dist: python-dotenv ; extra == 'dotenv'
+
+Flask
+=====
+
+Flask is a lightweight `WSGI`_ web application framework. It is designed
+to make getting started quick and easy, with the ability to scale up to
+complex applications. It began as a simple wrapper around `Werkzeug`_
+and `Jinja`_ and has become one of the most popular Python web
+application frameworks.
+
+Flask offers suggestions, but doesn't enforce any dependencies or
+project layout. It is up to the developer to choose the tools and
+libraries they want to use. There are many extensions provided by the
+community that make adding new functionality easy.
+
+.. _WSGI: https://wsgi.readthedocs.io/
+.. _Werkzeug: https://werkzeug.palletsprojects.com/
+.. _Jinja: https://jinja.palletsprojects.com/
+
+
+Installing
+----------
+
+Install and update using `pip`_:
+
+.. code-block:: text
+
+    $ pip install -U Flask
+
+.. _pip: https://pip.pypa.io/en/stable/getting-started/
+
+
+A Simple Example
+----------------
+
+.. code-block:: python
+
+    # save this as app.py
+    from flask import Flask
+
+    app = Flask(__name__)
+
+    @app.route("/")
+    def hello():
+        return "Hello, World!"
+
+.. code-block:: text
+
+    $ flask run
+      * Running on http://127.0.0.1:5000/ (Press CTRL+C to quit)
+
+
+Contributing
+------------
+
+For guidance on setting up a development environment and how to make a
+contribution to Flask, see the `contributing guidelines`_.
+
+.. _contributing guidelines: https://github.com/pallets/flask/blob/main/CONTRIBUTING.rst
+
+
+Donate
+------
+
+The Pallets organization develops and supports Flask and the libraries
+it uses. In order to grow the community of contributors and users, and
+allow the maintainers to devote more time to the projects, `please
+donate today`_.
+
+.. _please donate today: https://palletsprojects.com/donate
+
+
+Links
+-----
+
+-   Documentation: https://flask.palletsprojects.com/
+-   Changes: https://flask.palletsprojects.com/changes/
+-   PyPI Releases: https://pypi.org/project/Flask/
+-   Source Code: https://github.com/pallets/flask/
+-   Issue Tracker: https://github.com/pallets/flask/issues/
+-   Website: https://palletsprojects.com/p/flask/
+-   Twitter: https://twitter.com/PalletsTeam
+-   Chat: https://discord.gg/pallets
+
+

+ 52 - 0
rest_api_venv/Lib/site-packages/Flask-2.1.2.dist-info/RECORD

@@ -0,0 +1,52 @@
+../../Scripts/flask.exe,sha256=j_jQN_1O_x3GebqBKiYhTnYsiZyrod-AehqQwvGctmg,106367
+Flask-2.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+Flask-2.1.2.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475
+Flask-2.1.2.dist-info/METADATA,sha256=V3wRBN5pAKumPcu41b-ePUWUQkdyka_WVPXwMKGvLGQ,3907
+Flask-2.1.2.dist-info/RECORD,,
+Flask-2.1.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+Flask-2.1.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
+Flask-2.1.2.dist-info/entry_points.txt,sha256=s3MqQpduU25y4dq3ftBYD6bMVdVnbMpZP-sUNw0zw0k,41
+Flask-2.1.2.dist-info/top_level.txt,sha256=dvi65F6AeGWVU0TBpYiC04yM60-FX1gJFkK31IKQr5c,6
+flask/__init__.py,sha256=hutpl3wZCIHR-FckBdDZMe_pw89k7llUTj7c7t77cWc,2207
+flask/__main__.py,sha256=bYt9eEaoRQWdejEHFD8REx9jxVEdZptECFsV7F49Ink,30
+flask/__pycache__/__init__.cpython-310.pyc,,
+flask/__pycache__/__main__.cpython-310.pyc,,
+flask/__pycache__/app.cpython-310.pyc,,
+flask/__pycache__/blueprints.cpython-310.pyc,,
+flask/__pycache__/cli.cpython-310.pyc,,
+flask/__pycache__/config.cpython-310.pyc,,
+flask/__pycache__/ctx.cpython-310.pyc,,
+flask/__pycache__/debughelpers.cpython-310.pyc,,
+flask/__pycache__/globals.cpython-310.pyc,,
+flask/__pycache__/helpers.cpython-310.pyc,,
+flask/__pycache__/logging.cpython-310.pyc,,
+flask/__pycache__/scaffold.cpython-310.pyc,,
+flask/__pycache__/sessions.cpython-310.pyc,,
+flask/__pycache__/signals.cpython-310.pyc,,
+flask/__pycache__/templating.cpython-310.pyc,,
+flask/__pycache__/testing.cpython-310.pyc,,
+flask/__pycache__/typing.cpython-310.pyc,,
+flask/__pycache__/views.cpython-310.pyc,,
+flask/__pycache__/wrappers.cpython-310.pyc,,
+flask/app.py,sha256=b6_j0OtlrssZ05fMReHNwzSLN3M7mkI9LR8UWuhcm0g,82070
+flask/blueprints.py,sha256=W2C5eFciX2Zq8zJSKQHiQd488Ytcsrt6wcXSZ-rSU7c,23362
+flask/cli.py,sha256=eCZMiAFr6quTZo2pZ5RH2NBS1cQEZqLke9MCloOVIMA,31185
+flask/config.py,sha256=IWqHecH4poDxNEUg4U_ZA1CTlL5BKZDX3ofG4UGYyi0,12584
+flask/ctx.py,sha256=smANecq_Tr3FFi2UMDs3Nn-xYi9GhKrzZvFpkDqXtLM,18336
+flask/debughelpers.py,sha256=parkDxfxxGZZAOGSumyrGLCTKcws86lJ2X3RqikiwUE,6036
+flask/globals.py,sha256=cWd-R2hUH3VqPhnmQNww892tQS6Yjqg_wg8UvW1M7NM,1723
+flask/helpers.py,sha256=qkbHG_6-Ox_uDmmD8ZwA9k4nTI8Q8U5S_6QtXpzZCRg,29266
+flask/json/__init__.py,sha256=jZHbiHKOICmnMttYIhHYrCj2LeHjZY-J7KiYoorjc4I,10394
+flask/json/__pycache__/__init__.cpython-310.pyc,,
+flask/json/__pycache__/tag.cpython-310.pyc,,
+flask/json/tag.py,sha256=fys3HBLssWHuMAIJuTcf2K0bCtosePBKXIWASZEEjnU,8857
+flask/logging.py,sha256=1o_hirVGqdj7SBdETnhX7IAjklG89RXlrwz_2CjzQQE,2273
+flask/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+flask/scaffold.py,sha256=vFn4R_sDhaWwxH77UGmRUWgE2p9tzj9VqRHG5dvE7wM,32545
+flask/sessions.py,sha256=y0f1WBTQqjebkjtiT6d0G_ejIvllsjzch5sq_NUoXec,15834
+flask/signals.py,sha256=H7QwDciK-dtBxinjKpexpglP0E6k0MJILiFWTItfmqU,2136
+flask/templating.py,sha256=6rcyoV-Z57uBGMB6_xl4LhQJHbF456naf-GU8pjQSPM,5659
+flask/testing.py,sha256=mfyDupACHNQinATGAcrqqDst9Ik4CnNg3rP9gvpUjks,10385
+flask/typing.py,sha256=P1x3WCUYE7ddMNCUVqr04V5EjJy4M0Osz1MfVM0xgMQ,2116
+flask/views.py,sha256=nhq31TRB5Z-z2mjFGZACaaB2Et5XPCmWhWxJxOvLWww,5948
+flask/wrappers.py,sha256=Vgs2HlC8WNUOELQasV-Xad8DFTFwJe3eUltZG9z4Cu8,5675

+ 0 - 0
rest_api_venv/Lib/site-packages/Flask-2.1.2.dist-info/REQUESTED


+ 5 - 0
rest_api_venv/Lib/site-packages/Flask-2.1.2.dist-info/WHEEL

@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.1)
+Root-Is-Purelib: true
+Tag: py3-none-any
+

+ 2 - 0
rest_api_venv/Lib/site-packages/Flask-2.1.2.dist-info/entry_points.txt

@@ -0,0 +1,2 @@
+[console_scripts]
+flask = flask.cli:main

+ 1 - 0
rest_api_venv/Lib/site-packages/Flask-2.1.2.dist-info/top_level.txt

@@ -0,0 +1 @@
+flask

+ 1 - 0
rest_api_venv/Lib/site-packages/Jinja2-3.1.2.dist-info/INSTALLER

@@ -0,0 +1 @@
+pip

+ 28 - 0
rest_api_venv/Lib/site-packages/Jinja2-3.1.2.dist-info/LICENSE.rst

@@ -0,0 +1,28 @@
+Copyright 2007 Pallets
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+1.  Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+
+2.  Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in the
+    documentation and/or other materials provided with the distribution.
+
+3.  Neither the name of the copyright holder nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

+ 113 - 0
rest_api_venv/Lib/site-packages/Jinja2-3.1.2.dist-info/METADATA

@@ -0,0 +1,113 @@
+Metadata-Version: 2.1
+Name: Jinja2
+Version: 3.1.2
+Summary: A very fast and expressive template engine.
+Home-page: https://palletsprojects.com/p/jinja/
+Author: Armin Ronacher
+Author-email: armin.ronacher@active-4.com
+Maintainer: Pallets
+Maintainer-email: contact@palletsprojects.com
+License: BSD-3-Clause
+Project-URL: Donate, https://palletsprojects.com/donate
+Project-URL: Documentation, https://jinja.palletsprojects.com/
+Project-URL: Changes, https://jinja.palletsprojects.com/changes/
+Project-URL: Source Code, https://github.com/pallets/jinja/
+Project-URL: Issue Tracker, https://github.com/pallets/jinja/issues/
+Project-URL: Twitter, https://twitter.com/PalletsTeam
+Project-URL: Chat, https://discord.gg/pallets
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
+Classifier: Topic :: Text Processing :: Markup :: HTML
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+License-File: LICENSE.rst
+Requires-Dist: MarkupSafe (>=2.0)
+Provides-Extra: i18n
+Requires-Dist: Babel (>=2.7) ; extra == 'i18n'
+
+Jinja
+=====
+
+Jinja is a fast, expressive, extensible templating engine. Special
+placeholders in the template allow writing code similar to Python
+syntax. Then the template is passed data to render the final document.
+
+It includes:
+
+-   Template inheritance and inclusion.
+-   Define and import macros within templates.
+-   HTML templates can use autoescaping to prevent XSS from untrusted
+    user input.
+-   A sandboxed environment can safely render untrusted templates.
+-   AsyncIO support for generating templates and calling async
+    functions.
+-   I18N support with Babel.
+-   Templates are compiled to optimized Python code just-in-time and
+    cached, or can be compiled ahead-of-time.
+-   Exceptions point to the correct line in templates to make debugging
+    easier.
+-   Extensible filters, tests, functions, and even syntax.
+
+Jinja's philosophy is that while application logic belongs in Python if
+possible, it shouldn't make the template designer's job difficult by
+restricting functionality too much.
+
+
+Installing
+----------
+
+Install and update using `pip`_:
+
+.. code-block:: text
+
+    $ pip install -U Jinja2
+
+.. _pip: https://pip.pypa.io/en/stable/getting-started/
+
+
+In A Nutshell
+-------------
+
+.. code-block:: jinja
+
+    {% extends "base.html" %}
+    {% block title %}Members{% endblock %}
+    {% block content %}
+      <ul>
+      {% for user in users %}
+        <li><a href="{{ user.url }}">{{ user.username }}</a></li>
+      {% endfor %}
+      </ul>
+    {% endblock %}
+
+
+Donate
+------
+
+The Pallets organization develops and supports Jinja and other popular
+packages. In order to grow the community of contributors and users, and
+allow the maintainers to devote more time to the projects, `please
+donate today`_.
+
+.. _please donate today: https://palletsprojects.com/donate
+
+
+Links
+-----
+
+-   Documentation: https://jinja.palletsprojects.com/
+-   Changes: https://jinja.palletsprojects.com/changes/
+-   PyPI Releases: https://pypi.org/project/Jinja2/
+-   Source Code: https://github.com/pallets/jinja/
+-   Issue Tracker: https://github.com/pallets/jinja/issues/
+-   Website: https://palletsprojects.com/p/jinja/
+-   Twitter: https://twitter.com/PalletsTeam
+-   Chat: https://discord.gg/pallets
+
+

+ 58 - 0
rest_api_venv/Lib/site-packages/Jinja2-3.1.2.dist-info/RECORD

@@ -0,0 +1,58 @@
+Jinja2-3.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+Jinja2-3.1.2.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475
+Jinja2-3.1.2.dist-info/METADATA,sha256=PZ6v2SIidMNixR7MRUX9f7ZWsPwtXanknqiZUmRbh4U,3539
+Jinja2-3.1.2.dist-info/RECORD,,
+Jinja2-3.1.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
+Jinja2-3.1.2.dist-info/entry_points.txt,sha256=zRd62fbqIyfUpsRtU7EVIFyiu1tPwfgO7EvPErnxgTE,59
+Jinja2-3.1.2.dist-info/top_level.txt,sha256=PkeVWtLb3-CqjWi1fO29OCbj55EhX_chhKrCdrVe_zs,7
+jinja2/__init__.py,sha256=8vGduD8ytwgD6GDSqpYc2m3aU-T7PKOAddvVXgGr_Fs,1927
+jinja2/__pycache__/__init__.cpython-310.pyc,,
+jinja2/__pycache__/_identifier.cpython-310.pyc,,
+jinja2/__pycache__/async_utils.cpython-310.pyc,,
+jinja2/__pycache__/bccache.cpython-310.pyc,,
+jinja2/__pycache__/compiler.cpython-310.pyc,,
+jinja2/__pycache__/constants.cpython-310.pyc,,
+jinja2/__pycache__/debug.cpython-310.pyc,,
+jinja2/__pycache__/defaults.cpython-310.pyc,,
+jinja2/__pycache__/environment.cpython-310.pyc,,
+jinja2/__pycache__/exceptions.cpython-310.pyc,,
+jinja2/__pycache__/ext.cpython-310.pyc,,
+jinja2/__pycache__/filters.cpython-310.pyc,,
+jinja2/__pycache__/idtracking.cpython-310.pyc,,
+jinja2/__pycache__/lexer.cpython-310.pyc,,
+jinja2/__pycache__/loaders.cpython-310.pyc,,
+jinja2/__pycache__/meta.cpython-310.pyc,,
+jinja2/__pycache__/nativetypes.cpython-310.pyc,,
+jinja2/__pycache__/nodes.cpython-310.pyc,,
+jinja2/__pycache__/optimizer.cpython-310.pyc,,
+jinja2/__pycache__/parser.cpython-310.pyc,,
+jinja2/__pycache__/runtime.cpython-310.pyc,,
+jinja2/__pycache__/sandbox.cpython-310.pyc,,
+jinja2/__pycache__/tests.cpython-310.pyc,,
+jinja2/__pycache__/utils.cpython-310.pyc,,
+jinja2/__pycache__/visitor.cpython-310.pyc,,
+jinja2/_identifier.py,sha256=_zYctNKzRqlk_murTNlzrju1FFJL7Va_Ijqqd7ii2lU,1958
+jinja2/async_utils.py,sha256=dHlbTeaxFPtAOQEYOGYh_PHcDT0rsDaUJAFDl_0XtTg,2472
+jinja2/bccache.py,sha256=mhz5xtLxCcHRAa56azOhphIAe19u1we0ojifNMClDio,14061
+jinja2/compiler.py,sha256=Gs-N8ThJ7OWK4-reKoO8Wh1ZXz95MVphBKNVf75qBr8,72172
+jinja2/constants.py,sha256=GMoFydBF_kdpaRKPoM5cl5MviquVRLVyZtfp5-16jg0,1433
+jinja2/debug.py,sha256=iWJ432RadxJNnaMOPrjIDInz50UEgni3_HKuFXi2vuQ,6299
+jinja2/defaults.py,sha256=boBcSw78h-lp20YbaXSJsqkAI2uN_mD_TtCydpeq5wU,1267
+jinja2/environment.py,sha256=6uHIcc7ZblqOMdx_uYNKqRnnwAF0_nzbyeMP9FFtuh4,61349
+jinja2/exceptions.py,sha256=ioHeHrWwCWNaXX1inHmHVblvc4haO7AXsjCp3GfWvx0,5071
+jinja2/ext.py,sha256=ivr3P7LKbddiXDVez20EflcO3q2aHQwz9P_PgWGHVqE,31502
+jinja2/filters.py,sha256=9js1V-h2RlyW90IhLiBGLM2U-k6SCy2F4BUUMgB3K9Q,53509
+jinja2/idtracking.py,sha256=GfNmadir4oDALVxzn3DL9YInhJDr69ebXeA2ygfuCGA,10704
+jinja2/lexer.py,sha256=DW2nX9zk-6MWp65YR2bqqj0xqCvLtD-u9NWT8AnFRxQ,29726
+jinja2/loaders.py,sha256=BfptfvTVpClUd-leMkHczdyPNYFzp_n7PKOJ98iyHOg,23207
+jinja2/meta.py,sha256=GNPEvifmSaU3CMxlbheBOZjeZ277HThOPUTf1RkppKQ,4396
+jinja2/nativetypes.py,sha256=DXgORDPRmVWgy034H0xL8eF7qYoK3DrMxs-935d0Fzk,4226
+jinja2/nodes.py,sha256=i34GPRAZexXMT6bwuf5SEyvdmS-bRCy9KMjwN5O6pjk,34550
+jinja2/optimizer.py,sha256=tHkMwXxfZkbfA1KmLcqmBMSaz7RLIvvItrJcPoXTyD8,1650
+jinja2/parser.py,sha256=nHd-DFHbiygvfaPtm9rcQXJChZG7DPsWfiEsqfwKerY,39595
+jinja2/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+jinja2/runtime.py,sha256=5CmD5BjbEJxSiDNTFBeKCaq8qU4aYD2v6q2EluyExms,33476
+jinja2/sandbox.py,sha256=Y0xZeXQnH6EX5VjaV2YixESxoepnRbW_3UeQosaBU3M,14584
+jinja2/tests.py,sha256=Am5Z6Lmfr2XaH_npIfJJ8MdXtWsbLjMULZJulTAj30E,5905
+jinja2/utils.py,sha256=u9jXESxGn8ATZNVolwmkjUVu4SA-tLgV0W7PcSfPfdQ,23965
+jinja2/visitor.py,sha256=MH14C6yq24G_KVtWzjwaI7Wg14PCJIYlWW1kpkxYak0,3568

+ 5 - 0
rest_api_venv/Lib/site-packages/Jinja2-3.1.2.dist-info/WHEEL

@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.1)
+Root-Is-Purelib: true
+Tag: py3-none-any
+

+ 2 - 0
rest_api_venv/Lib/site-packages/Jinja2-3.1.2.dist-info/entry_points.txt

@@ -0,0 +1,2 @@
+[babel.extractors]
+jinja2 = jinja2.ext:babel_extract[i18n]

+ 1 - 0
rest_api_venv/Lib/site-packages/Jinja2-3.1.2.dist-info/top_level.txt

@@ -0,0 +1 @@
+jinja2

+ 1 - 0
rest_api_venv/Lib/site-packages/MarkupSafe-2.1.1.dist-info/INSTALLER

@@ -0,0 +1 @@
+pip

+ 28 - 0
rest_api_venv/Lib/site-packages/MarkupSafe-2.1.1.dist-info/LICENSE.rst

@@ -0,0 +1,28 @@
+Copyright 2010 Pallets
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+1.  Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+
+2.  Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in the
+    documentation and/or other materials provided with the distribution.
+
+3.  Neither the name of the copyright holder nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

+ 101 - 0
rest_api_venv/Lib/site-packages/MarkupSafe-2.1.1.dist-info/METADATA

@@ -0,0 +1,101 @@
+Metadata-Version: 2.1
+Name: MarkupSafe
+Version: 2.1.1
+Summary: Safely add untrusted strings to HTML/XML markup.
+Home-page: https://palletsprojects.com/p/markupsafe/
+Author: Armin Ronacher
+Author-email: armin.ronacher@active-4.com
+Maintainer: Pallets
+Maintainer-email: contact@palletsprojects.com
+License: BSD-3-Clause
+Project-URL: Donate, https://palletsprojects.com/donate
+Project-URL: Documentation, https://markupsafe.palletsprojects.com/
+Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/
+Project-URL: Source Code, https://github.com/pallets/markupsafe/
+Project-URL: Issue Tracker, https://github.com/pallets/markupsafe/issues/
+Project-URL: Twitter, https://twitter.com/PalletsTeam
+Project-URL: Chat, https://discord.gg/pallets
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
+Classifier: Topic :: Text Processing :: Markup :: HTML
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+License-File: LICENSE.rst
+
+MarkupSafe
+==========
+
+MarkupSafe implements a text object that escapes characters so it is
+safe to use in HTML and XML. Characters that have special meanings are
+replaced so that they display as the actual characters. This mitigates
+injection attacks, meaning untrusted user input can safely be displayed
+on a page.
+
+
+Installing
+----------
+
+Install and update using `pip`_:
+
+.. code-block:: text
+
+    pip install -U MarkupSafe
+
+.. _pip: https://pip.pypa.io/en/stable/getting-started/
+
+
+Examples
+--------
+
+.. code-block:: pycon
+
+    >>> from markupsafe import Markup, escape
+
+    >>> # escape replaces special characters and wraps in Markup
+    >>> escape("<script>alert(document.cookie);</script>")
+    Markup('&lt;script&gt;alert(document.cookie);&lt;/script&gt;')
+
+    >>> # wrap in Markup to mark text "safe" and prevent escaping
+    >>> Markup("<strong>Hello</strong>")
+    Markup('<strong>hello</strong>')
+
+    >>> escape(Markup("<strong>Hello</strong>"))
+    Markup('<strong>hello</strong>')
+
+    >>> # Markup is a str subclass
+    >>> # methods and operators escape their arguments
+    >>> template = Markup("Hello <em>{name}</em>")
+    >>> template.format(name='"World"')
+    Markup('Hello <em>&#34;World&#34;</em>')
+
+
+Donate
+------
+
+The Pallets organization develops and supports MarkupSafe and other
+popular packages. In order to grow the community of contributors and
+users, and allow the maintainers to devote more time to the projects,
+`please donate today`_.
+
+.. _please donate today: https://palletsprojects.com/donate
+
+
+Links
+-----
+
+-   Documentation: https://markupsafe.palletsprojects.com/
+-   Changes: https://markupsafe.palletsprojects.com/changes/
+-   PyPI Releases: https://pypi.org/project/MarkupSafe/
+-   Source Code: https://github.com/pallets/markupsafe/
+-   Issue Tracker: https://github.com/pallets/markupsafe/issues/
+-   Website: https://palletsprojects.com/p/markupsafe/
+-   Twitter: https://twitter.com/PalletsTeam
+-   Chat: https://discord.gg/pallets
+
+

+ 14 - 0
rest_api_venv/Lib/site-packages/MarkupSafe-2.1.1.dist-info/RECORD

@@ -0,0 +1,14 @@
+MarkupSafe-2.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+MarkupSafe-2.1.1.dist-info/LICENSE.rst,sha256=RjHsDbX9kKVH4zaBcmTGeYIUM4FG-KyUtKV_lu6MnsQ,1503
+MarkupSafe-2.1.1.dist-info/METADATA,sha256=DC93VszmzjLQcrVChRUjtW4XbUwjTdbaplpgdlbFdbs,3242
+MarkupSafe-2.1.1.dist-info/RECORD,,
+MarkupSafe-2.1.1.dist-info/WHEEL,sha256=C6CHup2HLC2Rld8AL5u9w89MYULjdaP5k0k7SG83CcI,102
+MarkupSafe-2.1.1.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11
+markupsafe/__init__.py,sha256=XGdbhy_OLrsQ5ZoFV3V6HQ3GeJ_ojcabRl_8yqehISk,9579
+markupsafe/__pycache__/__init__.cpython-310.pyc,,
+markupsafe/__pycache__/_native.cpython-310.pyc,,
+markupsafe/_native.py,sha256=_Q7UsXCOvgdonCgqG3l5asANI6eo50EKnDM-mlwEC5M,1776
+markupsafe/_speedups.c,sha256=n3jzzaJwXcoN8nTFyA53f3vSqsWK2vujI-v6QYifjhQ,7403
+markupsafe/_speedups.cp310-win_amd64.pyd,sha256=Mh72D6F52No2JwGW5GRZfeQKwRrES4u8uZFnw_nN4vk,15872
+markupsafe/_speedups.pyi,sha256=f5QtwIOP0eLrxh2v5p6SmaYmlcHIGIfmz0DovaqL0OU,238
+markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

+ 5 - 0
rest_api_venv/Lib/site-packages/MarkupSafe-2.1.1.dist-info/WHEEL

@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.0)
+Root-Is-Purelib: false
+Tag: cp310-cp310-win_amd64
+

+ 1 - 0
rest_api_venv/Lib/site-packages/MarkupSafe-2.1.1.dist-info/top_level.txt

@@ -0,0 +1 @@
+markupsafe

+ 1 - 0
rest_api_venv/Lib/site-packages/PyNaCl-1.5.0.dist-info/INSTALLER

@@ -0,0 +1 @@
+pip

+ 174 - 0
rest_api_venv/Lib/site-packages/PyNaCl-1.5.0.dist-info/LICENSE

@@ -0,0 +1,174 @@
+                              Apache License
+                        Version 2.0, January 2004
+                     http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+   "License" shall mean the terms and conditions for use, reproduction,
+   and distribution as defined by Sections 1 through 9 of this document.
+
+   "Licensor" shall mean the copyright owner or entity authorized by
+   the copyright owner that is granting the License.
+
+   "Legal Entity" shall mean the union of the acting entity and all
+   other entities that control, are controlled by, or are under common
+   control with that entity. For the purposes of this definition,
+   "control" means (i) the power, direct or indirect, to cause the
+   direction or management of such entity, whether by contract or
+   otherwise, or (ii) ownership of fifty percent (50%) or more of the
+   outstanding shares, or (iii) beneficial ownership of such entity.
+
+   "You" (or "Your") shall mean an individual or Legal Entity
+   exercising permissions granted by this License.
+
+   "Source" form shall mean the preferred form for making modifications,
+   including but not limited to software source code, documentation
+   source, and configuration files.
+
+   "Object" form shall mean any form resulting from mechanical
+   transformation or translation of a Source form, including but
+   not limited to compiled object code, generated documentation,
+   and conversions to other media types.
+
+   "Work" shall mean the work of authorship, whether in Source or
+   Object form, made available under the License, as indicated by a
+   copyright notice that is included in or attached to the work
+   (an example is provided in the Appendix below).
+
+   "Derivative Works" shall mean any work, whether in Source or Object
+   form, that is based on (or derived from) the Work and for which the
+   editorial revisions, annotations, elaborations, or other modifications
+   represent, as a whole, an original work of authorship. For the purposes
+   of this License, Derivative Works shall not include works that remain
+   separable from, or merely link (or bind by name) to the interfaces of,
+   the Work and Derivative Works thereof.
+
+   "Contribution" shall mean any work of authorship, including
+   the original version of the Work and any modifications or additions
+   to that Work or Derivative Works thereof, that is intentionally
+   submitted to Licensor for inclusion in the Work by the copyright owner
+   or by an individual or Legal Entity authorized to submit on behalf of
+   the copyright owner. For the purposes of this definition, "submitted"
+   means any form of electronic, verbal, or written communication sent
+   to the Licensor or its representatives, including but not limited to
+   communication on electronic mailing lists, source code control systems,
+   and issue tracking systems that are managed by, or on behalf of, the
+   Licensor for the purpose of discussing and improving the Work, but
+   excluding communication that is conspicuously marked or otherwise
+   designated in writing by the copyright owner as "Not a Contribution."
+
+   "Contributor" shall mean Licensor and any individual or Legal Entity
+   on behalf of whom a Contribution has been received by Licensor and
+   subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   copyright license to reproduce, prepare Derivative Works of,
+   publicly display, publicly perform, sublicense, and distribute the
+   Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   (except as stated in this section) patent license to make, have made,
+   use, offer to sell, sell, import, and otherwise transfer the Work,
+   where such license applies only to those patent claims licensable
+   by such Contributor that are necessarily infringed by their
+   Contribution(s) alone or by combination of their Contribution(s)
+   with the Work to which such Contribution(s) was submitted. If You
+   institute patent litigation against any entity (including a
+   cross-claim or counterclaim in a lawsuit) alleging that the Work
+   or a Contribution incorporated within the Work constitutes direct
+   or contributory patent infringement, then any patent licenses
+   granted to You under this License for that Work shall terminate
+   as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+   Work or Derivative Works thereof in any medium, with or without
+   modifications, and in Source or Object form, provided that You
+   meet the following conditions:
+
+   (a) You must give any other recipients of the Work or
+       Derivative Works a copy of this License; and
+
+   (b) You must cause any modified files to carry prominent notices
+       stating that You changed the files; and
+
+   (c) You must retain, in the Source form of any Derivative Works
+       that You distribute, all copyright, patent, trademark, and
+       attribution notices from the Source form of the Work,
+       excluding those notices that do not pertain to any part of
+       the Derivative Works; and
+
+   (d) If the Work includes a "NOTICE" text file as part of its
+       distribution, then any Derivative Works that You distribute must
+       include a readable copy of the attribution notices contained
+       within such NOTICE file, excluding those notices that do not
+       pertain to any part of the Derivative Works, in at least one
+       of the following places: within a NOTICE text file distributed
+       as part of the Derivative Works; within the Source form or
+       documentation, if provided along with the Derivative Works; or,
+       within a display generated by the Derivative Works, if and
+       wherever such third-party notices normally appear. The contents
+       of the NOTICE file are for informational purposes only and
+       do not modify the License. You may add Your own attribution
+       notices within Derivative Works that You distribute, alongside
+       or as an addendum to the NOTICE text from the Work, provided
+       that such additional attribution notices cannot be construed
+       as modifying the License.
+
+   You may add Your own copyright statement to Your modifications and
+   may provide additional or different license terms and conditions
+   for use, reproduction, or distribution of Your modifications, or
+   for any such Derivative Works as a whole, provided Your use,
+   reproduction, and distribution of the Work otherwise complies with
+   the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+   any Contribution intentionally submitted for inclusion in the Work
+   by You to the Licensor shall be under the terms and conditions of
+   this License, without any additional terms or conditions.
+   Notwithstanding the above, nothing herein shall supersede or modify
+   the terms of any separate license agreement you may have executed
+   with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+   names, trademarks, service marks, or product names of the Licensor,
+   except as required for reasonable and customary use in describing the
+   origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+   agreed to in writing, Licensor provides the Work (and each
+   Contributor provides its Contributions) on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+   implied, including, without limitation, any warranties or conditions
+   of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+   PARTICULAR PURPOSE. You are solely responsible for determining the
+   appropriateness of using or redistributing the Work and assume any
+   risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+   whether in tort (including negligence), contract, or otherwise,
+   unless required by applicable law (such as deliberate and grossly
+   negligent acts) or agreed to in writing, shall any Contributor be
+   liable to You for damages, including any direct, indirect, special,
+   incidental, or consequential damages of any character arising as a
+   result of this License or out of the use or inability to use the
+   Work (including but not limited to damages for loss of goodwill,
+   work stoppage, computer failure or malfunction, or any and all
+   other commercial damages or losses), even if such Contributor
+   has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+   the Work or Derivative Works thereof, You may choose to offer,
+   and charge a fee for, acceptance of support, warranty, indemnity,
+   or other liability obligations and/or rights consistent with this
+   License. However, in accepting such obligations, You may act only
+   on Your own behalf and on Your sole responsibility, not on behalf
+   of any other Contributor, and only if You agree to indemnify,
+   defend, and hold each Contributor harmless for any liability
+   incurred by, or claims asserted against, such Contributor by reason
+   of your accepting any such warranty or additional liability.

+ 246 - 0
rest_api_venv/Lib/site-packages/PyNaCl-1.5.0.dist-info/METADATA

@@ -0,0 +1,246 @@
+Metadata-Version: 2.1
+Name: PyNaCl
+Version: 1.5.0
+Summary: Python binding to the Networking and Cryptography (NaCl) library
+Home-page: https://github.com/pyca/pynacl/
+Author: The PyNaCl developers
+Author-email: cryptography-dev@python.org
+License: Apache License 2.0
+Platform: UNKNOWN
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Requires-Python: >=3.6
+License-File: LICENSE
+Requires-Dist: cffi (>=1.4.1)
+Provides-Extra: docs
+Requires-Dist: sphinx (>=1.6.5) ; extra == 'docs'
+Requires-Dist: sphinx-rtd-theme ; extra == 'docs'
+Provides-Extra: tests
+Requires-Dist: pytest (!=3.3.0,>=3.2.1) ; extra == 'tests'
+Requires-Dist: hypothesis (>=3.27.0) ; extra == 'tests'
+
+===============================================
+PyNaCl: Python binding to the libsodium library
+===============================================
+
+.. image:: https://img.shields.io/pypi/v/pynacl.svg
+    :target: https://pypi.org/project/PyNaCl/
+    :alt: Latest Version
+
+.. image:: https://codecov.io/github/pyca/pynacl/coverage.svg?branch=main
+    :target: https://codecov.io/github/pyca/pynacl?branch=main
+
+.. image:: https://img.shields.io/pypi/pyversions/pynacl.svg
+    :target: https://pypi.org/project/PyNaCl/
+    :alt: Compatible Python Versions
+
+PyNaCl is a Python binding to `libsodium`_, which is a fork of the
+`Networking and Cryptography library`_. These libraries have a stated goal of
+improving usability, security and speed. It supports Python 3.6+ as well as
+PyPy 3.
+
+.. _libsodium: https://github.com/jedisct1/libsodium
+.. _Networking and Cryptography library: https://nacl.cr.yp.to/
+
+Features
+--------
+
+* Digital signatures
+* Secret-key encryption
+* Public-key encryption
+* Hashing and message authentication
+* Password based key derivation and password hashing
+
+`Changelog`_
+------------
+
+.. _Changelog: https://pynacl.readthedocs.io/en/stable/changelog/
+
+Installation
+============
+
+Binary wheel install
+--------------------
+
+PyNaCl ships as a binary wheel on macOS, Windows and Linux ``manylinux1`` [#many]_ ,
+so all dependencies are included. Make sure you have an up-to-date pip
+and run:
+
+.. code-block:: console
+
+    $ pip install pynacl
+
+Faster wheel build
+------------------
+
+You can define the environment variable ``LIBSODIUM_MAKE_ARGS`` to pass arguments to ``make``
+and enable `parallelization`_:
+
+.. code-block:: console
+
+    $ LIBSODIUM_MAKE_ARGS=-j4 pip install pynacl
+
+Linux source build
+------------------
+
+PyNaCl relies on `libsodium`_, a portable C library. A copy is bundled
+with PyNaCl so to install you can run:
+
+.. code-block:: console
+
+    $ pip install pynacl
+
+If you'd prefer to use the version of ``libsodium`` provided by your
+distribution, you can disable the bundled copy during install by running:
+
+.. code-block:: console
+
+    $ SODIUM_INSTALL=system pip install pynacl
+
+.. warning:: Usage of the legacy ``easy_install`` command provided by setuptools
+   is generally discouraged, and is completely unsupported in PyNaCl's case.
+
+.. _parallelization: https://www.gnu.org/software/make/manual/html_node/Parallel.html
+
+.. _libsodium: https://github.com/jedisct1/libsodium
+
+.. [#many] `manylinux1 wheels <https://www.python.org/dev/peps/pep-0513/>`_
+    are built on a baseline linux environment based on Centos 5.11
+    and should work on most x86 and x86_64 glibc based linux environments.
+
+Changelog
+=========
+
+1.5.0 (2022-01-07)
+------------------
+
+* **BACKWARDS INCOMPATIBLE:** Removed support for Python 2.7 and Python 3.5.
+* **BACKWARDS INCOMPATIBLE:** We no longer distribute ``manylinux1``
+  wheels.
+* Added ``manylinux2014``, ``manylinux_2_24``, ``musllinux``, and macOS
+  ``universal2`` wheels (the latter supports macOS ``arm64``).
+* Update ``libsodium`` to 1.0.18-stable (July 25, 2021 release).
+* Add inline type hints.
+
+1.4.0 (2020-05-25)
+------------------
+
+* Update ``libsodium`` to 1.0.18.
+* **BACKWARDS INCOMPATIBLE:** We no longer distribute 32-bit ``manylinux1``
+  wheels. Continuing to produce them was a maintenance burden.
+* Added support for Python 3.8, and removed support for Python 3.4.
+* Add low level bindings for extracting the seed and the public key
+  from crypto_sign_ed25519 secret key
+* Add low level bindings for deterministic random generation.
+* Add ``wheel`` and ``setuptools`` setup_requirements in ``setup.py`` (#485)
+* Fix checks on very slow builders (#481, #495)
+* Add low-level bindings to ed25519 arithmetic functions
+* Update low-level blake2b state implementation
+* Fix wrong short-input behavior of SealedBox.decrypt() (#517)
+* Raise CryptPrefixError exception instead of InvalidkeyError when trying
+  to check a password against a verifier stored in a unknown format (#519)
+* Add support for minimal builds of libsodium. Trying to call functions
+  not available in a minimal build will raise an UnavailableError
+  exception. To compile a minimal build of the bundled libsodium, set
+  the SODIUM_INSTALL_MINIMAL environment variable to any non-empty
+  string (e.g. ``SODIUM_INSTALL_MINIMAL=1``) for setup.
+
+1.3.0 2018-09-26
+----------------
+
+* Added support for Python 3.7.
+* Update ``libsodium`` to 1.0.16.
+* Run and test all code examples in PyNaCl docs through sphinx's
+  doctest builder.
+* Add low-level bindings for chacha20-poly1305 AEAD constructions.
+* Add low-level bindings for the chacha20-poly1305 secretstream constructions.
+* Add low-level bindings for ed25519ph pre-hashed signing construction.
+* Add low-level bindings for constant-time increment and addition
+  on fixed-precision big integers represented as little-endian
+  byte sequences.
+* Add low-level bindings for the ISO/IEC 7816-4 compatible padding API.
+* Add low-level bindings for libsodium's crypto_kx... key exchange
+  construction.
+* Set hypothesis deadline to None in tests/test_pwhash.py to avoid
+  incorrect test failures on slower processor architectures.  GitHub
+  issue #370
+
+1.2.1 - 2017-12-04
+------------------
+
+* Update hypothesis minimum allowed version.
+* Infrastructure: add proper configuration for readthedocs builder
+  runtime environment.
+
+1.2.0 - 2017-11-01
+------------------
+
+* Update ``libsodium`` to 1.0.15.
+* Infrastructure: add jenkins support for automatic build of
+  ``manylinux1`` binary wheels
+* Added support for ``SealedBox`` construction.
+* Added support for ``argon2i`` and ``argon2id`` password hashing constructs
+  and restructured high-level password hashing implementation to expose
+  the same interface for all hashers.
+* Added support for 128 bit ``siphashx24`` variant of ``siphash24``.
+* Added support for ``from_seed`` APIs for X25519 keypair generation.
+* Dropped support for Python 3.3.
+
+1.1.2 - 2017-03-31
+------------------
+
+* reorder link time library search path when using bundled
+  libsodium
+
+1.1.1 - 2017-03-15
+------------------
+
+* Fixed a circular import bug in ``nacl.utils``.
+
+1.1.0 - 2017-03-14
+------------------
+
+* Dropped support for Python 2.6.
+* Added ``shared_key()`` method on ``Box``.
+* You can now pass ``None`` to ``nonce`` when encrypting with ``Box`` or
+  ``SecretBox`` and it will automatically generate a random nonce.
+* Added support for ``siphash24``.
+* Added support for ``blake2b``.
+* Added support for ``scrypt``.
+* Update ``libsodium`` to 1.0.11.
+* Default to the bundled ``libsodium`` when compiling.
+* All raised exceptions are defined mixing-in
+  ``nacl.exceptions.CryptoError``
+
+1.0.1 - 2016-01-24
+------------------
+
+* Fix an issue with absolute paths that prevented the creation of wheels.
+
+1.0 - 2016-01-23
+----------------
+
+* PyNaCl has been ported to use the new APIs available in cffi 1.0+.
+  Due to this change we no longer support PyPy releases older than 2.6.
+* Python 3.2 support has been dropped.
+* Functions to convert between Ed25519 and Curve25519 keys have been added.
+
+0.3.0 - 2015-03-04
+------------------
+
+* The low-level API (`nacl.c.*`) has been changed to match the
+  upstream NaCl C/C++ conventions (as well as those of other NaCl bindings).
+  The order of arguments and return values has changed significantly. To
+  avoid silent failures, `nacl.c` has been removed, and replaced with
+  `nacl.bindings` (with the new argument ordering). If you have code which
+  calls these functions (e.g. `nacl.c.crypto_box_keypair()`), you must review
+  the new docstrings and update your code/imports to match the new
+  conventions.
+
+

+ 68 - 0
rest_api_venv/Lib/site-packages/PyNaCl-1.5.0.dist-info/RECORD

@@ -0,0 +1,68 @@
+PyNaCl-1.5.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+PyNaCl-1.5.0.dist-info/LICENSE,sha256=d69bve2VkRS216XupRiyvjZOBPT0qV-eh9mHDCdxPSQ,9868
+PyNaCl-1.5.0.dist-info/METADATA,sha256=U9PLLkcKk_YC_Tl5OSoMiAmblalKU9qFsRtxiwa-TiM,8656
+PyNaCl-1.5.0.dist-info/RECORD,,
+PyNaCl-1.5.0.dist-info/WHEEL,sha256=nYCSW5p8tLyDU-wbqo3uRlCluAzwxLmyyRK2pVs4-Ag,100
+PyNaCl-1.5.0.dist-info/top_level.txt,sha256=wfdEOI_G2RIzmzsMyhpqP17HUh6Jcqi99to9aHLEslo,13
+nacl/__init__.py,sha256=W0JhVbFcnhlrEtK0tdRGDcX8aucbKCQmRBmf9hGFWQY,1155
+nacl/__pycache__/__init__.cpython-310.pyc,,
+nacl/__pycache__/encoding.cpython-310.pyc,,
+nacl/__pycache__/exceptions.cpython-310.pyc,,
+nacl/__pycache__/hash.cpython-310.pyc,,
+nacl/__pycache__/hashlib.cpython-310.pyc,,
+nacl/__pycache__/public.cpython-310.pyc,,
+nacl/__pycache__/secret.cpython-310.pyc,,
+nacl/__pycache__/signing.cpython-310.pyc,,
+nacl/__pycache__/utils.cpython-310.pyc,,
+nacl/_sodium.pyd,sha256=1zPCPGpLIWJaT_B_ZWK6iCvL2w9QgmJpQZ2N4FdPiM0,348672
+nacl/bindings/__init__.py,sha256=erC7ta0z7XQSmfeYcnCVhFKqBtkkK6wJq8PKsEnjqSg,17448
+nacl/bindings/__pycache__/__init__.cpython-310.pyc,,
+nacl/bindings/__pycache__/crypto_aead.cpython-310.pyc,,
+nacl/bindings/__pycache__/crypto_box.cpython-310.pyc,,
+nacl/bindings/__pycache__/crypto_core.cpython-310.pyc,,
+nacl/bindings/__pycache__/crypto_generichash.cpython-310.pyc,,
+nacl/bindings/__pycache__/crypto_hash.cpython-310.pyc,,
+nacl/bindings/__pycache__/crypto_kx.cpython-310.pyc,,
+nacl/bindings/__pycache__/crypto_pwhash.cpython-310.pyc,,
+nacl/bindings/__pycache__/crypto_scalarmult.cpython-310.pyc,,
+nacl/bindings/__pycache__/crypto_secretbox.cpython-310.pyc,,
+nacl/bindings/__pycache__/crypto_secretstream.cpython-310.pyc,,
+nacl/bindings/__pycache__/crypto_shorthash.cpython-310.pyc,,
+nacl/bindings/__pycache__/crypto_sign.cpython-310.pyc,,
+nacl/bindings/__pycache__/randombytes.cpython-310.pyc,,
+nacl/bindings/__pycache__/sodium_core.cpython-310.pyc,,
+nacl/bindings/__pycache__/utils.cpython-310.pyc,,
+nacl/bindings/crypto_aead.py,sha256=fxSgpeI48HkxwsZEk9cTxigdJ5k7PvMh1mxr7M0LK9A,16156
+nacl/bindings/crypto_box.py,sha256=6M7qSZu806POBtEoK5e9Ui-JcSYaFDI6tTnllVJ0F7U,10463
+nacl/bindings/crypto_core.py,sha256=KccKcEGpoEsQjkYRcwpsBse-g_iGXNgQHeb1gCdZR4E,14148
+nacl/bindings/crypto_generichash.py,sha256=8nEG1ntZSqVOTAV6-_EZg-gNbV5UJc-SbDUa2GdkxRs,9133
+nacl/bindings/crypto_hash.py,sha256=8GO8el_rZgD2ITJyEbiejHXZimfrxckXc9cUppv1vpk,2238
+nacl/bindings/crypto_kx.py,sha256=8gmFRbU7NKHqCLZ1HsObilmeTboYFW6tEJCEUucje9Q,6923
+nacl/bindings/crypto_pwhash.py,sha256=sIugQ9Rx8KTy3vnOsQwz_kV2PhqwqVOsfbkR_sKnudc,19448
+nacl/bindings/crypto_scalarmult.py,sha256=gMNDgWl-P6ua8I3kAM2yt4dNzK9gY742C424z-yBMUU,8484
+nacl/bindings/crypto_secretbox.py,sha256=WEX2_Ea79pQkUshr_sVlslA28Zx_66tYFawW1qipleY,3000
+nacl/bindings/crypto_secretstream.py,sha256=3uZN9XET8AeFKn365K2gfcZKYjkOFfyLr8GCUw8Wx6g,11522
+nacl/bindings/crypto_shorthash.py,sha256=NDmmvG77ZhGLmbK0USoLW2dOveUcGdUFqL1fi5ozcDc,2684
+nacl/bindings/crypto_sign.py,sha256=IMKXZCAb8oF0tLQR6UcpJk0zTRFDeW4IWNvo9XdZfgw,10669
+nacl/bindings/randombytes.py,sha256=JmR-o2Bpj1ssJp8GDj4KgutPknEZGgogUSdnW4kRI5Q,1614
+nacl/bindings/sodium_core.py,sha256=8o4mDDahmmAIf4Xk3hJE5C-B6Ms5mbeu64_ylW9kY6I,1072
+nacl/bindings/utils.py,sha256=2WefZr1MOFTWKn6h6ODuw4u8D9PEfDWMaayXiPUm6RA,4439
+nacl/encoding.py,sha256=xKwjFb5F1jEfmazetwtaQpsUPvzYLh3RU6Wkiq56NzI,3020
+nacl/exceptions.py,sha256=gM8gN01HFGCUw8L1VjmEhBPf00r-igNJo-nAEsrhEBU,2539
+nacl/hash.py,sha256=_H8JOPecwna4GP8CaMorghfetykbuO9q4oteek9bfVo,6574
+nacl/hashlib.py,sha256=x74Z29ExS3e5yFV0Mzij9gnU_eRKWwkD6YPYfI5eUFw,4543
+nacl/public.py,sha256=kVoz1R9zkNnczDUsBDnYYXL_6C9jsga6ph3rk32uFNo,15215
+nacl/pwhash/__init__.py,sha256=2vX9OivKZsrMVjh2vjfbdBVDsUml8AEnIRsa1d6ZzG0,2750
+nacl/pwhash/__pycache__/__init__.cpython-310.pyc,,
+nacl/pwhash/__pycache__/_argon2.cpython-310.pyc,,
+nacl/pwhash/__pycache__/argon2i.cpython-310.pyc,,
+nacl/pwhash/__pycache__/argon2id.cpython-310.pyc,,
+nacl/pwhash/__pycache__/scrypt.cpython-310.pyc,,
+nacl/pwhash/_argon2.py,sha256=uR1Y_DnX8RZIfmTHborlXWcJ-re_wDryOIhVYssjj8I,1828
+nacl/pwhash/argon2i.py,sha256=dJmM_bVP0LALK9I4TfaZsWqEah7z7sprd3mw8TRPr-4,4537
+nacl/pwhash/argon2id.py,sha256=dJ7kpU-b07YzMPlrsMW9K_tV-xnA0bE7pj-ZFo-z0wc,4568
+nacl/pwhash/scrypt.py,sha256=_qbeDojJzzE42RvwwUsELA8pGgXDInb1ht6ec1w3_1Y,7197
+nacl/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+nacl/secret.py,sha256=YShCAvKF7sHAGOBRSnWKMJvJChQ7SuhlQXRCz32MdrM,12413
+nacl/signing.py,sha256=CR9ZRXYSrdZFkNiIQTIplFS8UQ0Z_OzXqaaQ_HdhSQ4,8587
+nacl/utils.py,sha256=iEQ0LtPtyV7Cqo4wpn9CvQunwsM5ShEZlO9IvT1r4dk,2429

+ 5 - 0
rest_api_venv/Lib/site-packages/PyNaCl-1.5.0.dist-info/WHEEL

@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.1)
+Root-Is-Purelib: false
+Tag: cp36-abi3-win_amd64
+

+ 2 - 0
rest_api_venv/Lib/site-packages/PyNaCl-1.5.0.dist-info/top_level.txt

@@ -0,0 +1,2 @@
+_sodium
+nacl

+ 1 - 0
rest_api_venv/Lib/site-packages/Werkzeug-2.1.2.dist-info/INSTALLER

@@ -0,0 +1 @@
+pip

+ 28 - 0
rest_api_venv/Lib/site-packages/Werkzeug-2.1.2.dist-info/LICENSE.rst

@@ -0,0 +1,28 @@
+Copyright 2007 Pallets
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+1.  Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+
+2.  Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in the
+    documentation and/or other materials provided with the distribution.
+
+3.  Neither the name of the copyright holder nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

+ 128 - 0
rest_api_venv/Lib/site-packages/Werkzeug-2.1.2.dist-info/METADATA

@@ -0,0 +1,128 @@
+Metadata-Version: 2.1
+Name: Werkzeug
+Version: 2.1.2
+Summary: The comprehensive WSGI web application library.
+Home-page: https://palletsprojects.com/p/werkzeug/
+Author: Armin Ronacher
+Author-email: armin.ronacher@active-4.com
+Maintainer: Pallets
+Maintainer-email: contact@palletsprojects.com
+License: BSD-3-Clause
+Project-URL: Donate, https://palletsprojects.com/donate
+Project-URL: Documentation, https://werkzeug.palletsprojects.com/
+Project-URL: Changes, https://werkzeug.palletsprojects.com/changes/
+Project-URL: Source Code, https://github.com/pallets/werkzeug/
+Project-URL: Issue Tracker, https://github.com/pallets/werkzeug/issues/
+Project-URL: Twitter, https://twitter.com/PalletsTeam
+Project-URL: Chat, https://discord.gg/pallets
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
+Classifier: Topic :: Internet :: WWW/HTTP :: WSGI
+Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application
+Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware
+Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+License-File: LICENSE.rst
+Provides-Extra: watchdog
+Requires-Dist: watchdog ; extra == 'watchdog'
+
+Werkzeug
+========
+
+*werkzeug* German noun: "tool". Etymology: *werk* ("work"), *zeug* ("stuff")
+
+Werkzeug is a comprehensive `WSGI`_ web application library. It began as
+a simple collection of various utilities for WSGI applications and has
+become one of the most advanced WSGI utility libraries.
+
+It includes:
+
+-   An interactive debugger that allows inspecting stack traces and
+    source code in the browser with an interactive interpreter for any
+    frame in the stack.
+-   A full-featured request object with objects to interact with
+    headers, query args, form data, files, and cookies.
+-   A response object that can wrap other WSGI applications and handle
+    streaming data.
+-   A routing system for matching URLs to endpoints and generating URLs
+    for endpoints, with an extensible system for capturing variables
+    from URLs.
+-   HTTP utilities to handle entity tags, cache control, dates, user
+    agents, cookies, files, and more.
+-   A threaded WSGI server for use while developing applications
+    locally.
+-   A test client for simulating HTTP requests during testing without
+    requiring running a server.
+
+Werkzeug doesn't enforce any dependencies. It is up to the developer to
+choose a template engine, database adapter, and even how to handle
+requests. It can be used to build all sorts of end user applications
+such as blogs, wikis, or bulletin boards.
+
+`Flask`_ wraps Werkzeug, using it to handle the details of WSGI while
+providing more structure and patterns for defining powerful
+applications.
+
+.. _WSGI: https://wsgi.readthedocs.io/en/latest/
+.. _Flask: https://www.palletsprojects.com/p/flask/
+
+
+Installing
+----------
+
+Install and update using `pip`_:
+
+.. code-block:: text
+
+    pip install -U Werkzeug
+
+.. _pip: https://pip.pypa.io/en/stable/getting-started/
+
+
+A Simple Example
+----------------
+
+.. code-block:: python
+
+    from werkzeug.wrappers import Request, Response
+
+    @Request.application
+    def application(request):
+        return Response('Hello, World!')
+
+    if __name__ == '__main__':
+        from werkzeug.serving import run_simple
+        run_simple('localhost', 4000, application)
+
+
+Donate
+------
+
+The Pallets organization develops and supports Werkzeug and other
+popular packages. In order to grow the community of contributors and
+users, and allow the maintainers to devote more time to the projects,
+`please donate today`_.
+
+.. _please donate today: https://palletsprojects.com/donate
+
+
+Links
+-----
+
+-   Documentation: https://werkzeug.palletsprojects.com/
+-   Changes: https://werkzeug.palletsprojects.com/changes/
+-   PyPI Releases: https://pypi.org/project/Werkzeug/
+-   Source Code: https://github.com/pallets/werkzeug/
+-   Issue Tracker: https://github.com/pallets/werkzeug/issues/
+-   Website: https://palletsprojects.com/p/werkzeug/
+-   Twitter: https://twitter.com/PalletsTeam
+-   Chat: https://discord.gg/pallets
+
+

+ 86 - 0
rest_api_venv/Lib/site-packages/Werkzeug-2.1.2.dist-info/RECORD

@@ -0,0 +1,86 @@
+Werkzeug-2.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+Werkzeug-2.1.2.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475
+Werkzeug-2.1.2.dist-info/METADATA,sha256=vWBYPD9d_Qzl4WAupfJ5Fy_ep7pqMPnGvkSLYiCi4B0,4400
+Werkzeug-2.1.2.dist-info/RECORD,,
+Werkzeug-2.1.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
+Werkzeug-2.1.2.dist-info/top_level.txt,sha256=QRyj2VjwJoQkrwjwFIOlB8Xg3r9un0NtqVHQF-15xaw,9
+werkzeug/__init__.py,sha256=BtM-3LM8iob4OwPY693-LqZ5RcnDS4iftOqBK28uZ2k,188
+werkzeug/__pycache__/__init__.cpython-310.pyc,,
+werkzeug/__pycache__/_internal.cpython-310.pyc,,
+werkzeug/__pycache__/_reloader.cpython-310.pyc,,
+werkzeug/__pycache__/datastructures.cpython-310.pyc,,
+werkzeug/__pycache__/exceptions.cpython-310.pyc,,
+werkzeug/__pycache__/formparser.cpython-310.pyc,,
+werkzeug/__pycache__/http.cpython-310.pyc,,
+werkzeug/__pycache__/local.cpython-310.pyc,,
+werkzeug/__pycache__/routing.cpython-310.pyc,,
+werkzeug/__pycache__/security.cpython-310.pyc,,
+werkzeug/__pycache__/serving.cpython-310.pyc,,
+werkzeug/__pycache__/test.cpython-310.pyc,,
+werkzeug/__pycache__/testapp.cpython-310.pyc,,
+werkzeug/__pycache__/urls.cpython-310.pyc,,
+werkzeug/__pycache__/user_agent.cpython-310.pyc,,
+werkzeug/__pycache__/utils.cpython-310.pyc,,
+werkzeug/__pycache__/wsgi.cpython-310.pyc,,
+werkzeug/_internal.py,sha256=g8PHJz2z39I3x0vwTvTKbXIg0eUQqGF9UtUzDMWT0Qw,16222
+werkzeug/_reloader.py,sha256=lYStlIDduTxBOB8BSozy_44HQ7YT5fup-x3uuac1-2o,14331
+werkzeug/datastructures.py,sha256=Sk5gYGJbgvwpM-5IursyEWwo815RB5NAs2wFcTjHG0M,97018
+werkzeug/datastructures.pyi,sha256=L7MfJjHrEjKuAZ57w5d2eaiUIWYya52crapklFnKUz0,34493
+werkzeug/debug/__init__.py,sha256=Qds7CmReDr13XUaKYvcwnGNBQp6d86ooGV_to2Uw0C0,17730
+werkzeug/debug/__pycache__/__init__.cpython-310.pyc,,
+werkzeug/debug/__pycache__/console.cpython-310.pyc,,
+werkzeug/debug/__pycache__/repr.cpython-310.pyc,,
+werkzeug/debug/__pycache__/tbtools.cpython-310.pyc,,
+werkzeug/debug/console.py,sha256=08mKGZLMsrd2E-0qD82J5knUbI2DomHXUQ5z0550a_o,6082
+werkzeug/debug/repr.py,sha256=Mp911LMRzZUoNvrCLQfKKpQZbNKdIM8VbjzJQjBkdsM,9481
+werkzeug/debug/shared/ICON_LICENSE.md,sha256=DhA6Y1gUl5Jwfg0NFN9Rj4VWITt8tUx0IvdGf0ux9-s,222
+werkzeug/debug/shared/console.png,sha256=bxax6RXXlvOij_KeqvSNX0ojJf83YbnZ7my-3Gx9w2A,507
+werkzeug/debug/shared/debugger.js,sha256=tg42SZs1SVmYWZ-_Fj5ELK5-FLHnGNQrei0K2By8Bw8,10521
+werkzeug/debug/shared/less.png,sha256=-4-kNRaXJSONVLahrQKUxMwXGm9R4OnZ9SxDGpHlIR4,191
+werkzeug/debug/shared/more.png,sha256=GngN7CioHQoV58rH6ojnkYi8c_qED2Aka5FO5UXrReY,200
+werkzeug/debug/shared/style.css,sha256=-xSxzUEZGw_IqlDR5iZxitNl8LQUjBM-_Y4UAvXVH8g,6078
+werkzeug/debug/tbtools.py,sha256=cBsPKCrB0FRT8i5EUdGo0A8MStWSj7O3Jk40r7Ll3ok,12633
+werkzeug/exceptions.py,sha256=5nzjr4AN_J-jtkT2FgDIm8SUiC0tjzWcROXse06H6a8,26498
+werkzeug/formparser.py,sha256=rLEu_ZwVpvqshZg6E4Qiv36QsmzmCytTijBeGX3dDGk,16056
+werkzeug/http.py,sha256=RUwj0JM1Em3LHyqyXSJOkdtBOT24mJlGFbklqo3PWDY,44602
+werkzeug/local.py,sha256=cj0M4BzMGdg_CD-H3osv9Zf9by4qY-BzAD68bxp979Q,18343
+werkzeug/middleware/__init__.py,sha256=qfqgdT5npwG9ses3-FXQJf3aB95JYP1zchetH_T3PUw,500
+werkzeug/middleware/__pycache__/__init__.cpython-310.pyc,,
+werkzeug/middleware/__pycache__/dispatcher.cpython-310.pyc,,
+werkzeug/middleware/__pycache__/http_proxy.cpython-310.pyc,,
+werkzeug/middleware/__pycache__/lint.cpython-310.pyc,,
+werkzeug/middleware/__pycache__/profiler.cpython-310.pyc,,
+werkzeug/middleware/__pycache__/proxy_fix.cpython-310.pyc,,
+werkzeug/middleware/__pycache__/shared_data.cpython-310.pyc,,
+werkzeug/middleware/dispatcher.py,sha256=Fh_w-KyWnTSYF-Lfv5dimQ7THSS7afPAZMmvc4zF1gg,2580
+werkzeug/middleware/http_proxy.py,sha256=HE8VyhS7CR-E1O6_9b68huv8FLgGGR1DLYqkS3Xcp3Q,7558
+werkzeug/middleware/lint.py,sha256=L4ISeRPhFbrMWt8CFHHExyvuWxE3CyqbfD5hTQKkVjA,13966
+werkzeug/middleware/profiler.py,sha256=QkXk7cqnaPnF8wQu-5SyPCIOT3_kdABUBorQOghVNOA,4899
+werkzeug/middleware/proxy_fix.py,sha256=l7LC_LDu0Yd4SvUxS5SFigAJMzcIOGm6LNKl9IXJBSU,6974
+werkzeug/middleware/shared_data.py,sha256=fXjrEkuqxUVLG1DLrOdQLc96QQdjftCBZ1oM5oK89h4,9528
+werkzeug/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+werkzeug/routing.py,sha256=zopf1P3MG-atd33YdBwIO49AnJ7nem5SKQig5FIhKEI,84346
+werkzeug/sansio/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+werkzeug/sansio/__pycache__/__init__.cpython-310.pyc,,
+werkzeug/sansio/__pycache__/multipart.cpython-310.pyc,,
+werkzeug/sansio/__pycache__/request.cpython-310.pyc,,
+werkzeug/sansio/__pycache__/response.cpython-310.pyc,,
+werkzeug/sansio/__pycache__/utils.cpython-310.pyc,,
+werkzeug/sansio/multipart.py,sha256=BRjBk_mCPjSJzwNVvBgmrJGk3QxA9pYfsgzFki28bxc,8751
+werkzeug/sansio/request.py,sha256=6xhrNJAqScdbBF5i7HN-Y_1XjJ04wQtBKOsZuCy0AYw,20176
+werkzeug/sansio/response.py,sha256=zvCq9HSBBZGBd5Gg412BY9RZIwnKsJl5Kzfd3Kl9sSo,26098
+werkzeug/sansio/utils.py,sha256=V5v-UUnX8pm4RehP9Tt_NiUSOJGJGUvKjlW0eOIQldM,4164
+werkzeug/security.py,sha256=vrBofh4WZZoUo1eAdJ6F1DrzVRlYauGS2CUDYpbQKj8,4658
+werkzeug/serving.py,sha256=aL-dIwzwO_-UuUs0cKwYFOynUWVmYcaDjz713Wy_BHE,38337
+werkzeug/test.py,sha256=7Ur4IinGCk9k5WCNk6x-mr2JrnupvKRXt6n-qNfo9oE,47841
+werkzeug/testapp.py,sha256=p-2lMyvaHXzP1lau0tUAJTbW4STogoMpXFyCkeRBkAI,9397
+werkzeug/urls.py,sha256=Q9Si-eVh7yxk3rwkzrwGRm146FXVXgg9lBP3k0HUfVM,36600
+werkzeug/user_agent.py,sha256=WclZhpvgLurMF45hsioSbS75H1Zb4iMQGKN3_yZ2oKo,1420
+werkzeug/utils.py,sha256=5HGm_5WSKBTVVl8IgvA-b-jL7gjT-LHWXH0ZKzCCI0I,24932
+werkzeug/wrappers/__init__.py,sha256=kGyK7rOud3qCxll_jFyW15YarJhj1xtdf3ocx9ZheB8,120
+werkzeug/wrappers/__pycache__/__init__.cpython-310.pyc,,
+werkzeug/wrappers/__pycache__/request.cpython-310.pyc,,
+werkzeug/wrappers/__pycache__/response.cpython-310.pyc,,
+werkzeug/wrappers/request.py,sha256=UQ559KkGS0Po6HTBgvKMlk1_AsNw5zstzm8o_dRrfdQ,23415
+werkzeug/wrappers/response.py,sha256=c2HUXrrt5Sf8-XEB1fUXxm6jp7Lu80KR0A_tbQFvw1Q,34750
+werkzeug/wsgi.py,sha256=L7s5-Rlt7BRVEZ1m81MaenGfMDP7yL3p1Kxt9Yssqzg,33727

+ 5 - 0
rest_api_venv/Lib/site-packages/Werkzeug-2.1.2.dist-info/WHEEL

@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.1)
+Root-Is-Purelib: true
+Tag: py3-none-any
+

+ 1 - 0
rest_api_venv/Lib/site-packages/Werkzeug-2.1.2.dist-info/top_level.txt

@@ -0,0 +1 @@
+werkzeug

BIN
rest_api_venv/Lib/site-packages/__pycache__/_virtualenv.cpython-310.pyc


BIN
rest_api_venv/Lib/site-packages/__pycache__/gnupg.cpython-310.pyc


BIN
rest_api_venv/Lib/site-packages/__pycache__/six.cpython-310.pyc


BIN
rest_api_venv/Lib/site-packages/__pycache__/sshtunnel.cpython-310.pyc


BIN
rest_api_venv/Lib/site-packages/_cffi_backend.cp310-win_amd64.pyd


+ 187 - 0
rest_api_venv/Lib/site-packages/_distutils_hack/__init__.py

@@ -0,0 +1,187 @@
+# don't import any costly modules
+import sys
+import os
+
+
+is_pypy = '__pypy__' in sys.builtin_module_names
+
+
+def warn_distutils_present():
+    if 'distutils' not in sys.modules:
+        return
+    if is_pypy and sys.version_info < (3, 7):
+        # PyPy for 3.6 unconditionally imports distutils, so bypass the warning
+        # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
+        return
+    import warnings
+    warnings.warn(
+        "Distutils was imported before Setuptools, but importing Setuptools "
+        "also replaces the `distutils` module in `sys.modules`. This may lead "
+        "to undesirable behaviors or errors. To avoid these issues, avoid "
+        "using distutils directly, ensure that setuptools is installed in the "
+        "traditional way (e.g. not an editable install), and/or make sure "
+        "that setuptools is always imported before distutils.")
+
+
+def clear_distutils():
+    if 'distutils' not in sys.modules:
+        return
+    import warnings
+    warnings.warn("Setuptools is replacing distutils.")
+    mods = [
+        name for name in sys.modules
+        if name == "distutils" or name.startswith("distutils.")
+    ]
+    for name in mods:
+        del sys.modules[name]
+
+
+def enabled():
+    """
+    Allow selection of distutils by environment variable.
+    """
+    which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local')
+    return which == 'local'
+
+
+def ensure_local_distutils():
+    import importlib
+    clear_distutils()
+
+    # With the DistutilsMetaFinder in place,
+    # perform an import to cause distutils to be
+    # loaded from setuptools._distutils. Ref #2906.
+    with shim():
+        importlib.import_module('distutils')
+
+    # check that submodules load as expected
+    core = importlib.import_module('distutils.core')
+    assert '_distutils' in core.__file__, core.__file__
+    assert 'setuptools._distutils.log' not in sys.modules
+
+
+def do_override():
+    """
+    Ensure that the local copy of distutils is preferred over stdlib.
+
+    See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
+    for more motivation.
+    """
+    if enabled():
+        warn_distutils_present()
+        ensure_local_distutils()
+
+
+class _TrivialRe:
+    def __init__(self, *patterns):
+        self._patterns = patterns
+
+    def match(self, string):
+        return all(pat in string for pat in self._patterns)
+
+
+class DistutilsMetaFinder:
+    def find_spec(self, fullname, path, target=None):
+        if path is not None:
+            return
+
+        method_name = 'spec_for_{fullname}'.format(**locals())
+        method = getattr(self, method_name, lambda: None)
+        return method()
+
+    def spec_for_distutils(self):
+        if self.is_cpython():
+            return
+
+        import importlib
+        import importlib.abc
+        import importlib.util
+
+        try:
+            mod = importlib.import_module('setuptools._distutils')
+        except Exception:
+            # There are a couple of cases where setuptools._distutils
+            # may not be present:
+            # - An older Setuptools without a local distutils is
+            #   taking precedence. Ref #2957.
+            # - Path manipulation during sitecustomize removes
+            #   setuptools from the path but only after the hook
+            #   has been loaded. Ref #2980.
+            # In either case, fall back to stdlib behavior.
+            return
+
+        class DistutilsLoader(importlib.abc.Loader):
+
+            def create_module(self, spec):
+                mod.__name__ = 'distutils'
+                return mod
+
+            def exec_module(self, module):
+                pass
+
+        return importlib.util.spec_from_loader(
+            'distutils', DistutilsLoader(), origin=mod.__file__
+        )
+
+    @staticmethod
+    def is_cpython():
+        """
+        Suppress supplying distutils for CPython (build and tests).
+        Ref #2965 and #3007.
+        """
+        return os.path.isfile('pybuilddir.txt')
+
+    def spec_for_pip(self):
+        """
+        Ensure stdlib distutils when running under pip.
+        See pypa/pip#8761 for rationale.
+        """
+        if self.pip_imported_during_build():
+            return
+        clear_distutils()
+        self.spec_for_distutils = lambda: None
+
+    @classmethod
+    def pip_imported_during_build(cls):
+        """
+        Detect if pip is being imported in a build script. Ref #2355.
+        """
+        import traceback
+        return any(
+            cls.frame_file_is_setup(frame)
+            for frame, line in traceback.walk_stack(None)
+        )
+
+    @staticmethod
+    def frame_file_is_setup(frame):
+        """
+        Return True if the indicated frame suggests a setup.py file.
+        """
+        # some frames may not have __file__ (#2940)
+        return frame.f_globals.get('__file__', '').endswith('setup.py')
+
+
+DISTUTILS_FINDER = DistutilsMetaFinder()
+
+
+def add_shim():
+    DISTUTILS_FINDER in sys.meta_path or insert_shim()
+
+
+class shim:
+    def __enter__(self):
+        insert_shim()
+
+    def __exit__(self, exc, value, tb):
+        remove_shim()
+
+
+def insert_shim():
+    sys.meta_path.insert(0, DISTUTILS_FINDER)
+
+
+def remove_shim():
+    try:
+        sys.meta_path.remove(DISTUTILS_FINDER)
+    except ValueError:
+        pass

BIN
rest_api_venv/Lib/site-packages/_distutils_hack/__pycache__/__init__.cpython-310.pyc


BIN
rest_api_venv/Lib/site-packages/_distutils_hack/__pycache__/override.cpython-310.pyc


+ 1 - 0
rest_api_venv/Lib/site-packages/_distutils_hack/override.py

@@ -0,0 +1 @@
+__import__('_distutils_hack').do_override()

+ 1 - 0
rest_api_venv/Lib/site-packages/_virtualenv.pth

@@ -0,0 +1 @@
+import _virtualenv

+ 130 - 0
rest_api_venv/Lib/site-packages/_virtualenv.py

@@ -0,0 +1,130 @@
+"""Patches that are applied at runtime to the virtual environment"""
+# -*- coding: utf-8 -*-
+
+import os
+import sys
+
+VIRTUALENV_PATCH_FILE = os.path.join(__file__)
+
+
+def patch_dist(dist):
+    """
+    Distutils allows user to configure some arguments via a configuration file:
+    https://docs.python.org/3/install/index.html#distutils-configuration-files
+
+    Some of this arguments though don't make sense in context of the virtual environment files, let's fix them up.
+    """
+    # we cannot allow some install config as that would get packages installed outside of the virtual environment
+    old_parse_config_files = dist.Distribution.parse_config_files
+
+    def parse_config_files(self, *args, **kwargs):
+        result = old_parse_config_files(self, *args, **kwargs)
+        install = self.get_option_dict("install")
+
+        if "prefix" in install:  # the prefix governs where to install the libraries
+            install["prefix"] = VIRTUALENV_PATCH_FILE, os.path.abspath(sys.prefix)
+        for base in ("purelib", "platlib", "headers", "scripts", "data"):
+            key = "install_{}".format(base)
+            if key in install:  # do not allow global configs to hijack venv paths
+                install.pop(key, None)
+        return result
+
+    dist.Distribution.parse_config_files = parse_config_files
+
+
+# Import hook that patches some modules to ignore configuration values that break package installation in case
+# of virtual environments.
+_DISTUTILS_PATCH = "distutils.dist", "setuptools.dist"
+if sys.version_info > (3, 4):
+    # https://docs.python.org/3/library/importlib.html#setting-up-an-importer
+
+    class _Finder:
+        """A meta path finder that allows patching the imported distutils modules"""
+
+        fullname = None
+
+        # lock[0] is threading.Lock(), but initialized lazily to avoid importing threading very early at startup,
+        # because there are gevent-based applications that need to be first to import threading by themselves.
+        # See https://github.com/pypa/virtualenv/issues/1895 for details.
+        lock = []
+
+        def find_spec(self, fullname, path, target=None):
+            if fullname in _DISTUTILS_PATCH and self.fullname is None:
+                # initialize lock[0] lazily
+                if len(self.lock) == 0:
+                    import threading
+
+                    lock = threading.Lock()
+                    # there is possibility that two threads T1 and T2 are simultaneously running into find_spec,
+                    # observing .lock as empty, and further going into hereby initialization. However due to the GIL,
+                    # list.append() operation is atomic and this way only one of the threads will "win" to put the lock
+                    # - that every thread will use - into .lock[0].
+                    # https://docs.python.org/3/faq/library.html#what-kinds-of-global-value-mutation-are-thread-safe
+                    self.lock.append(lock)
+
+                from functools import partial
+                from importlib.util import find_spec
+
+                with self.lock[0]:
+                    self.fullname = fullname
+                    try:
+                        spec = find_spec(fullname, path)
+                        if spec is not None:
+                            # https://www.python.org/dev/peps/pep-0451/#how-loading-will-work
+                            is_new_api = hasattr(spec.loader, "exec_module")
+                            func_name = "exec_module" if is_new_api else "load_module"
+                            old = getattr(spec.loader, func_name)
+                            func = self.exec_module if is_new_api else self.load_module
+                            if old is not func:
+                                try:
+                                    setattr(spec.loader, func_name, partial(func, old))
+                                except AttributeError:
+                                    pass  # C-Extension loaders are r/o such as zipimporter with <python 3.7
+                            return spec
+                    finally:
+                        self.fullname = None
+
+        @staticmethod
+        def exec_module(old, module):
+            old(module)
+            if module.__name__ in _DISTUTILS_PATCH:
+                patch_dist(module)
+
+        @staticmethod
+        def load_module(old, name):
+            module = old(name)
+            if module.__name__ in _DISTUTILS_PATCH:
+                patch_dist(module)
+            return module
+
+    sys.meta_path.insert(0, _Finder())
+else:
+    # https://www.python.org/dev/peps/pep-0302/
+    from imp import find_module
+    from pkgutil import ImpImporter, ImpLoader
+
+    class _VirtualenvImporter(object, ImpImporter):
+        def __init__(self, path=None):
+            object.__init__(self)
+            ImpImporter.__init__(self, path)
+
+        def find_module(self, fullname, path=None):
+            if fullname in _DISTUTILS_PATCH:
+                try:
+                    return _VirtualenvLoader(fullname, *find_module(fullname.split(".")[-1], path))
+                except ImportError:
+                    pass
+            return None
+
+    class _VirtualenvLoader(object, ImpLoader):
+        def __init__(self, fullname, file, filename, etc):
+            object.__init__(self)
+            ImpLoader.__init__(self, fullname, file, filename, etc)
+
+        def load_module(self, fullname):
+            module = super(_VirtualenvLoader, self).load_module(fullname)
+            patch_dist(module)
+            module.__loader__ = None  # distlib fallback
+            return module
+
+    sys.meta_path.append(_VirtualenvImporter())

+ 1 - 0
rest_api_venv/Lib/site-packages/bcrypt-3.2.2.dist-info/INSTALLER

@@ -0,0 +1 @@
+pip

+ 201 - 0
rest_api_venv/Lib/site-packages/bcrypt-3.2.2.dist-info/LICENSE

@@ -0,0 +1,201 @@
+                              Apache License
+                        Version 2.0, January 2004
+                     http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+   "License" shall mean the terms and conditions for use, reproduction,
+   and distribution as defined by Sections 1 through 9 of this document.
+
+   "Licensor" shall mean the copyright owner or entity authorized by
+   the copyright owner that is granting the License.
+
+   "Legal Entity" shall mean the union of the acting entity and all
+   other entities that control, are controlled by, or are under common
+   control with that entity. For the purposes of this definition,
+   "control" means (i) the power, direct or indirect, to cause the
+   direction or management of such entity, whether by contract or
+   otherwise, or (ii) ownership of fifty percent (50%) or more of the
+   outstanding shares, or (iii) beneficial ownership of such entity.
+
+   "You" (or "Your") shall mean an individual or Legal Entity
+   exercising permissions granted by this License.
+
+   "Source" form shall mean the preferred form for making modifications,
+   including but not limited to software source code, documentation
+   source, and configuration files.
+
+   "Object" form shall mean any form resulting from mechanical
+   transformation or translation of a Source form, including but
+   not limited to compiled object code, generated documentation,
+   and conversions to other media types.
+
+   "Work" shall mean the work of authorship, whether in Source or
+   Object form, made available under the License, as indicated by a
+   copyright notice that is included in or attached to the work
+   (an example is provided in the Appendix below).
+
+   "Derivative Works" shall mean any work, whether in Source or Object
+   form, that is based on (or derived from) the Work and for which the
+   editorial revisions, annotations, elaborations, or other modifications
+   represent, as a whole, an original work of authorship. For the purposes
+   of this License, Derivative Works shall not include works that remain
+   separable from, or merely link (or bind by name) to the interfaces of,
+   the Work and Derivative Works thereof.
+
+   "Contribution" shall mean any work of authorship, including
+   the original version of the Work and any modifications or additions
+   to that Work or Derivative Works thereof, that is intentionally
+   submitted to Licensor for inclusion in the Work by the copyright owner
+   or by an individual or Legal Entity authorized to submit on behalf of
+   the copyright owner. For the purposes of this definition, "submitted"
+   means any form of electronic, verbal, or written communication sent
+   to the Licensor or its representatives, including but not limited to
+   communication on electronic mailing lists, source code control systems,
+   and issue tracking systems that are managed by, or on behalf of, the
+   Licensor for the purpose of discussing and improving the Work, but
+   excluding communication that is conspicuously marked or otherwise
+   designated in writing by the copyright owner as "Not a Contribution."
+
+   "Contributor" shall mean Licensor and any individual or Legal Entity
+   on behalf of whom a Contribution has been received by Licensor and
+   subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   copyright license to reproduce, prepare Derivative Works of,
+   publicly display, publicly perform, sublicense, and distribute the
+   Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   (except as stated in this section) patent license to make, have made,
+   use, offer to sell, sell, import, and otherwise transfer the Work,
+   where such license applies only to those patent claims licensable
+   by such Contributor that are necessarily infringed by their
+   Contribution(s) alone or by combination of their Contribution(s)
+   with the Work to which such Contribution(s) was submitted. If You
+   institute patent litigation against any entity (including a
+   cross-claim or counterclaim in a lawsuit) alleging that the Work
+   or a Contribution incorporated within the Work constitutes direct
+   or contributory patent infringement, then any patent licenses
+   granted to You under this License for that Work shall terminate
+   as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+   Work or Derivative Works thereof in any medium, with or without
+   modifications, and in Source or Object form, provided that You
+   meet the following conditions:
+
+   (a) You must give any other recipients of the Work or
+       Derivative Works a copy of this License; and
+
+   (b) You must cause any modified files to carry prominent notices
+       stating that You changed the files; and
+
+   (c) You must retain, in the Source form of any Derivative Works
+       that You distribute, all copyright, patent, trademark, and
+       attribution notices from the Source form of the Work,
+       excluding those notices that do not pertain to any part of
+       the Derivative Works; and
+
+   (d) If the Work includes a "NOTICE" text file as part of its
+       distribution, then any Derivative Works that You distribute must
+       include a readable copy of the attribution notices contained
+       within such NOTICE file, excluding those notices that do not
+       pertain to any part of the Derivative Works, in at least one
+       of the following places: within a NOTICE text file distributed
+       as part of the Derivative Works; within the Source form or
+       documentation, if provided along with the Derivative Works; or,
+       within a display generated by the Derivative Works, if and
+       wherever such third-party notices normally appear. The contents
+       of the NOTICE file are for informational purposes only and
+       do not modify the License. You may add Your own attribution
+       notices within Derivative Works that You distribute, alongside
+       or as an addendum to the NOTICE text from the Work, provided
+       that such additional attribution notices cannot be construed
+       as modifying the License.
+
+   You may add Your own copyright statement to Your modifications and
+   may provide additional or different license terms and conditions
+   for use, reproduction, or distribution of Your modifications, or
+   for any such Derivative Works as a whole, provided Your use,
+   reproduction, and distribution of the Work otherwise complies with
+   the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+   any Contribution intentionally submitted for inclusion in the Work
+   by You to the Licensor shall be under the terms and conditions of
+   this License, without any additional terms or conditions.
+   Notwithstanding the above, nothing herein shall supersede or modify
+   the terms of any separate license agreement you may have executed
+   with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+   names, trademarks, service marks, or product names of the Licensor,
+   except as required for reasonable and customary use in describing the
+   origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+   agreed to in writing, Licensor provides the Work (and each
+   Contributor provides its Contributions) on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+   implied, including, without limitation, any warranties or conditions
+   of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+   PARTICULAR PURPOSE. You are solely responsible for determining the
+   appropriateness of using or redistributing the Work and assume any
+   risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+   whether in tort (including negligence), contract, or otherwise,
+   unless required by applicable law (such as deliberate and grossly
+   negligent acts) or agreed to in writing, shall any Contributor be
+   liable to You for damages, including any direct, indirect, special,
+   incidental, or consequential damages of any character arising as a
+   result of this License or out of the use or inability to use the
+   Work (including but not limited to damages for loss of goodwill,
+   work stoppage, computer failure or malfunction, or any and all
+   other commercial damages or losses), even if such Contributor
+   has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+   the Work or Derivative Works thereof, You may choose to offer,
+   and charge a fee for, acceptance of support, warranty, indemnity,
+   or other liability obligations and/or rights consistent with this
+   License. However, in accepting such obligations, You may act only
+   on Your own behalf and on Your sole responsibility, not on behalf
+   of any other Contributor, and only if You agree to indemnify,
+   defend, and hold each Contributor harmless for any liability
+   incurred by, or claims asserted against, such Contributor by reason
+   of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+   To apply the Apache License to your work, attach the following
+   boilerplate notice, with the fields enclosed by brackets "[]"
+   replaced with your own identifying information. (Don't include
+   the brackets!)  The text should be enclosed in the appropriate
+   comment syntax for the file format. We also recommend that a
+   file or class name and description of purpose be included on the
+   same "printed page" as the copyright notice for easier
+   identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.

+ 271 - 0
rest_api_venv/Lib/site-packages/bcrypt-3.2.2.dist-info/METADATA

@@ -0,0 +1,271 @@
+Metadata-Version: 2.1
+Name: bcrypt
+Version: 3.2.2
+Summary: Modern password hashing for your software and your servers
+Home-page: https://github.com/pyca/bcrypt/
+Author: The Python Cryptographic Authority developers
+Author-email: cryptography-dev@python.org
+License: Apache License, Version 2.0
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Requires-Python: >=3.6
+Description-Content-Type: text/x-rst
+Requires-Dist: cffi (>=1.1)
+Provides-Extra: tests
+Requires-Dist: pytest (!=3.3.0,>=3.2.1) ; extra == 'tests'
+Provides-Extra: typecheck
+Requires-Dist: mypy ; extra == 'typecheck'
+
+bcrypt
+======
+
+.. image:: https://img.shields.io/pypi/v/bcrypt.svg
+    :target: https://pypi.org/project/bcrypt/
+    :alt: Latest Version
+
+.. image:: https://github.com/pyca/bcrypt/workflows/CI/badge.svg?branch=main
+    :target: https://github.com/pyca/bcrypt/actions?query=workflow%3ACI+branch%3Amain
+
+Good password hashing for your software and your servers
+
+
+Installation
+============
+
+To install bcrypt, simply:
+
+.. code:: bash
+
+    $ pip install bcrypt
+
+Note that bcrypt should build very easily on Linux provided you have a C compiler, headers for Python (if you're not using pypy), and headers for the libffi libraries available on your system.
+
+For Debian and Ubuntu, the following command will ensure that the required dependencies are installed:
+
+.. code:: bash
+
+    $ sudo apt-get install build-essential libffi-dev python-dev
+
+For Fedora and RHEL-derivatives, the following command will ensure that the required dependencies are installed:
+
+.. code:: bash
+
+    $ sudo yum install gcc libffi-devel python-devel
+
+For Alpine, the following command will ensure that the required dependencies are installed:
+
+.. code:: bash
+
+    $ apk add --update musl-dev gcc libffi-dev
+
+
+Alternatives
+============
+
+While bcrypt remains a good choice for password storage depending on your specific use case you may also want to consider using scrypt (either via `standard library`_ or `cryptography`_) or argon2id via `argon2_cffi`_.
+
+Changelog
+=========
+
+3.2.2
+-----
+
+* Fixed packaging of ``py.typed`` files in wheels so that ``mypy`` works.
+
+3.2.1
+-----
+
+* Added support for compilation on z/OS
+* The next release of ``bcrypt`` with be 4.0 and it will require Rust at
+  compile time, for users building from source. There will be no additional
+  requirement for users who are installing from wheels. Users on most
+  platforms will be able to obtain a wheel by making sure they have an up to
+  date ``pip``. The minimum supported Rust version will be 1.56.0.
+* This will be the final release for which we ship ``manylinux2010`` wheels.
+  Going forward the minimum supported manylinux ABI for our wheels will be
+  ``manylinux2014``. The vast majority of users will continue to receive
+  ``manylinux`` wheels provided they have an up to date ``pip``.
+
+
+3.2.0
+-----
+
+* Added typehints for library functions.
+* Dropped support for Python versions less than 3.6 (2.7, 3.4, 3.5).
+* Shipped ``abi3`` Windows wheels (requires pip >= 20).
+
+3.1.7
+-----
+
+* Set a ``setuptools`` lower bound for PEP517 wheel building.
+* We no longer distribute 32-bit ``manylinux1`` wheels. Continuing to produce
+  them was a maintenance burden.
+
+3.1.6
+-----
+
+* Added support for compilation on Haiku.
+
+3.1.5
+-----
+
+* Added support for compilation on AIX.
+* Dropped Python 2.6 and 3.3 support.
+* Switched to using ``abi3`` wheels for Python 3. If you are not getting a
+  wheel on a compatible platform please upgrade your ``pip`` version.
+
+3.1.4
+-----
+
+* Fixed compilation with mingw and on illumos.
+
+3.1.3
+-----
+* Fixed a compilation issue on Solaris.
+* Added a warning when using too few rounds with ``kdf``.
+
+3.1.2
+-----
+* Fixed a compile issue affecting big endian platforms.
+* Fixed invalid escape sequence warnings on Python 3.6.
+* Fixed building in non-UTF8 environments on Python 2.
+
+3.1.1
+-----
+* Resolved a ``UserWarning`` when used with ``cffi`` 1.8.3.
+
+3.1.0
+-----
+* Added support for ``checkpw``, a convenience method for verifying a password.
+* Ensure that you get a ``$2y$`` hash when you input a ``$2y$`` salt.
+* Fixed a regression where ``$2a`` hashes were vulnerable to a wraparound bug.
+* Fixed compilation under Alpine Linux.
+
+3.0.0
+-----
+* Switched the C backend to code obtained from the OpenBSD project rather than
+  openwall.
+* Added support for ``bcrypt_pbkdf`` via the ``kdf`` function.
+
+2.0.0
+-----
+* Added support for an adjustible prefix when calling ``gensalt``.
+* Switched to CFFI 1.0+
+
+Usage
+-----
+
+Password Hashing
+~~~~~~~~~~~~~~~~
+
+Hashing and then later checking that a password matches the previous hashed
+password is very simple:
+
+.. code:: pycon
+
+    >>> import bcrypt
+    >>> password = b"super secret password"
+    >>> # Hash a password for the first time, with a randomly-generated salt
+    >>> hashed = bcrypt.hashpw(password, bcrypt.gensalt())
+    >>> # Check that an unhashed password matches one that has previously been
+    >>> # hashed
+    >>> if bcrypt.checkpw(password, hashed):
+    ...     print("It Matches!")
+    ... else:
+    ...     print("It Does not Match :(")
+
+KDF
+~~~
+
+As of 3.0.0 ``bcrypt`` now offers a ``kdf`` function which does ``bcrypt_pbkdf``.
+This KDF is used in OpenSSH's newer encrypted private key format.
+
+.. code:: pycon
+
+    >>> import bcrypt
+    >>> key = bcrypt.kdf(
+    ...     password=b'password',
+    ...     salt=b'salt',
+    ...     desired_key_bytes=32,
+    ...     rounds=100)
+
+
+Adjustable Work Factor
+~~~~~~~~~~~~~~~~~~~~~~
+One of bcrypt's features is an adjustable logarithmic work factor. To adjust
+the work factor merely pass the desired number of rounds to
+``bcrypt.gensalt(rounds=12)`` which defaults to 12):
+
+.. code:: pycon
+
+    >>> import bcrypt
+    >>> password = b"super secret password"
+    >>> # Hash a password for the first time, with a certain number of rounds
+    >>> hashed = bcrypt.hashpw(password, bcrypt.gensalt(14))
+    >>> # Check that a unhashed password matches one that has previously been
+    >>> #   hashed
+    >>> if bcrypt.checkpw(password, hashed):
+    ...     print("It Matches!")
+    ... else:
+    ...     print("It Does not Match :(")
+
+
+Adjustable Prefix
+~~~~~~~~~~~~~~~~~
+
+Another one of bcrypt's features is an adjustable prefix to let you define what
+libraries you'll remain compatible with. To adjust this, pass either ``2a`` or
+``2b`` (the default) to ``bcrypt.gensalt(prefix=b"2b")`` as a bytes object.
+
+As of 3.0.0 the ``$2y$`` prefix is still supported in ``hashpw`` but deprecated.
+
+Maximum Password Length
+~~~~~~~~~~~~~~~~~~~~~~~
+
+The bcrypt algorithm only handles passwords up to 72 characters, any characters
+beyond that are ignored. To work around this, a common approach is to hash a
+password with a cryptographic hash (such as ``sha256``) and then base64
+encode it to prevent NULL byte problems before hashing the result with
+``bcrypt``:
+
+.. code:: pycon
+
+    >>> password = b"an incredibly long password" * 10
+    >>> hashed = bcrypt.hashpw(
+    ...     base64.b64encode(hashlib.sha256(password).digest()),
+    ...     bcrypt.gensalt()
+    ... )
+
+Compatibility
+-------------
+
+This library should be compatible with py-bcrypt and it will run on Python
+3.6+, and PyPy 3.
+
+C Code
+------
+
+This library uses code from OpenBSD.
+
+Security
+--------
+
+``bcrypt`` follows the `same security policy as cryptography`_, if you
+identify a vulnerability, we ask you to contact us privately.
+
+.. _`same security policy as cryptography`: https://cryptography.io/en/latest/security.html
+.. _`standard library`: https://docs.python.org/3/library/hashlib.html#hashlib.scrypt
+.. _`argon2_cffi`: https://argon2-cffi.readthedocs.io
+.. _`cryptography`: https://cryptography.io/en/latest/hazmat/primitives/key-derivation-functions/#cryptography.hazmat.primitives.kdf.scrypt.Scrypt
+
+

+ 13 - 0
rest_api_venv/Lib/site-packages/bcrypt-3.2.2.dist-info/RECORD

@@ -0,0 +1,13 @@
+bcrypt-3.2.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+bcrypt-3.2.2.dist-info/LICENSE,sha256=GsUKWr1c_1Mx1Zko2DNptmsAmaiPARHcIXVSazxmbew,11051
+bcrypt-3.2.2.dist-info/METADATA,sha256=8N_0mlqmjxBLT_9IIrIJ52PjeZfif62oFT6gtLc7BWc,8320
+bcrypt-3.2.2.dist-info/RECORD,,
+bcrypt-3.2.2.dist-info/WHEEL,sha256=nYCSW5p8tLyDU-wbqo3uRlCluAzwxLmyyRK2pVs4-Ag,100
+bcrypt-3.2.2.dist-info/top_level.txt,sha256=igJttN6fNWPEzk4lnCMzlitVT_1PlLVJzxzogMWGARU,15
+bcrypt/__about__.py,sha256=iAISuy3kRAR94iNKmiK1giadfXjdfyAU5Px8IB1diHQ,1361
+bcrypt/__init__.py,sha256=PsRRCygPdTjKbtD7VvpVCdmxGvVML_Urqt9C-f8bWiE,5758
+bcrypt/__pycache__/__about__.cpython-310.pyc,,
+bcrypt/__pycache__/__init__.cpython-310.pyc,,
+bcrypt/_bcrypt.pyd,sha256=vATnUn-Ys4vvto6W_qHSXrYeNgOYU50m2M_Ne5EOCmE,31744
+bcrypt/_bcrypt.pyi,sha256=SReALz_knzTvhNibAfUsTBvXylYzlDL94uT_kZCPLqg,51
+bcrypt/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

+ 5 - 0
rest_api_venv/Lib/site-packages/bcrypt-3.2.2.dist-info/WHEEL

@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.1)
+Root-Is-Purelib: false
+Tag: cp36-abi3-win_amd64
+

+ 2 - 0
rest_api_venv/Lib/site-packages/bcrypt-3.2.2.dist-info/top_level.txt

@@ -0,0 +1,2 @@
+_bcrypt
+bcrypt

+ 41 - 0
rest_api_venv/Lib/site-packages/bcrypt/__about__.py

@@ -0,0 +1,41 @@
+# Author:: Donald Stufft (<donald@stufft.io>)
+# Copyright:: Copyright (c) 2013 Donald Stufft
+# License:: Apache License, Version 2.0
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import unicode_literals
+
+__all__ = [
+    "__title__",
+    "__summary__",
+    "__uri__",
+    "__version__",
+    "__author__",
+    "__email__",
+    "__license__",
+    "__copyright__",
+]
+
+__title__ = "bcrypt"
+__summary__ = "Modern password hashing for your software and your servers"
+__uri__ = "https://github.com/pyca/bcrypt/"
+
+__version__ = "3.2.2"
+
+__author__ = "The Python Cryptographic Authority developers"
+__email__ = "cryptography-dev@python.org"
+
+__license__ = "Apache License, Version 2.0"
+__copyright__ = "Copyright 2013-2022 {0}".format(__author__)

+ 171 - 0
rest_api_venv/Lib/site-packages/bcrypt/__init__.py

@@ -0,0 +1,171 @@
+# Author:: Donald Stufft (<donald@stufft.io>)
+# Copyright:: Copyright (c) 2013 Donald Stufft
+# License:: Apache License, Version 2.0
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import absolute_import
+from __future__ import division
+
+import hmac
+import os
+import re
+import warnings
+
+from .__about__ import (
+    __author__,
+    __copyright__,
+    __email__,
+    __license__,
+    __summary__,
+    __title__,
+    __uri__,
+    __version__,
+)
+from . import _bcrypt  # noqa: I100
+
+
+__all__ = [
+    "__title__",
+    "__summary__",
+    "__uri__",
+    "__version__",
+    "__author__",
+    "__email__",
+    "__license__",
+    "__copyright__",
+    "gensalt",
+    "hashpw",
+    "kdf",
+    "checkpw",
+]
+
+
+_normalize_re = re.compile(rb"^\$2y\$")
+
+
+def gensalt(rounds: int = 12, prefix: bytes = b"2b") -> bytes:
+    if prefix not in (b"2a", b"2b"):
+        raise ValueError("Supported prefixes are b'2a' or b'2b'")
+
+    if rounds < 4 or rounds > 31:
+        raise ValueError("Invalid rounds")
+
+    salt = os.urandom(16)
+    output = _bcrypt.ffi.new("char[]", 30)
+    _bcrypt.lib.encode_base64(output, salt, len(salt))
+
+    return (
+        b"$"
+        + prefix
+        + b"$"
+        + ("%2.2u" % rounds).encode("ascii")
+        + b"$"
+        + _bcrypt.ffi.string(output)
+    )
+
+
+def hashpw(password: bytes, salt: bytes) -> bytes:
+    if isinstance(password, str) or isinstance(salt, str):
+        raise TypeError("Strings must be encoded before hashing")
+
+    if b"\x00" in password:
+        raise ValueError("password may not contain NUL bytes")
+
+    # bcrypt originally suffered from a wraparound bug:
+    # http://www.openwall.com/lists/oss-security/2012/01/02/4
+    # This bug was corrected in the OpenBSD source by truncating inputs to 72
+    # bytes on the updated prefix $2b$, but leaving $2a$ unchanged for
+    # compatibility. However, pyca/bcrypt 2.0.0 *did* correctly truncate inputs
+    # on $2a$, so we do it here to preserve compatibility with 2.0.0
+    password = password[:72]
+
+    # When the original 8bit bug was found the original library we supported
+    # added a new prefix, $2y$, that fixes it. This prefix is exactly the same
+    # as the $2b$ prefix added by OpenBSD other than the name. Since the
+    # OpenBSD library does not support the $2y$ prefix, if the salt given to us
+    # is for the $2y$ prefix, we'll just mugne it so that it's a $2b$ prior to
+    # passing it into the C library.
+    original_salt, salt = salt, _normalize_re.sub(b"$2b$", salt)
+
+    hashed = _bcrypt.ffi.new("char[]", 128)
+    retval = _bcrypt.lib.bcrypt_hashpass(password, salt, hashed, len(hashed))
+
+    if retval != 0:
+        raise ValueError("Invalid salt")
+
+    # Now that we've gotten our hashed password, we want to ensure that the
+    # prefix we return is the one that was passed in, so we'll use the prefix
+    # from the original salt and concatenate that with the return value (minus
+    # the return value's prefix). This will ensure that if someone passed in a
+    # salt with a $2y$ prefix, that they get back a hash with a $2y$ prefix
+    # even though we munged it to $2b$.
+    return original_salt[:4] + _bcrypt.ffi.string(hashed)[4:]
+
+
+def checkpw(password: bytes, hashed_password: bytes) -> bool:
+    if isinstance(password, str) or isinstance(hashed_password, str):
+        raise TypeError("Strings must be encoded before checking")
+
+    if b"\x00" in password or b"\x00" in hashed_password:
+        raise ValueError(
+            "password and hashed_password may not contain NUL bytes"
+        )
+
+    ret = hashpw(password, hashed_password)
+    return hmac.compare_digest(ret, hashed_password)
+
+
+def kdf(
+    password: bytes,
+    salt: bytes,
+    desired_key_bytes: int,
+    rounds: int,
+    ignore_few_rounds: bool = False,
+) -> bytes:
+    if isinstance(password, str) or isinstance(salt, str):
+        raise TypeError("Strings must be encoded before hashing")
+
+    if len(password) == 0 or len(salt) == 0:
+        raise ValueError("password and salt must not be empty")
+
+    if desired_key_bytes <= 0 or desired_key_bytes > 512:
+        raise ValueError("desired_key_bytes must be 1-512")
+
+    if rounds < 1:
+        raise ValueError("rounds must be 1 or more")
+
+    if rounds < 50 and not ignore_few_rounds:
+        # They probably think bcrypt.kdf()'s rounds parameter is logarithmic,
+        # expecting this value to be slow enough (it probably would be if this
+        # were bcrypt). Emit a warning.
+        warnings.warn(
+            (
+                "Warning: bcrypt.kdf() called with only {0} round(s). "
+                "This few is not secure: the parameter is linear, like PBKDF2."
+            ).format(rounds),
+            UserWarning,
+            stacklevel=2,
+        )
+
+    key = _bcrypt.ffi.new("uint8_t[]", desired_key_bytes)
+    res = _bcrypt.lib.bcrypt_pbkdf(
+        password, len(password), salt, len(salt), key, len(key), rounds
+    )
+    _bcrypt_assert(res == 0)
+
+    return _bcrypt.ffi.buffer(key, desired_key_bytes)[:]
+
+
+def _bcrypt_assert(ok: bool) -> None:
+    if not ok:
+        raise SystemError("bcrypt assertion failed")

BIN
rest_api_venv/Lib/site-packages/bcrypt/__pycache__/__about__.cpython-310.pyc


BIN
rest_api_venv/Lib/site-packages/bcrypt/__pycache__/__init__.cpython-310.pyc


BIN
rest_api_venv/Lib/site-packages/bcrypt/_bcrypt.pyd


+ 4 - 0
rest_api_venv/Lib/site-packages/bcrypt/_bcrypt.pyi

@@ -0,0 +1,4 @@
+import typing
+
+ffi: typing.Any
+lib: typing.Any

+ 0 - 0
rest_api_venv/Lib/site-packages/bcrypt/py.typed


+ 1 - 0
rest_api_venv/Lib/site-packages/cffi-1.15.0.dist-info/INSTALLER

@@ -0,0 +1 @@
+pip

+ 26 - 0
rest_api_venv/Lib/site-packages/cffi-1.15.0.dist-info/LICENSE

@@ -0,0 +1,26 @@
+
+Except when otherwise stated (look for LICENSE files in directories or
+information at the beginning of each file) all software and
+documentation is licensed as follows: 
+
+    The MIT License
+
+    Permission is hereby granted, free of charge, to any person 
+    obtaining a copy of this software and associated documentation 
+    files (the "Software"), to deal in the Software without 
+    restriction, including without limitation the rights to use, 
+    copy, modify, merge, publish, distribute, sublicense, and/or 
+    sell copies of the Software, and to permit persons to whom the 
+    Software is furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included 
+    in all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS 
+    OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 
+    THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 
+    DEALINGS IN THE SOFTWARE.
+

+ 37 - 0
rest_api_venv/Lib/site-packages/cffi-1.15.0.dist-info/METADATA

@@ -0,0 +1,37 @@
+Metadata-Version: 2.1
+Name: cffi
+Version: 1.15.0
+Summary: Foreign Function Interface for Python calling C code.
+Home-page: http://cffi.readthedocs.org
+Author: Armin Rigo, Maciej Fijalkowski
+Author-email: python-cffi@googlegroups.com
+License: MIT
+Platform: UNKNOWN
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: License :: OSI Approved :: MIT License
+License-File: LICENSE
+Requires-Dist: pycparser
+
+
+CFFI
+====
+
+Foreign Function Interface for Python calling C code.
+Please see the `Documentation <http://cffi.readthedocs.org/>`_.
+
+Contact
+-------
+
+`Mailing list <https://groups.google.com/forum/#!forum/python-cffi>`_
+
+

+ 44 - 0
rest_api_venv/Lib/site-packages/cffi-1.15.0.dist-info/RECORD

@@ -0,0 +1,44 @@
+_cffi_backend.cp310-win_amd64.pyd,sha256=V2PB0pkDVnzeTUY1XTpzgNEBQ1Q5hspO6_yk0i2ZHj4,183296
+cffi-1.15.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+cffi-1.15.0.dist-info/LICENSE,sha256=esEZUOct9bRcUXFqeyLnuzSzJNZ_Bl4pOBUt1HLEgV8,1320
+cffi-1.15.0.dist-info/METADATA,sha256=eo1521k3Cf0aEkta5nFxpMpLkd-kufSatsH1IAJqd-Y,1164
+cffi-1.15.0.dist-info/RECORD,,
+cffi-1.15.0.dist-info/WHEEL,sha256=C6CHup2HLC2Rld8AL5u9w89MYULjdaP5k0k7SG83CcI,102
+cffi-1.15.0.dist-info/entry_points.txt,sha256=Q9f5C9IpjYxo0d2PK9eUcnkgxHc9pHWwjEMaANPKNCI,76
+cffi-1.15.0.dist-info/top_level.txt,sha256=rE7WR3rZfNKxWI9-jn6hsHCAl7MDkB-FmuQbxWjFehQ,19
+cffi/__init__.py,sha256=fUtCZsANzF8L4lqN3ntzr_g_gJYpJd7gLKBMXnIeFBs,527
+cffi/__pycache__/__init__.cpython-310.pyc,,
+cffi/__pycache__/api.cpython-310.pyc,,
+cffi/__pycache__/backend_ctypes.cpython-310.pyc,,
+cffi/__pycache__/cffi_opcode.cpython-310.pyc,,
+cffi/__pycache__/commontypes.cpython-310.pyc,,
+cffi/__pycache__/cparser.cpython-310.pyc,,
+cffi/__pycache__/error.cpython-310.pyc,,
+cffi/__pycache__/ffiplatform.cpython-310.pyc,,
+cffi/__pycache__/lock.cpython-310.pyc,,
+cffi/__pycache__/model.cpython-310.pyc,,
+cffi/__pycache__/pkgconfig.cpython-310.pyc,,
+cffi/__pycache__/recompiler.cpython-310.pyc,,
+cffi/__pycache__/setuptools_ext.cpython-310.pyc,,
+cffi/__pycache__/vengine_cpy.cpython-310.pyc,,
+cffi/__pycache__/vengine_gen.cpython-310.pyc,,
+cffi/__pycache__/verifier.cpython-310.pyc,,
+cffi/_cffi_errors.h,sha256=G0bGOb-6SNIO0UY8KEN3cM40Yd1JuR5bETQ8Ni5PxWY,4057
+cffi/_cffi_include.h,sha256=H7cgdZR-POwmUFrIup4jOGzmje8YoQHhN99gVFg7w08,15185
+cffi/_embedding.h,sha256=chQESHjLOleuOiPVeOtKsofXhG44yFyZkIfOCevMAAg,18108
+cffi/api.py,sha256=Xs_dAN5x1ehfnn_F9ZTdA3Ce0bmPrqeIOkO4Ya1tfbQ,43029
+cffi/backend_ctypes.py,sha256=BHN3q2giL2_Y8wMDST2CIcc_qoMrs65qV9Ob5JvxBZ4,43575
+cffi/cffi_opcode.py,sha256=57P2NHLZkuTWueZybu5iosWljb6ocQmUXzGrCplrnyE,5911
+cffi/commontypes.py,sha256=mEZD4g0qtadnv6O6CEXvMQaJ1K6SRbG5S1h4YvVZHOU,2769
+cffi/cparser.py,sha256=CwVk2V3ATYlCoywG6zN35w6UQ7zj2EWX68KjoJp2Mzk,45237
+cffi/error.py,sha256=Bka7fSV22aIglTQDPIDfpnxTc1aWZLMQdQOJY-h_PUA,908
+cffi/ffiplatform.py,sha256=qioydJeC63dEvrQ3ht5_BPmSs7wzzzuWnZAJtfhic7I,4173
+cffi/lock.py,sha256=vnbsel7392Ib8gGBifIfAfc7MHteSwd3nP725pvc25Q,777
+cffi/model.py,sha256=HRD0WEYHF2Vr6RjS-4wyncElrZxU2256zY0fbMkSKec,22385
+cffi/parse_c_type.h,sha256=fKYNqWNX5f9kZNNhbXcRLTOlpRGRhh8eCLyHmTXIZnQ,6157
+cffi/pkgconfig.py,sha256=9zDcDf0XKIJaxFHLg7e-W8-Xb8Yq5hdhqH7kLg-ugRo,4495
+cffi/recompiler.py,sha256=LmEalHqs90dgp5od-BiZizsu2M2WJV7S6ctNSxj3FsA,66149
+cffi/setuptools_ext.py,sha256=8y14TOlRAkgdczmwtPOahyFXJHNyIqhLjUHMYQmjOHs,9150
+cffi/vengine_cpy.py,sha256=ukugKCIsURxJzHxlxS265tGjQfPTFDbThwsqBrwKh-A,44396
+cffi/vengine_gen.py,sha256=mykUhLFJIcV6AyQ5cMJ3n_7dbqw0a9WEjXW0E-WfgiI,27359
+cffi/verifier.py,sha256=AZuuR7MxjMYZc8IsZjGsF8mdGajCsOY60AZLwZZ_Z2Y,11560

+ 5 - 0
rest_api_venv/Lib/site-packages/cffi-1.15.0.dist-info/WHEEL

@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.0)
+Root-Is-Purelib: false
+Tag: cp310-cp310-win_amd64
+

+ 3 - 0
rest_api_venv/Lib/site-packages/cffi-1.15.0.dist-info/entry_points.txt

@@ -0,0 +1,3 @@
+[distutils.setup_keywords]
+cffi_modules = cffi.setuptools_ext:cffi_modules
+

+ 2 - 0
rest_api_venv/Lib/site-packages/cffi-1.15.0.dist-info/top_level.txt

@@ -0,0 +1,2 @@
+_cffi_backend
+cffi

+ 14 - 0
rest_api_venv/Lib/site-packages/cffi/__init__.py

@@ -0,0 +1,14 @@
+__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError',
+           'FFIError']
+
+from .api import FFI
+from .error import CDefError, FFIError, VerificationError, VerificationMissing
+from .error import PkgConfigError
+
+__version__ = "1.15.0"
+__version_info__ = (1, 15, 0)
+
+# The verifier module file names are based on the CRC32 of a string that
+# contains the following version number.  It may be older than __version__
+# if nothing is clearly incompatible.
+__version_verifier_modules__ = "0.8.6"

BIN
rest_api_venv/Lib/site-packages/cffi/__pycache__/__init__.cpython-310.pyc


BIN
rest_api_venv/Lib/site-packages/cffi/__pycache__/api.cpython-310.pyc


BIN
rest_api_venv/Lib/site-packages/cffi/__pycache__/backend_ctypes.cpython-310.pyc


BIN
rest_api_venv/Lib/site-packages/cffi/__pycache__/cffi_opcode.cpython-310.pyc


BIN
rest_api_venv/Lib/site-packages/cffi/__pycache__/commontypes.cpython-310.pyc


BIN
rest_api_venv/Lib/site-packages/cffi/__pycache__/cparser.cpython-310.pyc


BIN
rest_api_venv/Lib/site-packages/cffi/__pycache__/error.cpython-310.pyc


BIN
rest_api_venv/Lib/site-packages/cffi/__pycache__/ffiplatform.cpython-310.pyc


BIN
rest_api_venv/Lib/site-packages/cffi/__pycache__/lock.cpython-310.pyc


BIN
rest_api_venv/Lib/site-packages/cffi/__pycache__/model.cpython-310.pyc


BIN
rest_api_venv/Lib/site-packages/cffi/__pycache__/pkgconfig.cpython-310.pyc


BIN
rest_api_venv/Lib/site-packages/cffi/__pycache__/recompiler.cpython-310.pyc


BIN
rest_api_venv/Lib/site-packages/cffi/__pycache__/setuptools_ext.cpython-310.pyc


BIN
rest_api_venv/Lib/site-packages/cffi/__pycache__/vengine_cpy.cpython-310.pyc


BIN
rest_api_venv/Lib/site-packages/cffi/__pycache__/vengine_gen.cpython-310.pyc


BIN
rest_api_venv/Lib/site-packages/cffi/__pycache__/verifier.cpython-310.pyc


+ 149 - 0
rest_api_venv/Lib/site-packages/cffi/_cffi_errors.h

@@ -0,0 +1,149 @@
+#ifndef CFFI_MESSAGEBOX
+# ifdef _MSC_VER
+#  define CFFI_MESSAGEBOX  1
+# else
+#  define CFFI_MESSAGEBOX  0
+# endif
+#endif
+
+
+#if CFFI_MESSAGEBOX
+/* Windows only: logic to take the Python-CFFI embedding logic
+   initialization errors and display them in a background thread
+   with MessageBox.  The idea is that if the whole program closes
+   as a result of this problem, then likely it is already a console
+   program and you can read the stderr output in the console too.
+   If it is not a console program, then it will likely show its own
+   dialog to complain, or generally not abruptly close, and for this
+   case the background thread should stay alive.
+*/
+static void *volatile _cffi_bootstrap_text;
+
+static PyObject *_cffi_start_error_capture(void)
+{
+    PyObject *result = NULL;
+    PyObject *x, *m, *bi;
+
+    if (InterlockedCompareExchangePointer(&_cffi_bootstrap_text,
+            (void *)1, NULL) != NULL)
+        return (PyObject *)1;
+
+    m = PyImport_AddModule("_cffi_error_capture");
+    if (m == NULL)
+        goto error;
+
+    result = PyModule_GetDict(m);
+    if (result == NULL)
+        goto error;
+
+#if PY_MAJOR_VERSION >= 3
+    bi = PyImport_ImportModule("builtins");
+#else
+    bi = PyImport_ImportModule("__builtin__");
+#endif
+    if (bi == NULL)
+        goto error;
+    PyDict_SetItemString(result, "__builtins__", bi);
+    Py_DECREF(bi);
+
+    x = PyRun_String(
+        "import sys\n"
+        "class FileLike:\n"
+        "  def write(self, x):\n"
+        "    try:\n"
+        "      of.write(x)\n"
+        "    except: pass\n"
+        "    self.buf += x\n"
+        "  def flush(self):\n"
+        "    pass\n"
+        "fl = FileLike()\n"
+        "fl.buf = ''\n"
+        "of = sys.stderr\n"
+        "sys.stderr = fl\n"
+        "def done():\n"
+        "  sys.stderr = of\n"
+        "  return fl.buf\n",   /* make sure the returned value stays alive */
+        Py_file_input,
+        result, result);
+    Py_XDECREF(x);
+
+ error:
+    if (PyErr_Occurred())
+    {
+        PyErr_WriteUnraisable(Py_None);
+        PyErr_Clear();
+    }
+    return result;
+}
+
+#pragma comment(lib, "user32.lib")
+
+static DWORD WINAPI _cffi_bootstrap_dialog(LPVOID ignored)
+{
+    Sleep(666);    /* may be interrupted if the whole process is closing */
+#if PY_MAJOR_VERSION >= 3
+    MessageBoxW(NULL, (wchar_t *)_cffi_bootstrap_text,
+                L"Python-CFFI error",
+                MB_OK | MB_ICONERROR);
+#else
+    MessageBoxA(NULL, (char *)_cffi_bootstrap_text,
+                "Python-CFFI error",
+                MB_OK | MB_ICONERROR);
+#endif
+    _cffi_bootstrap_text = NULL;
+    return 0;
+}
+
+static void _cffi_stop_error_capture(PyObject *ecap)
+{
+    PyObject *s;
+    void *text;
+
+    if (ecap == (PyObject *)1)
+        return;
+
+    if (ecap == NULL)
+        goto error;
+
+    s = PyRun_String("done()", Py_eval_input, ecap, ecap);
+    if (s == NULL)
+        goto error;
+
+    /* Show a dialog box, but in a background thread, and
+       never show multiple dialog boxes at once. */
+#if PY_MAJOR_VERSION >= 3
+    text = PyUnicode_AsWideCharString(s, NULL);
+#else
+    text = PyString_AsString(s);
+#endif
+
+    _cffi_bootstrap_text = text;
+
+    if (text != NULL)
+    {
+        HANDLE h;
+        h = CreateThread(NULL, 0, _cffi_bootstrap_dialog,
+                         NULL, 0, NULL);
+        if (h != NULL)
+            CloseHandle(h);
+    }
+    /* decref the string, but it should stay alive as 'fl.buf'
+       in the small module above.  It will really be freed only if
+       we later get another similar error.  So it's a leak of at
+       most one copy of the small module.  That's fine for this
+       situation which is usually a "fatal error" anyway. */
+    Py_DECREF(s);
+    PyErr_Clear();
+    return;
+
+  error:
+    _cffi_bootstrap_text = NULL;
+    PyErr_Clear();
+}
+
+#else
+
+static PyObject *_cffi_start_error_capture(void) { return NULL; }
+static void _cffi_stop_error_capture(PyObject *ecap) { }
+
+#endif

+ 385 - 0
rest_api_venv/Lib/site-packages/cffi/_cffi_include.h

@@ -0,0 +1,385 @@
+#define _CFFI_
+
+/* We try to define Py_LIMITED_API before including Python.h.
+
+   Mess: we can only define it if Py_DEBUG, Py_TRACE_REFS and
+   Py_REF_DEBUG are not defined.  This is a best-effort approximation:
+   we can learn about Py_DEBUG from pyconfig.h, but it is unclear if
+   the same works for the other two macros.  Py_DEBUG implies them,
+   but not the other way around.
+
+   The implementation is messy (issue #350): on Windows, with _MSC_VER,
+   we have to define Py_LIMITED_API even before including pyconfig.h.
+   In that case, we guess what pyconfig.h will do to the macros above,
+   and check our guess after the #include.
+
+   Note that on Windows, with CPython 3.x, you need >= 3.5 and virtualenv
+   version >= 16.0.0.  With older versions of either, you don't get a
+   copy of PYTHON3.DLL in the virtualenv.  We can't check the version of
+   CPython *before* we even include pyconfig.h.  ffi.set_source() puts
+   a ``#define _CFFI_NO_LIMITED_API'' at the start of this file if it is
+   running on Windows < 3.5, as an attempt at fixing it, but that's
+   arguably wrong because it may not be the target version of Python.
+   Still better than nothing I guess.  As another workaround, you can
+   remove the definition of Py_LIMITED_API here.
+
+   See also 'py_limited_api' in cffi/setuptools_ext.py.
+*/
+#if !defined(_CFFI_USE_EMBEDDING) && !defined(Py_LIMITED_API)
+#  ifdef _MSC_VER
+#    if !defined(_DEBUG) && !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API)
+#      define Py_LIMITED_API
+#    endif
+#    include <pyconfig.h>
+     /* sanity-check: Py_LIMITED_API will cause crashes if any of these
+        are also defined.  Normally, the Python file PC/pyconfig.h does not
+        cause any of these to be defined, with the exception that _DEBUG
+        causes Py_DEBUG.  Double-check that. */
+#    ifdef Py_LIMITED_API
+#      if defined(Py_DEBUG)
+#        error "pyconfig.h unexpectedly defines Py_DEBUG, but Py_LIMITED_API is set"
+#      endif
+#      if defined(Py_TRACE_REFS)
+#        error "pyconfig.h unexpectedly defines Py_TRACE_REFS, but Py_LIMITED_API is set"
+#      endif
+#      if defined(Py_REF_DEBUG)
+#        error "pyconfig.h unexpectedly defines Py_REF_DEBUG, but Py_LIMITED_API is set"
+#      endif
+#    endif
+#  else
+#    include <pyconfig.h>
+#    if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API)
+#      define Py_LIMITED_API
+#    endif
+#  endif
+#endif
+
+#include <Python.h>
+#ifdef __cplusplus
+extern "C" {
+#endif
+#include <stddef.h>
+#include "parse_c_type.h"
+
+/* this block of #ifs should be kept exactly identical between
+   c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py
+   and cffi/_cffi_include.h */
+#if defined(_MSC_VER)
+# include <malloc.h>   /* for alloca() */
+# if _MSC_VER < 1600   /* MSVC < 2010 */
+   typedef __int8 int8_t;
+   typedef __int16 int16_t;
+   typedef __int32 int32_t;
+   typedef __int64 int64_t;
+   typedef unsigned __int8 uint8_t;
+   typedef unsigned __int16 uint16_t;
+   typedef unsigned __int32 uint32_t;
+   typedef unsigned __int64 uint64_t;
+   typedef __int8 int_least8_t;
+   typedef __int16 int_least16_t;
+   typedef __int32 int_least32_t;
+   typedef __int64 int_least64_t;
+   typedef unsigned __int8 uint_least8_t;
+   typedef unsigned __int16 uint_least16_t;
+   typedef unsigned __int32 uint_least32_t;
+   typedef unsigned __int64 uint_least64_t;
+   typedef __int8 int_fast8_t;
+   typedef __int16 int_fast16_t;
+   typedef __int32 int_fast32_t;
+   typedef __int64 int_fast64_t;
+   typedef unsigned __int8 uint_fast8_t;
+   typedef unsigned __int16 uint_fast16_t;
+   typedef unsigned __int32 uint_fast32_t;
+   typedef unsigned __int64 uint_fast64_t;
+   typedef __int64 intmax_t;
+   typedef unsigned __int64 uintmax_t;
+# else
+#  include <stdint.h>
+# endif
+# if _MSC_VER < 1800   /* MSVC < 2013 */
+#  ifndef __cplusplus
+    typedef unsigned char _Bool;
+#  endif
+# endif
+#else
+# include <stdint.h>
+# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
+#  include <alloca.h>
+# endif
+#endif
+
+#ifdef __GNUC__
+# define _CFFI_UNUSED_FN  __attribute__((unused))
+#else
+# define _CFFI_UNUSED_FN  /* nothing */
+#endif
+
+#ifdef __cplusplus
+# ifndef _Bool
+   typedef bool _Bool;   /* semi-hackish: C++ has no _Bool; bool is builtin */
+# endif
+#endif
+
+/**********  CPython-specific section  **********/
+#ifndef PYPY_VERSION
+
+
+#if PY_MAJOR_VERSION >= 3
+# define PyInt_FromLong PyLong_FromLong
+#endif
+
+#define _cffi_from_c_double PyFloat_FromDouble
+#define _cffi_from_c_float PyFloat_FromDouble
+#define _cffi_from_c_long PyInt_FromLong
+#define _cffi_from_c_ulong PyLong_FromUnsignedLong
+#define _cffi_from_c_longlong PyLong_FromLongLong
+#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong
+#define _cffi_from_c__Bool PyBool_FromLong
+
+#define _cffi_to_c_double PyFloat_AsDouble
+#define _cffi_to_c_float PyFloat_AsDouble
+
+#define _cffi_from_c_int(x, type)                                        \
+    (((type)-1) > 0 ? /* unsigned */                                     \
+        (sizeof(type) < sizeof(long) ?                                   \
+            PyInt_FromLong((long)x) :                                    \
+         sizeof(type) == sizeof(long) ?                                  \
+            PyLong_FromUnsignedLong((unsigned long)x) :                  \
+            PyLong_FromUnsignedLongLong((unsigned long long)x)) :        \
+        (sizeof(type) <= sizeof(long) ?                                  \
+            PyInt_FromLong((long)x) :                                    \
+            PyLong_FromLongLong((long long)x)))
+
+#define _cffi_to_c_int(o, type)                                          \
+    ((type)(                                                             \
+     sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o)        \
+                                         : (type)_cffi_to_c_i8(o)) :     \
+     sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o)       \
+                                         : (type)_cffi_to_c_i16(o)) :    \
+     sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o)       \
+                                         : (type)_cffi_to_c_i32(o)) :    \
+     sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o)       \
+                                         : (type)_cffi_to_c_i64(o)) :    \
+     (Py_FatalError("unsupported size for type " #type), (type)0)))
+
+#define _cffi_to_c_i8                                                    \
+                 ((int(*)(PyObject *))_cffi_exports[1])
+#define _cffi_to_c_u8                                                    \
+                 ((int(*)(PyObject *))_cffi_exports[2])
+#define _cffi_to_c_i16                                                   \
+                 ((int(*)(PyObject *))_cffi_exports[3])
+#define _cffi_to_c_u16                                                   \
+                 ((int(*)(PyObject *))_cffi_exports[4])
+#define _cffi_to_c_i32                                                   \
+                 ((int(*)(PyObject *))_cffi_exports[5])
+#define _cffi_to_c_u32                                                   \
+                 ((unsigned int(*)(PyObject *))_cffi_exports[6])
+#define _cffi_to_c_i64                                                   \
+                 ((long long(*)(PyObject *))_cffi_exports[7])
+#define _cffi_to_c_u64                                                   \
+                 ((unsigned long long(*)(PyObject *))_cffi_exports[8])
+#define _cffi_to_c_char                                                  \
+                 ((int(*)(PyObject *))_cffi_exports[9])
+#define _cffi_from_c_pointer                                             \
+    ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[10])
+#define _cffi_to_c_pointer                                               \
+    ((char *(*)(PyObject *, struct _cffi_ctypedescr *))_cffi_exports[11])
+#define _cffi_get_struct_layout                                          \
+    not used any more
+#define _cffi_restore_errno                                              \
+    ((void(*)(void))_cffi_exports[13])
+#define _cffi_save_errno                                                 \
+    ((void(*)(void))_cffi_exports[14])
+#define _cffi_from_c_char                                                \
+    ((PyObject *(*)(char))_cffi_exports[15])
+#define _cffi_from_c_deref                                               \
+    ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[16])
+#define _cffi_to_c                                                       \
+    ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[17])
+#define _cffi_from_c_struct                                              \
+    ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[18])
+#define _cffi_to_c_wchar_t                                               \
+    ((_cffi_wchar_t(*)(PyObject *))_cffi_exports[19])
+#define _cffi_from_c_wchar_t                                             \
+    ((PyObject *(*)(_cffi_wchar_t))_cffi_exports[20])
+#define _cffi_to_c_long_double                                           \
+    ((long double(*)(PyObject *))_cffi_exports[21])
+#define _cffi_to_c__Bool                                                 \
+    ((_Bool(*)(PyObject *))_cffi_exports[22])
+#define _cffi_prepare_pointer_call_argument                              \
+    ((Py_ssize_t(*)(struct _cffi_ctypedescr *,                           \
+                    PyObject *, char **))_cffi_exports[23])
+#define _cffi_convert_array_from_object                                  \
+    ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[24])
+#define _CFFI_CPIDX  25
+#define _cffi_call_python                                                \
+    ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX])
+#define _cffi_to_c_wchar3216_t                                           \
+    ((int(*)(PyObject *))_cffi_exports[26])
+#define _cffi_from_c_wchar3216_t                                         \
+    ((PyObject *(*)(int))_cffi_exports[27])
+#define _CFFI_NUM_EXPORTS 28
+
+struct _cffi_ctypedescr;
+
+static void *_cffi_exports[_CFFI_NUM_EXPORTS];
+
+#define _cffi_type(index)   (                           \
+    assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \
+    (struct _cffi_ctypedescr *)_cffi_types[index])
+
+static PyObject *_cffi_init(const char *module_name, Py_ssize_t version,
+                            const struct _cffi_type_context_s *ctx)
+{
+    PyObject *module, *o_arg, *new_module;
+    void *raw[] = {
+        (void *)module_name,
+        (void *)version,
+        (void *)_cffi_exports,
+        (void *)ctx,
+    };
+
+    module = PyImport_ImportModule("_cffi_backend");
+    if (module == NULL)
+        goto failure;
+
+    o_arg = PyLong_FromVoidPtr((void *)raw);
+    if (o_arg == NULL)
+        goto failure;
+
+    new_module = PyObject_CallMethod(
+        module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg);
+
+    Py_DECREF(o_arg);
+    Py_DECREF(module);
+    return new_module;
+
+  failure:
+    Py_XDECREF(module);
+    return NULL;
+}
+
+
+#ifdef HAVE_WCHAR_H
+typedef wchar_t _cffi_wchar_t;
+#else
+typedef uint16_t _cffi_wchar_t;   /* same random pick as _cffi_backend.c */
+#endif
+
+_CFFI_UNUSED_FN static uint16_t _cffi_to_c_char16_t(PyObject *o)
+{
+    if (sizeof(_cffi_wchar_t) == 2)
+        return (uint16_t)_cffi_to_c_wchar_t(o);
+    else
+        return (uint16_t)_cffi_to_c_wchar3216_t(o);
+}
+
+_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char16_t(uint16_t x)
+{
+    if (sizeof(_cffi_wchar_t) == 2)
+        return _cffi_from_c_wchar_t((_cffi_wchar_t)x);
+    else
+        return _cffi_from_c_wchar3216_t((int)x);
+}
+
+_CFFI_UNUSED_FN static int _cffi_to_c_char32_t(PyObject *o)
+{
+    if (sizeof(_cffi_wchar_t) == 4)
+        return (int)_cffi_to_c_wchar_t(o);
+    else
+        return (int)_cffi_to_c_wchar3216_t(o);
+}
+
+_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char32_t(unsigned int x)
+{
+    if (sizeof(_cffi_wchar_t) == 4)
+        return _cffi_from_c_wchar_t((_cffi_wchar_t)x);
+    else
+        return _cffi_from_c_wchar3216_t((int)x);
+}
+
+union _cffi_union_alignment_u {
+    unsigned char m_char;
+    unsigned short m_short;
+    unsigned int m_int;
+    unsigned long m_long;
+    unsigned long long m_longlong;
+    float m_float;
+    double m_double;
+    long double m_longdouble;
+};
+
+struct _cffi_freeme_s {
+    struct _cffi_freeme_s *next;
+    union _cffi_union_alignment_u alignment;
+};
+
+_CFFI_UNUSED_FN static int
+_cffi_convert_array_argument(struct _cffi_ctypedescr *ctptr, PyObject *arg,
+                             char **output_data, Py_ssize_t datasize,
+                             struct _cffi_freeme_s **freeme)
+{
+    char *p;
+    if (datasize < 0)
+        return -1;
+
+    p = *output_data;
+    if (p == NULL) {
+        struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc(
+            offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize);
+        if (fp == NULL)
+            return -1;
+        fp->next = *freeme;
+        *freeme = fp;
+        p = *output_data = (char *)&fp->alignment;
+    }
+    memset((void *)p, 0, (size_t)datasize);
+    return _cffi_convert_array_from_object(p, ctptr, arg);
+}
+
+_CFFI_UNUSED_FN static void
+_cffi_free_array_arguments(struct _cffi_freeme_s *freeme)
+{
+    do {
+        void *p = (void *)freeme;
+        freeme = freeme->next;
+        PyObject_Free(p);
+    } while (freeme != NULL);
+}
+
+/**********  end CPython-specific section  **********/
+#else
+_CFFI_UNUSED_FN
+static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *);
+# define _cffi_call_python  _cffi_call_python_org
+#endif
+
+
+#define _cffi_array_len(array)   (sizeof(array) / sizeof((array)[0]))
+
+#define _cffi_prim_int(size, sign)                                      \
+    ((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8  : _CFFI_PRIM_UINT8)  :    \
+     (size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) :    \
+     (size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) :    \
+     (size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) :    \
+     _CFFI__UNKNOWN_PRIM)
+
+#define _cffi_prim_float(size)                                          \
+    ((size) == sizeof(float) ? _CFFI_PRIM_FLOAT :                       \
+     (size) == sizeof(double) ? _CFFI_PRIM_DOUBLE :                     \
+     (size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE :       \
+     _CFFI__UNKNOWN_FLOAT_PRIM)
+
+#define _cffi_check_int(got, got_nonpos, expected)      \
+    ((got_nonpos) == (expected <= 0) &&                 \
+     (got) == (unsigned long long)expected)
+
+#ifdef MS_WIN32
+# define _cffi_stdcall  __stdcall
+#else
+# define _cffi_stdcall  /* nothing */
+#endif
+
+#ifdef __cplusplus
+}
+#endif

+ 527 - 0
rest_api_venv/Lib/site-packages/cffi/_embedding.h

@@ -0,0 +1,527 @@
+
+/***** Support code for embedding *****/
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+#if defined(_WIN32)
+#  define CFFI_DLLEXPORT  __declspec(dllexport)
+#elif defined(__GNUC__)
+#  define CFFI_DLLEXPORT  __attribute__((visibility("default")))
+#else
+#  define CFFI_DLLEXPORT  /* nothing */
+#endif
+
+
+/* There are two global variables of type _cffi_call_python_fnptr:
+
+   * _cffi_call_python, which we declare just below, is the one called
+     by ``extern "Python"`` implementations.
+
+   * _cffi_call_python_org, which on CPython is actually part of the
+     _cffi_exports[] array, is the function pointer copied from
+     _cffi_backend.
+
+   After initialization is complete, both are equal.  However, the
+   first one remains equal to &_cffi_start_and_call_python until the
+   very end of initialization, when we are (or should be) sure that
+   concurrent threads also see a completely initialized world, and
+   only then is it changed.
+*/
+#undef _cffi_call_python
+typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *);
+static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *);
+static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python;
+
+
+#ifndef _MSC_VER
+   /* --- Assuming a GCC not infinitely old --- */
+# define cffi_compare_and_swap(l,o,n)  __sync_bool_compare_and_swap(l,o,n)
+# define cffi_write_barrier()          __sync_synchronize()
+# if !defined(__amd64__) && !defined(__x86_64__) &&   \
+     !defined(__i386__) && !defined(__i386)
+#   define cffi_read_barrier()         __sync_synchronize()
+# else
+#   define cffi_read_barrier()         (void)0
+# endif
+#else
+   /* --- Windows threads version --- */
+# include <Windows.h>
+# define cffi_compare_and_swap(l,o,n) \
+                               (InterlockedCompareExchangePointer(l,n,o) == (o))
+# define cffi_write_barrier()       InterlockedCompareExchange(&_cffi_dummy,0,0)
+# define cffi_read_barrier()           (void)0
+static volatile LONG _cffi_dummy;
+#endif
+
+#ifdef WITH_THREAD
+# ifndef _MSC_VER
+#  include <pthread.h>
+   static pthread_mutex_t _cffi_embed_startup_lock;
+# else
+   static CRITICAL_SECTION _cffi_embed_startup_lock;
+# endif
+  static char _cffi_embed_startup_lock_ready = 0;
+#endif
+
+static void _cffi_acquire_reentrant_mutex(void)
+{
+    static void *volatile lock = NULL;
+
+    while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) {
+        /* should ideally do a spin loop instruction here, but
+           hard to do it portably and doesn't really matter I
+           think: pthread_mutex_init() should be very fast, and
+           this is only run at start-up anyway. */
+    }
+
+#ifdef WITH_THREAD
+    if (!_cffi_embed_startup_lock_ready) {
+# ifndef _MSC_VER
+        pthread_mutexattr_t attr;
+        pthread_mutexattr_init(&attr);
+        pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE);
+        pthread_mutex_init(&_cffi_embed_startup_lock, &attr);
+# else
+        InitializeCriticalSection(&_cffi_embed_startup_lock);
+# endif
+        _cffi_embed_startup_lock_ready = 1;
+    }
+#endif
+
+    while (!cffi_compare_and_swap(&lock, (void *)1, NULL))
+        ;
+
+#ifndef _MSC_VER
+    pthread_mutex_lock(&_cffi_embed_startup_lock);
+#else
+    EnterCriticalSection(&_cffi_embed_startup_lock);
+#endif
+}
+
+static void _cffi_release_reentrant_mutex(void)
+{
+#ifndef _MSC_VER
+    pthread_mutex_unlock(&_cffi_embed_startup_lock);
+#else
+    LeaveCriticalSection(&_cffi_embed_startup_lock);
+#endif
+}
+
+
+/**********  CPython-specific section  **********/
+#ifndef PYPY_VERSION
+
+#include "_cffi_errors.h"
+
+
+#define _cffi_call_python_org  _cffi_exports[_CFFI_CPIDX]
+
+PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void);   /* forward */
+
+static void _cffi_py_initialize(void)
+{
+    /* XXX use initsigs=0, which "skips initialization registration of
+       signal handlers, which might be useful when Python is
+       embedded" according to the Python docs.  But review and think
+       if it should be a user-controllable setting.
+
+       XXX we should also give a way to write errors to a buffer
+       instead of to stderr.
+
+       XXX if importing 'site' fails, CPython (any version) calls
+       exit().  Should we try to work around this behavior here?
+    */
+    Py_InitializeEx(0);
+}
+
+static int _cffi_initialize_python(void)
+{
+    /* This initializes Python, imports _cffi_backend, and then the
+       present .dll/.so is set up as a CPython C extension module.
+    */
+    int result;
+    PyGILState_STATE state;
+    PyObject *pycode=NULL, *global_dict=NULL, *x;
+    PyObject *builtins;
+
+    state = PyGILState_Ensure();
+
+    /* Call the initxxx() function from the present module.  It will
+       create and initialize us as a CPython extension module, instead
+       of letting the startup Python code do it---it might reimport
+       the same .dll/.so and get maybe confused on some platforms.
+       It might also have troubles locating the .dll/.so again for all
+       I know.
+    */
+    (void)_CFFI_PYTHON_STARTUP_FUNC();
+    if (PyErr_Occurred())
+        goto error;
+
+    /* Now run the Python code provided to ffi.embedding_init_code().
+     */
+    pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE,
+                              "<init code for '" _CFFI_MODULE_NAME "'>",
+                              Py_file_input);
+    if (pycode == NULL)
+        goto error;
+    global_dict = PyDict_New();
+    if (global_dict == NULL)
+        goto error;
+    builtins = PyEval_GetBuiltins();
+    if (builtins == NULL)
+        goto error;
+    if (PyDict_SetItemString(global_dict, "__builtins__", builtins) < 0)
+        goto error;
+    x = PyEval_EvalCode(
+#if PY_MAJOR_VERSION < 3
+                        (PyCodeObject *)
+#endif
+                        pycode, global_dict, global_dict);
+    if (x == NULL)
+        goto error;
+    Py_DECREF(x);
+
+    /* Done!  Now if we've been called from
+       _cffi_start_and_call_python() in an ``extern "Python"``, we can
+       only hope that the Python code did correctly set up the
+       corresponding @ffi.def_extern() function.  Otherwise, the
+       general logic of ``extern "Python"`` functions (inside the
+       _cffi_backend module) will find that the reference is still
+       missing and print an error.
+     */
+    result = 0;
+ done:
+    Py_XDECREF(pycode);
+    Py_XDECREF(global_dict);
+    PyGILState_Release(state);
+    return result;
+
+ error:;
+    {
+        /* Print as much information as potentially useful.
+           Debugging load-time failures with embedding is not fun
+        */
+        PyObject *ecap;
+        PyObject *exception, *v, *tb, *f, *modules, *mod;
+        PyErr_Fetch(&exception, &v, &tb);
+        ecap = _cffi_start_error_capture();
+        f = PySys_GetObject((char *)"stderr");
+        if (f != NULL && f != Py_None) {
+            PyFile_WriteString(
+                "Failed to initialize the Python-CFFI embedding logic:\n\n", f);
+        }
+
+        if (exception != NULL) {
+            PyErr_NormalizeException(&exception, &v, &tb);
+            PyErr_Display(exception, v, tb);
+        }
+        Py_XDECREF(exception);
+        Py_XDECREF(v);
+        Py_XDECREF(tb);
+
+        if (f != NULL && f != Py_None) {
+            PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME
+                               "\ncompiled with cffi version: 1.15.0"
+                               "\n_cffi_backend module: ", f);
+            modules = PyImport_GetModuleDict();
+            mod = PyDict_GetItemString(modules, "_cffi_backend");
+            if (mod == NULL) {
+                PyFile_WriteString("not loaded", f);
+            }
+            else {
+                v = PyObject_GetAttrString(mod, "__file__");
+                PyFile_WriteObject(v, f, 0);
+                Py_XDECREF(v);
+            }
+            PyFile_WriteString("\nsys.path: ", f);
+            PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0);
+            PyFile_WriteString("\n\n", f);
+        }
+        _cffi_stop_error_capture(ecap);
+    }
+    result = -1;
+    goto done;
+}
+
+#if PY_VERSION_HEX < 0x03080000
+PyAPI_DATA(char *) _PyParser_TokenNames[];  /* from CPython */
+#endif
+
+static int _cffi_carefully_make_gil(void)
+{
+    /* This does the basic initialization of Python.  It can be called
+       completely concurrently from unrelated threads.  It assumes
+       that we don't hold the GIL before (if it exists), and we don't
+       hold it afterwards.
+
+       (What it really does used to be completely different in Python 2
+       and Python 3, with the Python 2 solution avoiding the spin-lock
+       around the Py_InitializeEx() call.  However, after recent changes
+       to CPython 2.7 (issue #358) it no longer works.  So we use the
+       Python 3 solution everywhere.)
+
+       This initializes Python by calling Py_InitializeEx().
+       Important: this must not be called concurrently at all.
+       So we use a global variable as a simple spin lock.  This global
+       variable must be from 'libpythonX.Y.so', not from this
+       cffi-based extension module, because it must be shared from
+       different cffi-based extension modules.
+
+       In Python < 3.8, we choose
+       _PyParser_TokenNames[0] as a completely arbitrary pointer value
+       that is never written to.  The default is to point to the
+       string "ENDMARKER".  We change it temporarily to point to the
+       next character in that string.  (Yes, I know it's REALLY
+       obscure.)
+
+       In Python >= 3.8, this string array is no longer writable, so
+       instead we pick PyCapsuleType.tp_version_tag.  We can't change
+       Python < 3.8 because someone might use a mixture of cffi
+       embedded modules, some of which were compiled before this file
+       changed.
+    */
+
+#ifdef WITH_THREAD
+# if PY_VERSION_HEX < 0x03080000
+    char *volatile *lock = (char *volatile *)_PyParser_TokenNames;
+    char *old_value, *locked_value;
+
+    while (1) {    /* spin loop */
+        old_value = *lock;
+        locked_value = old_value + 1;
+        if (old_value[0] == 'E') {
+            assert(old_value[1] == 'N');
+            if (cffi_compare_and_swap(lock, old_value, locked_value))
+                break;
+        }
+        else {
+            assert(old_value[0] == 'N');
+            /* should ideally do a spin loop instruction here, but
+               hard to do it portably and doesn't really matter I
+               think: PyEval_InitThreads() should be very fast, and
+               this is only run at start-up anyway. */
+        }
+    }
+# else
+    int volatile *lock = (int volatile *)&PyCapsule_Type.tp_version_tag;
+    int old_value, locked_value;
+    assert(!(PyCapsule_Type.tp_flags & Py_TPFLAGS_HAVE_VERSION_TAG));
+
+    while (1) {    /* spin loop */
+        old_value = *lock;
+        locked_value = -42;
+        if (old_value == 0) {
+            if (cffi_compare_and_swap(lock, old_value, locked_value))
+                break;
+        }
+        else {
+            assert(old_value == locked_value);
+            /* should ideally do a spin loop instruction here, but
+               hard to do it portably and doesn't really matter I
+               think: PyEval_InitThreads() should be very fast, and
+               this is only run at start-up anyway. */
+        }
+    }
+# endif
+#endif
+
+    /* call Py_InitializeEx() */
+    if (!Py_IsInitialized()) {
+        _cffi_py_initialize();
+#if PY_VERSION_HEX < 0x03070000
+        PyEval_InitThreads();
+#endif
+        PyEval_SaveThread();  /* release the GIL */
+        /* the returned tstate must be the one that has been stored into the
+           autoTLSkey by _PyGILState_Init() called from Py_Initialize(). */
+    }
+    else {
+#if PY_VERSION_HEX < 0x03070000
+        /* PyEval_InitThreads() is always a no-op from CPython 3.7 */
+        PyGILState_STATE state = PyGILState_Ensure();
+        PyEval_InitThreads();
+        PyGILState_Release(state);
+#endif
+    }
+
+#ifdef WITH_THREAD
+    /* release the lock */
+    while (!cffi_compare_and_swap(lock, locked_value, old_value))
+        ;
+#endif
+
+    return 0;
+}
+
+/**********  end CPython-specific section  **********/
+
+
+#else
+
+
+/**********  PyPy-specific section  **********/
+
+PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]);   /* forward */
+
+static struct _cffi_pypy_init_s {
+    const char *name;
+    void *func;    /* function pointer */
+    const char *code;
+} _cffi_pypy_init = {
+    _CFFI_MODULE_NAME,
+    _CFFI_PYTHON_STARTUP_FUNC,
+    _CFFI_PYTHON_STARTUP_CODE,
+};
+
+extern int pypy_carefully_make_gil(const char *);
+extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *);
+
+static int _cffi_carefully_make_gil(void)
+{
+    return pypy_carefully_make_gil(_CFFI_MODULE_NAME);
+}
+
+static int _cffi_initialize_python(void)
+{
+    return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init);
+}
+
+/**********  end PyPy-specific section  **********/
+
+
+#endif
+
+
+#ifdef __GNUC__
+__attribute__((noinline))
+#endif
+static _cffi_call_python_fnptr _cffi_start_python(void)
+{
+    /* Delicate logic to initialize Python.  This function can be
+       called multiple times concurrently, e.g. when the process calls
+       its first ``extern "Python"`` functions in multiple threads at
+       once.  It can also be called recursively, in which case we must
+       ignore it.  We also have to consider what occurs if several
+       different cffi-based extensions reach this code in parallel
+       threads---it is a different copy of the code, then, and we
+       can't have any shared global variable unless it comes from
+       'libpythonX.Y.so'.
+
+       Idea:
+
+       * _cffi_carefully_make_gil(): "carefully" call
+         PyEval_InitThreads() (possibly with Py_InitializeEx() first).
+
+       * then we use a (local) custom lock to make sure that a call to this
+         cffi-based extension will wait if another call to the *same*
+         extension is running the initialization in another thread.
+         It is reentrant, so that a recursive call will not block, but
+         only one from a different thread.
+
+       * then we grab the GIL and (Python 2) we call Py_InitializeEx().
+         At this point, concurrent calls to Py_InitializeEx() are not
+         possible: we have the GIL.
+
+       * do the rest of the specific initialization, which may
+         temporarily release the GIL but not the custom lock.
+         Only release the custom lock when we are done.
+    */
+    static char called = 0;
+
+    if (_cffi_carefully_make_gil() != 0)
+        return NULL;
+
+    _cffi_acquire_reentrant_mutex();
+
+    /* Here the GIL exists, but we don't have it.  We're only protected
+       from concurrency by the reentrant mutex. */
+
+    /* This file only initializes the embedded module once, the first
+       time this is called, even if there are subinterpreters. */
+    if (!called) {
+        called = 1;  /* invoke _cffi_initialize_python() only once,
+                        but don't set '_cffi_call_python' right now,
+                        otherwise concurrent threads won't call
+                        this function at all (we need them to wait) */
+        if (_cffi_initialize_python() == 0) {
+            /* now initialization is finished.  Switch to the fast-path. */
+
+            /* We would like nobody to see the new value of
+               '_cffi_call_python' without also seeing the rest of the
+               data initialized.  However, this is not possible.  But
+               the new value of '_cffi_call_python' is the function
+               'cffi_call_python()' from _cffi_backend.  So:  */
+            cffi_write_barrier();
+            /* ^^^ we put a write barrier here, and a corresponding
+               read barrier at the start of cffi_call_python().  This
+               ensures that after that read barrier, we see everything
+               done here before the write barrier.
+            */
+
+            assert(_cffi_call_python_org != NULL);
+            _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org;
+        }
+        else {
+            /* initialization failed.  Reset this to NULL, even if it was
+               already set to some other value.  Future calls to
+               _cffi_start_python() are still forced to occur, and will
+               always return NULL from now on. */
+            _cffi_call_python_org = NULL;
+        }
+    }
+
+    _cffi_release_reentrant_mutex();
+
+    return (_cffi_call_python_fnptr)_cffi_call_python_org;
+}
+
+static
+void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args)
+{
+    _cffi_call_python_fnptr fnptr;
+    int current_err = errno;
+#ifdef _MSC_VER
+    int current_lasterr = GetLastError();
+#endif
+    fnptr = _cffi_start_python();
+    if (fnptr == NULL) {
+        fprintf(stderr, "function %s() called, but initialization code "
+                        "failed.  Returning 0.\n", externpy->name);
+        memset(args, 0, externpy->size_of_result);
+    }
+#ifdef _MSC_VER
+    SetLastError(current_lasterr);
+#endif
+    errno = current_err;
+
+    if (fnptr != NULL)
+        fnptr(externpy, args);
+}
+
+
+/* The cffi_start_python() function makes sure Python is initialized
+   and our cffi module is set up.  It can be called manually from the
+   user C code.  The same effect is obtained automatically from any
+   dll-exported ``extern "Python"`` function.  This function returns
+   -1 if initialization failed, 0 if all is OK.  */
+_CFFI_UNUSED_FN
+static int cffi_start_python(void)
+{
+    if (_cffi_call_python == &_cffi_start_and_call_python) {
+        if (_cffi_start_python() == NULL)
+            return -1;
+    }
+    cffi_read_barrier();
+    return 0;
+}
+
+#undef cffi_compare_and_swap
+#undef cffi_write_barrier
+#undef cffi_read_barrier
+
+#ifdef __cplusplus
+}
+#endif

+ 965 - 0
rest_api_venv/Lib/site-packages/cffi/api.py

@@ -0,0 +1,965 @@
+import sys, types
+from .lock import allocate_lock
+from .error import CDefError
+from . import model
+
+try:
+    callable
+except NameError:
+    # Python 3.1
+    from collections import Callable
+    callable = lambda x: isinstance(x, Callable)
+
+try:
+    basestring
+except NameError:
+    # Python 3.x
+    basestring = str
+
+_unspecified = object()
+
+
+
+class FFI(object):
+    r'''
+    The main top-level class that you instantiate once, or once per module.
+
+    Example usage:
+
+        ffi = FFI()
+        ffi.cdef("""
+            int printf(const char *, ...);
+        """)
+
+        C = ffi.dlopen(None)   # standard library
+        -or-
+        C = ffi.verify()  # use a C compiler: verify the decl above is right
+
+        C.printf("hello, %s!\n", ffi.new("char[]", "world"))
+    '''
+
+    def __init__(self, backend=None):
+        """Create an FFI instance.  The 'backend' argument is used to
+        select a non-default backend, mostly for tests.
+        """
+        if backend is None:
+            # You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with
+            # _cffi_backend.so compiled.
+            import _cffi_backend as backend
+            from . import __version__
+            if backend.__version__ != __version__:
+                # bad version!  Try to be as explicit as possible.
+                if hasattr(backend, '__file__'):
+                    # CPython
+                    raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r.  When we import the top-level '_cffi_backend' extension module, we get version %s, located in %r.  The two versions should be equal; check your installation." % (
+                        __version__, __file__,
+                        backend.__version__, backend.__file__))
+                else:
+                    # PyPy
+                    raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r.  This interpreter comes with a built-in '_cffi_backend' module, which is version %s.  The two versions should be equal; check your installation." % (
+                        __version__, __file__, backend.__version__))
+            # (If you insist you can also try to pass the option
+            # 'backend=backend_ctypes.CTypesBackend()', but don't
+            # rely on it!  It's probably not going to work well.)
+
+        from . import cparser
+        self._backend = backend
+        self._lock = allocate_lock()
+        self._parser = cparser.Parser()
+        self._cached_btypes = {}
+        self._parsed_types = types.ModuleType('parsed_types').__dict__
+        self._new_types = types.ModuleType('new_types').__dict__
+        self._function_caches = []
+        self._libraries = []
+        self._cdefsources = []
+        self._included_ffis = []
+        self._windows_unicode = None
+        self._init_once_cache = {}
+        self._cdef_version = None
+        self._embedding = None
+        self._typecache = model.get_typecache(backend)
+        if hasattr(backend, 'set_ffi'):
+            backend.set_ffi(self)
+        for name in list(backend.__dict__):
+            if name.startswith('RTLD_'):
+                setattr(self, name, getattr(backend, name))
+        #
+        with self._lock:
+            self.BVoidP = self._get_cached_btype(model.voidp_type)
+            self.BCharA = self._get_cached_btype(model.char_array_type)
+        if isinstance(backend, types.ModuleType):
+            # _cffi_backend: attach these constants to the class
+            if not hasattr(FFI, 'NULL'):
+                FFI.NULL = self.cast(self.BVoidP, 0)
+                FFI.CData, FFI.CType = backend._get_types()
+        else:
+            # ctypes backend: attach these constants to the instance
+            self.NULL = self.cast(self.BVoidP, 0)
+            self.CData, self.CType = backend._get_types()
+        self.buffer = backend.buffer
+
+    def cdef(self, csource, override=False, packed=False, pack=None):
+        """Parse the given C source.  This registers all declared functions,
+        types, and global variables.  The functions and global variables can
+        then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'.
+        The types can be used in 'ffi.new()' and other functions.
+        If 'packed' is specified as True, all structs declared inside this
+        cdef are packed, i.e. laid out without any field alignment at all.
+        Alternatively, 'pack' can be a small integer, and requests for
+        alignment greater than that are ignored (pack=1 is equivalent to
+        packed=True).
+        """
+        self._cdef(csource, override=override, packed=packed, pack=pack)
+
+    def embedding_api(self, csource, packed=False, pack=None):
+        self._cdef(csource, packed=packed, pack=pack, dllexport=True)
+        if self._embedding is None:
+            self._embedding = ''
+
+    def _cdef(self, csource, override=False, **options):
+        if not isinstance(csource, str):    # unicode, on Python 2
+            if not isinstance(csource, basestring):
+                raise TypeError("cdef() argument must be a string")
+            csource = csource.encode('ascii')
+        with self._lock:
+            self._cdef_version = object()
+            self._parser.parse(csource, override=override, **options)
+            self._cdefsources.append(csource)
+            if override:
+                for cache in self._function_caches:
+                    cache.clear()
+            finishlist = self._parser._recomplete
+            if finishlist:
+                self._parser._recomplete = []
+                for tp in finishlist:
+                    tp.finish_backend_type(self, finishlist)
+
+    def dlopen(self, name, flags=0):
+        """Load and return a dynamic library identified by 'name'.
+        The standard C library can be loaded by passing None.
+        Note that functions and types declared by 'ffi.cdef()' are not
+        linked to a particular library, just like C headers; in the
+        library we only look for the actual (untyped) symbols.
+        """
+        if not (isinstance(name, basestring) or
+                name is None or
+                isinstance(name, self.CData)):
+            raise TypeError("dlopen(name): name must be a file name, None, "
+                            "or an already-opened 'void *' handle")
+        with self._lock:
+            lib, function_cache = _make_ffi_library(self, name, flags)
+            self._function_caches.append(function_cache)
+            self._libraries.append(lib)
+        return lib
+
+    def dlclose(self, lib):
+        """Close a library obtained with ffi.dlopen().  After this call,
+        access to functions or variables from the library will fail
+        (possibly with a segmentation fault).
+        """
+        type(lib).__cffi_close__(lib)
+
+    def _typeof_locked(self, cdecl):
+        # call me with the lock!
+        key = cdecl
+        if key in self._parsed_types:
+            return self._parsed_types[key]
+        #
+        if not isinstance(cdecl, str):    # unicode, on Python 2
+            cdecl = cdecl.encode('ascii')
+        #
+        type = self._parser.parse_type(cdecl)
+        really_a_function_type = type.is_raw_function
+        if really_a_function_type:
+            type = type.as_function_pointer()
+        btype = self._get_cached_btype(type)
+        result = btype, really_a_function_type
+        self._parsed_types[key] = result
+        return result
+
+    def _typeof(self, cdecl, consider_function_as_funcptr=False):
+        # string -> ctype object
+        try:
+            result = self._parsed_types[cdecl]
+        except KeyError:
+            with self._lock:
+                result = self._typeof_locked(cdecl)
+        #
+        btype, really_a_function_type = result
+        if really_a_function_type and not consider_function_as_funcptr:
+            raise CDefError("the type %r is a function type, not a "
+                            "pointer-to-function type" % (cdecl,))
+        return btype
+
+    def typeof(self, cdecl):
+        """Parse the C type given as a string and return the
+        corresponding <ctype> object.
+        It can also be used on 'cdata' instance to get its C type.
+        """
+        if isinstance(cdecl, basestring):
+            return self._typeof(cdecl)
+        if isinstance(cdecl, self.CData):
+            return self._backend.typeof(cdecl)
+        if isinstance(cdecl, types.BuiltinFunctionType):
+            res = _builtin_function_type(cdecl)
+            if res is not None:
+                return res
+        if (isinstance(cdecl, types.FunctionType)
+                and hasattr(cdecl, '_cffi_base_type')):
+            with self._lock:
+                return self._get_cached_btype(cdecl._cffi_base_type)
+        raise TypeError(type(cdecl))
+
+    def sizeof(self, cdecl):
+        """Return the size in bytes of the argument.  It can be a
+        string naming a C type, or a 'cdata' instance.
+        """
+        if isinstance(cdecl, basestring):
+            BType = self._typeof(cdecl)
+            return self._backend.sizeof(BType)
+        else:
+            return self._backend.sizeof(cdecl)
+
+    def alignof(self, cdecl):
+        """Return the natural alignment size in bytes of the C type
+        given as a string.
+        """
+        if isinstance(cdecl, basestring):
+            cdecl = self._typeof(cdecl)
+        return self._backend.alignof(cdecl)
+
+    def offsetof(self, cdecl, *fields_or_indexes):
+        """Return the offset of the named field inside the given
+        structure or array, which must be given as a C type name.
+        You can give several field names in case of nested structures.
+        You can also give numeric values which correspond to array
+        items, in case of an array type.
+        """
+        if isinstance(cdecl, basestring):
+            cdecl = self._typeof(cdecl)
+        return self._typeoffsetof(cdecl, *fields_or_indexes)[1]
+
+    def new(self, cdecl, init=None):
+        """Allocate an instance according to the specified C type and
+        return a pointer to it.  The specified C type must be either a
+        pointer or an array: ``new('X *')`` allocates an X and returns
+        a pointer to it, whereas ``new('X[n]')`` allocates an array of
+        n X'es and returns an array referencing it (which works
+        mostly like a pointer, like in C).  You can also use
+        ``new('X[]', n)`` to allocate an array of a non-constant
+        length n.
+
+        The memory is initialized following the rules of declaring a
+        global variable in C: by default it is zero-initialized, but
+        an explicit initializer can be given which can be used to
+        fill all or part of the memory.
+
+        When the returned <cdata> object goes out of scope, the memory
+        is freed.  In other words the returned <cdata> object has
+        ownership of the value of type 'cdecl' that it points to.  This
+        means that the raw data can be used as long as this object is
+        kept alive, but must not be used for a longer time.  Be careful
+        about that when copying the pointer to the memory somewhere
+        else, e.g. into another structure.
+        """
+        if isinstance(cdecl, basestring):
+            cdecl = self._typeof(cdecl)
+        return self._backend.newp(cdecl, init)
+
+    def new_allocator(self, alloc=None, free=None,
+                      should_clear_after_alloc=True):
+        """Return a new allocator, i.e. a function that behaves like ffi.new()
+        but uses the provided low-level 'alloc' and 'free' functions.
+
+        'alloc' is called with the size as argument.  If it returns NULL, a
+        MemoryError is raised.  'free' is called with the result of 'alloc'
+        as argument.  Both can be either Python function or directly C
+        functions.  If 'free' is None, then no free function is called.
+        If both 'alloc' and 'free' are None, the default is used.
+
+        If 'should_clear_after_alloc' is set to False, then the memory
+        returned by 'alloc' is assumed to be already cleared (or you are
+        fine with garbage); otherwise CFFI will clear it.
+        """
+        compiled_ffi = self._backend.FFI()
+        allocator = compiled_ffi.new_allocator(alloc, free,
+                                               should_clear_after_alloc)
+        def allocate(cdecl, init=None):
+            if isinstance(cdecl, basestring):
+                cdecl = self._typeof(cdecl)
+            return allocator(cdecl, init)
+        return allocate
+
+    def cast(self, cdecl, source):
+        """Similar to a C cast: returns an instance of the named C
+        type initialized with the given 'source'.  The source is
+        casted between integers or pointers of any type.
+        """
+        if isinstance(cdecl, basestring):
+            cdecl = self._typeof(cdecl)
+        return self._backend.cast(cdecl, source)
+
+    def string(self, cdata, maxlen=-1):
+        """Return a Python string (or unicode string) from the 'cdata'.
+        If 'cdata' is a pointer or array of characters or bytes, returns
+        the null-terminated string.  The returned string extends until
+        the first null character, or at most 'maxlen' characters.  If
+        'cdata' is an array then 'maxlen' defaults to its length.
+
+        If 'cdata' is a pointer or array of wchar_t, returns a unicode
+        string following the same rules.
+
+        If 'cdata' is a single character or byte or a wchar_t, returns
+        it as a string or unicode string.
+
+        If 'cdata' is an enum, returns the value of the enumerator as a
+        string, or 'NUMBER' if the value is out of range.
+        """
+        return self._backend.string(cdata, maxlen)
+
+    def unpack(self, cdata, length):
+        """Unpack an array of C data of the given length,
+        returning a Python string/unicode/list.
+
+        If 'cdata' is a pointer to 'char', returns a byte string.
+        It does not stop at the first null.  This is equivalent to:
+        ffi.buffer(cdata, length)[:]
+
+        If 'cdata' is a pointer to 'wchar_t', returns a unicode string.
+        'length' is measured in wchar_t's; it is not the size in bytes.
+
+        If 'cdata' is a pointer to anything else, returns a list of
+        'length' items.  This is a faster equivalent to:
+        [cdata[i] for i in range(length)]
+        """
+        return self._backend.unpack(cdata, length)
+
+   #def buffer(self, cdata, size=-1):
+   #    """Return a read-write buffer object that references the raw C data
+   #    pointed to by the given 'cdata'.  The 'cdata' must be a pointer or
+   #    an array.  Can be passed to functions expecting a buffer, or directly
+   #    manipulated with:
+   #
+   #        buf[:]          get a copy of it in a regular string, or
+   #        buf[idx]        as a single character
+   #        buf[:] = ...
+   #        buf[idx] = ...  change the content
+   #    """
+   #    note that 'buffer' is a type, set on this instance by __init__
+
+    def from_buffer(self, cdecl, python_buffer=_unspecified,
+                    require_writable=False):
+        """Return a cdata of the given type pointing to the data of the
+        given Python object, which must support the buffer interface.
+        Note that this is not meant to be used on the built-in types
+        str or unicode (you can build 'char[]' arrays explicitly)
+        but only on objects containing large quantities of raw data
+        in some other format, like 'array.array' or numpy arrays.
+
+        The first argument is optional and default to 'char[]'.
+        """
+        if python_buffer is _unspecified:
+            cdecl, python_buffer = self.BCharA, cdecl
+        elif isinstance(cdecl, basestring):
+            cdecl = self._typeof(cdecl)
+        return self._backend.from_buffer(cdecl, python_buffer,
+                                         require_writable)
+
+    def memmove(self, dest, src, n):
+        """ffi.memmove(dest, src, n) copies n bytes of memory from src to dest.
+
+        Like the C function memmove(), the memory areas may overlap;
+        apart from that it behaves like the C function memcpy().
+
+        'src' can be any cdata ptr or array, or any Python buffer object.
+        'dest' can be any cdata ptr or array, or a writable Python buffer
+        object.  The size to copy, 'n', is always measured in bytes.
+
+        Unlike other methods, this one supports all Python buffer including
+        byte strings and bytearrays---but it still does not support
+        non-contiguous buffers.
+        """
+        return self._backend.memmove(dest, src, n)
+
+    def callback(self, cdecl, python_callable=None, error=None, onerror=None):
+        """Return a callback object or a decorator making such a
+        callback object.  'cdecl' must name a C function pointer type.
+        The callback invokes the specified 'python_callable' (which may
+        be provided either directly or via a decorator).  Important: the
+        callback object must be manually kept alive for as long as the
+        callback may be invoked from the C level.
+        """
+        def callback_decorator_wrap(python_callable):
+            if not callable(python_callable):
+                raise TypeError("the 'python_callable' argument "
+                                "is not callable")
+            return self._backend.callback(cdecl, python_callable,
+                                          error, onerror)
+        if isinstance(cdecl, basestring):
+            cdecl = self._typeof(cdecl, consider_function_as_funcptr=True)
+        if python_callable is None:
+            return callback_decorator_wrap                # decorator mode
+        else:
+            return callback_decorator_wrap(python_callable)  # direct mode
+
+    def getctype(self, cdecl, replace_with=''):
+        """Return a string giving the C type 'cdecl', which may be itself
+        a string or a <ctype> object.  If 'replace_with' is given, it gives
+        extra text to append (or insert for more complicated C types), like
+        a variable name, or '*' to get actually the C type 'pointer-to-cdecl'.
+        """
+        if isinstance(cdecl, basestring):
+            cdecl = self._typeof(cdecl)
+        replace_with = replace_with.strip()
+        if (replace_with.startswith('*')
+                and '&[' in self._backend.getcname(cdecl, '&')):
+            replace_with = '(%s)' % replace_with
+        elif replace_with and not replace_with[0] in '[(':
+            replace_with = ' ' + replace_with
+        return self._backend.getcname(cdecl, replace_with)
+
+    def gc(self, cdata, destructor, size=0):
+        """Return a new cdata object that points to the same
+        data.  Later, when this new cdata object is garbage-collected,
+        'destructor(old_cdata_object)' will be called.
+
+        The optional 'size' gives an estimate of the size, used to
+        trigger the garbage collection more eagerly.  So far only used
+        on PyPy.  It tells the GC that the returned object keeps alive
+        roughly 'size' bytes of external memory.
+        """
+        return self._backend.gcp(cdata, destructor, size)
+
+    def _get_cached_btype(self, type):
+        assert self._lock.acquire(False) is False
+        # call me with the lock!
+        try:
+            BType = self._cached_btypes[type]
+        except KeyError:
+            finishlist = []
+            BType = type.get_cached_btype(self, finishlist)
+            for type in finishlist:
+                type.finish_backend_type(self, finishlist)
+        return BType
+
+    def verify(self, source='', tmpdir=None, **kwargs):
+        """Verify that the current ffi signatures compile on this
+        machine, and return a dynamic library object.  The dynamic
+        library can be used to call functions and access global
+        variables declared in this 'ffi'.  The library is compiled
+        by the C compiler: it gives you C-level API compatibility
+        (including calling macros).  This is unlike 'ffi.dlopen()',
+        which requires binary compatibility in the signatures.
+        """
+        from .verifier import Verifier, _caller_dir_pycache
+        #
+        # If set_unicode(True) was called, insert the UNICODE and
+        # _UNICODE macro declarations
+        if self._windows_unicode:
+            self._apply_windows_unicode(kwargs)
+        #
+        # Set the tmpdir here, and not in Verifier.__init__: it picks
+        # up the caller's directory, which we want to be the caller of
+        # ffi.verify(), as opposed to the caller of Veritier().
+        tmpdir = tmpdir or _caller_dir_pycache()
+        #
+        # Make a Verifier() and use it to load the library.
+        self.verifier = Verifier(self, source, tmpdir, **kwargs)
+        lib = self.verifier.load_library()
+        #
+        # Save the loaded library for keep-alive purposes, even
+        # if the caller doesn't keep it alive itself (it should).
+        self._libraries.append(lib)
+        return lib
+
+    def _get_errno(self):
+        return self._backend.get_errno()
+    def _set_errno(self, errno):
+        self._backend.set_errno(errno)
+    errno = property(_get_errno, _set_errno, None,
+                     "the value of 'errno' from/to the C calls")
+
+    def getwinerror(self, code=-1):
+        return self._backend.getwinerror(code)
+
+    def _pointer_to(self, ctype):
+        with self._lock:
+            return model.pointer_cache(self, ctype)
+
+    def addressof(self, cdata, *fields_or_indexes):
+        """Return the address of a <cdata 'struct-or-union'>.
+        If 'fields_or_indexes' are given, returns the address of that
+        field or array item in the structure or array, recursively in
+        case of nested structures.
+        """
+        try:
+            ctype = self._backend.typeof(cdata)
+        except TypeError:
+            if '__addressof__' in type(cdata).__dict__:
+                return type(cdata).__addressof__(cdata, *fields_or_indexes)
+            raise
+        if fields_or_indexes:
+            ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes)
+        else:
+            if ctype.kind == "pointer":
+                raise TypeError("addressof(pointer)")
+            offset = 0
+        ctypeptr = self._pointer_to(ctype)
+        return self._backend.rawaddressof(ctypeptr, cdata, offset)
+
+    def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes):
+        ctype, offset = self._backend.typeoffsetof(ctype, field_or_index)
+        for field1 in fields_or_indexes:
+            ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1)
+            offset += offset1
+        return ctype, offset
+
+    def include(self, ffi_to_include):
+        """Includes the typedefs, structs, unions and enums defined
+        in another FFI instance.  Usage is similar to a #include in C,
+        where a part of the program might include types defined in
+        another part for its own usage.  Note that the include()
+        method has no effect on functions, constants and global
+        variables, which must anyway be accessed directly from the
+        lib object returned by the original FFI instance.
+        """
+        if not isinstance(ffi_to_include, FFI):
+            raise TypeError("ffi.include() expects an argument that is also of"
+                            " type cffi.FFI, not %r" % (
+                                type(ffi_to_include).__name__,))
+        if ffi_to_include is self:
+            raise ValueError("self.include(self)")
+        with ffi_to_include._lock:
+            with self._lock:
+                self._parser.include(ffi_to_include._parser)
+                self._cdefsources.append('[')
+                self._cdefsources.extend(ffi_to_include._cdefsources)
+                self._cdefsources.append(']')
+                self._included_ffis.append(ffi_to_include)
+
+    def new_handle(self, x):
+        return self._backend.newp_handle(self.BVoidP, x)
+
+    def from_handle(self, x):
+        return self._backend.from_handle(x)
+
+    def release(self, x):
+        self._backend.release(x)
+
+    def set_unicode(self, enabled_flag):
+        """Windows: if 'enabled_flag' is True, enable the UNICODE and
+        _UNICODE defines in C, and declare the types like TCHAR and LPTCSTR
+        to be (pointers to) wchar_t.  If 'enabled_flag' is False,
+        declare these types to be (pointers to) plain 8-bit characters.
+        This is mostly for backward compatibility; you usually want True.
+        """
+        if self._windows_unicode is not None:
+            raise ValueError("set_unicode() can only be called once")
+        enabled_flag = bool(enabled_flag)
+        if enabled_flag:
+            self.cdef("typedef wchar_t TBYTE;"
+                      "typedef wchar_t TCHAR;"
+                      "typedef const wchar_t *LPCTSTR;"
+                      "typedef const wchar_t *PCTSTR;"
+                      "typedef wchar_t *LPTSTR;"
+                      "typedef wchar_t *PTSTR;"
+                      "typedef TBYTE *PTBYTE;"
+                      "typedef TCHAR *PTCHAR;")
+        else:
+            self.cdef("typedef char TBYTE;"
+                      "typedef char TCHAR;"
+                      "typedef const char *LPCTSTR;"
+                      "typedef const char *PCTSTR;"
+                      "typedef char *LPTSTR;"
+                      "typedef char *PTSTR;"
+                      "typedef TBYTE *PTBYTE;"
+                      "typedef TCHAR *PTCHAR;")
+        self._windows_unicode = enabled_flag
+
+    def _apply_windows_unicode(self, kwds):
+        defmacros = kwds.get('define_macros', ())
+        if not isinstance(defmacros, (list, tuple)):
+            raise TypeError("'define_macros' must be a list or tuple")
+        defmacros = list(defmacros) + [('UNICODE', '1'),
+                                       ('_UNICODE', '1')]
+        kwds['define_macros'] = defmacros
+
+    def _apply_embedding_fix(self, kwds):
+        # must include an argument like "-lpython2.7" for the compiler
+        def ensure(key, value):
+            lst = kwds.setdefault(key, [])
+            if value not in lst:
+                lst.append(value)
+        #
+        if '__pypy__' in sys.builtin_module_names:
+            import os
+            if sys.platform == "win32":
+                # we need 'libpypy-c.lib'.  Current distributions of
+                # pypy (>= 4.1) contain it as 'libs/python27.lib'.
+                pythonlib = "python{0[0]}{0[1]}".format(sys.version_info)
+                if hasattr(sys, 'prefix'):
+                    ensure('library_dirs', os.path.join(sys.prefix, 'libs'))
+            else:
+                # we need 'libpypy-c.{so,dylib}', which should be by
+                # default located in 'sys.prefix/bin' for installed
+                # systems.
+                if sys.version_info < (3,):
+                    pythonlib = "pypy-c"
+                else:
+                    pythonlib = "pypy3-c"
+                if hasattr(sys, 'prefix'):
+                    ensure('library_dirs', os.path.join(sys.prefix, 'bin'))
+            # On uninstalled pypy's, the libpypy-c is typically found in
+            # .../pypy/goal/.
+            if hasattr(sys, 'prefix'):
+                ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal'))
+        else:
+            if sys.platform == "win32":
+                template = "python%d%d"
+                if hasattr(sys, 'gettotalrefcount'):
+                    template += '_d'
+            else:
+                try:
+                    import sysconfig
+                except ImportError:    # 2.6
+                    from distutils import sysconfig
+                template = "python%d.%d"
+                if sysconfig.get_config_var('DEBUG_EXT'):
+                    template += sysconfig.get_config_var('DEBUG_EXT')
+            pythonlib = (template %
+                    (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+            if hasattr(sys, 'abiflags'):
+                pythonlib += sys.abiflags
+        ensure('libraries', pythonlib)
+        if sys.platform == "win32":
+            ensure('extra_link_args', '/MANIFEST')
+
+    def set_source(self, module_name, source, source_extension='.c', **kwds):
+        import os
+        if hasattr(self, '_assigned_source'):
+            raise ValueError("set_source() cannot be called several times "
+                             "per ffi object")
+        if not isinstance(module_name, basestring):
+            raise TypeError("'module_name' must be a string")
+        if os.sep in module_name or (os.altsep and os.altsep in module_name):
+            raise ValueError("'module_name' must not contain '/': use a dotted "
+                             "name to make a 'package.module' location")
+        self._assigned_source = (str(module_name), source,
+                                 source_extension, kwds)
+
+    def set_source_pkgconfig(self, module_name, pkgconfig_libs, source,
+                             source_extension='.c', **kwds):
+        from . import pkgconfig
+        if not isinstance(pkgconfig_libs, list):
+            raise TypeError("the pkgconfig_libs argument must be a list "
+                            "of package names")
+        kwds2 = pkgconfig.flags_from_pkgconfig(pkgconfig_libs)
+        pkgconfig.merge_flags(kwds, kwds2)
+        self.set_source(module_name, source, source_extension, **kwds)
+
+    def distutils_extension(self, tmpdir='build', verbose=True):
+        from distutils.dir_util import mkpath
+        from .recompiler import recompile
+        #
+        if not hasattr(self, '_assigned_source'):
+            if hasattr(self, 'verifier'):     # fallback, 'tmpdir' ignored
+                return self.verifier.get_extension()
+            raise ValueError("set_source() must be called before"
+                             " distutils_extension()")
+        module_name, source, source_extension, kwds = self._assigned_source
+        if source is None:
+            raise TypeError("distutils_extension() is only for C extension "
+                            "modules, not for dlopen()-style pure Python "
+                            "modules")
+        mkpath(tmpdir)
+        ext, updated = recompile(self, module_name,
+                                 source, tmpdir=tmpdir, extradir=tmpdir,
+                                 source_extension=source_extension,
+                                 call_c_compiler=False, **kwds)
+        if verbose:
+            if updated:
+                sys.stderr.write("regenerated: %r\n" % (ext.sources[0],))
+            else:
+                sys.stderr.write("not modified: %r\n" % (ext.sources[0],))
+        return ext
+
+    def emit_c_code(self, filename):
+        from .recompiler import recompile
+        #
+        if not hasattr(self, '_assigned_source'):
+            raise ValueError("set_source() must be called before emit_c_code()")
+        module_name, source, source_extension, kwds = self._assigned_source
+        if source is None:
+            raise TypeError("emit_c_code() is only for C extension modules, "
+                            "not for dlopen()-style pure Python modules")
+        recompile(self, module_name, source,
+                  c_file=filename, call_c_compiler=False, **kwds)
+
+    def emit_python_code(self, filename):
+        from .recompiler import recompile
+        #
+        if not hasattr(self, '_assigned_source'):
+            raise ValueError("set_source() must be called before emit_c_code()")
+        module_name, source, source_extension, kwds = self._assigned_source
+        if source is not None:
+            raise TypeError("emit_python_code() is only for dlopen()-style "
+                            "pure Python modules, not for C extension modules")
+        recompile(self, module_name, source,
+                  c_file=filename, call_c_compiler=False, **kwds)
+
+    def compile(self, tmpdir='.', verbose=0, target=None, debug=None):
+        """The 'target' argument gives the final file name of the
+        compiled DLL.  Use '*' to force distutils' choice, suitable for
+        regular CPython C API modules.  Use a file name ending in '.*'
+        to ask for the system's default extension for dynamic libraries
+        (.so/.dll/.dylib).
+
+        The default is '*' when building a non-embedded C API extension,
+        and (module_name + '.*') when building an embedded library.
+        """
+        from .recompiler import recompile
+        #
+        if not hasattr(self, '_assigned_source'):
+            raise ValueError("set_source() must be called before compile()")
+        module_name, source, source_extension, kwds = self._assigned_source
+        return recompile(self, module_name, source, tmpdir=tmpdir,
+                         target=target, source_extension=source_extension,
+                         compiler_verbose=verbose, debug=debug, **kwds)
+
+    def init_once(self, func, tag):
+        # Read _init_once_cache[tag], which is either (False, lock) if
+        # we're calling the function now in some thread, or (True, result).
+        # Don't call setdefault() in most cases, to avoid allocating and
+        # immediately freeing a lock; but still use setdefaut() to avoid
+        # races.
+        try:
+            x = self._init_once_cache[tag]
+        except KeyError:
+            x = self._init_once_cache.setdefault(tag, (False, allocate_lock()))
+        # Common case: we got (True, result), so we return the result.
+        if x[0]:
+            return x[1]
+        # Else, it's a lock.  Acquire it to serialize the following tests.
+        with x[1]:
+            # Read again from _init_once_cache the current status.
+            x = self._init_once_cache[tag]
+            if x[0]:
+                return x[1]
+            # Call the function and store the result back.
+            result = func()
+            self._init_once_cache[tag] = (True, result)
+        return result
+
+    def embedding_init_code(self, pysource):
+        if self._embedding:
+            raise ValueError("embedding_init_code() can only be called once")
+        # fix 'pysource' before it gets dumped into the C file:
+        # - remove empty lines at the beginning, so it starts at "line 1"
+        # - dedent, if all non-empty lines are indented
+        # - check for SyntaxErrors
+        import re
+        match = re.match(r'\s*\n', pysource)
+        if match:
+            pysource = pysource[match.end():]
+        lines = pysource.splitlines() or ['']
+        prefix = re.match(r'\s*', lines[0]).group()
+        for i in range(1, len(lines)):
+            line = lines[i]
+            if line.rstrip():
+                while not line.startswith(prefix):
+                    prefix = prefix[:-1]
+        i = len(prefix)
+        lines = [line[i:]+'\n' for line in lines]
+        pysource = ''.join(lines)
+        #
+        compile(pysource, "cffi_init", "exec")
+        #
+        self._embedding = pysource
+
+    def def_extern(self, *args, **kwds):
+        raise ValueError("ffi.def_extern() is only available on API-mode FFI "
+                         "objects")
+
+    def list_types(self):
+        """Returns the user type names known to this FFI instance.
+        This returns a tuple containing three lists of names:
+        (typedef_names, names_of_structs, names_of_unions)
+        """
+        typedefs = []
+        structs = []
+        unions = []
+        for key in self._parser._declarations:
+            if key.startswith('typedef '):
+                typedefs.append(key[8:])
+            elif key.startswith('struct '):
+                structs.append(key[7:])
+            elif key.startswith('union '):
+                unions.append(key[6:])
+        typedefs.sort()
+        structs.sort()
+        unions.sort()
+        return (typedefs, structs, unions)
+
+
+def _load_backend_lib(backend, name, flags):
+    import os
+    if not isinstance(name, basestring):
+        if sys.platform != "win32" or name is not None:
+            return backend.load_library(name, flags)
+        name = "c"    # Windows: load_library(None) fails, but this works
+                      # on Python 2 (backward compatibility hack only)
+    first_error = None
+    if '.' in name or '/' in name or os.sep in name:
+        try:
+            return backend.load_library(name, flags)
+        except OSError as e:
+            first_error = e
+    import ctypes.util
+    path = ctypes.util.find_library(name)
+    if path is None:
+        if name == "c" and sys.platform == "win32" and sys.version_info >= (3,):
+            raise OSError("dlopen(None) cannot work on Windows for Python 3 "
+                          "(see http://bugs.python.org/issue23606)")
+        msg = ("ctypes.util.find_library() did not manage "
+               "to locate a library called %r" % (name,))
+        if first_error is not None:
+            msg = "%s.  Additionally, %s" % (first_error, msg)
+        raise OSError(msg)
+    return backend.load_library(path, flags)
+
+def _make_ffi_library(ffi, libname, flags):
+    backend = ffi._backend
+    backendlib = _load_backend_lib(backend, libname, flags)
+    #
+    def accessor_function(name):
+        key = 'function ' + name
+        tp, _ = ffi._parser._declarations[key]
+        BType = ffi._get_cached_btype(tp)
+        value = backendlib.load_function(BType, name)
+        library.__dict__[name] = value
+    #
+    def accessor_variable(name):
+        key = 'variable ' + name
+        tp, _ = ffi._parser._declarations[key]
+        BType = ffi._get_cached_btype(tp)
+        read_variable = backendlib.read_variable
+        write_variable = backendlib.write_variable
+        setattr(FFILibrary, name, property(
+            lambda self: read_variable(BType, name),
+            lambda self, value: write_variable(BType, name, value)))
+    #
+    def addressof_var(name):
+        try:
+            return addr_variables[name]
+        except KeyError:
+            with ffi._lock:
+                if name not in addr_variables:
+                    key = 'variable ' + name
+                    tp, _ = ffi._parser._declarations[key]
+                    BType = ffi._get_cached_btype(tp)
+                    if BType.kind != 'array':
+                        BType = model.pointer_cache(ffi, BType)
+                    p = backendlib.load_function(BType, name)
+                    addr_variables[name] = p
+            return addr_variables[name]
+    #
+    def accessor_constant(name):
+        raise NotImplementedError("non-integer constant '%s' cannot be "
+                                  "accessed from a dlopen() library" % (name,))
+    #
+    def accessor_int_constant(name):
+        library.__dict__[name] = ffi._parser._int_constants[name]
+    #
+    accessors = {}
+    accessors_version = [False]
+    addr_variables = {}
+    #
+    def update_accessors():
+        if accessors_version[0] is ffi._cdef_version:
+            return
+        #
+        for key, (tp, _) in ffi._parser._declarations.items():
+            if not isinstance(tp, model.EnumType):
+                tag, name = key.split(' ', 1)
+                if tag == 'function':
+                    accessors[name] = accessor_function
+                elif tag == 'variable':
+                    accessors[name] = accessor_variable
+                elif tag == 'constant':
+                    accessors[name] = accessor_constant
+            else:
+                for i, enumname in enumerate(tp.enumerators):
+                    def accessor_enum(name, tp=tp, i=i):
+                        tp.check_not_partial()
+                        library.__dict__[name] = tp.enumvalues[i]
+                    accessors[enumname] = accessor_enum
+        for name in ffi._parser._int_constants:
+            accessors.setdefault(name, accessor_int_constant)
+        accessors_version[0] = ffi._cdef_version
+    #
+    def make_accessor(name):
+        with ffi._lock:
+            if name in library.__dict__ or name in FFILibrary.__dict__:
+                return    # added by another thread while waiting for the lock
+            if name not in accessors:
+                update_accessors()
+                if name not in accessors:
+                    raise AttributeError(name)
+            accessors[name](name)
+    #
+    class FFILibrary(object):
+        def __getattr__(self, name):
+            make_accessor(name)
+            return getattr(self, name)
+        def __setattr__(self, name, value):
+            try:
+                property = getattr(self.__class__, name)
+            except AttributeError:
+                make_accessor(name)
+                setattr(self, name, value)
+            else:
+                property.__set__(self, value)
+        def __dir__(self):
+            with ffi._lock:
+                update_accessors()
+                return accessors.keys()
+        def __addressof__(self, name):
+            if name in library.__dict__:
+                return library.__dict__[name]
+            if name in FFILibrary.__dict__:
+                return addressof_var(name)
+            make_accessor(name)
+            if name in library.__dict__:
+                return library.__dict__[name]
+            if name in FFILibrary.__dict__:
+                return addressof_var(name)
+            raise AttributeError("cffi library has no function or "
+                                 "global variable named '%s'" % (name,))
+        def __cffi_close__(self):
+            backendlib.close_lib()
+            self.__dict__.clear()
+    #
+    if isinstance(libname, basestring):
+        try:
+            if not isinstance(libname, str):    # unicode, on Python 2
+                libname = libname.encode('utf-8')
+            FFILibrary.__name__ = 'FFILibrary_%s' % libname
+        except UnicodeError:
+            pass
+    library = FFILibrary()
+    return library, library.__dict__
+
+def _builtin_function_type(func):
+    # a hack to make at least ffi.typeof(builtin_function) work,
+    # if the builtin function was obtained by 'vengine_cpy'.
+    import sys
+    try:
+        module = sys.modules[func.__module__]
+        ffi = module._cffi_original_ffi
+        types_of_builtin_funcs = module._cffi_types_of_builtin_funcs
+        tp = types_of_builtin_funcs[func]
+    except (KeyError, AttributeError, TypeError):
+        return None
+    else:
+        with ffi._lock:
+            return ffi._get_cached_btype(tp)

+ 1121 - 0
rest_api_venv/Lib/site-packages/cffi/backend_ctypes.py

@@ -0,0 +1,1121 @@
+import ctypes, ctypes.util, operator, sys
+from . import model
+
+if sys.version_info < (3,):
+    bytechr = chr
+else:
+    unicode = str
+    long = int
+    xrange = range
+    bytechr = lambda num: bytes([num])
+
+class CTypesType(type):
+    pass
+
+class CTypesData(object):
+    __metaclass__ = CTypesType
+    __slots__ = ['__weakref__']
+    __name__ = '<cdata>'
+
+    def __init__(self, *args):
+        raise TypeError("cannot instantiate %r" % (self.__class__,))
+
+    @classmethod
+    def _newp(cls, init):
+        raise TypeError("expected a pointer or array ctype, got '%s'"
+                        % (cls._get_c_name(),))
+
+    @staticmethod
+    def _to_ctypes(value):
+        raise TypeError
+
+    @classmethod
+    def _arg_to_ctypes(cls, *value):
+        try:
+            ctype = cls._ctype
+        except AttributeError:
+            raise TypeError("cannot create an instance of %r" % (cls,))
+        if value:
+            res = cls._to_ctypes(*value)
+            if not isinstance(res, ctype):
+                res = cls._ctype(res)
+        else:
+            res = cls._ctype()
+        return res
+
+    @classmethod
+    def _create_ctype_obj(cls, init):
+        if init is None:
+            return cls._arg_to_ctypes()
+        else:
+            return cls._arg_to_ctypes(init)
+
+    @staticmethod
+    def _from_ctypes(ctypes_value):
+        raise TypeError
+
+    @classmethod
+    def _get_c_name(cls, replace_with=''):
+        return cls._reftypename.replace(' &', replace_with)
+
+    @classmethod
+    def _fix_class(cls):
+        cls.__name__ = 'CData<%s>' % (cls._get_c_name(),)
+        cls.__qualname__ = 'CData<%s>' % (cls._get_c_name(),)
+        cls.__module__ = 'ffi'
+
+    def _get_own_repr(self):
+        raise NotImplementedError
+
+    def _addr_repr(self, address):
+        if address == 0:
+            return 'NULL'
+        else:
+            if address < 0:
+                address += 1 << (8*ctypes.sizeof(ctypes.c_void_p))
+            return '0x%x' % address
+
+    def __repr__(self, c_name=None):
+        own = self._get_own_repr()
+        return '<cdata %r %s>' % (c_name or self._get_c_name(), own)
+
+    def _convert_to_address(self, BClass):
+        if BClass is None:
+            raise TypeError("cannot convert %r to an address" % (
+                self._get_c_name(),))
+        else:
+            raise TypeError("cannot convert %r to %r" % (
+                self._get_c_name(), BClass._get_c_name()))
+
+    @classmethod
+    def _get_size(cls):
+        return ctypes.sizeof(cls._ctype)
+
+    def _get_size_of_instance(self):
+        return ctypes.sizeof(self._ctype)
+
+    @classmethod
+    def _cast_from(cls, source):
+        raise TypeError("cannot cast to %r" % (cls._get_c_name(),))
+
+    def _cast_to_integer(self):
+        return self._convert_to_address(None)
+
+    @classmethod
+    def _alignment(cls):
+        return ctypes.alignment(cls._ctype)
+
+    def __iter__(self):
+        raise TypeError("cdata %r does not support iteration" % (
+            self._get_c_name()),)
+
+    def _make_cmp(name):
+        cmpfunc = getattr(operator, name)
+        def cmp(self, other):
+            v_is_ptr = not isinstance(self, CTypesGenericPrimitive)
+            w_is_ptr = (isinstance(other, CTypesData) and
+                           not isinstance(other, CTypesGenericPrimitive))
+            if v_is_ptr and w_is_ptr:
+                return cmpfunc(self._convert_to_address(None),
+                               other._convert_to_address(None))
+            elif v_is_ptr or w_is_ptr:
+                return NotImplemented
+            else:
+                if isinstance(self, CTypesGenericPrimitive):
+                    self = self._value
+                if isinstance(other, CTypesGenericPrimitive):
+                    other = other._value
+                return cmpfunc(self, other)
+        cmp.func_name = name
+        return cmp
+
+    __eq__ = _make_cmp('__eq__')
+    __ne__ = _make_cmp('__ne__')
+    __lt__ = _make_cmp('__lt__')
+    __le__ = _make_cmp('__le__')
+    __gt__ = _make_cmp('__gt__')
+    __ge__ = _make_cmp('__ge__')
+
+    def __hash__(self):
+        return hash(self._convert_to_address(None))
+
+    def _to_string(self, maxlen):
+        raise TypeError("string(): %r" % (self,))
+
+
+class CTypesGenericPrimitive(CTypesData):
+    __slots__ = []
+
+    def __hash__(self):
+        return hash(self._value)
+
+    def _get_own_repr(self):
+        return repr(self._from_ctypes(self._value))
+
+
+class CTypesGenericArray(CTypesData):
+    __slots__ = []
+
+    @classmethod
+    def _newp(cls, init):
+        return cls(init)
+
+    def __iter__(self):
+        for i in xrange(len(self)):
+            yield self[i]
+
+    def _get_own_repr(self):
+        return self._addr_repr(ctypes.addressof(self._blob))
+
+
+class CTypesGenericPtr(CTypesData):
+    __slots__ = ['_address', '_as_ctype_ptr']
+    _automatic_casts = False
+    kind = "pointer"
+
+    @classmethod
+    def _newp(cls, init):
+        return cls(init)
+
+    @classmethod
+    def _cast_from(cls, source):
+        if source is None:
+            address = 0
+        elif isinstance(source, CTypesData):
+            address = source._cast_to_integer()
+        elif isinstance(source, (int, long)):
+            address = source
+        else:
+            raise TypeError("bad type for cast to %r: %r" %
+                            (cls, type(source).__name__))
+        return cls._new_pointer_at(address)
+
+    @classmethod
+    def _new_pointer_at(cls, address):
+        self = cls.__new__(cls)
+        self._address = address
+        self._as_ctype_ptr = ctypes.cast(address, cls._ctype)
+        return self
+
+    def _get_own_repr(self):
+        try:
+            return self._addr_repr(self._address)
+        except AttributeError:
+            return '???'
+
+    def _cast_to_integer(self):
+        return self._address
+
+    def __nonzero__(self):
+        return bool(self._address)
+    __bool__ = __nonzero__
+
+    @classmethod
+    def _to_ctypes(cls, value):
+        if not isinstance(value, CTypesData):
+            raise TypeError("unexpected %s object" % type(value).__name__)
+        address = value._convert_to_address(cls)
+        return ctypes.cast(address, cls._ctype)
+
+    @classmethod
+    def _from_ctypes(cls, ctypes_ptr):
+        address = ctypes.cast(ctypes_ptr, ctypes.c_void_p).value or 0
+        return cls._new_pointer_at(address)
+
+    @classmethod
+    def _initialize(cls, ctypes_ptr, value):
+        if value:
+            ctypes_ptr.contents = cls._to_ctypes(value).contents
+
+    def _convert_to_address(self, BClass):
+        if (BClass in (self.__class__, None) or BClass._automatic_casts
+            or self._automatic_casts):
+            return self._address
+        else:
+            return CTypesData._convert_to_address(self, BClass)
+
+
+class CTypesBaseStructOrUnion(CTypesData):
+    __slots__ = ['_blob']
+
+    @classmethod
+    def _create_ctype_obj(cls, init):
+        # may be overridden
+        raise TypeError("cannot instantiate opaque type %s" % (cls,))
+
+    def _get_own_repr(self):
+        return self._addr_repr(ctypes.addressof(self._blob))
+
+    @classmethod
+    def _offsetof(cls, fieldname):
+        return getattr(cls._ctype, fieldname).offset
+
+    def _convert_to_address(self, BClass):
+        if getattr(BClass, '_BItem', None) is self.__class__:
+            return ctypes.addressof(self._blob)
+        else:
+            return CTypesData._convert_to_address(self, BClass)
+
+    @classmethod
+    def _from_ctypes(cls, ctypes_struct_or_union):
+        self = cls.__new__(cls)
+        self._blob = ctypes_struct_or_union
+        return self
+
+    @classmethod
+    def _to_ctypes(cls, value):
+        return value._blob
+
+    def __repr__(self, c_name=None):
+        return CTypesData.__repr__(self, c_name or self._get_c_name(' &'))
+
+
+class CTypesBackend(object):
+
+    PRIMITIVE_TYPES = {
+        'char': ctypes.c_char,
+        'short': ctypes.c_short,
+        'int': ctypes.c_int,
+        'long': ctypes.c_long,
+        'long long': ctypes.c_longlong,
+        'signed char': ctypes.c_byte,
+        'unsigned char': ctypes.c_ubyte,
+        'unsigned short': ctypes.c_ushort,
+        'unsigned int': ctypes.c_uint,
+        'unsigned long': ctypes.c_ulong,
+        'unsigned long long': ctypes.c_ulonglong,
+        'float': ctypes.c_float,
+        'double': ctypes.c_double,
+        '_Bool': ctypes.c_bool,
+        }
+
+    for _name in ['unsigned long long', 'unsigned long',
+                  'unsigned int', 'unsigned short', 'unsigned char']:
+        _size = ctypes.sizeof(PRIMITIVE_TYPES[_name])
+        PRIMITIVE_TYPES['uint%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name]
+        if _size == ctypes.sizeof(ctypes.c_void_p):
+            PRIMITIVE_TYPES['uintptr_t'] = PRIMITIVE_TYPES[_name]
+        if _size == ctypes.sizeof(ctypes.c_size_t):
+            PRIMITIVE_TYPES['size_t'] = PRIMITIVE_TYPES[_name]
+
+    for _name in ['long long', 'long', 'int', 'short', 'signed char']:
+        _size = ctypes.sizeof(PRIMITIVE_TYPES[_name])
+        PRIMITIVE_TYPES['int%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name]
+        if _size == ctypes.sizeof(ctypes.c_void_p):
+            PRIMITIVE_TYPES['intptr_t'] = PRIMITIVE_TYPES[_name]
+            PRIMITIVE_TYPES['ptrdiff_t'] = PRIMITIVE_TYPES[_name]
+        if _size == ctypes.sizeof(ctypes.c_size_t):
+            PRIMITIVE_TYPES['ssize_t'] = PRIMITIVE_TYPES[_name]
+
+
+    def __init__(self):
+        self.RTLD_LAZY = 0   # not supported anyway by ctypes
+        self.RTLD_NOW  = 0
+        self.RTLD_GLOBAL = ctypes.RTLD_GLOBAL
+        self.RTLD_LOCAL = ctypes.RTLD_LOCAL
+
+    def set_ffi(self, ffi):
+        self.ffi = ffi
+
+    def _get_types(self):
+        return CTypesData, CTypesType
+
+    def load_library(self, path, flags=0):
+        cdll = ctypes.CDLL(path, flags)
+        return CTypesLibrary(self, cdll)
+
+    def new_void_type(self):
+        class CTypesVoid(CTypesData):
+            __slots__ = []
+            _reftypename = 'void &'
+            @staticmethod
+            def _from_ctypes(novalue):
+                return None
+            @staticmethod
+            def _to_ctypes(novalue):
+                if novalue is not None:
+                    raise TypeError("None expected, got %s object" %
+                                    (type(novalue).__name__,))
+                return None
+        CTypesVoid._fix_class()
+        return CTypesVoid
+
+    def new_primitive_type(self, name):
+        if name == 'wchar_t':
+            raise NotImplementedError(name)
+        ctype = self.PRIMITIVE_TYPES[name]
+        if name == 'char':
+            kind = 'char'
+        elif name in ('float', 'double'):
+            kind = 'float'
+        else:
+            if name in ('signed char', 'unsigned char'):
+                kind = 'byte'
+            elif name == '_Bool':
+                kind = 'bool'
+            else:
+                kind = 'int'
+            is_signed = (ctype(-1).value == -1)
+        #
+        def _cast_source_to_int(source):
+            if isinstance(source, (int, long, float)):
+                source = int(source)
+            elif isinstance(source, CTypesData):
+                source = source._cast_to_integer()
+            elif isinstance(source, bytes):
+                source = ord(source)
+            elif source is None:
+                source = 0
+            else:
+                raise TypeError("bad type for cast to %r: %r" %
+                                (CTypesPrimitive, type(source).__name__))
+            return source
+        #
+        kind1 = kind
+        class CTypesPrimitive(CTypesGenericPrimitive):
+            __slots__ = ['_value']
+            _ctype = ctype
+            _reftypename = '%s &' % name
+            kind = kind1
+
+            def __init__(self, value):
+                self._value = value
+
+            @staticmethod
+            def _create_ctype_obj(init):
+                if init is None:
+                    return ctype()
+                return ctype(CTypesPrimitive._to_ctypes(init))
+
+            if kind == 'int' or kind == 'byte':
+                @classmethod
+                def _cast_from(cls, source):
+                    source = _cast_source_to_int(source)
+                    source = ctype(source).value     # cast within range
+                    return cls(source)
+                def __int__(self):
+                    return self._value
+
+            if kind == 'bool':
+                @classmethod
+                def _cast_from(cls, source):
+                    if not isinstance(source, (int, long, float)):
+                        source = _cast_source_to_int(source)
+                    return cls(bool(source))
+                def __int__(self):
+                    return int(self._value)
+
+            if kind == 'char':
+                @classmethod
+                def _cast_from(cls, source):
+                    source = _cast_source_to_int(source)
+                    source = bytechr(source & 0xFF)
+                    return cls(source)
+                def __int__(self):
+                    return ord(self._value)
+
+            if kind == 'float':
+                @classmethod
+                def _cast_from(cls, source):
+                    if isinstance(source, float):
+                        pass
+                    elif isinstance(source, CTypesGenericPrimitive):
+                        if hasattr(source, '__float__'):
+                            source = float(source)
+                        else:
+                            source = int(source)
+                    else:
+                        source = _cast_source_to_int(source)
+                    source = ctype(source).value     # fix precision
+                    return cls(source)
+                def __int__(self):
+                    return int(self._value)
+                def __float__(self):
+                    return self._value
+
+            _cast_to_integer = __int__
+
+            if kind == 'int' or kind == 'byte' or kind == 'bool':
+                @staticmethod
+                def _to_ctypes(x):
+                    if not isinstance(x, (int, long)):
+                        if isinstance(x, CTypesData):
+                            x = int(x)
+                        else:
+                            raise TypeError("integer expected, got %s" %
+                                            type(x).__name__)
+                    if ctype(x).value != x:
+                        if not is_signed and x < 0:
+                            raise OverflowError("%s: negative integer" % name)
+                        else:
+                            raise OverflowError("%s: integer out of bounds"
+                                                % name)
+                    return x
+
+            if kind == 'char':
+                @staticmethod
+                def _to_ctypes(x):
+                    if isinstance(x, bytes) and len(x) == 1:
+                        return x
+                    if isinstance(x, CTypesPrimitive):    # <CData <char>>
+                        return x._value
+                    raise TypeError("character expected, got %s" %
+                                    type(x).__name__)
+                def __nonzero__(self):
+                    return ord(self._value) != 0
+            else:
+                def __nonzero__(self):
+                    return self._value != 0
+            __bool__ = __nonzero__
+
+            if kind == 'float':
+                @staticmethod
+                def _to_ctypes(x):
+                    if not isinstance(x, (int, long, float, CTypesData)):
+                        raise TypeError("float expected, got %s" %
+                                        type(x).__name__)
+                    return ctype(x).value
+
+            @staticmethod
+            def _from_ctypes(value):
+                return getattr(value, 'value', value)
+
+            @staticmethod
+            def _initialize(blob, init):
+                blob.value = CTypesPrimitive._to_ctypes(init)
+
+            if kind == 'char':
+                def _to_string(self, maxlen):
+                    return self._value
+            if kind == 'byte':
+                def _to_string(self, maxlen):
+                    return chr(self._value & 0xff)
+        #
+        CTypesPrimitive._fix_class()
+        return CTypesPrimitive
+
+    def new_pointer_type(self, BItem):
+        getbtype = self.ffi._get_cached_btype
+        if BItem is getbtype(model.PrimitiveType('char')):
+            kind = 'charp'
+        elif BItem in (getbtype(model.PrimitiveType('signed char')),
+                       getbtype(model.PrimitiveType('unsigned char'))):
+            kind = 'bytep'
+        elif BItem is getbtype(model.void_type):
+            kind = 'voidp'
+        else:
+            kind = 'generic'
+        #
+        class CTypesPtr(CTypesGenericPtr):
+            __slots__ = ['_own']
+            if kind == 'charp':
+                __slots__ += ['__as_strbuf']
+            _BItem = BItem
+            if hasattr(BItem, '_ctype'):
+                _ctype = ctypes.POINTER(BItem._ctype)
+                _bitem_size = ctypes.sizeof(BItem._ctype)
+            else:
+                _ctype = ctypes.c_void_p
+            if issubclass(BItem, CTypesGenericArray):
+                _reftypename = BItem._get_c_name('(* &)')
+            else:
+                _reftypename = BItem._get_c_name(' * &')
+
+            def __init__(self, init):
+                ctypeobj = BItem._create_ctype_obj(init)
+                if kind == 'charp':
+                    self.__as_strbuf = ctypes.create_string_buffer(
+                        ctypeobj.value + b'\x00')
+                    self._as_ctype_ptr = ctypes.cast(
+                        self.__as_strbuf, self._ctype)
+                else:
+                    self._as_ctype_ptr = ctypes.pointer(ctypeobj)
+                self._address = ctypes.cast(self._as_ctype_ptr,
+                                            ctypes.c_void_p).value
+                self._own = True
+
+            def __add__(self, other):
+                if isinstance(other, (int, long)):
+                    return self._new_pointer_at(self._address +
+                                                other * self._bitem_size)
+                else:
+                    return NotImplemented
+
+            def __sub__(self, other):
+                if isinstance(other, (int, long)):
+                    return self._new_pointer_at(self._address -
+                                                other * self._bitem_size)
+                elif type(self) is type(other):
+                    return (self._address - other._address) // self._bitem_size
+                else:
+                    return NotImplemented
+
+            def __getitem__(self, index):
+                if getattr(self, '_own', False) and index != 0:
+                    raise IndexError
+                return BItem._from_ctypes(self._as_ctype_ptr[index])
+
+            def __setitem__(self, index, value):
+                self._as_ctype_ptr[index] = BItem._to_ctypes(value)
+
+            if kind == 'charp' or kind == 'voidp':
+                @classmethod
+                def _arg_to_ctypes(cls, *value):
+                    if value and isinstance(value[0], bytes):
+                        return ctypes.c_char_p(value[0])
+                    else:
+                        return super(CTypesPtr, cls)._arg_to_ctypes(*value)
+
+            if kind == 'charp' or kind == 'bytep':
+                def _to_string(self, maxlen):
+                    if maxlen < 0:
+                        maxlen = sys.maxsize
+                    p = ctypes.cast(self._as_ctype_ptr,
+                                    ctypes.POINTER(ctypes.c_char))
+                    n = 0
+                    while n < maxlen and p[n] != b'\x00':
+                        n += 1
+                    return b''.join([p[i] for i in range(n)])
+
+            def _get_own_repr(self):
+                if getattr(self, '_own', False):
+                    return 'owning %d bytes' % (
+                        ctypes.sizeof(self._as_ctype_ptr.contents),)
+                return super(CTypesPtr, self)._get_own_repr()
+        #
+        if (BItem is self.ffi._get_cached_btype(model.void_type) or
+            BItem is self.ffi._get_cached_btype(model.PrimitiveType('char'))):
+            CTypesPtr._automatic_casts = True
+        #
+        CTypesPtr._fix_class()
+        return CTypesPtr
+
+    def new_array_type(self, CTypesPtr, length):
+        if length is None:
+            brackets = ' &[]'
+        else:
+            brackets = ' &[%d]' % length
+        BItem = CTypesPtr._BItem
+        getbtype = self.ffi._get_cached_btype
+        if BItem is getbtype(model.PrimitiveType('char')):
+            kind = 'char'
+        elif BItem in (getbtype(model.PrimitiveType('signed char')),
+                       getbtype(model.PrimitiveType('unsigned char'))):
+            kind = 'byte'
+        else:
+            kind = 'generic'
+        #
+        class CTypesArray(CTypesGenericArray):
+            __slots__ = ['_blob', '_own']
+            if length is not None:
+                _ctype = BItem._ctype * length
+            else:
+                __slots__.append('_ctype')
+            _reftypename = BItem._get_c_name(brackets)
+            _declared_length = length
+            _CTPtr = CTypesPtr
+
+            def __init__(self, init):
+                if length is None:
+                    if isinstance(init, (int, long)):
+                        len1 = init
+                        init = None
+                    elif kind == 'char' and isinstance(init, bytes):
+                        len1 = len(init) + 1    # extra null
+                    else:
+                        init = tuple(init)
+                        len1 = len(init)
+                    self._ctype = BItem._ctype * len1
+                self._blob = self._ctype()
+                self._own = True
+                if init is not None:
+                    self._initialize(self._blob, init)
+
+            @staticmethod
+            def _initialize(blob, init):
+                if isinstance(init, bytes):
+                    init = [init[i:i+1] for i in range(len(init))]
+                else:
+                    if isinstance(init, CTypesGenericArray):
+                        if (len(init) != len(blob) or
+                            not isinstance(init, CTypesArray)):
+                            raise TypeError("length/type mismatch: %s" % (init,))
+                    init = tuple(init)
+                if len(init) > len(blob):
+                    raise IndexError("too many initializers")
+                addr = ctypes.cast(blob, ctypes.c_void_p).value
+                PTR = ctypes.POINTER(BItem._ctype)
+                itemsize = ctypes.sizeof(BItem._ctype)
+                for i, value in enumerate(init):
+                    p = ctypes.cast(addr + i * itemsize, PTR)
+                    BItem._initialize(p.contents, value)
+
+            def __len__(self):
+                return len(self._blob)
+
+            def __getitem__(self, index):
+                if not (0 <= index < len(self._blob)):
+                    raise IndexError
+                return BItem._from_ctypes(self._blob[index])
+
+            def __setitem__(self, index, value):
+                if not (0 <= index < len(self._blob)):
+                    raise IndexError
+                self._blob[index] = BItem._to_ctypes(value)
+
+            if kind == 'char' or kind == 'byte':
+                def _to_string(self, maxlen):
+                    if maxlen < 0:
+                        maxlen = len(self._blob)
+                    p = ctypes.cast(self._blob,
+                                    ctypes.POINTER(ctypes.c_char))
+                    n = 0
+                    while n < maxlen and p[n] != b'\x00':
+                        n += 1
+                    return b''.join([p[i] for i in range(n)])
+
+            def _get_own_repr(self):
+                if getattr(self, '_own', False):
+                    return 'owning %d bytes' % (ctypes.sizeof(self._blob),)
+                return super(CTypesArray, self)._get_own_repr()
+
+            def _convert_to_address(self, BClass):
+                if BClass in (CTypesPtr, None) or BClass._automatic_casts:
+                    return ctypes.addressof(self._blob)
+                else:
+                    return CTypesData._convert_to_address(self, BClass)
+
+            @staticmethod
+            def _from_ctypes(ctypes_array):
+                self = CTypesArray.__new__(CTypesArray)
+                self._blob = ctypes_array
+                return self
+
+            @staticmethod
+            def _arg_to_ctypes(value):
+                return CTypesPtr._arg_to_ctypes(value)
+
+            def __add__(self, other):
+                if isinstance(other, (int, long)):
+                    return CTypesPtr._new_pointer_at(
+                        ctypes.addressof(self._blob) +
+                        other * ctypes.sizeof(BItem._ctype))
+                else:
+                    return NotImplemented
+
+            @classmethod
+            def _cast_from(cls, source):
+                raise NotImplementedError("casting to %r" % (
+                    cls._get_c_name(),))
+        #
+        CTypesArray._fix_class()
+        return CTypesArray
+
+    def _new_struct_or_union(self, kind, name, base_ctypes_class):
+        #
+        class struct_or_union(base_ctypes_class):
+            pass
+        struct_or_union.__name__ = '%s_%s' % (kind, name)
+        kind1 = kind
+        #
+        class CTypesStructOrUnion(CTypesBaseStructOrUnion):
+            __slots__ = ['_blob']
+            _ctype = struct_or_union
+            _reftypename = '%s &' % (name,)
+            _kind = kind = kind1
+        #
+        CTypesStructOrUnion._fix_class()
+        return CTypesStructOrUnion
+
+    def new_struct_type(self, name):
+        return self._new_struct_or_union('struct', name, ctypes.Structure)
+
+    def new_union_type(self, name):
+        return self._new_struct_or_union('union', name, ctypes.Union)
+
+    def complete_struct_or_union(self, CTypesStructOrUnion, fields, tp,
+                                 totalsize=-1, totalalignment=-1, sflags=0,
+                                 pack=0):
+        if totalsize >= 0 or totalalignment >= 0:
+            raise NotImplementedError("the ctypes backend of CFFI does not support "
+                                      "structures completed by verify(); please "
+                                      "compile and install the _cffi_backend module.")
+        struct_or_union = CTypesStructOrUnion._ctype
+        fnames = [fname for (fname, BField, bitsize) in fields]
+        btypes = [BField for (fname, BField, bitsize) in fields]
+        bitfields = [bitsize for (fname, BField, bitsize) in fields]
+        #
+        bfield_types = {}
+        cfields = []
+        for (fname, BField, bitsize) in fields:
+            if bitsize < 0:
+                cfields.append((fname, BField._ctype))
+                bfield_types[fname] = BField
+            else:
+                cfields.append((fname, BField._ctype, bitsize))
+                bfield_types[fname] = Ellipsis
+        if sflags & 8:
+            struct_or_union._pack_ = 1
+        elif pack:
+            struct_or_union._pack_ = pack
+        struct_or_union._fields_ = cfields
+        CTypesStructOrUnion._bfield_types = bfield_types
+        #
+        @staticmethod
+        def _create_ctype_obj(init):
+            result = struct_or_union()
+            if init is not None:
+                initialize(result, init)
+            return result
+        CTypesStructOrUnion._create_ctype_obj = _create_ctype_obj
+        #
+        def initialize(blob, init):
+            if is_union:
+                if len(init) > 1:
+                    raise ValueError("union initializer: %d items given, but "
+                                    "only one supported (use a dict if needed)"
+                                     % (len(init),))
+            if not isinstance(init, dict):
+                if isinstance(init, (bytes, unicode)):
+                    raise TypeError("union initializer: got a str")
+                init = tuple(init)
+                if len(init) > len(fnames):
+                    raise ValueError("too many values for %s initializer" %
+                                     CTypesStructOrUnion._get_c_name())
+                init = dict(zip(fnames, init))
+            addr = ctypes.addressof(blob)
+            for fname, value in init.items():
+                BField, bitsize = name2fieldtype[fname]
+                assert bitsize < 0, \
+                       "not implemented: initializer with bit fields"
+                offset = CTypesStructOrUnion._offsetof(fname)
+                PTR = ctypes.POINTER(BField._ctype)
+                p = ctypes.cast(addr + offset, PTR)
+                BField._initialize(p.contents, value)
+        is_union = CTypesStructOrUnion._kind == 'union'
+        name2fieldtype = dict(zip(fnames, zip(btypes, bitfields)))
+        #
+        for fname, BField, bitsize in fields:
+            if fname == '':
+                raise NotImplementedError("nested anonymous structs/unions")
+            if hasattr(CTypesStructOrUnion, fname):
+                raise ValueError("the field name %r conflicts in "
+                                 "the ctypes backend" % fname)
+            if bitsize < 0:
+                def getter(self, fname=fname, BField=BField,
+                           offset=CTypesStructOrUnion._offsetof(fname),
+                           PTR=ctypes.POINTER(BField._ctype)):
+                    addr = ctypes.addressof(self._blob)
+                    p = ctypes.cast(addr + offset, PTR)
+                    return BField._from_ctypes(p.contents)
+                def setter(self, value, fname=fname, BField=BField):
+                    setattr(self._blob, fname, BField._to_ctypes(value))
+                #
+                if issubclass(BField, CTypesGenericArray):
+                    setter = None
+                    if BField._declared_length == 0:
+                        def getter(self, fname=fname, BFieldPtr=BField._CTPtr,
+                                   offset=CTypesStructOrUnion._offsetof(fname),
+                                   PTR=ctypes.POINTER(BField._ctype)):
+                            addr = ctypes.addressof(self._blob)
+                            p = ctypes.cast(addr + offset, PTR)
+                            return BFieldPtr._from_ctypes(p)
+                #
+            else:
+                def getter(self, fname=fname, BField=BField):
+                    return BField._from_ctypes(getattr(self._blob, fname))
+                def setter(self, value, fname=fname, BField=BField):
+                    # xxx obscure workaround
+                    value = BField._to_ctypes(value)
+                    oldvalue = getattr(self._blob, fname)
+                    setattr(self._blob, fname, value)
+                    if value != getattr(self._blob, fname):
+                        setattr(self._blob, fname, oldvalue)
+                        raise OverflowError("value too large for bitfield")
+            setattr(CTypesStructOrUnion, fname, property(getter, setter))
+        #
+        CTypesPtr = self.ffi._get_cached_btype(model.PointerType(tp))
+        for fname in fnames:
+            if hasattr(CTypesPtr, fname):
+                raise ValueError("the field name %r conflicts in "
+                                 "the ctypes backend" % fname)
+            def getter(self, fname=fname):
+                return getattr(self[0], fname)
+            def setter(self, value, fname=fname):
+                setattr(self[0], fname, value)
+            setattr(CTypesPtr, fname, property(getter, setter))
+
+    def new_function_type(self, BArgs, BResult, has_varargs):
+        nameargs = [BArg._get_c_name() for BArg in BArgs]
+        if has_varargs:
+            nameargs.append('...')
+        nameargs = ', '.join(nameargs)
+        #
+        class CTypesFunctionPtr(CTypesGenericPtr):
+            __slots__ = ['_own_callback', '_name']
+            _ctype = ctypes.CFUNCTYPE(getattr(BResult, '_ctype', None),
+                                      *[BArg._ctype for BArg in BArgs],
+                                      use_errno=True)
+            _reftypename = BResult._get_c_name('(* &)(%s)' % (nameargs,))
+
+            def __init__(self, init, error=None):
+                # create a callback to the Python callable init()
+                import traceback
+                assert not has_varargs, "varargs not supported for callbacks"
+                if getattr(BResult, '_ctype', None) is not None:
+                    error = BResult._from_ctypes(
+                        BResult._create_ctype_obj(error))
+                else:
+                    error = None
+                def callback(*args):
+                    args2 = []
+                    for arg, BArg in zip(args, BArgs):
+                        args2.append(BArg._from_ctypes(arg))
+                    try:
+                        res2 = init(*args2)
+                        res2 = BResult._to_ctypes(res2)
+                    except:
+                        traceback.print_exc()
+                        res2 = error
+                    if issubclass(BResult, CTypesGenericPtr):
+                        if res2:
+                            res2 = ctypes.cast(res2, ctypes.c_void_p).value
+                                # .value: http://bugs.python.org/issue1574593
+                        else:
+                            res2 = None
+                    #print repr(res2)
+                    return res2
+                if issubclass(BResult, CTypesGenericPtr):
+                    # The only pointers callbacks can return are void*s:
+                    # http://bugs.python.org/issue5710
+                    callback_ctype = ctypes.CFUNCTYPE(
+                        ctypes.c_void_p,
+                        *[BArg._ctype for BArg in BArgs],
+                        use_errno=True)
+                else:
+                    callback_ctype = CTypesFunctionPtr._ctype
+                self._as_ctype_ptr = callback_ctype(callback)
+                self._address = ctypes.cast(self._as_ctype_ptr,
+                                            ctypes.c_void_p).value
+                self._own_callback = init
+
+            @staticmethod
+            def _initialize(ctypes_ptr, value):
+                if value:
+                    raise NotImplementedError("ctypes backend: not supported: "
+                                          "initializers for function pointers")
+
+            def __repr__(self):
+                c_name = getattr(self, '_name', None)
+                if c_name:
+                    i = self._reftypename.index('(* &)')
+                    if self._reftypename[i-1] not in ' )*':
+                        c_name = ' ' + c_name
+                    c_name = self._reftypename.replace('(* &)', c_name)
+                return CTypesData.__repr__(self, c_name)
+
+            def _get_own_repr(self):
+                if getattr(self, '_own_callback', None) is not None:
+                    return 'calling %r' % (self._own_callback,)
+                return super(CTypesFunctionPtr, self)._get_own_repr()
+
+            def __call__(self, *args):
+                if has_varargs:
+                    assert len(args) >= len(BArgs)
+                    extraargs = args[len(BArgs):]
+                    args = args[:len(BArgs)]
+                else:
+                    assert len(args) == len(BArgs)
+                ctypes_args = []
+                for arg, BArg in zip(args, BArgs):
+                    ctypes_args.append(BArg._arg_to_ctypes(arg))
+                if has_varargs:
+                    for i, arg in enumerate(extraargs):
+                        if arg is None:
+                            ctypes_args.append(ctypes.c_void_p(0))  # NULL
+                            continue
+                        if not isinstance(arg, CTypesData):
+                            raise TypeError(
+                                "argument %d passed in the variadic part "
+                                "needs to be a cdata object (got %s)" %
+                                (1 + len(BArgs) + i, type(arg).__name__))
+                        ctypes_args.append(arg._arg_to_ctypes(arg))
+                result = self._as_ctype_ptr(*ctypes_args)
+                return BResult._from_ctypes(result)
+        #
+        CTypesFunctionPtr._fix_class()
+        return CTypesFunctionPtr
+
+    def new_enum_type(self, name, enumerators, enumvalues, CTypesInt):
+        assert isinstance(name, str)
+        reverse_mapping = dict(zip(reversed(enumvalues),
+                                   reversed(enumerators)))
+        #
+        class CTypesEnum(CTypesInt):
+            __slots__ = []
+            _reftypename = '%s &' % name
+
+            def _get_own_repr(self):
+                value = self._value
+                try:
+                    return '%d: %s' % (value, reverse_mapping[value])
+                except KeyError:
+                    return str(value)
+
+            def _to_string(self, maxlen):
+                value = self._value
+                try:
+                    return reverse_mapping[value]
+                except KeyError:
+                    return str(value)
+        #
+        CTypesEnum._fix_class()
+        return CTypesEnum
+
+    def get_errno(self):
+        return ctypes.get_errno()
+
+    def set_errno(self, value):
+        ctypes.set_errno(value)
+
+    def string(self, b, maxlen=-1):
+        return b._to_string(maxlen)
+
+    def buffer(self, bptr, size=-1):
+        raise NotImplementedError("buffer() with ctypes backend")
+
+    def sizeof(self, cdata_or_BType):
+        if isinstance(cdata_or_BType, CTypesData):
+            return cdata_or_BType._get_size_of_instance()
+        else:
+            assert issubclass(cdata_or_BType, CTypesData)
+            return cdata_or_BType._get_size()
+
+    def alignof(self, BType):
+        assert issubclass(BType, CTypesData)
+        return BType._alignment()
+
+    def newp(self, BType, source):
+        if not issubclass(BType, CTypesData):
+            raise TypeError
+        return BType._newp(source)
+
+    def cast(self, BType, source):
+        return BType._cast_from(source)
+
+    def callback(self, BType, source, error, onerror):
+        assert onerror is None   # XXX not implemented
+        return BType(source, error)
+
+    _weakref_cache_ref = None
+
+    def gcp(self, cdata, destructor, size=0):
+        if self._weakref_cache_ref is None:
+            import weakref
+            class MyRef(weakref.ref):
+                def __eq__(self, other):
+                    myref = self()
+                    return self is other or (
+                        myref is not None and myref is other())
+                def __ne__(self, other):
+                    return not (self == other)
+                def __hash__(self):
+                    try:
+                        return self._hash
+                    except AttributeError:
+                        self._hash = hash(self())
+                        return self._hash
+            self._weakref_cache_ref = {}, MyRef
+        weak_cache, MyRef = self._weakref_cache_ref
+
+        if destructor is None:
+            try:
+                del weak_cache[MyRef(cdata)]
+            except KeyError:
+                raise TypeError("Can remove destructor only on a object "
+                                "previously returned by ffi.gc()")
+            return None
+
+        def remove(k):
+            cdata, destructor = weak_cache.pop(k, (None, None))
+            if destructor is not None:
+                destructor(cdata)
+
+        new_cdata = self.cast(self.typeof(cdata), cdata)
+        assert new_cdata is not cdata
+        weak_cache[MyRef(new_cdata, remove)] = (cdata, destructor)
+        return new_cdata
+
+    typeof = type
+
+    def getcname(self, BType, replace_with):
+        return BType._get_c_name(replace_with)
+
+    def typeoffsetof(self, BType, fieldname, num=0):
+        if isinstance(fieldname, str):
+            if num == 0 and issubclass(BType, CTypesGenericPtr):
+                BType = BType._BItem
+            if not issubclass(BType, CTypesBaseStructOrUnion):
+                raise TypeError("expected a struct or union ctype")
+            BField = BType._bfield_types[fieldname]
+            if BField is Ellipsis:
+                raise TypeError("not supported for bitfields")
+            return (BField, BType._offsetof(fieldname))
+        elif isinstance(fieldname, (int, long)):
+            if issubclass(BType, CTypesGenericArray):
+                BType = BType._CTPtr
+            if not issubclass(BType, CTypesGenericPtr):
+                raise TypeError("expected an array or ptr ctype")
+            BItem = BType._BItem
+            offset = BItem._get_size() * fieldname
+            if offset > sys.maxsize:
+                raise OverflowError
+            return (BItem, offset)
+        else:
+            raise TypeError(type(fieldname))
+
+    def rawaddressof(self, BTypePtr, cdata, offset=None):
+        if isinstance(cdata, CTypesBaseStructOrUnion):
+            ptr = ctypes.pointer(type(cdata)._to_ctypes(cdata))
+        elif isinstance(cdata, CTypesGenericPtr):
+            if offset is None or not issubclass(type(cdata)._BItem,
+                                                CTypesBaseStructOrUnion):
+                raise TypeError("unexpected cdata type")
+            ptr = type(cdata)._to_ctypes(cdata)
+        elif isinstance(cdata, CTypesGenericArray):
+            ptr = type(cdata)._to_ctypes(cdata)
+        else:
+            raise TypeError("expected a <cdata 'struct-or-union'>")
+        if offset:
+            ptr = ctypes.cast(
+                ctypes.c_void_p(
+                    ctypes.cast(ptr, ctypes.c_void_p).value + offset),
+                type(ptr))
+        return BTypePtr._from_ctypes(ptr)
+
+
+class CTypesLibrary(object):
+
+    def __init__(self, backend, cdll):
+        self.backend = backend
+        self.cdll = cdll
+
+    def load_function(self, BType, name):
+        c_func = getattr(self.cdll, name)
+        funcobj = BType._from_ctypes(c_func)
+        funcobj._name = name
+        return funcobj
+
+    def read_variable(self, BType, name):
+        try:
+            ctypes_obj = BType._ctype.in_dll(self.cdll, name)
+        except AttributeError as e:
+            raise NotImplementedError(e)
+        return BType._from_ctypes(ctypes_obj)
+
+    def write_variable(self, BType, name, value):
+        new_ctypes_obj = BType._to_ctypes(value)
+        ctypes_obj = BType._ctype.in_dll(self.cdll, name)
+        ctypes.memmove(ctypes.addressof(ctypes_obj),
+                       ctypes.addressof(new_ctypes_obj),
+                       ctypes.sizeof(BType._ctype))

+ 187 - 0
rest_api_venv/Lib/site-packages/cffi/cffi_opcode.py

@@ -0,0 +1,187 @@
+from .error import VerificationError
+
+class CffiOp(object):
+    def __init__(self, op, arg):
+        self.op = op
+        self.arg = arg
+
+    def as_c_expr(self):
+        if self.op is None:
+            assert isinstance(self.arg, str)
+            return '(_cffi_opcode_t)(%s)' % (self.arg,)
+        classname = CLASS_NAME[self.op]
+        return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg)
+
+    def as_python_bytes(self):
+        if self.op is None and self.arg.isdigit():
+            value = int(self.arg)     # non-negative: '-' not in self.arg
+            if value >= 2**31:
+                raise OverflowError("cannot emit %r: limited to 2**31-1"
+                                    % (self.arg,))
+            return format_four_bytes(value)
+        if isinstance(self.arg, str):
+            raise VerificationError("cannot emit to Python: %r" % (self.arg,))
+        return format_four_bytes((self.arg << 8) | self.op)
+
+    def __str__(self):
+        classname = CLASS_NAME.get(self.op, self.op)
+        return '(%s %s)' % (classname, self.arg)
+
+def format_four_bytes(num):
+    return '\\x%02X\\x%02X\\x%02X\\x%02X' % (
+        (num >> 24) & 0xFF,
+        (num >> 16) & 0xFF,
+        (num >>  8) & 0xFF,
+        (num      ) & 0xFF)
+
+OP_PRIMITIVE       = 1
+OP_POINTER         = 3
+OP_ARRAY           = 5
+OP_OPEN_ARRAY      = 7
+OP_STRUCT_UNION    = 9
+OP_ENUM            = 11
+OP_FUNCTION        = 13
+OP_FUNCTION_END    = 15
+OP_NOOP            = 17
+OP_BITFIELD        = 19
+OP_TYPENAME        = 21
+OP_CPYTHON_BLTN_V  = 23   # varargs
+OP_CPYTHON_BLTN_N  = 25   # noargs
+OP_CPYTHON_BLTN_O  = 27   # O  (i.e. a single arg)
+OP_CONSTANT        = 29
+OP_CONSTANT_INT    = 31
+OP_GLOBAL_VAR      = 33
+OP_DLOPEN_FUNC     = 35
+OP_DLOPEN_CONST    = 37
+OP_GLOBAL_VAR_F    = 39
+OP_EXTERN_PYTHON   = 41
+
+PRIM_VOID          = 0
+PRIM_BOOL          = 1
+PRIM_CHAR          = 2
+PRIM_SCHAR         = 3
+PRIM_UCHAR         = 4
+PRIM_SHORT         = 5
+PRIM_USHORT        = 6
+PRIM_INT           = 7
+PRIM_UINT          = 8
+PRIM_LONG          = 9
+PRIM_ULONG         = 10
+PRIM_LONGLONG      = 11
+PRIM_ULONGLONG     = 12
+PRIM_FLOAT         = 13
+PRIM_DOUBLE        = 14
+PRIM_LONGDOUBLE    = 15
+
+PRIM_WCHAR         = 16
+PRIM_INT8          = 17
+PRIM_UINT8         = 18
+PRIM_INT16         = 19
+PRIM_UINT16        = 20
+PRIM_INT32         = 21
+PRIM_UINT32        = 22
+PRIM_INT64         = 23
+PRIM_UINT64        = 24
+PRIM_INTPTR        = 25
+PRIM_UINTPTR       = 26
+PRIM_PTRDIFF       = 27
+PRIM_SIZE          = 28
+PRIM_SSIZE         = 29
+PRIM_INT_LEAST8    = 30
+PRIM_UINT_LEAST8   = 31
+PRIM_INT_LEAST16   = 32
+PRIM_UINT_LEAST16  = 33
+PRIM_INT_LEAST32   = 34
+PRIM_UINT_LEAST32  = 35
+PRIM_INT_LEAST64   = 36
+PRIM_UINT_LEAST64  = 37
+PRIM_INT_FAST8     = 38
+PRIM_UINT_FAST8    = 39
+PRIM_INT_FAST16    = 40
+PRIM_UINT_FAST16   = 41
+PRIM_INT_FAST32    = 42
+PRIM_UINT_FAST32   = 43
+PRIM_INT_FAST64    = 44
+PRIM_UINT_FAST64   = 45
+PRIM_INTMAX        = 46
+PRIM_UINTMAX       = 47
+PRIM_FLOATCOMPLEX  = 48
+PRIM_DOUBLECOMPLEX = 49
+PRIM_CHAR16        = 50
+PRIM_CHAR32        = 51
+
+_NUM_PRIM          = 52
+_UNKNOWN_PRIM          = -1
+_UNKNOWN_FLOAT_PRIM    = -2
+_UNKNOWN_LONG_DOUBLE   = -3
+
+_IO_FILE_STRUCT        = -1
+
+PRIMITIVE_TO_INDEX = {
+    'char':               PRIM_CHAR,
+    'short':              PRIM_SHORT,
+    'int':                PRIM_INT,
+    'long':               PRIM_LONG,
+    'long long':          PRIM_LONGLONG,
+    'signed char':        PRIM_SCHAR,
+    'unsigned char':      PRIM_UCHAR,
+    'unsigned short':     PRIM_USHORT,
+    'unsigned int':       PRIM_UINT,
+    'unsigned long':      PRIM_ULONG,
+    'unsigned long long': PRIM_ULONGLONG,
+    'float':              PRIM_FLOAT,
+    'double':             PRIM_DOUBLE,
+    'long double':        PRIM_LONGDOUBLE,
+    'float _Complex':     PRIM_FLOATCOMPLEX,
+    'double _Complex':    PRIM_DOUBLECOMPLEX,
+    '_Bool':              PRIM_BOOL,
+    'wchar_t':            PRIM_WCHAR,
+    'char16_t':           PRIM_CHAR16,
+    'char32_t':           PRIM_CHAR32,
+    'int8_t':             PRIM_INT8,
+    'uint8_t':            PRIM_UINT8,
+    'int16_t':            PRIM_INT16,
+    'uint16_t':           PRIM_UINT16,
+    'int32_t':            PRIM_INT32,
+    'uint32_t':           PRIM_UINT32,
+    'int64_t':            PRIM_INT64,
+    'uint64_t':           PRIM_UINT64,
+    'intptr_t':           PRIM_INTPTR,
+    'uintptr_t':          PRIM_UINTPTR,
+    'ptrdiff_t':          PRIM_PTRDIFF,
+    'size_t':             PRIM_SIZE,
+    'ssize_t':            PRIM_SSIZE,
+    'int_least8_t':       PRIM_INT_LEAST8,
+    'uint_least8_t':      PRIM_UINT_LEAST8,
+    'int_least16_t':      PRIM_INT_LEAST16,
+    'uint_least16_t':     PRIM_UINT_LEAST16,
+    'int_least32_t':      PRIM_INT_LEAST32,
+    'uint_least32_t':     PRIM_UINT_LEAST32,
+    'int_least64_t':      PRIM_INT_LEAST64,
+    'uint_least64_t':     PRIM_UINT_LEAST64,
+    'int_fast8_t':        PRIM_INT_FAST8,
+    'uint_fast8_t':       PRIM_UINT_FAST8,
+    'int_fast16_t':       PRIM_INT_FAST16,
+    'uint_fast16_t':      PRIM_UINT_FAST16,
+    'int_fast32_t':       PRIM_INT_FAST32,
+    'uint_fast32_t':      PRIM_UINT_FAST32,
+    'int_fast64_t':       PRIM_INT_FAST64,
+    'uint_fast64_t':      PRIM_UINT_FAST64,
+    'intmax_t':           PRIM_INTMAX,
+    'uintmax_t':          PRIM_UINTMAX,
+    }
+
+F_UNION         = 0x01
+F_CHECK_FIELDS  = 0x02
+F_PACKED        = 0x04
+F_EXTERNAL      = 0x08
+F_OPAQUE        = 0x10
+
+G_FLAGS = dict([('_CFFI_' + _key, globals()[_key])
+                for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED',
+                             'F_EXTERNAL', 'F_OPAQUE']])
+
+CLASS_NAME = {}
+for _name, _value in list(globals().items()):
+    if _name.startswith('OP_') and isinstance(_value, int):
+        CLASS_NAME[_value] = _name[3:]

+ 80 - 0
rest_api_venv/Lib/site-packages/cffi/commontypes.py

@@ -0,0 +1,80 @@
+import sys
+from . import model
+from .error import FFIError
+
+
+COMMON_TYPES = {}
+
+try:
+    # fetch "bool" and all simple Windows types
+    from _cffi_backend import _get_common_types
+    _get_common_types(COMMON_TYPES)
+except ImportError:
+    pass
+
+COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE')
+COMMON_TYPES['bool'] = '_Bool'    # in case we got ImportError above
+
+for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
+    if _type.endswith('_t'):
+        COMMON_TYPES[_type] = _type
+del _type
+
+_CACHE = {}
+
+def resolve_common_type(parser, commontype):
+    try:
+        return _CACHE[commontype]
+    except KeyError:
+        cdecl = COMMON_TYPES.get(commontype, commontype)
+        if not isinstance(cdecl, str):
+            result, quals = cdecl, 0    # cdecl is already a BaseType
+        elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
+            result, quals = model.PrimitiveType(cdecl), 0
+        elif cdecl == 'set-unicode-needed':
+            raise FFIError("The Windows type %r is only available after "
+                           "you call ffi.set_unicode()" % (commontype,))
+        else:
+            if commontype == cdecl:
+                raise FFIError(
+                    "Unsupported type: %r.  Please look at "
+        "http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations "
+                    "and file an issue if you think this type should really "
+                    "be supported." % (commontype,))
+            result, quals = parser.parse_type_and_quals(cdecl)   # recursive
+
+        assert isinstance(result, model.BaseTypeByIdentity)
+        _CACHE[commontype] = result, quals
+        return result, quals
+
+
+# ____________________________________________________________
+# extra types for Windows (most of them are in commontypes.c)
+
+
+def win_common_types():
+    return {
+        "UNICODE_STRING": model.StructType(
+            "_UNICODE_STRING",
+            ["Length",
+             "MaximumLength",
+             "Buffer"],
+            [model.PrimitiveType("unsigned short"),
+             model.PrimitiveType("unsigned short"),
+             model.PointerType(model.PrimitiveType("wchar_t"))],
+            [-1, -1, -1]),
+        "PUNICODE_STRING": "UNICODE_STRING *",
+        "PCUNICODE_STRING": "const UNICODE_STRING *",
+
+        "TBYTE": "set-unicode-needed",
+        "TCHAR": "set-unicode-needed",
+        "LPCTSTR": "set-unicode-needed",
+        "PCTSTR": "set-unicode-needed",
+        "LPTSTR": "set-unicode-needed",
+        "PTSTR": "set-unicode-needed",
+        "PTBYTE": "set-unicode-needed",
+        "PTCHAR": "set-unicode-needed",
+        }
+
+if sys.platform == 'win32':
+    COMMON_TYPES.update(win_common_types())

+ 1006 - 0
rest_api_venv/Lib/site-packages/cffi/cparser.py

@@ -0,0 +1,1006 @@
+from . import model
+from .commontypes import COMMON_TYPES, resolve_common_type
+from .error import FFIError, CDefError
+try:
+    from . import _pycparser as pycparser
+except ImportError:
+    import pycparser
+import weakref, re, sys
+
+try:
+    if sys.version_info < (3,):
+        import thread as _thread
+    else:
+        import _thread
+    lock = _thread.allocate_lock()
+except ImportError:
+    lock = None
+
+def _workaround_for_static_import_finders():
+    # Issue #392: packaging tools like cx_Freeze can not find these
+    # because pycparser uses exec dynamic import.  This is an obscure
+    # workaround.  This function is never called.
+    import pycparser.yacctab
+    import pycparser.lextab
+
+CDEF_SOURCE_STRING = "<cdef source string>"
+_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$",
+                        re.DOTALL | re.MULTILINE)
+_r_define  = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)"
+                        r"\b((?:[^\n\\]|\\.)*?)$",
+                        re.DOTALL | re.MULTILINE)
+_r_line_directive = re.compile(r"^[ \t]*#[ \t]*(?:line|\d+)\b.*$", re.MULTILINE)
+_r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}")
+_r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$")
+_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]")
+_r_words = re.compile(r"\w+|\S")
+_parser_cache = None
+_r_int_literal = re.compile(r"-?0?x?[0-9a-f]+[lu]*$", re.IGNORECASE)
+_r_stdcall1 = re.compile(r"\b(__stdcall|WINAPI)\b")
+_r_stdcall2 = re.compile(r"[(]\s*(__stdcall|WINAPI)\b")
+_r_cdecl = re.compile(r"\b__cdecl\b")
+_r_extern_python = re.compile(r'\bextern\s*"'
+                              r'(Python|Python\s*\+\s*C|C\s*\+\s*Python)"\s*.')
+_r_star_const_space = re.compile(       # matches "* const "
+    r"[*]\s*((const|volatile|restrict)\b\s*)+")
+_r_int_dotdotdot = re.compile(r"(\b(int|long|short|signed|unsigned|char)\s*)+"
+                              r"\.\.\.")
+_r_float_dotdotdot = re.compile(r"\b(double|float)\s*\.\.\.")
+
+def _get_parser():
+    global _parser_cache
+    if _parser_cache is None:
+        _parser_cache = pycparser.CParser()
+    return _parser_cache
+
+def _workaround_for_old_pycparser(csource):
+    # Workaround for a pycparser issue (fixed between pycparser 2.10 and
+    # 2.14): "char*const***" gives us a wrong syntax tree, the same as
+    # for "char***(*const)".  This means we can't tell the difference
+    # afterwards.  But "char(*const(***))" gives us the right syntax
+    # tree.  The issue only occurs if there are several stars in
+    # sequence with no parenthesis inbetween, just possibly qualifiers.
+    # Attempt to fix it by adding some parentheses in the source: each
+    # time we see "* const" or "* const *", we add an opening
+    # parenthesis before each star---the hard part is figuring out where
+    # to close them.
+    parts = []
+    while True:
+        match = _r_star_const_space.search(csource)
+        if not match:
+            break
+        #print repr(''.join(parts)+csource), '=>',
+        parts.append(csource[:match.start()])
+        parts.append('('); closing = ')'
+        parts.append(match.group())   # e.g. "* const "
+        endpos = match.end()
+        if csource.startswith('*', endpos):
+            parts.append('('); closing += ')'
+        level = 0
+        i = endpos
+        while i < len(csource):
+            c = csource[i]
+            if c == '(':
+                level += 1
+            elif c == ')':
+                if level == 0:
+                    break
+                level -= 1
+            elif c in ',;=':
+                if level == 0:
+                    break
+            i += 1
+        csource = csource[endpos:i] + closing + csource[i:]
+        #print repr(''.join(parts)+csource)
+    parts.append(csource)
+    return ''.join(parts)
+
+def _preprocess_extern_python(csource):
+    # input: `extern "Python" int foo(int);` or
+    #        `extern "Python" { int foo(int); }`
+    # output:
+    #     void __cffi_extern_python_start;
+    #     int foo(int);
+    #     void __cffi_extern_python_stop;
+    #
+    # input: `extern "Python+C" int foo(int);`
+    # output:
+    #     void __cffi_extern_python_plus_c_start;
+    #     int foo(int);
+    #     void __cffi_extern_python_stop;
+    parts = []
+    while True:
+        match = _r_extern_python.search(csource)
+        if not match:
+            break
+        endpos = match.end() - 1
+        #print
+        #print ''.join(parts)+csource
+        #print '=>'
+        parts.append(csource[:match.start()])
+        if 'C' in match.group(1):
+            parts.append('void __cffi_extern_python_plus_c_start; ')
+        else:
+            parts.append('void __cffi_extern_python_start; ')
+        if csource[endpos] == '{':
+            # grouping variant
+            closing = csource.find('}', endpos)
+            if closing < 0:
+                raise CDefError("'extern \"Python\" {': no '}' found")
+            if csource.find('{', endpos + 1, closing) >= 0:
+                raise NotImplementedError("cannot use { } inside a block "
+                                          "'extern \"Python\" { ... }'")
+            parts.append(csource[endpos+1:closing])
+            csource = csource[closing+1:]
+        else:
+            # non-grouping variant
+            semicolon = csource.find(';', endpos)
+            if semicolon < 0:
+                raise CDefError("'extern \"Python\": no ';' found")
+            parts.append(csource[endpos:semicolon+1])
+            csource = csource[semicolon+1:]
+        parts.append(' void __cffi_extern_python_stop;')
+        #print ''.join(parts)+csource
+        #print
+    parts.append(csource)
+    return ''.join(parts)
+
+def _warn_for_string_literal(csource):
+    if '"' not in csource:
+        return
+    for line in csource.splitlines():
+        if '"' in line and not line.lstrip().startswith('#'):
+            import warnings
+            warnings.warn("String literal found in cdef() or type source. "
+                          "String literals are ignored here, but you should "
+                          "remove them anyway because some character sequences "
+                          "confuse pre-parsing.")
+            break
+
+def _warn_for_non_extern_non_static_global_variable(decl):
+    if not decl.storage:
+        import warnings
+        warnings.warn("Global variable '%s' in cdef(): for consistency "
+                      "with C it should have a storage class specifier "
+                      "(usually 'extern')" % (decl.name,))
+
+def _remove_line_directives(csource):
+    # _r_line_directive matches whole lines, without the final \n, if they
+    # start with '#line' with some spacing allowed, or '#NUMBER'.  This
+    # function stores them away and replaces them with exactly the string
+    # '#line@N', where N is the index in the list 'line_directives'.
+    line_directives = []
+    def replace(m):
+        i = len(line_directives)
+        line_directives.append(m.group())
+        return '#line@%d' % i
+    csource = _r_line_directive.sub(replace, csource)
+    return csource, line_directives
+
+def _put_back_line_directives(csource, line_directives):
+    def replace(m):
+        s = m.group()
+        if not s.startswith('#line@'):
+            raise AssertionError("unexpected #line directive "
+                                 "(should have been processed and removed")
+        return line_directives[int(s[6:])]
+    return _r_line_directive.sub(replace, csource)
+
+def _preprocess(csource):
+    # First, remove the lines of the form '#line N "filename"' because
+    # the "filename" part could confuse the rest
+    csource, line_directives = _remove_line_directives(csource)
+    # Remove comments.  NOTE: this only work because the cdef() section
+    # should not contain any string literals (except in line directives)!
+    def replace_keeping_newlines(m):
+        return ' ' + m.group().count('\n') * '\n'
+    csource = _r_comment.sub(replace_keeping_newlines, csource)
+    # Remove the "#define FOO x" lines
+    macros = {}
+    for match in _r_define.finditer(csource):
+        macroname, macrovalue = match.groups()
+        macrovalue = macrovalue.replace('\\\n', '').strip()
+        macros[macroname] = macrovalue
+    csource = _r_define.sub('', csource)
+    #
+    if pycparser.__version__ < '2.14':
+        csource = _workaround_for_old_pycparser(csource)
+    #
+    # BIG HACK: replace WINAPI or __stdcall with "volatile const".
+    # It doesn't make sense for the return type of a function to be
+    # "volatile volatile const", so we abuse it to detect __stdcall...
+    # Hack number 2 is that "int(volatile *fptr)();" is not valid C
+    # syntax, so we place the "volatile" before the opening parenthesis.
+    csource = _r_stdcall2.sub(' volatile volatile const(', csource)
+    csource = _r_stdcall1.sub(' volatile volatile const ', csource)
+    csource = _r_cdecl.sub(' ', csource)
+    #
+    # Replace `extern "Python"` with start/end markers
+    csource = _preprocess_extern_python(csource)
+    #
+    # Now there should not be any string literal left; warn if we get one
+    _warn_for_string_literal(csource)
+    #
+    # Replace "[...]" with "[__dotdotdotarray__]"
+    csource = _r_partial_array.sub('[__dotdotdotarray__]', csource)
+    #
+    # Replace "...}" with "__dotdotdotNUM__}".  This construction should
+    # occur only at the end of enums; at the end of structs we have "...;}"
+    # and at the end of vararg functions "...);".  Also replace "=...[,}]"
+    # with ",__dotdotdotNUM__[,}]": this occurs in the enums too, when
+    # giving an unknown value.
+    matches = list(_r_partial_enum.finditer(csource))
+    for number, match in enumerate(reversed(matches)):
+        p = match.start()
+        if csource[p] == '=':
+            p2 = csource.find('...', p, match.end())
+            assert p2 > p
+            csource = '%s,__dotdotdot%d__ %s' % (csource[:p], number,
+                                                 csource[p2+3:])
+        else:
+            assert csource[p:p+3] == '...'
+            csource = '%s __dotdotdot%d__ %s' % (csource[:p], number,
+                                                 csource[p+3:])
+    # Replace "int ..." or "unsigned long int..." with "__dotdotdotint__"
+    csource = _r_int_dotdotdot.sub(' __dotdotdotint__ ', csource)
+    # Replace "float ..." or "double..." with "__dotdotdotfloat__"
+    csource = _r_float_dotdotdot.sub(' __dotdotdotfloat__ ', csource)
+    # Replace all remaining "..." with the same name, "__dotdotdot__",
+    # which is declared with a typedef for the purpose of C parsing.
+    csource = csource.replace('...', ' __dotdotdot__ ')
+    # Finally, put back the line directives
+    csource = _put_back_line_directives(csource, line_directives)
+    return csource, macros
+
+def _common_type_names(csource):
+    # Look in the source for what looks like usages of types from the
+    # list of common types.  A "usage" is approximated here as the
+    # appearance of the word, minus a "definition" of the type, which
+    # is the last word in a "typedef" statement.  Approximative only
+    # but should be fine for all the common types.
+    look_for_words = set(COMMON_TYPES)
+    look_for_words.add(';')
+    look_for_words.add(',')
+    look_for_words.add('(')
+    look_for_words.add(')')
+    look_for_words.add('typedef')
+    words_used = set()
+    is_typedef = False
+    paren = 0
+    previous_word = ''
+    for word in _r_words.findall(csource):
+        if word in look_for_words:
+            if word == ';':
+                if is_typedef:
+                    words_used.discard(previous_word)
+                    look_for_words.discard(previous_word)
+                    is_typedef = False
+            elif word == 'typedef':
+                is_typedef = True
+                paren = 0
+            elif word == '(':
+                paren += 1
+            elif word == ')':
+                paren -= 1
+            elif word == ',':
+                if is_typedef and paren == 0:
+                    words_used.discard(previous_word)
+                    look_for_words.discard(previous_word)
+            else:   # word in COMMON_TYPES
+                words_used.add(word)
+        previous_word = word
+    return words_used
+
+
+class Parser(object):
+
+    def __init__(self):
+        self._declarations = {}
+        self._included_declarations = set()
+        self._anonymous_counter = 0
+        self._structnode2type = weakref.WeakKeyDictionary()
+        self._options = {}
+        self._int_constants = {}
+        self._recomplete = []
+        self._uses_new_feature = None
+
+    def _parse(self, csource):
+        csource, macros = _preprocess(csource)
+        # XXX: for more efficiency we would need to poke into the
+        # internals of CParser...  the following registers the
+        # typedefs, because their presence or absence influences the
+        # parsing itself (but what they are typedef'ed to plays no role)
+        ctn = _common_type_names(csource)
+        typenames = []
+        for name in sorted(self._declarations):
+            if name.startswith('typedef '):
+                name = name[8:]
+                typenames.append(name)
+                ctn.discard(name)
+        typenames += sorted(ctn)
+        #
+        csourcelines = []
+        csourcelines.append('# 1 "<cdef automatic initialization code>"')
+        for typename in typenames:
+            csourcelines.append('typedef int %s;' % typename)
+        csourcelines.append('typedef int __dotdotdotint__, __dotdotdotfloat__,'
+                            ' __dotdotdot__;')
+        # this forces pycparser to consider the following in the file
+        # called <cdef source string> from line 1
+        csourcelines.append('# 1 "%s"' % (CDEF_SOURCE_STRING,))
+        csourcelines.append(csource)
+        fullcsource = '\n'.join(csourcelines)
+        if lock is not None:
+            lock.acquire()     # pycparser is not thread-safe...
+        try:
+            ast = _get_parser().parse(fullcsource)
+        except pycparser.c_parser.ParseError as e:
+            self.convert_pycparser_error(e, csource)
+        finally:
+            if lock is not None:
+                lock.release()
+        # csource will be used to find buggy source text
+        return ast, macros, csource
+
+    def _convert_pycparser_error(self, e, csource):
+        # xxx look for "<cdef source string>:NUM:" at the start of str(e)
+        # and interpret that as a line number.  This will not work if
+        # the user gives explicit ``# NUM "FILE"`` directives.
+        line = None
+        msg = str(e)
+        match = re.match(r"%s:(\d+):" % (CDEF_SOURCE_STRING,), msg)
+        if match:
+            linenum = int(match.group(1), 10)
+            csourcelines = csource.splitlines()
+            if 1 <= linenum <= len(csourcelines):
+                line = csourcelines[linenum-1]
+        return line
+
+    def convert_pycparser_error(self, e, csource):
+        line = self._convert_pycparser_error(e, csource)
+
+        msg = str(e)
+        if line:
+            msg = 'cannot parse "%s"\n%s' % (line.strip(), msg)
+        else:
+            msg = 'parse error\n%s' % (msg,)
+        raise CDefError(msg)
+
+    def parse(self, csource, override=False, packed=False, pack=None,
+                    dllexport=False):
+        if packed:
+            if packed != True:
+                raise ValueError("'packed' should be False or True; use "
+                                 "'pack' to give another value")
+            if pack:
+                raise ValueError("cannot give both 'pack' and 'packed'")
+            pack = 1
+        elif pack:
+            if pack & (pack - 1):
+                raise ValueError("'pack' must be a power of two, not %r" %
+                    (pack,))
+        else:
+            pack = 0
+        prev_options = self._options
+        try:
+            self._options = {'override': override,
+                             'packed': pack,
+                             'dllexport': dllexport}
+            self._internal_parse(csource)
+        finally:
+            self._options = prev_options
+
+    def _internal_parse(self, csource):
+        ast, macros, csource = self._parse(csource)
+        # add the macros
+        self._process_macros(macros)
+        # find the first "__dotdotdot__" and use that as a separator
+        # between the repeated typedefs and the real csource
+        iterator = iter(ast.ext)
+        for decl in iterator:
+            if decl.name == '__dotdotdot__':
+                break
+        else:
+            assert 0
+        current_decl = None
+        #
+        try:
+            self._inside_extern_python = '__cffi_extern_python_stop'
+            for decl in iterator:
+                current_decl = decl
+                if isinstance(decl, pycparser.c_ast.Decl):
+                    self._parse_decl(decl)
+                elif isinstance(decl, pycparser.c_ast.Typedef):
+                    if not decl.name:
+                        raise CDefError("typedef does not declare any name",
+                                        decl)
+                    quals = 0
+                    if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) and
+                            decl.type.type.names[-1].startswith('__dotdotdot')):
+                        realtype = self._get_unknown_type(decl)
+                    elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and
+                          isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and
+                          isinstance(decl.type.type.type,
+                                     pycparser.c_ast.IdentifierType) and
+                          decl.type.type.type.names[-1].startswith('__dotdotdot')):
+                        realtype = self._get_unknown_ptr_type(decl)
+                    else:
+                        realtype, quals = self._get_type_and_quals(
+                            decl.type, name=decl.name, partial_length_ok=True,
+                            typedef_example="*(%s *)0" % (decl.name,))
+                    self._declare('typedef ' + decl.name, realtype, quals=quals)
+                elif decl.__class__.__name__ == 'Pragma':
+                    pass    # skip pragma, only in pycparser 2.15
+                else:
+                    raise CDefError("unexpected <%s>: this construct is valid "
+                                    "C but not valid in cdef()" %
+                                    decl.__class__.__name__, decl)
+        except CDefError as e:
+            if len(e.args) == 1:
+                e.args = e.args + (current_decl,)
+            raise
+        except FFIError as e:
+            msg = self._convert_pycparser_error(e, csource)
+            if msg:
+                e.args = (e.args[0] + "\n    *** Err: %s" % msg,)
+            raise
+
+    def _add_constants(self, key, val):
+        if key in self._int_constants:
+            if self._int_constants[key] == val:
+                return     # ignore identical double declarations
+            raise FFIError(
+                "multiple declarations of constant: %s" % (key,))
+        self._int_constants[key] = val
+
+    def _add_integer_constant(self, name, int_str):
+        int_str = int_str.lower().rstrip("ul")
+        neg = int_str.startswith('-')
+        if neg:
+            int_str = int_str[1:]
+        # "010" is not valid oct in py3
+        if (int_str.startswith("0") and int_str != '0'
+                and not int_str.startswith("0x")):
+            int_str = "0o" + int_str[1:]
+        pyvalue = int(int_str, 0)
+        if neg:
+            pyvalue = -pyvalue
+        self._add_constants(name, pyvalue)
+        self._declare('macro ' + name, pyvalue)
+
+    def _process_macros(self, macros):
+        for key, value in macros.items():
+            value = value.strip()
+            if _r_int_literal.match(value):
+                self._add_integer_constant(key, value)
+            elif value == '...':
+                self._declare('macro ' + key, value)
+            else:
+                raise CDefError(
+                    'only supports one of the following syntax:\n'
+                    '  #define %s ...     (literally dot-dot-dot)\n'
+                    '  #define %s NUMBER  (with NUMBER an integer'
+                                    ' constant, decimal/hex/octal)\n'
+                    'got:\n'
+                    '  #define %s %s'
+                    % (key, key, key, value))
+
+    def _declare_function(self, tp, quals, decl):
+        tp = self._get_type_pointer(tp, quals)
+        if self._options.get('dllexport'):
+            tag = 'dllexport_python '
+        elif self._inside_extern_python == '__cffi_extern_python_start':
+            tag = 'extern_python '
+        elif self._inside_extern_python == '__cffi_extern_python_plus_c_start':
+            tag = 'extern_python_plus_c '
+        else:
+            tag = 'function '
+        self._declare(tag + decl.name, tp)
+
+    def _parse_decl(self, decl):
+        node = decl.type
+        if isinstance(node, pycparser.c_ast.FuncDecl):
+            tp, quals = self._get_type_and_quals(node, name=decl.name)
+            assert isinstance(tp, model.RawFunctionType)
+            self._declare_function(tp, quals, decl)
+        else:
+            if isinstance(node, pycparser.c_ast.Struct):
+                self._get_struct_union_enum_type('struct', node)
+            elif isinstance(node, pycparser.c_ast.Union):
+                self._get_struct_union_enum_type('union', node)
+            elif isinstance(node, pycparser.c_ast.Enum):
+                self._get_struct_union_enum_type('enum', node)
+            elif not decl.name:
+                raise CDefError("construct does not declare any variable",
+                                decl)
+            #
+            if decl.name:
+                tp, quals = self._get_type_and_quals(node,
+                                                     partial_length_ok=True)
+                if tp.is_raw_function:
+                    self._declare_function(tp, quals, decl)
+                elif (tp.is_integer_type() and
+                        hasattr(decl, 'init') and
+                        hasattr(decl.init, 'value') and
+                        _r_int_literal.match(decl.init.value)):
+                    self._add_integer_constant(decl.name, decl.init.value)
+                elif (tp.is_integer_type() and
+                        isinstance(decl.init, pycparser.c_ast.UnaryOp) and
+                        decl.init.op == '-' and
+                        hasattr(decl.init.expr, 'value') and
+                        _r_int_literal.match(decl.init.expr.value)):
+                    self._add_integer_constant(decl.name,
+                                               '-' + decl.init.expr.value)
+                elif (tp is model.void_type and
+                      decl.name.startswith('__cffi_extern_python_')):
+                    # hack: `extern "Python"` in the C source is replaced
+                    # with "void __cffi_extern_python_start;" and
+                    # "void __cffi_extern_python_stop;"
+                    self._inside_extern_python = decl.name
+                else:
+                    if self._inside_extern_python !='__cffi_extern_python_stop':
+                        raise CDefError(
+                            "cannot declare constants or "
+                            "variables with 'extern \"Python\"'")
+                    if (quals & model.Q_CONST) and not tp.is_array_type:
+                        self._declare('constant ' + decl.name, tp, quals=quals)
+                    else:
+                        _warn_for_non_extern_non_static_global_variable(decl)
+                        self._declare('variable ' + decl.name, tp, quals=quals)
+
+    def parse_type(self, cdecl):
+        return self.parse_type_and_quals(cdecl)[0]
+
+    def parse_type_and_quals(self, cdecl):
+        ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2]
+        assert not macros
+        exprnode = ast.ext[-1].type.args.params[0]
+        if isinstance(exprnode, pycparser.c_ast.ID):
+            raise CDefError("unknown identifier '%s'" % (exprnode.name,))
+        return self._get_type_and_quals(exprnode.type)
+
+    def _declare(self, name, obj, included=False, quals=0):
+        if name in self._declarations:
+            prevobj, prevquals = self._declarations[name]
+            if prevobj is obj and prevquals == quals:
+                return
+            if not self._options.get('override'):
+                raise FFIError(
+                    "multiple declarations of %s (for interactive usage, "
+                    "try cdef(xx, override=True))" % (name,))
+        assert '__dotdotdot__' not in name.split()
+        self._declarations[name] = (obj, quals)
+        if included:
+            self._included_declarations.add(obj)
+
+    def _extract_quals(self, type):
+        quals = 0
+        if isinstance(type, (pycparser.c_ast.TypeDecl,
+                             pycparser.c_ast.PtrDecl)):
+            if 'const' in type.quals:
+                quals |= model.Q_CONST
+            if 'volatile' in type.quals:
+                quals |= model.Q_VOLATILE
+            if 'restrict' in type.quals:
+                quals |= model.Q_RESTRICT
+        return quals
+
+    def _get_type_pointer(self, type, quals, declname=None):
+        if isinstance(type, model.RawFunctionType):
+            return type.as_function_pointer()
+        if (isinstance(type, model.StructOrUnionOrEnum) and
+                type.name.startswith('$') and type.name[1:].isdigit() and
+                type.forcename is None and declname is not None):
+            return model.NamedPointerType(type, declname, quals)
+        return model.PointerType(type, quals)
+
+    def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False,
+                            typedef_example=None):
+        # first, dereference typedefs, if we have it already parsed, we're good
+        if (isinstance(typenode, pycparser.c_ast.TypeDecl) and
+            isinstance(typenode.type, pycparser.c_ast.IdentifierType) and
+            len(typenode.type.names) == 1 and
+            ('typedef ' + typenode.type.names[0]) in self._declarations):
+            tp, quals = self._declarations['typedef ' + typenode.type.names[0]]
+            quals |= self._extract_quals(typenode)
+            return tp, quals
+        #
+        if isinstance(typenode, pycparser.c_ast.ArrayDecl):
+            # array type
+            if typenode.dim is None:
+                length = None
+            else:
+                length = self._parse_constant(
+                    typenode.dim, partial_length_ok=partial_length_ok)
+            # a hack: in 'typedef int foo_t[...][...];', don't use '...' as
+            # the length but use directly the C expression that would be
+            # generated by recompiler.py.  This lets the typedef be used in
+            # many more places within recompiler.py
+            if typedef_example is not None:
+                if length == '...':
+                    length = '_cffi_array_len(%s)' % (typedef_example,)
+                typedef_example = "*" + typedef_example
+            #
+            tp, quals = self._get_type_and_quals(typenode.type,
+                                partial_length_ok=partial_length_ok,
+                                typedef_example=typedef_example)
+            return model.ArrayType(tp, length), quals
+        #
+        if isinstance(typenode, pycparser.c_ast.PtrDecl):
+            # pointer type
+            itemtype, itemquals = self._get_type_and_quals(typenode.type)
+            tp = self._get_type_pointer(itemtype, itemquals, declname=name)
+            quals = self._extract_quals(typenode)
+            return tp, quals
+        #
+        if isinstance(typenode, pycparser.c_ast.TypeDecl):
+            quals = self._extract_quals(typenode)
+            type = typenode.type
+            if isinstance(type, pycparser.c_ast.IdentifierType):
+                # assume a primitive type.  get it from .names, but reduce
+                # synonyms to a single chosen combination
+                names = list(type.names)
+                if names != ['signed', 'char']:    # keep this unmodified
+                    prefixes = {}
+                    while names:
+                        name = names[0]
+                        if name in ('short', 'long', 'signed', 'unsigned'):
+                            prefixes[name] = prefixes.get(name, 0) + 1
+                            del names[0]
+                        else:
+                            break
+                    # ignore the 'signed' prefix below, and reorder the others
+                    newnames = []
+                    for prefix in ('unsigned', 'short', 'long'):
+                        for i in range(prefixes.get(prefix, 0)):
+                            newnames.append(prefix)
+                    if not names:
+                        names = ['int']    # implicitly
+                    if names == ['int']:   # but kill it if 'short' or 'long'
+                        if 'short' in prefixes or 'long' in prefixes:
+                            names = []
+                    names = newnames + names
+                ident = ' '.join(names)
+                if ident == 'void':
+                    return model.void_type, quals
+                if ident == '__dotdotdot__':
+                    raise FFIError(':%d: bad usage of "..."' %
+                            typenode.coord.line)
+                tp0, quals0 = resolve_common_type(self, ident)
+                return tp0, (quals | quals0)
+            #
+            if isinstance(type, pycparser.c_ast.Struct):
+                # 'struct foobar'
+                tp = self._get_struct_union_enum_type('struct', type, name)
+                return tp, quals
+            #
+            if isinstance(type, pycparser.c_ast.Union):
+                # 'union foobar'
+                tp = self._get_struct_union_enum_type('union', type, name)
+                return tp, quals
+            #
+            if isinstance(type, pycparser.c_ast.Enum):
+                # 'enum foobar'
+                tp = self._get_struct_union_enum_type('enum', type, name)
+                return tp, quals
+        #
+        if isinstance(typenode, pycparser.c_ast.FuncDecl):
+            # a function type
+            return self._parse_function_type(typenode, name), 0
+        #
+        # nested anonymous structs or unions end up here
+        if isinstance(typenode, pycparser.c_ast.Struct):
+            return self._get_struct_union_enum_type('struct', typenode, name,
+                                                    nested=True), 0
+        if isinstance(typenode, pycparser.c_ast.Union):
+            return self._get_struct_union_enum_type('union', typenode, name,
+                                                    nested=True), 0
+        #
+        raise FFIError(":%d: bad or unsupported type declaration" %
+                typenode.coord.line)
+
+    def _parse_function_type(self, typenode, funcname=None):
+        params = list(getattr(typenode.args, 'params', []))
+        for i, arg in enumerate(params):
+            if not hasattr(arg, 'type'):
+                raise CDefError("%s arg %d: unknown type '%s'"
+                    " (if you meant to use the old C syntax of giving"
+                    " untyped arguments, it is not supported)"
+                    % (funcname or 'in expression', i + 1,
+                       getattr(arg, 'name', '?')))
+        ellipsis = (
+            len(params) > 0 and
+            isinstance(params[-1].type, pycparser.c_ast.TypeDecl) and
+            isinstance(params[-1].type.type,
+                       pycparser.c_ast.IdentifierType) and
+            params[-1].type.type.names == ['__dotdotdot__'])
+        if ellipsis:
+            params.pop()
+            if not params:
+                raise CDefError(
+                    "%s: a function with only '(...)' as argument"
+                    " is not correct C" % (funcname or 'in expression'))
+        args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type))
+                for argdeclnode in params]
+        if not ellipsis and args == [model.void_type]:
+            args = []
+        result, quals = self._get_type_and_quals(typenode.type)
+        # the 'quals' on the result type are ignored.  HACK: we absure them
+        # to detect __stdcall functions: we textually replace "__stdcall"
+        # with "volatile volatile const" above.
+        abi = None
+        if hasattr(typenode.type, 'quals'): # else, probable syntax error anyway
+            if typenode.type.quals[-3:] == ['volatile', 'volatile', 'const']:
+                abi = '__stdcall'
+        return model.RawFunctionType(tuple(args), result, ellipsis, abi)
+
+    def _as_func_arg(self, type, quals):
+        if isinstance(type, model.ArrayType):
+            return model.PointerType(type.item, quals)
+        elif isinstance(type, model.RawFunctionType):
+            return type.as_function_pointer()
+        else:
+            return type
+
+    def _get_struct_union_enum_type(self, kind, type, name=None, nested=False):
+        # First, a level of caching on the exact 'type' node of the AST.
+        # This is obscure, but needed because pycparser "unrolls" declarations
+        # such as "typedef struct { } foo_t, *foo_p" and we end up with
+        # an AST that is not a tree, but a DAG, with the "type" node of the
+        # two branches foo_t and foo_p of the trees being the same node.
+        # It's a bit silly but detecting "DAG-ness" in the AST tree seems
+        # to be the only way to distinguish this case from two independent
+        # structs.  See test_struct_with_two_usages.
+        try:
+            return self._structnode2type[type]
+        except KeyError:
+            pass
+        #
+        # Note that this must handle parsing "struct foo" any number of
+        # times and always return the same StructType object.  Additionally,
+        # one of these times (not necessarily the first), the fields of
+        # the struct can be specified with "struct foo { ...fields... }".
+        # If no name is given, then we have to create a new anonymous struct
+        # with no caching; in this case, the fields are either specified
+        # right now or never.
+        #
+        force_name = name
+        name = type.name
+        #
+        # get the type or create it if needed
+        if name is None:
+            # 'force_name' is used to guess a more readable name for
+            # anonymous structs, for the common case "typedef struct { } foo".
+            if force_name is not None:
+                explicit_name = '$%s' % force_name
+            else:
+                self._anonymous_counter += 1
+                explicit_name = '$%d' % self._anonymous_counter
+            tp = None
+        else:
+            explicit_name = name
+            key = '%s %s' % (kind, name)
+            tp, _ = self._declarations.get(key, (None, None))
+        #
+        if tp is None:
+            if kind == 'struct':
+                tp = model.StructType(explicit_name, None, None, None)
+            elif kind == 'union':
+                tp = model.UnionType(explicit_name, None, None, None)
+            elif kind == 'enum':
+                if explicit_name == '__dotdotdot__':
+                    raise CDefError("Enums cannot be declared with ...")
+                tp = self._build_enum_type(explicit_name, type.values)
+            else:
+                raise AssertionError("kind = %r" % (kind,))
+            if name is not None:
+                self._declare(key, tp)
+        else:
+            if kind == 'enum' and type.values is not None:
+                raise NotImplementedError(
+                    "enum %s: the '{}' declaration should appear on the first "
+                    "time the enum is mentioned, not later" % explicit_name)
+        if not tp.forcename:
+            tp.force_the_name(force_name)
+        if tp.forcename and '$' in tp.name:
+            self._declare('anonymous %s' % tp.forcename, tp)
+        #
+        self._structnode2type[type] = tp
+        #
+        # enums: done here
+        if kind == 'enum':
+            return tp
+        #
+        # is there a 'type.decls'?  If yes, then this is the place in the
+        # C sources that declare the fields.  If no, then just return the
+        # existing type, possibly still incomplete.
+        if type.decls is None:
+            return tp
+        #
+        if tp.fldnames is not None:
+            raise CDefError("duplicate declaration of struct %s" % name)
+        fldnames = []
+        fldtypes = []
+        fldbitsize = []
+        fldquals = []
+        for decl in type.decls:
+            if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and
+                    ''.join(decl.type.names) == '__dotdotdot__'):
+                # XXX pycparser is inconsistent: 'names' should be a list
+                # of strings, but is sometimes just one string.  Use
+                # str.join() as a way to cope with both.
+                self._make_partial(tp, nested)
+                continue
+            if decl.bitsize is None:
+                bitsize = -1
+            else:
+                bitsize = self._parse_constant(decl.bitsize)
+            self._partial_length = False
+            type, fqual = self._get_type_and_quals(decl.type,
+                                                   partial_length_ok=True)
+            if self._partial_length:
+                self._make_partial(tp, nested)
+            if isinstance(type, model.StructType) and type.partial:
+                self._make_partial(tp, nested)
+            fldnames.append(decl.name or '')
+            fldtypes.append(type)
+            fldbitsize.append(bitsize)
+            fldquals.append(fqual)
+        tp.fldnames = tuple(fldnames)
+        tp.fldtypes = tuple(fldtypes)
+        tp.fldbitsize = tuple(fldbitsize)
+        tp.fldquals = tuple(fldquals)
+        if fldbitsize != [-1] * len(fldbitsize):
+            if isinstance(tp, model.StructType) and tp.partial:
+                raise NotImplementedError("%s: using both bitfields and '...;'"
+                                          % (tp,))
+        tp.packed = self._options.get('packed')
+        if tp.completed:    # must be re-completed: it is not opaque any more
+            tp.completed = 0
+            self._recomplete.append(tp)
+        return tp
+
+    def _make_partial(self, tp, nested):
+        if not isinstance(tp, model.StructOrUnion):
+            raise CDefError("%s cannot be partial" % (tp,))
+        if not tp.has_c_name() and not nested:
+            raise NotImplementedError("%s is partial but has no C name" %(tp,))
+        tp.partial = True
+
+    def _parse_constant(self, exprnode, partial_length_ok=False):
+        # for now, limited to expressions that are an immediate number
+        # or positive/negative number
+        if isinstance(exprnode, pycparser.c_ast.Constant):
+            s = exprnode.value
+            if '0' <= s[0] <= '9':
+                s = s.rstrip('uUlL')
+                try:
+                    if s.startswith('0'):
+                        return int(s, 8)
+                    else:
+                        return int(s, 10)
+                except ValueError:
+                    if len(s) > 1:
+                        if s.lower()[0:2] == '0x':
+                            return int(s, 16)
+                        elif s.lower()[0:2] == '0b':
+                            return int(s, 2)
+                raise CDefError("invalid constant %r" % (s,))
+            elif s[0] == "'" and s[-1] == "'" and (
+                    len(s) == 3 or (len(s) == 4 and s[1] == "\\")):
+                return ord(s[-2])
+            else:
+                raise CDefError("invalid constant %r" % (s,))
+        #
+        if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and
+                exprnode.op == '+'):
+            return self._parse_constant(exprnode.expr)
+        #
+        if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and
+                exprnode.op == '-'):
+            return -self._parse_constant(exprnode.expr)
+        # load previously defined int constant
+        if (isinstance(exprnode, pycparser.c_ast.ID) and
+                exprnode.name in self._int_constants):
+            return self._int_constants[exprnode.name]
+        #
+        if (isinstance(exprnode, pycparser.c_ast.ID) and
+                    exprnode.name == '__dotdotdotarray__'):
+            if partial_length_ok:
+                self._partial_length = True
+                return '...'
+            raise FFIError(":%d: unsupported '[...]' here, cannot derive "
+                           "the actual array length in this context"
+                           % exprnode.coord.line)
+        #
+        if isinstance(exprnode, pycparser.c_ast.BinaryOp):
+            left = self._parse_constant(exprnode.left)
+            right = self._parse_constant(exprnode.right)
+            if exprnode.op == '+':
+                return left + right
+            elif exprnode.op == '-':
+                return left - right
+            elif exprnode.op == '*':
+                return left * right
+            elif exprnode.op == '/':
+                return self._c_div(left, right)
+            elif exprnode.op == '%':
+                return left - self._c_div(left, right) * right
+            elif exprnode.op == '<<':
+                return left << right
+            elif exprnode.op == '>>':
+                return left >> right
+            elif exprnode.op == '&':
+                return left & right
+            elif exprnode.op == '|':
+                return left | right
+            elif exprnode.op == '^':
+                return left ^ right
+        #
+        raise FFIError(":%d: unsupported expression: expected a "
+                       "simple numeric constant" % exprnode.coord.line)
+
+    def _c_div(self, a, b):
+        result = a // b
+        if ((a < 0) ^ (b < 0)) and (a % b) != 0:
+            result += 1
+        return result
+
+    def _build_enum_type(self, explicit_name, decls):
+        if decls is not None:
+            partial = False
+            enumerators = []
+            enumvalues = []
+            nextenumvalue = 0
+            for enum in decls.enumerators:
+                if _r_enum_dotdotdot.match(enum.name):
+                    partial = True
+                    continue
+                if enum.value is not None:
+                    nextenumvalue = self._parse_constant(enum.value)
+                enumerators.append(enum.name)
+                enumvalues.append(nextenumvalue)
+                self._add_constants(enum.name, nextenumvalue)
+                nextenumvalue += 1
+            enumerators = tuple(enumerators)
+            enumvalues = tuple(enumvalues)
+            tp = model.EnumType(explicit_name, enumerators, enumvalues)
+            tp.partial = partial
+        else:   # opaque enum
+            tp = model.EnumType(explicit_name, (), ())
+        return tp
+
+    def include(self, other):
+        for name, (tp, quals) in other._declarations.items():
+            if name.startswith('anonymous $enum_$'):
+                continue   # fix for test_anonymous_enum_include
+            kind = name.split(' ', 1)[0]
+            if kind in ('struct', 'union', 'enum', 'anonymous', 'typedef'):
+                self._declare(name, tp, included=True, quals=quals)
+        for k, v in other._int_constants.items():
+            self._add_constants(k, v)
+
+    def _get_unknown_type(self, decl):
+        typenames = decl.type.type.names
+        if typenames == ['__dotdotdot__']:
+            return model.unknown_type(decl.name)
+
+        if typenames == ['__dotdotdotint__']:
+            if self._uses_new_feature is None:
+                self._uses_new_feature = "'typedef int... %s'" % decl.name
+            return model.UnknownIntegerType(decl.name)
+
+        if typenames == ['__dotdotdotfloat__']:
+            # note: not for 'long double' so far
+            if self._uses_new_feature is None:
+                self._uses_new_feature = "'typedef float... %s'" % decl.name
+            return model.UnknownFloatType(decl.name)
+
+        raise FFIError(':%d: unsupported usage of "..." in typedef'
+                       % decl.coord.line)
+
+    def _get_unknown_ptr_type(self, decl):
+        if decl.type.type.type.names == ['__dotdotdot__']:
+            return model.unknown_ptr_type(decl.name)
+        raise FFIError(':%d: unsupported usage of "..." in typedef'
+                       % decl.coord.line)

+ 31 - 0
rest_api_venv/Lib/site-packages/cffi/error.py

@@ -0,0 +1,31 @@
+
+class FFIError(Exception):
+    __module__ = 'cffi'
+
+class CDefError(Exception):
+    __module__ = 'cffi'
+    def __str__(self):
+        try:
+            current_decl = self.args[1]
+            filename = current_decl.coord.file
+            linenum = current_decl.coord.line
+            prefix = '%s:%d: ' % (filename, linenum)
+        except (AttributeError, TypeError, IndexError):
+            prefix = ''
+        return '%s%s' % (prefix, self.args[0])
+
+class VerificationError(Exception):
+    """ An error raised when verification fails
+    """
+    __module__ = 'cffi'
+
+class VerificationMissing(Exception):
+    """ An error raised when incomplete structures are passed into
+    cdef, but no verification has been done
+    """
+    __module__ = 'cffi'
+
+class PkgConfigError(Exception):
+    """ An error raised for missing modules in pkg-config
+    """
+    __module__ = 'cffi'

+ 127 - 0
rest_api_venv/Lib/site-packages/cffi/ffiplatform.py

@@ -0,0 +1,127 @@
+import sys, os
+from .error import VerificationError
+
+
+LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs',
+                      'extra_objects', 'depends']
+
+def get_extension(srcfilename, modname, sources=(), **kwds):
+    _hack_at_distutils()
+    from distutils.core import Extension
+    allsources = [srcfilename]
+    for src in sources:
+        allsources.append(os.path.normpath(src))
+    return Extension(name=modname, sources=allsources, **kwds)
+
+def compile(tmpdir, ext, compiler_verbose=0, debug=None):
+    """Compile a C extension module using distutils."""
+
+    _hack_at_distutils()
+    saved_environ = os.environ.copy()
+    try:
+        outputfilename = _build(tmpdir, ext, compiler_verbose, debug)
+        outputfilename = os.path.abspath(outputfilename)
+    finally:
+        # workaround for a distutils bugs where some env vars can
+        # become longer and longer every time it is used
+        for key, value in saved_environ.items():
+            if os.environ.get(key) != value:
+                os.environ[key] = value
+    return outputfilename
+
+def _build(tmpdir, ext, compiler_verbose=0, debug=None):
+    # XXX compact but horrible :-(
+    from distutils.core import Distribution
+    import distutils.errors, distutils.log
+    #
+    dist = Distribution({'ext_modules': [ext]})
+    dist.parse_config_files()
+    options = dist.get_option_dict('build_ext')
+    if debug is None:
+        debug = sys.flags.debug
+    options['debug'] = ('ffiplatform', debug)
+    options['force'] = ('ffiplatform', True)
+    options['build_lib'] = ('ffiplatform', tmpdir)
+    options['build_temp'] = ('ffiplatform', tmpdir)
+    #
+    try:
+        old_level = distutils.log.set_threshold(0) or 0
+        try:
+            distutils.log.set_verbosity(compiler_verbose)
+            dist.run_command('build_ext')
+            cmd_obj = dist.get_command_obj('build_ext')
+            [soname] = cmd_obj.get_outputs()
+        finally:
+            distutils.log.set_threshold(old_level)
+    except (distutils.errors.CompileError,
+            distutils.errors.LinkError) as e:
+        raise VerificationError('%s: %s' % (e.__class__.__name__, e))
+    #
+    return soname
+
+try:
+    from os.path import samefile
+except ImportError:
+    def samefile(f1, f2):
+        return os.path.abspath(f1) == os.path.abspath(f2)
+
+def maybe_relative_path(path):
+    if not os.path.isabs(path):
+        return path      # already relative
+    dir = path
+    names = []
+    while True:
+        prevdir = dir
+        dir, name = os.path.split(prevdir)
+        if dir == prevdir or not dir:
+            return path     # failed to make it relative
+        names.append(name)
+        try:
+            if samefile(dir, os.curdir):
+                names.reverse()
+                return os.path.join(*names)
+        except OSError:
+            pass
+
+# ____________________________________________________________
+
+try:
+    int_or_long = (int, long)
+    import cStringIO
+except NameError:
+    int_or_long = int      # Python 3
+    import io as cStringIO
+
+def _flatten(x, f):
+    if isinstance(x, str):
+        f.write('%ds%s' % (len(x), x))
+    elif isinstance(x, dict):
+        keys = sorted(x.keys())
+        f.write('%dd' % len(keys))
+        for key in keys:
+            _flatten(key, f)
+            _flatten(x[key], f)
+    elif isinstance(x, (list, tuple)):
+        f.write('%dl' % len(x))
+        for value in x:
+            _flatten(value, f)
+    elif isinstance(x, int_or_long):
+        f.write('%di' % (x,))
+    else:
+        raise TypeError(
+            "the keywords to verify() contains unsupported object %r" % (x,))
+
+def flatten(x):
+    f = cStringIO.StringIO()
+    _flatten(x, f)
+    return f.getvalue()
+
+def _hack_at_distutils():
+    # Windows-only workaround for some configurations: see
+    # https://bugs.python.org/issue23246 (Python 2.7 with 
+    # a specific MS compiler suite download)
+    if sys.platform == "win32":
+        try:
+            import setuptools    # for side-effects, patches distutils
+        except ImportError:
+            pass

Vissa filer visades inte eftersom för många filer har ändrats