diff --git a/.venv/Lib/site-packages/Flask_Login-0.6.3.dist-info/INSTALLER b/.venv/Lib/site-packages/Flask_Login-0.6.3.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/.venv/Lib/site-packages/Flask_Login-0.6.3.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/.venv/Lib/site-packages/Flask_Login-0.6.3.dist-info/LICENSE b/.venv/Lib/site-packages/Flask_Login-0.6.3.dist-info/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..04463812d8c9852f8b999e62b7fdbd6fd345c302
--- /dev/null
+++ b/.venv/Lib/site-packages/Flask_Login-0.6.3.dist-info/LICENSE
@@ -0,0 +1,22 @@
+Copyright (c) 2011 Matthew Frazier
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/.venv/Lib/site-packages/Flask_Login-0.6.3.dist-info/METADATA b/.venv/Lib/site-packages/Flask_Login-0.6.3.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..445a0b23ed2b7a814e8b29ac604d5b51f15c3640
--- /dev/null
+++ b/.venv/Lib/site-packages/Flask_Login-0.6.3.dist-info/METADATA
@@ -0,0 +1,183 @@
+Metadata-Version: 2.1
+Name: Flask-Login
+Version: 0.6.3
+Summary: User authentication and session management for Flask.
+Home-page: https://github.com/maxcountryman/flask-login
+Author: Matthew Frazier
+Author-email: leafstormrush@gmail.com
+Maintainer: Max Countryman
+License: MIT
+Project-URL: Documentation, https://flask-login.readthedocs.io/
+Project-URL: Changes, https://github.com/maxcountryman/flask-login/blob/main/CHANGES.md
+Project-URL: Source Code, https://github.com/maxcountryman/flask-login
+Project-URL: Issue Tracker, https://github.com/maxcountryman/flask-login/issues
+Classifier: Development Status :: 4 - Beta
+Classifier: Environment :: Web Environment
+Classifier: Framework :: Flask
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Requires-Python: >=3.7
+Description-Content-Type: text/markdown
+License-File: LICENSE
+Requires-Dist: Flask >=1.0.4
+Requires-Dist: Werkzeug >=1.0.1
+
+# Flask-Login
+
+![Tests](https://github.com/maxcountryman/flask-login/workflows/Tests/badge.svg)
+[![coverage](https://coveralls.io/repos/maxcountryman/flask-login/badge.svg?branch=main&service=github)](https://coveralls.io/github/maxcountryman/flask-login?branch=main)
+[![Software License](https://img.shields.io/badge/license-MIT-brightgreen.svg)](LICENSE)
+
+Flask-Login provides user session management for Flask. It handles the common
+tasks of logging in, logging out, and remembering your users' sessions over
+extended periods of time.
+
+Flask-Login is not bound to any particular database system or permissions
+model. The only requirement is that your user objects implement a few methods,
+and that you provide a callback to the extension capable of loading users from
+their ID.
+
+## Installation
+
+Install the extension with pip:
+
+```sh
+$ pip install flask-login
+```
+
+## Usage
+
+Once installed, the Flask-Login is easy to use. Let's walk through setting up
+a basic application. Also please note that this is a very basic guide: we will
+be taking shortcuts here that you should never take in a real application.
+
+To begin we'll set up a Flask app:
+
+```python
+import flask
+
+app = flask.Flask(__name__)
+app.secret_key = 'super secret string'  # Change this!
+```
+
+Flask-Login works via a login manager. To kick things off, we'll set up the
+login manager by instantiating it and telling it about our Flask app:
+
+```python
+import flask_login
+
+login_manager = flask_login.LoginManager()
+
+login_manager.init_app(app)
+```
+
+To keep things simple we're going to use a dictionary to represent a database
+of users. In a real application, this would be an actual persistence layer.
+However it's important to point out this is a feature of Flask-Login: it
+doesn't care how your data is stored so long as you tell it how to retrieve it!
+
+```python
+# Our mock database.
+users = {'foo@bar.tld': {'password': 'secret'}}
+```
+
+We also need to tell Flask-Login how to load a user from a Flask request and
+from its session. To do this we need to define our user object, a
+`user_loader` callback, and a `request_loader` callback.
+
+```python
+class User(flask_login.UserMixin):
+    pass
+
+
+@login_manager.user_loader
+def user_loader(email):
+    if email not in users:
+        return
+
+    user = User()
+    user.id = email
+    return user
+
+
+@login_manager.request_loader
+def request_loader(request):
+    email = request.form.get('email')
+    if email not in users:
+        return
+
+    user = User()
+    user.id = email
+    return user
+```
+
+Now we're ready to define our views. We can start with a login view, which will
+populate the session with authentication bits. After that we can define a view
+that requires authentication.
+
+```python
+@app.route('/login', methods=['GET', 'POST'])
+def login():
+    if flask.request.method == 'GET':
+        return '''
+               <form action='login' method='POST'>
+                <input type='text' name='email' id='email' placeholder='email'/>
+                <input type='password' name='password' id='password' placeholder='password'/>
+                <input type='submit' name='submit'/>
+               </form>
+               '''
+
+    email = flask.request.form['email']
+    if email in users and flask.request.form['password'] == users[email]['password']:
+        user = User()
+        user.id = email
+        flask_login.login_user(user)
+        return flask.redirect(flask.url_for('protected'))
+
+    return 'Bad login'
+
+
+@app.route('/protected')
+@flask_login.login_required
+def protected():
+    return 'Logged in as: ' + flask_login.current_user.id
+```
+
+Finally we can define a view to clear the session and log users out:
+
+```python
+@app.route('/logout')
+def logout():
+    flask_login.logout_user()
+    return 'Logged out'
+```
+
+We now have a basic working application that makes use of session-based
+authentication. To round things off, we should provide a callback for login
+failures:
+
+```python
+@login_manager.unauthorized_handler
+def unauthorized_handler():
+    return 'Unauthorized', 401
+```
+
+Documentation for Flask-Login is available on [ReadTheDocs](https://flask-login.readthedocs.io/en/latest/).
+For complete understanding of available configuration, please refer to the [source code](https://github.com/maxcountryman/flask-login).
+
+
+## Contributing
+
+We welcome contributions! If you would like to hack on Flask-Login, please
+follow these steps:
+
+1. Fork this repository
+2. Make your changes
+3. Install the dev requirements with `pip install -r requirements/dev.txt`
+4. Submit a pull request after running `tox` (ensure it does not error!)
+
+Please give us adequate time to review your submission. Thanks!
diff --git a/.venv/Lib/site-packages/Flask_Login-0.6.3.dist-info/RECORD b/.venv/Lib/site-packages/Flask_Login-0.6.3.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..399f08f170fb6c8a99a02bb085b8cf8fe8065462
--- /dev/null
+++ b/.venv/Lib/site-packages/Flask_Login-0.6.3.dist-info/RECORD
@@ -0,0 +1,23 @@
+Flask_Login-0.6.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+Flask_Login-0.6.3.dist-info/LICENSE,sha256=ep37nF2iBO0TcPO2LBPimSoS2h2nB_R-FWiX7rQ0Tls,1059
+Flask_Login-0.6.3.dist-info/METADATA,sha256=AUSHR5Po6-Cwmz1KBrAZbTzR-iVVFvtb2NQKYl7UuAU,5799
+Flask_Login-0.6.3.dist-info/RECORD,,
+Flask_Login-0.6.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+Flask_Login-0.6.3.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
+Flask_Login-0.6.3.dist-info/top_level.txt,sha256=OuXmIpiFnXLvW-iBbW2km7ZIy5EZvwSBnYaOC3Kt7j8,12
+flask_login/__about__.py,sha256=Kkp5e9mV9G7vK_FqZof-g9RFmyyBzq1gge5aKXgvilE,389
+flask_login/__init__.py,sha256=wYQiQCikT_Ndp3PhOD-1gRTGCrUPIE-FrjQUrT9aVAg,2681
+flask_login/__pycache__/__about__.cpython-310.pyc,,
+flask_login/__pycache__/__init__.cpython-310.pyc,,
+flask_login/__pycache__/config.cpython-310.pyc,,
+flask_login/__pycache__/login_manager.cpython-310.pyc,,
+flask_login/__pycache__/mixins.cpython-310.pyc,,
+flask_login/__pycache__/signals.cpython-310.pyc,,
+flask_login/__pycache__/test_client.cpython-310.pyc,,
+flask_login/__pycache__/utils.cpython-310.pyc,,
+flask_login/config.py,sha256=YAocv18La7YGQyNY5aT7rU1GQIZnX6pvchwqx3kA9p8,1813
+flask_login/login_manager.py,sha256=h20F_iv3mqc6rIJ4-V6_XookzOUl8Rcpasua-dCByQY,20073
+flask_login/mixins.py,sha256=gPd7otMRljxw0eUhUMbHsnEBc_jK2cYdxg5KFLuJcoI,1528
+flask_login/signals.py,sha256=xCMoFHKU1RTVt1NY-Gfl0OiVKpiyNt6YJw_PsgkjY3w,2464
+flask_login/test_client.py,sha256=6mrjiBRLGJpgvvFlLypXPTBLiMp0BAN-Ft-uogqC81g,517
+flask_login/utils.py,sha256=Y1wxjCVxpYohBaQJ0ADLypQ-VvBNycwG-gVXFF7k99I,14021
diff --git a/.venv/Lib/site-packages/Flask_Login-0.6.3.dist-info/REQUESTED b/.venv/Lib/site-packages/Flask_Login-0.6.3.dist-info/REQUESTED
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/.venv/Lib/site-packages/Flask_Login-0.6.3.dist-info/WHEEL b/.venv/Lib/site-packages/Flask_Login-0.6.3.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..ba48cbcf9275ac6d88fe25821695e14d0a822e79
--- /dev/null
+++ b/.venv/Lib/site-packages/Flask_Login-0.6.3.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.41.3)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/.venv/Lib/site-packages/Flask_Login-0.6.3.dist-info/top_level.txt b/.venv/Lib/site-packages/Flask_Login-0.6.3.dist-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..31514bd20ce166ec4ec252051f2836a8200a2457
--- /dev/null
+++ b/.venv/Lib/site-packages/Flask_Login-0.6.3.dist-info/top_level.txt
@@ -0,0 +1 @@
+flask_login
diff --git a/.venv/Lib/site-packages/flask_login/__about__.py b/.venv/Lib/site-packages/flask_login/__about__.py
new file mode 100644
index 0000000000000000000000000000000000000000..1918d54f750466ebdbb723b6b03edaee46a36af7
--- /dev/null
+++ b/.venv/Lib/site-packages/flask_login/__about__.py
@@ -0,0 +1,10 @@
+__title__ = "Flask-Login"
+__description__ = "User session management for Flask"
+__url__ = "https://github.com/maxcountryman/flask-login"
+__version_info__ = ("0", "6", "3")
+__version__ = ".".join(__version_info__)
+__author__ = "Matthew Frazier"
+__author_email__ = "leafstormrush@gmail.com"
+__maintainer__ = "Max Countryman"
+__license__ = "MIT"
+__copyright__ = "(c) 2011 by Matthew Frazier"
diff --git a/.venv/Lib/site-packages/flask_login/__init__.py b/.venv/Lib/site-packages/flask_login/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..fbe9c3e774e2bfae66550522b405612a585a9a2a
--- /dev/null
+++ b/.venv/Lib/site-packages/flask_login/__init__.py
@@ -0,0 +1,94 @@
+from .__about__ import __version__
+from .config import AUTH_HEADER_NAME
+from .config import COOKIE_DURATION
+from .config import COOKIE_HTTPONLY
+from .config import COOKIE_NAME
+from .config import COOKIE_SECURE
+from .config import ID_ATTRIBUTE
+from .config import LOGIN_MESSAGE
+from .config import LOGIN_MESSAGE_CATEGORY
+from .config import REFRESH_MESSAGE
+from .config import REFRESH_MESSAGE_CATEGORY
+from .login_manager import LoginManager
+from .mixins import AnonymousUserMixin
+from .mixins import UserMixin
+from .signals import session_protected
+from .signals import user_accessed
+from .signals import user_loaded_from_cookie
+from .signals import user_loaded_from_request
+from .signals import user_logged_in
+from .signals import user_logged_out
+from .signals import user_login_confirmed
+from .signals import user_needs_refresh
+from .signals import user_unauthorized
+from .test_client import FlaskLoginClient
+from .utils import confirm_login
+from .utils import current_user
+from .utils import decode_cookie
+from .utils import encode_cookie
+from .utils import fresh_login_required
+from .utils import login_fresh
+from .utils import login_remembered
+from .utils import login_required
+from .utils import login_url
+from .utils import login_user
+from .utils import logout_user
+from .utils import make_next_param
+from .utils import set_login_view
+
+__all__ = [
+    "__version__",
+    "AUTH_HEADER_NAME",
+    "COOKIE_DURATION",
+    "COOKIE_HTTPONLY",
+    "COOKIE_NAME",
+    "COOKIE_SECURE",
+    "ID_ATTRIBUTE",
+    "LOGIN_MESSAGE",
+    "LOGIN_MESSAGE_CATEGORY",
+    "REFRESH_MESSAGE",
+    "REFRESH_MESSAGE_CATEGORY",
+    "LoginManager",
+    "AnonymousUserMixin",
+    "UserMixin",
+    "session_protected",
+    "user_accessed",
+    "user_loaded_from_cookie",
+    "user_loaded_from_request",
+    "user_logged_in",
+    "user_logged_out",
+    "user_login_confirmed",
+    "user_needs_refresh",
+    "user_unauthorized",
+    "FlaskLoginClient",
+    "confirm_login",
+    "current_user",
+    "decode_cookie",
+    "encode_cookie",
+    "fresh_login_required",
+    "login_fresh",
+    "login_remembered",
+    "login_required",
+    "login_url",
+    "login_user",
+    "logout_user",
+    "make_next_param",
+    "set_login_view",
+]
+
+
+def __getattr__(name):
+    if name == "user_loaded_from_header":
+        import warnings
+        from .signals import _user_loaded_from_header
+
+        warnings.warn(
+            "'user_loaded_from_header' is deprecated and will be"
+            " removed in Flask-Login 0.7. Use"
+            " 'user_loaded_from_request' instead.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        return _user_loaded_from_header
+
+    raise AttributeError(name)
diff --git a/.venv/Lib/site-packages/flask_login/__pycache__/__about__.cpython-310.pyc b/.venv/Lib/site-packages/flask_login/__pycache__/__about__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0773706312d55329b2dea0c4e137008b2860dd4a
Binary files /dev/null and b/.venv/Lib/site-packages/flask_login/__pycache__/__about__.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/flask_login/__pycache__/__init__.cpython-310.pyc b/.venv/Lib/site-packages/flask_login/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..313a8fc65bc89b386e1956b31a412b6a705c8907
Binary files /dev/null and b/.venv/Lib/site-packages/flask_login/__pycache__/__init__.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/flask_login/__pycache__/config.cpython-310.pyc b/.venv/Lib/site-packages/flask_login/__pycache__/config.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bf8223b678a3d8fa7c75560927d2d75dcaedec11
Binary files /dev/null and b/.venv/Lib/site-packages/flask_login/__pycache__/config.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/flask_login/__pycache__/login_manager.cpython-310.pyc b/.venv/Lib/site-packages/flask_login/__pycache__/login_manager.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3babc385b605c369873627826ad1f3da23c4176a
Binary files /dev/null and b/.venv/Lib/site-packages/flask_login/__pycache__/login_manager.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/flask_login/__pycache__/mixins.cpython-310.pyc b/.venv/Lib/site-packages/flask_login/__pycache__/mixins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d364451625f54da7c0caae206130d359d200f99a
Binary files /dev/null and b/.venv/Lib/site-packages/flask_login/__pycache__/mixins.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/flask_login/__pycache__/signals.cpython-310.pyc b/.venv/Lib/site-packages/flask_login/__pycache__/signals.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..538e5955bbfbf46cd6d3058dcf51539877d618a1
Binary files /dev/null and b/.venv/Lib/site-packages/flask_login/__pycache__/signals.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/flask_login/__pycache__/test_client.cpython-310.pyc b/.venv/Lib/site-packages/flask_login/__pycache__/test_client.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c51b8b9a78b335319c860961ca8a553d2b7bfea9
Binary files /dev/null and b/.venv/Lib/site-packages/flask_login/__pycache__/test_client.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/flask_login/__pycache__/utils.cpython-310.pyc b/.venv/Lib/site-packages/flask_login/__pycache__/utils.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..5cf82e4704fc8dbd419a319d4ff5e9cc75a4326a
Binary files /dev/null and b/.venv/Lib/site-packages/flask_login/__pycache__/utils.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/flask_login/config.py b/.venv/Lib/site-packages/flask_login/config.py
new file mode 100644
index 0000000000000000000000000000000000000000..fe2db2c5c8b3a6577fde3c4983a2ed3a8c515a1e
--- /dev/null
+++ b/.venv/Lib/site-packages/flask_login/config.py
@@ -0,0 +1,55 @@
+from datetime import timedelta
+
+#: The default name of the "remember me" cookie (``remember_token``)
+COOKIE_NAME = "remember_token"
+
+#: The default time before the "remember me" cookie expires (365 days).
+COOKIE_DURATION = timedelta(days=365)
+
+#: Whether the "remember me" cookie requires Secure; defaults to ``False``
+COOKIE_SECURE = False
+
+#: Whether the "remember me" cookie uses HttpOnly or not; defaults to ``True``
+COOKIE_HTTPONLY = True
+
+#: Whether the "remember me" cookie requires same origin; defaults to ``None``
+COOKIE_SAMESITE = None
+
+#: The default flash message to display when users need to log in.
+LOGIN_MESSAGE = "Please log in to access this page."
+
+#: The default flash message category to display when users need to log in.
+LOGIN_MESSAGE_CATEGORY = "message"
+
+#: The default flash message to display when users need to reauthenticate.
+REFRESH_MESSAGE = "Please reauthenticate to access this page."
+
+#: The default flash message category to display when users need to
+#: reauthenticate.
+REFRESH_MESSAGE_CATEGORY = "message"
+
+#: The default attribute to retreive the str id of the user
+ID_ATTRIBUTE = "get_id"
+
+#: Default name of the auth header (``Authorization``)
+AUTH_HEADER_NAME = "Authorization"
+
+#: A set of session keys that are populated by Flask-Login. Use this set to
+#: purge keys safely and accurately.
+SESSION_KEYS = {
+    "_user_id",
+    "_remember",
+    "_remember_seconds",
+    "_id",
+    "_fresh",
+    "next",
+}
+
+#: A set of HTTP methods which are exempt from `login_required` and
+#: `fresh_login_required`. By default, this is just ``OPTIONS``.
+EXEMPT_METHODS = {"OPTIONS"}
+
+#: If true, the page the user is attempting to access is stored in the session
+#: rather than a url parameter when redirecting to the login view; defaults to
+#: ``False``.
+USE_SESSION_FOR_NEXT = False
diff --git a/.venv/Lib/site-packages/flask_login/login_manager.py b/.venv/Lib/site-packages/flask_login/login_manager.py
new file mode 100644
index 0000000000000000000000000000000000000000..49d0844fa50bbb060f1f5d2d16ee89aeaa213b69
--- /dev/null
+++ b/.venv/Lib/site-packages/flask_login/login_manager.py
@@ -0,0 +1,543 @@
+from datetime import datetime
+from datetime import timedelta
+
+from flask import abort
+from flask import current_app
+from flask import flash
+from flask import g
+from flask import has_app_context
+from flask import redirect
+from flask import request
+from flask import session
+
+from .config import AUTH_HEADER_NAME
+from .config import COOKIE_DURATION
+from .config import COOKIE_HTTPONLY
+from .config import COOKIE_NAME
+from .config import COOKIE_SAMESITE
+from .config import COOKIE_SECURE
+from .config import ID_ATTRIBUTE
+from .config import LOGIN_MESSAGE
+from .config import LOGIN_MESSAGE_CATEGORY
+from .config import REFRESH_MESSAGE
+from .config import REFRESH_MESSAGE_CATEGORY
+from .config import SESSION_KEYS
+from .config import USE_SESSION_FOR_NEXT
+from .mixins import AnonymousUserMixin
+from .signals import session_protected
+from .signals import user_accessed
+from .signals import user_loaded_from_cookie
+from .signals import user_loaded_from_request
+from .signals import user_needs_refresh
+from .signals import user_unauthorized
+from .utils import _create_identifier
+from .utils import _user_context_processor
+from .utils import decode_cookie
+from .utils import encode_cookie
+from .utils import expand_login_view
+from .utils import login_url as make_login_url
+from .utils import make_next_param
+
+
+class LoginManager:
+    """This object is used to hold the settings used for logging in. Instances
+    of :class:`LoginManager` are *not* bound to specific apps, so you can
+    create one in the main body of your code and then bind it to your
+    app in a factory function.
+    """
+
+    def __init__(self, app=None, add_context_processor=True):
+        #: A class or factory function that produces an anonymous user, which
+        #: is used when no one is logged in.
+        self.anonymous_user = AnonymousUserMixin
+
+        #: The name of the view to redirect to when the user needs to log in.
+        #: (This can be an absolute URL as well, if your authentication
+        #: machinery is external to your application.)
+        self.login_view = None
+
+        #: Names of views to redirect to when the user needs to log in,
+        #: per blueprint. If the key value is set to None the value of
+        #: :attr:`login_view` will be used instead.
+        self.blueprint_login_views = {}
+
+        #: The message to flash when a user is redirected to the login page.
+        self.login_message = LOGIN_MESSAGE
+
+        #: The message category to flash when a user is redirected to the login
+        #: page.
+        self.login_message_category = LOGIN_MESSAGE_CATEGORY
+
+        #: The name of the view to redirect to when the user needs to
+        #: reauthenticate.
+        self.refresh_view = None
+
+        #: The message to flash when a user is redirected to the 'needs
+        #: refresh' page.
+        self.needs_refresh_message = REFRESH_MESSAGE
+
+        #: The message category to flash when a user is redirected to the
+        #: 'needs refresh' page.
+        self.needs_refresh_message_category = REFRESH_MESSAGE_CATEGORY
+
+        #: The mode to use session protection in. This can be either
+        #: ``'basic'`` (the default) or ``'strong'``, or ``None`` to disable
+        #: it.
+        self.session_protection = "basic"
+
+        #: If present, used to translate flash messages ``self.login_message``
+        #: and ``self.needs_refresh_message``
+        self.localize_callback = None
+
+        self.unauthorized_callback = None
+
+        self.needs_refresh_callback = None
+
+        self.id_attribute = ID_ATTRIBUTE
+
+        self._user_callback = None
+
+        self._header_callback = None
+
+        self._request_callback = None
+
+        self._session_identifier_generator = _create_identifier
+
+        if app is not None:
+            self.init_app(app, add_context_processor)
+
+    def setup_app(self, app, add_context_processor=True):  # pragma: no cover
+        """
+        This method has been deprecated. Please use
+        :meth:`LoginManager.init_app` instead.
+        """
+        import warnings
+
+        warnings.warn(
+            "'setup_app' is deprecated and will be removed in"
+            " Flask-Login 0.7. Use 'init_app' instead.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        self.init_app(app, add_context_processor)
+
+    def init_app(self, app, add_context_processor=True):
+        """
+        Configures an application. This registers an `after_request` call, and
+        attaches this `LoginManager` to it as `app.login_manager`.
+
+        :param app: The :class:`flask.Flask` object to configure.
+        :type app: :class:`flask.Flask`
+        :param add_context_processor: Whether to add a context processor to
+            the app that adds a `current_user` variable to the template.
+            Defaults to ``True``.
+        :type add_context_processor: bool
+        """
+        app.login_manager = self
+        app.after_request(self._update_remember_cookie)
+
+        if add_context_processor:
+            app.context_processor(_user_context_processor)
+
+    def unauthorized(self):
+        """
+        This is called when the user is required to log in. If you register a
+        callback with :meth:`LoginManager.unauthorized_handler`, then it will
+        be called. Otherwise, it will take the following actions:
+
+            - Flash :attr:`LoginManager.login_message` to the user.
+
+            - If the app is using blueprints find the login view for
+              the current blueprint using `blueprint_login_views`. If the app
+              is not using blueprints or the login view for the current
+              blueprint is not specified use the value of `login_view`.
+
+            - Redirect the user to the login view. (The page they were
+              attempting to access will be passed in the ``next`` query
+              string variable, so you can redirect there if present instead
+              of the homepage. Alternatively, it will be added to the session
+              as ``next`` if USE_SESSION_FOR_NEXT is set.)
+
+        If :attr:`LoginManager.login_view` is not defined, then it will simply
+        raise a HTTP 401 (Unauthorized) error instead.
+
+        This should be returned from a view or before/after_request function,
+        otherwise the redirect will have no effect.
+        """
+        user_unauthorized.send(current_app._get_current_object())
+
+        if self.unauthorized_callback:
+            return self.unauthorized_callback()
+
+        if request.blueprint in self.blueprint_login_views:
+            login_view = self.blueprint_login_views[request.blueprint]
+        else:
+            login_view = self.login_view
+
+        if not login_view:
+            abort(401)
+
+        if self.login_message:
+            if self.localize_callback is not None:
+                flash(
+                    self.localize_callback(self.login_message),
+                    category=self.login_message_category,
+                )
+            else:
+                flash(self.login_message, category=self.login_message_category)
+
+        config = current_app.config
+        if config.get("USE_SESSION_FOR_NEXT", USE_SESSION_FOR_NEXT):
+            login_url = expand_login_view(login_view)
+            session["_id"] = self._session_identifier_generator()
+            session["next"] = make_next_param(login_url, request.url)
+            redirect_url = make_login_url(login_view)
+        else:
+            redirect_url = make_login_url(login_view, next_url=request.url)
+
+        return redirect(redirect_url)
+
+    def user_loader(self, callback):
+        """
+        This sets the callback for reloading a user from the session. The
+        function you set should take a user ID (a ``str``) and return a
+        user object, or ``None`` if the user does not exist.
+
+        :param callback: The callback for retrieving a user object.
+        :type callback: callable
+        """
+        self._user_callback = callback
+        return self.user_callback
+
+    @property
+    def user_callback(self):
+        """Gets the user_loader callback set by user_loader decorator."""
+        return self._user_callback
+
+    def request_loader(self, callback):
+        """
+        This sets the callback for loading a user from a Flask request.
+        The function you set should take Flask request object and
+        return a user object, or `None` if the user does not exist.
+
+        :param callback: The callback for retrieving a user object.
+        :type callback: callable
+        """
+        self._request_callback = callback
+        return self.request_callback
+
+    @property
+    def request_callback(self):
+        """Gets the request_loader callback set by request_loader decorator."""
+        return self._request_callback
+
+    def unauthorized_handler(self, callback):
+        """
+        This will set the callback for the `unauthorized` method, which among
+        other things is used by `login_required`. It takes no arguments, and
+        should return a response to be sent to the user instead of their
+        normal view.
+
+        :param callback: The callback for unauthorized users.
+        :type callback: callable
+        """
+        self.unauthorized_callback = callback
+        return callback
+
+    def needs_refresh_handler(self, callback):
+        """
+        This will set the callback for the `needs_refresh` method, which among
+        other things is used by `fresh_login_required`. It takes no arguments,
+        and should return a response to be sent to the user instead of their
+        normal view.
+
+        :param callback: The callback for unauthorized users.
+        :type callback: callable
+        """
+        self.needs_refresh_callback = callback
+        return callback
+
+    def needs_refresh(self):
+        """
+        This is called when the user is logged in, but they need to be
+        reauthenticated because their session is stale. If you register a
+        callback with `needs_refresh_handler`, then it will be called.
+        Otherwise, it will take the following actions:
+
+            - Flash :attr:`LoginManager.needs_refresh_message` to the user.
+
+            - Redirect the user to :attr:`LoginManager.refresh_view`. (The page
+              they were attempting to access will be passed in the ``next``
+              query string variable, so you can redirect there if present
+              instead of the homepage.)
+
+        If :attr:`LoginManager.refresh_view` is not defined, then it will
+        simply raise a HTTP 401 (Unauthorized) error instead.
+
+        This should be returned from a view or before/after_request function,
+        otherwise the redirect will have no effect.
+        """
+        user_needs_refresh.send(current_app._get_current_object())
+
+        if self.needs_refresh_callback:
+            return self.needs_refresh_callback()
+
+        if not self.refresh_view:
+            abort(401)
+
+        if self.needs_refresh_message:
+            if self.localize_callback is not None:
+                flash(
+                    self.localize_callback(self.needs_refresh_message),
+                    category=self.needs_refresh_message_category,
+                )
+            else:
+                flash(
+                    self.needs_refresh_message,
+                    category=self.needs_refresh_message_category,
+                )
+
+        config = current_app.config
+        if config.get("USE_SESSION_FOR_NEXT", USE_SESSION_FOR_NEXT):
+            login_url = expand_login_view(self.refresh_view)
+            session["_id"] = self._session_identifier_generator()
+            session["next"] = make_next_param(login_url, request.url)
+            redirect_url = make_login_url(self.refresh_view)
+        else:
+            login_url = self.refresh_view
+            redirect_url = make_login_url(login_url, next_url=request.url)
+
+        return redirect(redirect_url)
+
+    def header_loader(self, callback):
+        """
+        This function has been deprecated. Please use
+        :meth:`LoginManager.request_loader` instead.
+
+        This sets the callback for loading a user from a header value.
+        The function you set should take an authentication token and
+        return a user object, or `None` if the user does not exist.
+
+        :param callback: The callback for retrieving a user object.
+        :type callback: callable
+        """
+        import warnings
+
+        warnings.warn(
+            "'header_loader' is deprecated and will be removed in"
+            " Flask-Login 0.7. Use 'request_loader' instead.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        self._header_callback = callback
+        return callback
+
+    def _update_request_context_with_user(self, user=None):
+        """Store the given user as ctx.user."""
+
+        if user is None:
+            user = self.anonymous_user()
+
+        g._login_user = user
+
+    def _load_user(self):
+        """Loads user from session or remember_me cookie as applicable"""
+
+        if self._user_callback is None and self._request_callback is None:
+            raise Exception(
+                "Missing user_loader or request_loader. Refer to "
+                "http://flask-login.readthedocs.io/#how-it-works "
+                "for more info."
+            )
+
+        user_accessed.send(current_app._get_current_object())
+
+        # Check SESSION_PROTECTION
+        if self._session_protection_failed():
+            return self._update_request_context_with_user()
+
+        user = None
+
+        # Load user from Flask Session
+        user_id = session.get("_user_id")
+        if user_id is not None and self._user_callback is not None:
+            user = self._user_callback(user_id)
+
+        # Load user from Remember Me Cookie or Request Loader
+        if user is None:
+            config = current_app.config
+            cookie_name = config.get("REMEMBER_COOKIE_NAME", COOKIE_NAME)
+            header_name = config.get("AUTH_HEADER_NAME", AUTH_HEADER_NAME)
+            has_cookie = (
+                cookie_name in request.cookies and session.get("_remember") != "clear"
+            )
+            if has_cookie:
+                cookie = request.cookies[cookie_name]
+                user = self._load_user_from_remember_cookie(cookie)
+            elif self._request_callback:
+                user = self._load_user_from_request(request)
+            elif header_name in request.headers:
+                header = request.headers[header_name]
+                user = self._load_user_from_header(header)
+
+        return self._update_request_context_with_user(user)
+
+    def _session_protection_failed(self):
+        sess = session._get_current_object()
+        ident = self._session_identifier_generator()
+
+        app = current_app._get_current_object()
+        mode = app.config.get("SESSION_PROTECTION", self.session_protection)
+
+        if not mode or mode not in ["basic", "strong"]:
+            return False
+
+        # if the sess is empty, it's an anonymous user or just logged out
+        # so we can skip this
+        if sess and ident != sess.get("_id", None):
+            if mode == "basic" or sess.permanent:
+                if sess.get("_fresh") is not False:
+                    sess["_fresh"] = False
+                session_protected.send(app)
+                return False
+            elif mode == "strong":
+                for k in SESSION_KEYS:
+                    sess.pop(k, None)
+
+                sess["_remember"] = "clear"
+                session_protected.send(app)
+                return True
+
+        return False
+
+    def _load_user_from_remember_cookie(self, cookie):
+        user_id = decode_cookie(cookie)
+        if user_id is not None:
+            session["_user_id"] = user_id
+            session["_fresh"] = False
+            user = None
+            if self._user_callback:
+                user = self._user_callback(user_id)
+            if user is not None:
+                app = current_app._get_current_object()
+                user_loaded_from_cookie.send(app, user=user)
+                return user
+        return None
+
+    def _load_user_from_header(self, header):
+        if self._header_callback:
+            user = self._header_callback(header)
+            if user is not None:
+                app = current_app._get_current_object()
+
+                from .signals import _user_loaded_from_header
+
+                _user_loaded_from_header.send(app, user=user)
+                return user
+        return None
+
+    def _load_user_from_request(self, request):
+        if self._request_callback:
+            user = self._request_callback(request)
+            if user is not None:
+                app = current_app._get_current_object()
+                user_loaded_from_request.send(app, user=user)
+                return user
+        return None
+
+    def _update_remember_cookie(self, response):
+        # Don't modify the session unless there's something to do.
+        if "_remember" not in session and current_app.config.get(
+            "REMEMBER_COOKIE_REFRESH_EACH_REQUEST"
+        ):
+            session["_remember"] = "set"
+
+        if "_remember" in session:
+            operation = session.pop("_remember", None)
+
+            if operation == "set" and "_user_id" in session:
+                self._set_cookie(response)
+            elif operation == "clear":
+                self._clear_cookie(response)
+
+        return response
+
+    def _set_cookie(self, response):
+        # cookie settings
+        config = current_app.config
+        cookie_name = config.get("REMEMBER_COOKIE_NAME", COOKIE_NAME)
+        domain = config.get("REMEMBER_COOKIE_DOMAIN")
+        path = config.get("REMEMBER_COOKIE_PATH", "/")
+
+        secure = config.get("REMEMBER_COOKIE_SECURE", COOKIE_SECURE)
+        httponly = config.get("REMEMBER_COOKIE_HTTPONLY", COOKIE_HTTPONLY)
+        samesite = config.get("REMEMBER_COOKIE_SAMESITE", COOKIE_SAMESITE)
+
+        if "_remember_seconds" in session:
+            duration = timedelta(seconds=session["_remember_seconds"])
+        else:
+            duration = config.get("REMEMBER_COOKIE_DURATION", COOKIE_DURATION)
+
+        # prepare data
+        data = encode_cookie(str(session["_user_id"]))
+
+        if isinstance(duration, int):
+            duration = timedelta(seconds=duration)
+
+        try:
+            expires = datetime.utcnow() + duration
+        except TypeError as e:
+            raise Exception(
+                "REMEMBER_COOKIE_DURATION must be a datetime.timedelta,"
+                f" instead got: {duration}"
+            ) from e
+
+        # actually set it
+        response.set_cookie(
+            cookie_name,
+            value=data,
+            expires=expires,
+            domain=domain,
+            path=path,
+            secure=secure,
+            httponly=httponly,
+            samesite=samesite,
+        )
+
+    def _clear_cookie(self, response):
+        config = current_app.config
+        cookie_name = config.get("REMEMBER_COOKIE_NAME", COOKIE_NAME)
+        domain = config.get("REMEMBER_COOKIE_DOMAIN")
+        path = config.get("REMEMBER_COOKIE_PATH", "/")
+        response.delete_cookie(cookie_name, domain=domain, path=path)
+
+    @property
+    def _login_disabled(self):
+        """Legacy property, use app.config['LOGIN_DISABLED'] instead."""
+        import warnings
+
+        warnings.warn(
+            "'_login_disabled' is deprecated and will be removed in"
+            " Flask-Login 0.7. Use 'LOGIN_DISABLED' in 'app.config'"
+            " instead.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+
+        if has_app_context():
+            return current_app.config.get("LOGIN_DISABLED", False)
+        return False
+
+    @_login_disabled.setter
+    def _login_disabled(self, newvalue):
+        """Legacy property setter, use app.config['LOGIN_DISABLED'] instead."""
+        import warnings
+
+        warnings.warn(
+            "'_login_disabled' is deprecated and will be removed in"
+            " Flask-Login 0.7. Use 'LOGIN_DISABLED' in 'app.config'"
+            " instead.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        current_app.config["LOGIN_DISABLED"] = newvalue
diff --git a/.venv/Lib/site-packages/flask_login/mixins.py b/.venv/Lib/site-packages/flask_login/mixins.py
new file mode 100644
index 0000000000000000000000000000000000000000..0b3a71bbee1a6ad2b3f049f30f8f97004d769367
--- /dev/null
+++ b/.venv/Lib/site-packages/flask_login/mixins.py
@@ -0,0 +1,65 @@
+class UserMixin:
+    """
+    This provides default implementations for the methods that Flask-Login
+    expects user objects to have.
+    """
+
+    # Python 3 implicitly set __hash__ to None if we override __eq__
+    # We set it back to its default implementation
+    __hash__ = object.__hash__
+
+    @property
+    def is_active(self):
+        return True
+
+    @property
+    def is_authenticated(self):
+        return self.is_active
+
+    @property
+    def is_anonymous(self):
+        return False
+
+    def get_id(self):
+        try:
+            return str(self.id)
+        except AttributeError:
+            raise NotImplementedError("No `id` attribute - override `get_id`") from None
+
+    def __eq__(self, other):
+        """
+        Checks the equality of two `UserMixin` objects using `get_id`.
+        """
+        if isinstance(other, UserMixin):
+            return self.get_id() == other.get_id()
+        return NotImplemented
+
+    def __ne__(self, other):
+        """
+        Checks the inequality of two `UserMixin` objects using `get_id`.
+        """
+        equal = self.__eq__(other)
+        if equal is NotImplemented:
+            return NotImplemented
+        return not equal
+
+
+class AnonymousUserMixin:
+    """
+    This is the default object for representing an anonymous user.
+    """
+
+    @property
+    def is_authenticated(self):
+        return False
+
+    @property
+    def is_active(self):
+        return False
+
+    @property
+    def is_anonymous(self):
+        return True
+
+    def get_id(self):
+        return
diff --git a/.venv/Lib/site-packages/flask_login/signals.py b/.venv/Lib/site-packages/flask_login/signals.py
new file mode 100644
index 0000000000000000000000000000000000000000..cf9157f8b80120b06ce411c5b7085920aa34b174
--- /dev/null
+++ b/.venv/Lib/site-packages/flask_login/signals.py
@@ -0,0 +1,61 @@
+from flask.signals import Namespace
+
+_signals = Namespace()
+
+#: Sent when a user is logged in. In addition to the app (which is the
+#: sender), it is passed `user`, which is the user being logged in.
+user_logged_in = _signals.signal("logged-in")
+
+#: Sent when a user is logged out. In addition to the app (which is the
+#: sender), it is passed `user`, which is the user being logged out.
+user_logged_out = _signals.signal("logged-out")
+
+#: Sent when the user is loaded from the cookie. In addition to the app (which
+#: is the sender), it is passed `user`, which is the user being reloaded.
+user_loaded_from_cookie = _signals.signal("loaded-from-cookie")
+
+#: Sent when the user is loaded from the header. In addition to the app (which
+#: is the #: sender), it is passed `user`, which is the user being reloaded.
+_user_loaded_from_header = _signals.signal("loaded-from-header")
+
+#: Sent when the user is loaded from the request. In addition to the app (which
+#: is the #: sender), it is passed `user`, which is the user being reloaded.
+user_loaded_from_request = _signals.signal("loaded-from-request")
+
+#: Sent when a user's login is confirmed, marking it as fresh. (It is not
+#: called for a normal login.)
+#: It receives no additional arguments besides the app.
+user_login_confirmed = _signals.signal("login-confirmed")
+
+#: Sent when the `unauthorized` method is called on a `LoginManager`. It
+#: receives no additional arguments besides the app.
+user_unauthorized = _signals.signal("unauthorized")
+
+#: Sent when the `needs_refresh` method is called on a `LoginManager`. It
+#: receives no additional arguments besides the app.
+user_needs_refresh = _signals.signal("needs-refresh")
+
+#: Sent whenever the user is accessed/loaded
+#: receives no additional arguments besides the app.
+user_accessed = _signals.signal("accessed")
+
+#: Sent whenever session protection takes effect, and a session is either
+#: marked non-fresh or deleted. It receives no additional arguments besides
+#: the app.
+session_protected = _signals.signal("session-protected")
+
+
+def __getattr__(name):
+    if name == "user_loaded_from_header":
+        import warnings
+
+        warnings.warn(
+            "'user_loaded_from_header' is deprecated and will be"
+            " removed in Flask-Login 0.7. Use"
+            " 'user_loaded_from_request' instead.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        return _user_loaded_from_header
+
+    raise AttributeError(name)
diff --git a/.venv/Lib/site-packages/flask_login/test_client.py b/.venv/Lib/site-packages/flask_login/test_client.py
new file mode 100644
index 0000000000000000000000000000000000000000..be2a8bf6ab7541aca05035cbaa03497b7007b6d8
--- /dev/null
+++ b/.venv/Lib/site-packages/flask_login/test_client.py
@@ -0,0 +1,19 @@
+from flask.testing import FlaskClient
+
+
+class FlaskLoginClient(FlaskClient):
+    """
+    A Flask test client that knows how to log in users
+    using the Flask-Login extension.
+    """
+
+    def __init__(self, *args, **kwargs):
+        user = kwargs.pop("user", None)
+        fresh = kwargs.pop("fresh_login", True)
+
+        super().__init__(*args, **kwargs)
+
+        if user:
+            with self.session_transaction() as sess:
+                sess["_user_id"] = user.get_id()
+                sess["_fresh"] = fresh
diff --git a/.venv/Lib/site-packages/flask_login/utils.py b/.venv/Lib/site-packages/flask_login/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..37b20564f2469fbb188afcd61c4eaeb0610e5151
--- /dev/null
+++ b/.venv/Lib/site-packages/flask_login/utils.py
@@ -0,0 +1,415 @@
+import hmac
+from functools import wraps
+from hashlib import sha512
+from urllib.parse import parse_qs
+from urllib.parse import urlencode
+from urllib.parse import urlsplit
+from urllib.parse import urlunsplit
+
+from flask import current_app
+from flask import g
+from flask import has_request_context
+from flask import request
+from flask import session
+from flask import url_for
+from werkzeug.local import LocalProxy
+
+from .config import COOKIE_NAME
+from .config import EXEMPT_METHODS
+from .signals import user_logged_in
+from .signals import user_logged_out
+from .signals import user_login_confirmed
+
+#: A proxy for the current user. If no user is logged in, this will be an
+#: anonymous user
+current_user = LocalProxy(lambda: _get_user())
+
+
+def encode_cookie(payload, key=None):
+    """
+    This will encode a ``str`` value into a cookie, and sign that cookie
+    with the app's secret key.
+
+    :param payload: The value to encode, as `str`.
+    :type payload: str
+
+    :param key: The key to use when creating the cookie digest. If not
+                specified, the SECRET_KEY value from app config will be used.
+    :type key: str
+    """
+    return f"{payload}|{_cookie_digest(payload, key=key)}"
+
+
+def decode_cookie(cookie, key=None):
+    """
+    This decodes a cookie given by `encode_cookie`. If verification of the
+    cookie fails, ``None`` will be implicitly returned.
+
+    :param cookie: An encoded cookie.
+    :type cookie: str
+
+    :param key: The key to use when creating the cookie digest. If not
+                specified, the SECRET_KEY value from app config will be used.
+    :type key: str
+    """
+    try:
+        payload, digest = cookie.rsplit("|", 1)
+        if hasattr(digest, "decode"):
+            digest = digest.decode("ascii")  # pragma: no cover
+    except ValueError:
+        return
+
+    if hmac.compare_digest(_cookie_digest(payload, key=key), digest):
+        return payload
+
+
+def make_next_param(login_url, current_url):
+    """
+    Reduces the scheme and host from a given URL so it can be passed to
+    the given `login` URL more efficiently.
+
+    :param login_url: The login URL being redirected to.
+    :type login_url: str
+    :param current_url: The URL to reduce.
+    :type current_url: str
+    """
+    l_url = urlsplit(login_url)
+    c_url = urlsplit(current_url)
+
+    if (not l_url.scheme or l_url.scheme == c_url.scheme) and (
+        not l_url.netloc or l_url.netloc == c_url.netloc
+    ):
+        return urlunsplit(("", "", c_url.path, c_url.query, ""))
+    return current_url
+
+
+def expand_login_view(login_view):
+    """
+    Returns the url for the login view, expanding the view name to a url if
+    needed.
+
+    :param login_view: The name of the login view or a URL for the login view.
+    :type login_view: str
+    """
+    if login_view.startswith(("https://", "http://", "/")):
+        return login_view
+
+    return url_for(login_view)
+
+
+def login_url(login_view, next_url=None, next_field="next"):
+    """
+    Creates a URL for redirecting to a login page. If only `login_view` is
+    provided, this will just return the URL for it. If `next_url` is provided,
+    however, this will append a ``next=URL`` parameter to the query string
+    so that the login view can redirect back to that URL. Flask-Login's default
+    unauthorized handler uses this function when redirecting to your login url.
+    To force the host name used, set `FORCE_HOST_FOR_REDIRECTS` to a host. This
+    prevents from redirecting to external sites if request headers Host or
+    X-Forwarded-For are present.
+
+    :param login_view: The name of the login view. (Alternately, the actual
+                       URL to the login view.)
+    :type login_view: str
+    :param next_url: The URL to give the login view for redirection.
+    :type next_url: str
+    :param next_field: What field to store the next URL in. (It defaults to
+                       ``next``.)
+    :type next_field: str
+    """
+    base = expand_login_view(login_view)
+
+    if next_url is None:
+        return base
+
+    parsed_result = urlsplit(base)
+    md = parse_qs(parsed_result.query, keep_blank_values=True)
+    md[next_field] = make_next_param(base, next_url)
+    netloc = current_app.config.get("FORCE_HOST_FOR_REDIRECTS") or parsed_result.netloc
+    parsed_result = parsed_result._replace(
+        netloc=netloc, query=urlencode(md, doseq=True)
+    )
+    return urlunsplit(parsed_result)
+
+
+def login_fresh():
+    """
+    This returns ``True`` if the current login is fresh.
+    """
+    return session.get("_fresh", False)
+
+
+def login_remembered():
+    """
+    This returns ``True`` if the current login is remembered across sessions.
+    """
+    config = current_app.config
+    cookie_name = config.get("REMEMBER_COOKIE_NAME", COOKIE_NAME)
+    has_cookie = cookie_name in request.cookies and session.get("_remember") != "clear"
+    if has_cookie:
+        cookie = request.cookies[cookie_name]
+        user_id = decode_cookie(cookie)
+        return user_id is not None
+    return False
+
+
+def login_user(user, remember=False, duration=None, force=False, fresh=True):
+    """
+    Logs a user in. You should pass the actual user object to this. If the
+    user's `is_active` property is ``False``, they will not be logged in
+    unless `force` is ``True``.
+
+    This will return ``True`` if the log in attempt succeeds, and ``False`` if
+    it fails (i.e. because the user is inactive).
+
+    :param user: The user object to log in.
+    :type user: object
+    :param remember: Whether to remember the user after their session expires.
+        Defaults to ``False``.
+    :type remember: bool
+    :param duration: The amount of time before the remember cookie expires. If
+        ``None`` the value set in the settings is used. Defaults to ``None``.
+    :type duration: :class:`datetime.timedelta`
+    :param force: If the user is inactive, setting this to ``True`` will log
+        them in regardless. Defaults to ``False``.
+    :type force: bool
+    :param fresh: setting this to ``False`` will log in the user with a session
+        marked as not "fresh". Defaults to ``True``.
+    :type fresh: bool
+    """
+    if not force and not user.is_active:
+        return False
+
+    user_id = getattr(user, current_app.login_manager.id_attribute)()
+    session["_user_id"] = user_id
+    session["_fresh"] = fresh
+    session["_id"] = current_app.login_manager._session_identifier_generator()
+
+    if remember:
+        session["_remember"] = "set"
+        if duration is not None:
+            try:
+                # equal to timedelta.total_seconds() but works with Python 2.6
+                session["_remember_seconds"] = (
+                    duration.microseconds
+                    + (duration.seconds + duration.days * 24 * 3600) * 10**6
+                ) / 10.0**6
+            except AttributeError as e:
+                raise Exception(
+                    f"duration must be a datetime.timedelta, instead got: {duration}"
+                ) from e
+
+    current_app.login_manager._update_request_context_with_user(user)
+    user_logged_in.send(current_app._get_current_object(), user=_get_user())
+    return True
+
+
+def logout_user():
+    """
+    Logs a user out. (You do not need to pass the actual user.) This will
+    also clean up the remember me cookie if it exists.
+    """
+
+    user = _get_user()
+
+    if "_user_id" in session:
+        session.pop("_user_id")
+
+    if "_fresh" in session:
+        session.pop("_fresh")
+
+    if "_id" in session:
+        session.pop("_id")
+
+    cookie_name = current_app.config.get("REMEMBER_COOKIE_NAME", COOKIE_NAME)
+    if cookie_name in request.cookies:
+        session["_remember"] = "clear"
+        if "_remember_seconds" in session:
+            session.pop("_remember_seconds")
+
+    user_logged_out.send(current_app._get_current_object(), user=user)
+
+    current_app.login_manager._update_request_context_with_user()
+    return True
+
+
+def confirm_login():
+    """
+    This sets the current session as fresh. Sessions become stale when they
+    are reloaded from a cookie.
+    """
+    session["_fresh"] = True
+    session["_id"] = current_app.login_manager._session_identifier_generator()
+    user_login_confirmed.send(current_app._get_current_object())
+
+
+def login_required(func):
+    """
+    If you decorate a view with this, it will ensure that the current user is
+    logged in and authenticated before calling the actual view. (If they are
+    not, it calls the :attr:`LoginManager.unauthorized` callback.) For
+    example::
+
+        @app.route('/post')
+        @login_required
+        def post():
+            pass
+
+    If there are only certain times you need to require that your user is
+    logged in, you can do so with::
+
+        if not current_user.is_authenticated:
+            return current_app.login_manager.unauthorized()
+
+    ...which is essentially the code that this function adds to your views.
+
+    It can be convenient to globally turn off authentication when unit testing.
+    To enable this, if the application configuration variable `LOGIN_DISABLED`
+    is set to `True`, this decorator will be ignored.
+
+    .. Note ::
+
+        Per `W3 guidelines for CORS preflight requests
+        <http://www.w3.org/TR/cors/#cross-origin-request-with-preflight-0>`_,
+        HTTP ``OPTIONS`` requests are exempt from login checks.
+
+    :param func: The view function to decorate.
+    :type func: function
+    """
+
+    @wraps(func)
+    def decorated_view(*args, **kwargs):
+        if request.method in EXEMPT_METHODS or current_app.config.get("LOGIN_DISABLED"):
+            pass
+        elif not current_user.is_authenticated:
+            return current_app.login_manager.unauthorized()
+
+        # flask 1.x compatibility
+        # current_app.ensure_sync is only available in Flask >= 2.0
+        if callable(getattr(current_app, "ensure_sync", None)):
+            return current_app.ensure_sync(func)(*args, **kwargs)
+        return func(*args, **kwargs)
+
+    return decorated_view
+
+
+def fresh_login_required(func):
+    """
+    If you decorate a view with this, it will ensure that the current user's
+    login is fresh - i.e. their session was not restored from a 'remember me'
+    cookie. Sensitive operations, like changing a password or e-mail, should
+    be protected with this, to impede the efforts of cookie thieves.
+
+    If the user is not authenticated, :meth:`LoginManager.unauthorized` is
+    called as normal. If they are authenticated, but their session is not
+    fresh, it will call :meth:`LoginManager.needs_refresh` instead. (In that
+    case, you will need to provide a :attr:`LoginManager.refresh_view`.)
+
+    Behaves identically to the :func:`login_required` decorator with respect
+    to configuration variables.
+
+    .. Note ::
+
+        Per `W3 guidelines for CORS preflight requests
+        <http://www.w3.org/TR/cors/#cross-origin-request-with-preflight-0>`_,
+        HTTP ``OPTIONS`` requests are exempt from login checks.
+
+    :param func: The view function to decorate.
+    :type func: function
+    """
+
+    @wraps(func)
+    def decorated_view(*args, **kwargs):
+        if request.method in EXEMPT_METHODS or current_app.config.get("LOGIN_DISABLED"):
+            pass
+        elif not current_user.is_authenticated:
+            return current_app.login_manager.unauthorized()
+        elif not login_fresh():
+            return current_app.login_manager.needs_refresh()
+        try:
+            # current_app.ensure_sync available in Flask >= 2.0
+            return current_app.ensure_sync(func)(*args, **kwargs)
+        except AttributeError:  # pragma: no cover
+            return func(*args, **kwargs)
+
+    return decorated_view
+
+
+def set_login_view(login_view, blueprint=None):
+    """
+    Sets the login view for the app or blueprint. If a blueprint is passed,
+    the login view is set for this blueprint on ``blueprint_login_views``.
+
+    :param login_view: The user object to log in.
+    :type login_view: str
+    :param blueprint: The blueprint which this login view should be set on.
+        Defaults to ``None``.
+    :type blueprint: object
+    """
+
+    num_login_views = len(current_app.login_manager.blueprint_login_views)
+    if blueprint is not None or num_login_views != 0:
+        (current_app.login_manager.blueprint_login_views[blueprint.name]) = login_view
+
+        if (
+            current_app.login_manager.login_view is not None
+            and None not in current_app.login_manager.blueprint_login_views
+        ):
+            (
+                current_app.login_manager.blueprint_login_views[None]
+            ) = current_app.login_manager.login_view
+
+        current_app.login_manager.login_view = None
+    else:
+        current_app.login_manager.login_view = login_view
+
+
+def _get_user():
+    if has_request_context():
+        if "_login_user" not in g:
+            current_app.login_manager._load_user()
+
+        return g._login_user
+
+    return None
+
+
+def _cookie_digest(payload, key=None):
+    key = _secret_key(key)
+
+    return hmac.new(key, payload.encode("utf-8"), sha512).hexdigest()
+
+
+def _get_remote_addr():
+    address = request.headers.get("X-Forwarded-For", request.remote_addr)
+    if address is not None:
+        # An 'X-Forwarded-For' header includes a comma separated list of the
+        # addresses, the first address being the actual remote address.
+        address = address.encode("utf-8").split(b",")[0].strip()
+    return address
+
+
+def _create_identifier():
+    user_agent = request.headers.get("User-Agent")
+    if user_agent is not None:
+        user_agent = user_agent.encode("utf-8")
+    base = f"{_get_remote_addr()}|{user_agent}"
+    if str is bytes:
+        base = str(base, "utf-8", errors="replace")  # pragma: no cover
+    h = sha512()
+    h.update(base.encode("utf8"))
+    return h.hexdigest()
+
+
+def _user_context_processor():
+    return dict(current_user=_get_user())
+
+
+def _secret_key(key=None):
+    if key is None:
+        key = current_app.config["SECRET_KEY"]
+
+    if isinstance(key, str):  # pragma: no cover
+        key = key.encode("latin1")  # ensure bytes
+
+    return key
diff --git a/.venv/Lib/site-packages/gunicorn-23.0.0.dist-info/INSTALLER b/.venv/Lib/site-packages/gunicorn-23.0.0.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn-23.0.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/.venv/Lib/site-packages/gunicorn-23.0.0.dist-info/LICENSE b/.venv/Lib/site-packages/gunicorn-23.0.0.dist-info/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..0dca6e18215ecca52c99b77eb35b199afccb0a6f
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn-23.0.0.dist-info/LICENSE
@@ -0,0 +1,23 @@
+2009-2024 (c) Benoît Chesneau <benoitc@gunicorn.org>
+2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/.venv/Lib/site-packages/gunicorn-23.0.0.dist-info/METADATA b/.venv/Lib/site-packages/gunicorn-23.0.0.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..550aef249da0a8663bedbf69e91c18156aed74fe
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn-23.0.0.dist-info/METADATA
@@ -0,0 +1,130 @@
+Metadata-Version: 2.1
+Name: gunicorn
+Version: 23.0.0
+Summary: WSGI HTTP Server for UNIX
+Author-email: Benoit Chesneau <benoitc@gunicorn.org>
+License: MIT
+Project-URL: Homepage, https://gunicorn.org
+Project-URL: Documentation, https://docs.gunicorn.org
+Project-URL: Issue tracker, https://github.com/benoitc/gunicorn/issues
+Project-URL: Source code, https://github.com/benoitc/gunicorn
+Project-URL: Changelog, https://docs.gunicorn.org/en/stable/news.html
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Other Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Operating System :: POSIX
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Internet
+Classifier: Topic :: Utilities
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Internet :: WWW/HTTP
+Classifier: Topic :: Internet :: WWW/HTTP :: WSGI
+Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Server
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+Requires-Dist: packaging
+Requires-Dist: importlib-metadata ; python_version < "3.8"
+Provides-Extra: eventlet
+Requires-Dist: eventlet !=0.36.0,>=0.24.1 ; extra == 'eventlet'
+Provides-Extra: gevent
+Requires-Dist: gevent >=1.4.0 ; extra == 'gevent'
+Provides-Extra: gthread
+Provides-Extra: setproctitle
+Requires-Dist: setproctitle ; extra == 'setproctitle'
+Provides-Extra: testing
+Requires-Dist: gevent ; extra == 'testing'
+Requires-Dist: eventlet ; extra == 'testing'
+Requires-Dist: coverage ; extra == 'testing'
+Requires-Dist: pytest ; extra == 'testing'
+Requires-Dist: pytest-cov ; extra == 'testing'
+Provides-Extra: tornado
+Requires-Dist: tornado >=0.2 ; extra == 'tornado'
+
+Gunicorn
+--------
+
+.. image:: https://img.shields.io/pypi/v/gunicorn.svg?style=flat
+    :alt: PyPI version
+    :target: https://pypi.python.org/pypi/gunicorn
+
+.. image:: https://img.shields.io/pypi/pyversions/gunicorn.svg
+    :alt: Supported Python versions
+    :target: https://pypi.python.org/pypi/gunicorn
+
+.. image:: https://github.com/benoitc/gunicorn/actions/workflows/tox.yml/badge.svg
+    :alt: Build Status
+    :target: https://github.com/benoitc/gunicorn/actions/workflows/tox.yml
+
+.. image:: https://github.com/benoitc/gunicorn/actions/workflows/lint.yml/badge.svg
+    :alt: Lint Status
+    :target: https://github.com/benoitc/gunicorn/actions/workflows/lint.yml
+
+Gunicorn 'Green Unicorn' is a Python WSGI HTTP Server for UNIX. It's a pre-fork
+worker model ported from Ruby's Unicorn_ project. The Gunicorn server is broadly
+compatible with various web frameworks, simply implemented, light on server
+resource usage, and fairly speedy.
+
+Feel free to join us in `#gunicorn`_ on `Libera.chat`_.
+
+Documentation
+-------------
+
+The documentation is hosted at https://docs.gunicorn.org.
+
+Installation
+------------
+
+Gunicorn requires **Python 3.x >= 3.7**.
+
+Install from PyPI::
+
+    $ pip install gunicorn
+
+
+Usage
+-----
+
+Basic usage::
+
+    $ gunicorn [OPTIONS] APP_MODULE
+
+Where ``APP_MODULE`` is of the pattern ``$(MODULE_NAME):$(VARIABLE_NAME)``. The
+module name can be a full dotted path. The variable name refers to a WSGI
+callable that should be found in the specified module.
+
+Example with test app::
+
+    $ cd examples
+    $ gunicorn --workers=2 test:app
+
+
+Contributing
+------------
+
+See `our complete contributor's guide <CONTRIBUTING.md>`_ for more details.
+
+
+License
+-------
+
+Gunicorn is released under the MIT License. See the LICENSE_ file for more
+details.
+
+.. _Unicorn: https://bogomips.org/unicorn/
+.. _`#gunicorn`: https://web.libera.chat/?channels=#gunicorn
+.. _`Libera.chat`: https://libera.chat/
+.. _LICENSE: https://github.com/benoitc/gunicorn/blob/master/LICENSE
diff --git a/.venv/Lib/site-packages/gunicorn-23.0.0.dist-info/RECORD b/.venv/Lib/site-packages/gunicorn-23.0.0.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..8304255723cf20f230106ae8127755ef94752fd9
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn-23.0.0.dist-info/RECORD
@@ -0,0 +1,77 @@
+../../Scripts/gunicorn.exe,sha256=Qe77fVdA-TobLf7yeQA8Mcj2aHem9xUEpG2cmlbUqhg,108455
+gunicorn-23.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+gunicorn-23.0.0.dist-info/LICENSE,sha256=ZkbNu6LpnjQh3RjCIXNXmh_eNH6DHa5q3ugO7-Mx6VE,1136
+gunicorn-23.0.0.dist-info/METADATA,sha256=KhY-mRcAcWCLIbXIHihsUNKWB5fGDOrsbq-JKQTBHY4,4421
+gunicorn-23.0.0.dist-info/RECORD,,
+gunicorn-23.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+gunicorn-23.0.0.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
+gunicorn-23.0.0.dist-info/entry_points.txt,sha256=bF8VNiG4H8W83JfEBcqcPMydv9hl04CS4kwh1KOYrFY,113
+gunicorn-23.0.0.dist-info/top_level.txt,sha256=cdMaa2yhxb8do-WioY9qRHUCfwf55YztjwQCncaInoE,9
+gunicorn/__init__.py,sha256=NaLW_JTiKLgqMXipjqzxFn-1wdiptlO2WxOB_KKwx94,257
+gunicorn/__main__.py,sha256=tviepyuwKyB6SPV28t2eZy_5PcCpT56z7QZjzbMpkQw,338
+gunicorn/__pycache__/__init__.cpython-310.pyc,,
+gunicorn/__pycache__/__main__.cpython-310.pyc,,
+gunicorn/__pycache__/arbiter.cpython-310.pyc,,
+gunicorn/__pycache__/config.cpython-310.pyc,,
+gunicorn/__pycache__/debug.cpython-310.pyc,,
+gunicorn/__pycache__/errors.cpython-310.pyc,,
+gunicorn/__pycache__/glogging.cpython-310.pyc,,
+gunicorn/__pycache__/pidfile.cpython-310.pyc,,
+gunicorn/__pycache__/reloader.cpython-310.pyc,,
+gunicorn/__pycache__/sock.cpython-310.pyc,,
+gunicorn/__pycache__/systemd.cpython-310.pyc,,
+gunicorn/__pycache__/util.cpython-310.pyc,,
+gunicorn/app/__init__.py,sha256=8m9lIbhRssnbGuBeQUA-vNSNbMeNju9Q_PUnnNfqOYU,105
+gunicorn/app/__pycache__/__init__.cpython-310.pyc,,
+gunicorn/app/__pycache__/base.cpython-310.pyc,,
+gunicorn/app/__pycache__/pasterapp.cpython-310.pyc,,
+gunicorn/app/__pycache__/wsgiapp.cpython-310.pyc,,
+gunicorn/app/base.py,sha256=KV2aIO50JTlakHL82q9zu3LhCJrDmUmaViwSy14Gk6U,7370
+gunicorn/app/pasterapp.py,sha256=BIa0mz_J86NuObUw2UIyjLYKUm8V3b034pJrTkvF-sA,2016
+gunicorn/app/wsgiapp.py,sha256=gVBgUc_3uSK0QzXYQ1XbutacEGjf44CgxAaYkgwfucY,1924
+gunicorn/arbiter.py,sha256=xcHpv8bsrYpIpu9q7YK4ue11f9kmz80dr7BUwKX3oxk,21470
+gunicorn/config.py,sha256=t3BChwMoBZwfV05Iy_n3oh232xvi1SORkOJfHFL_c-8,70318
+gunicorn/debug.py,sha256=c8cQv_g3d22JE6A4hv7FNmMhm4wq6iB_E-toorpqJcw,2263
+gunicorn/errors.py,sha256=iLTJQC4SVSRoygIGGHXvEp0d8UdzpeqmMRqUcF0JI14,897
+gunicorn/glogging.py,sha256=76MlUUc82FqdeD3R4qC8NeUHt8vxa3IBSxmeBtbZKtE,15273
+gunicorn/http/__init__.py,sha256=1k_WWvjT9eDDRDOutzXCebvYKm_qzaQA3GuLk0VkbJI,255
+gunicorn/http/__pycache__/__init__.cpython-310.pyc,,
+gunicorn/http/__pycache__/body.cpython-310.pyc,,
+gunicorn/http/__pycache__/errors.cpython-310.pyc,,
+gunicorn/http/__pycache__/message.cpython-310.pyc,,
+gunicorn/http/__pycache__/parser.cpython-310.pyc,,
+gunicorn/http/__pycache__/unreader.cpython-310.pyc,,
+gunicorn/http/__pycache__/wsgi.cpython-310.pyc,,
+gunicorn/http/body.py,sha256=sQgp_hJUjx8DK6LYzklMTl-xKcX8efsbreCKzowCGmo,7600
+gunicorn/http/errors.py,sha256=6tcG9pCvRiooXpfudQBILzUPx3ertuQ5utjZeUNMUqA,3437
+gunicorn/http/message.py,sha256=ok4xnqWhntIn21gcPa1KYZWRYTbwsECpot-Eac47qFs,17632
+gunicorn/http/parser.py,sha256=wayoAFjQYERSwE4YGwI2AYSNGZ2eTNbGUtoqqQFph5U,1334
+gunicorn/http/unreader.py,sha256=D7bluz62A1aLZQ9XbpX0-nDBal9KPtp_pjokk2YNY8E,1913
+gunicorn/http/wsgi.py,sha256=x-zTT7gvRF4wipmvoVePz1qO407JZCU_sNU8yjcl_R4,12811
+gunicorn/instrument/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+gunicorn/instrument/__pycache__/__init__.cpython-310.pyc,,
+gunicorn/instrument/__pycache__/statsd.cpython-310.pyc,,
+gunicorn/instrument/statsd.py,sha256=ghmaniNEjMMLvvdQkDPpB_u9a8z4FBfWUE_C9O1KIYQ,4750
+gunicorn/pidfile.py,sha256=HntiveG8eJmwB8_D3o5cBXRuGKnC0cvWxg90MWh1hUc,2327
+gunicorn/reloader.py,sha256=oDuK2PWGyIMm0_vc1y196Z1EggOvBi-Iz_2UbRY7PsQ,3761
+gunicorn/sock.py,sha256=VVF2eeoxQEJ2OEoZoek3BFZTqj7wXvQql7jpdFAjVTI,6834
+gunicorn/systemd.py,sha256=DmWbcqeRyHdAIy70UCEg2J93v6PpESp3EFTNm0Djgyg,2498
+gunicorn/util.py,sha256=YqC4E3RxhFNH-W4LOqy1RtxcHRy9hRyYND92ZSNXEwc,19095
+gunicorn/workers/__init__.py,sha256=Y0Z6WhXKY6PuTbFkOkeEBzIfhDDg5FeqVg8aJp6lIZA,572
+gunicorn/workers/__pycache__/__init__.cpython-310.pyc,,
+gunicorn/workers/__pycache__/base.cpython-310.pyc,,
+gunicorn/workers/__pycache__/base_async.cpython-310.pyc,,
+gunicorn/workers/__pycache__/geventlet.cpython-310.pyc,,
+gunicorn/workers/__pycache__/ggevent.cpython-310.pyc,,
+gunicorn/workers/__pycache__/gthread.cpython-310.pyc,,
+gunicorn/workers/__pycache__/gtornado.cpython-310.pyc,,
+gunicorn/workers/__pycache__/sync.cpython-310.pyc,,
+gunicorn/workers/__pycache__/workertmp.cpython-310.pyc,,
+gunicorn/workers/base.py,sha256=eM9MTLP9PdWL0Pm5V5byyBli-r8zF2MSEGjefr3y92M,9763
+gunicorn/workers/base_async.py,sha256=Oc-rSV81uHqvEqww2PM6tz75qNR07ChuqM6IkTOpzlk,5627
+gunicorn/workers/geventlet.py,sha256=s_I-gKYgDJnlAHdCxN_wfglODnDE1eJaZJZCJyNYg-4,6069
+gunicorn/workers/ggevent.py,sha256=OEhj-bFVBGQ-jbjr5S3gSvixJTa-YOQYht7fYTOCyt4,6030
+gunicorn/workers/gthread.py,sha256=moycCQoJS602u3U7gZEooYxqRP86Tq5bmQnipL4a4_c,12500
+gunicorn/workers/gtornado.py,sha256=zCHbxs5JeE9rtZa5mXlhftBlNlwp_tBWXuTQwqgv1so,5811
+gunicorn/workers/sync.py,sha256=mOY84VHbAx62lmo2DLuifkK9d6anEgvC7LAuYVJyRM4,7204
+gunicorn/workers/workertmp.py,sha256=bswGosCIDb_wBfdGaFqHopgxbmJ6rgVXYlVhJDWZKIc,1604
diff --git a/.venv/Lib/site-packages/gunicorn-23.0.0.dist-info/REQUESTED b/.venv/Lib/site-packages/gunicorn-23.0.0.dist-info/REQUESTED
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/.venv/Lib/site-packages/gunicorn-23.0.0.dist-info/WHEEL b/.venv/Lib/site-packages/gunicorn-23.0.0.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..1a9c53588bf958814188ba9a6e13d50b924fdccd
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn-23.0.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: setuptools (72.1.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/.venv/Lib/site-packages/gunicorn-23.0.0.dist-info/entry_points.txt b/.venv/Lib/site-packages/gunicorn-23.0.0.dist-info/entry_points.txt
new file mode 100644
index 0000000000000000000000000000000000000000..fd14749b06662c41125a4dcce8afe3ca07fb8797
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn-23.0.0.dist-info/entry_points.txt
@@ -0,0 +1,5 @@
+[console_scripts]
+gunicorn = gunicorn.app.wsgiapp:run
+
+[paste.server_runner]
+main = gunicorn.app.pasterapp:serve
diff --git a/.venv/Lib/site-packages/gunicorn-23.0.0.dist-info/top_level.txt b/.venv/Lib/site-packages/gunicorn-23.0.0.dist-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..8f22dccf99affb5ab9b1c65023ec083269269bca
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn-23.0.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+gunicorn
diff --git a/.venv/Lib/site-packages/gunicorn/__init__.py b/.venv/Lib/site-packages/gunicorn/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..cdcd1352ee16f68e05473ecc130b2a4212a5c62e
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/__init__.py
@@ -0,0 +1,8 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+version_info = (23, 0, 0)
+__version__ = ".".join([str(v) for v in version_info])
+SERVER = "gunicorn"
+SERVER_SOFTWARE = "%s/%s" % (SERVER, __version__)
diff --git a/.venv/Lib/site-packages/gunicorn/__main__.py b/.venv/Lib/site-packages/gunicorn/__main__.py
new file mode 100644
index 0000000000000000000000000000000000000000..ceb44d086cc17582034f3b8373cf359459ebaa61
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/__main__.py
@@ -0,0 +1,10 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+from gunicorn.app.wsgiapp import run
+
+if __name__ == "__main__":
+    # see config.py - argparse defaults to basename(argv[0]) == "__main__.py"
+    # todo: let runpy.run_module take care of argv[0] rewriting
+    run(prog="gunicorn")
diff --git a/.venv/Lib/site-packages/gunicorn/__pycache__/__init__.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3c4ae497f4d1ecfe0deeb16ebc0f36e5adfd43d9
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/__pycache__/__init__.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/__pycache__/__main__.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/__pycache__/__main__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..422985ff78acffec6abbcc460425596b925f0bf7
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/__pycache__/__main__.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/__pycache__/arbiter.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/__pycache__/arbiter.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..31de687fa68bfa7bcfdbe5ddb91f5f6f8ffecb26
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/__pycache__/arbiter.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/__pycache__/config.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/__pycache__/config.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8d873f775ac1688402e01cb9f05c822663bfaca2
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/__pycache__/config.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/__pycache__/debug.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/__pycache__/debug.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2785c58cd916cc54fd919270df447b26ea8cb8a4
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/__pycache__/debug.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/__pycache__/errors.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/__pycache__/errors.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..592b8bac3ba30c544988bbb21d0a9d37b506d034
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/__pycache__/errors.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/__pycache__/glogging.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/__pycache__/glogging.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c442158d2d98ea6bef8d793d205efb519f32c9d5
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/__pycache__/glogging.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/__pycache__/pidfile.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/__pycache__/pidfile.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a4f1924712e77e820f32f9787af772b4a6b7feb6
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/__pycache__/pidfile.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/__pycache__/reloader.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/__pycache__/reloader.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..148cfafbdac572ca71bd0bed9e2779bba88af92d
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/__pycache__/reloader.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/__pycache__/sock.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/__pycache__/sock.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e2769bbdafbcbf837b61193a2eac702afa4280bc
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/__pycache__/sock.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/__pycache__/systemd.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/__pycache__/systemd.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d652a1584cb75a1274f1923254df1e587150943c
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/__pycache__/systemd.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/__pycache__/util.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/__pycache__/util.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c968188ca722d0c336dbe35cbc3b7fe25ea49e61
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/__pycache__/util.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/app/__init__.py b/.venv/Lib/site-packages/gunicorn/app/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..530e35ca491371b1cd868c074523da1a54ca8439
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/app/__init__.py
@@ -0,0 +1,3 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
diff --git a/.venv/Lib/site-packages/gunicorn/app/__pycache__/__init__.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/app/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1d0427b426c9a7ff64eb67d73fb2ca3692a2b94c
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/app/__pycache__/__init__.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/app/__pycache__/base.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/app/__pycache__/base.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..317f6ed8f79c587287ab496e2087cb4fa2863426
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/app/__pycache__/base.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/app/__pycache__/pasterapp.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/app/__pycache__/pasterapp.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3b3627d9928d5b0c23b5313ebf8a4aed6b1f719c
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/app/__pycache__/pasterapp.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/app/__pycache__/wsgiapp.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/app/__pycache__/wsgiapp.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..72d974f11106efb59d76695fc9fda5c74f7ef6bf
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/app/__pycache__/wsgiapp.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/app/base.py b/.venv/Lib/site-packages/gunicorn/app/base.py
new file mode 100644
index 0000000000000000000000000000000000000000..9bf7a4f0f1a034d5fcedeaf1e8b5bea447ec5c07
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/app/base.py
@@ -0,0 +1,235 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+import importlib.util
+import importlib.machinery
+import os
+import sys
+import traceback
+
+from gunicorn import util
+from gunicorn.arbiter import Arbiter
+from gunicorn.config import Config, get_default_config_file
+from gunicorn import debug
+
+
+class BaseApplication:
+    """
+    An application interface for configuring and loading
+    the various necessities for any given web framework.
+    """
+    def __init__(self, usage=None, prog=None):
+        self.usage = usage
+        self.cfg = None
+        self.callable = None
+        self.prog = prog
+        self.logger = None
+        self.do_load_config()
+
+    def do_load_config(self):
+        """
+        Loads the configuration
+        """
+        try:
+            self.load_default_config()
+            self.load_config()
+        except Exception as e:
+            print("\nError: %s" % str(e), file=sys.stderr)
+            sys.stderr.flush()
+            sys.exit(1)
+
+    def load_default_config(self):
+        # init configuration
+        self.cfg = Config(self.usage, prog=self.prog)
+
+    def init(self, parser, opts, args):
+        raise NotImplementedError
+
+    def load(self):
+        raise NotImplementedError
+
+    def load_config(self):
+        """
+        This method is used to load the configuration from one or several input(s).
+        Custom Command line, configuration file.
+        You have to override this method in your class.
+        """
+        raise NotImplementedError
+
+    def reload(self):
+        self.do_load_config()
+        if self.cfg.spew:
+            debug.spew()
+
+    def wsgi(self):
+        if self.callable is None:
+            self.callable = self.load()
+        return self.callable
+
+    def run(self):
+        try:
+            Arbiter(self).run()
+        except RuntimeError as e:
+            print("\nError: %s\n" % e, file=sys.stderr)
+            sys.stderr.flush()
+            sys.exit(1)
+
+
+class Application(BaseApplication):
+
+    # 'init' and 'load' methods are implemented by WSGIApplication.
+    # pylint: disable=abstract-method
+
+    def chdir(self):
+        # chdir to the configured path before loading,
+        # default is the current dir
+        os.chdir(self.cfg.chdir)
+
+        # add the path to sys.path
+        if self.cfg.chdir not in sys.path:
+            sys.path.insert(0, self.cfg.chdir)
+
+    def get_config_from_filename(self, filename):
+
+        if not os.path.exists(filename):
+            raise RuntimeError("%r doesn't exist" % filename)
+
+        ext = os.path.splitext(filename)[1]
+
+        try:
+            module_name = '__config__'
+            if ext in [".py", ".pyc"]:
+                spec = importlib.util.spec_from_file_location(module_name, filename)
+            else:
+                msg = "configuration file should have a valid Python extension.\n"
+                util.warn(msg)
+                loader_ = importlib.machinery.SourceFileLoader(module_name, filename)
+                spec = importlib.util.spec_from_file_location(module_name, filename, loader=loader_)
+            mod = importlib.util.module_from_spec(spec)
+            sys.modules[module_name] = mod
+            spec.loader.exec_module(mod)
+        except Exception:
+            print("Failed to read config file: %s" % filename, file=sys.stderr)
+            traceback.print_exc()
+            sys.stderr.flush()
+            sys.exit(1)
+
+        return vars(mod)
+
+    def get_config_from_module_name(self, module_name):
+        return vars(importlib.import_module(module_name))
+
+    def load_config_from_module_name_or_filename(self, location):
+        """
+        Loads the configuration file: the file is a python file, otherwise raise an RuntimeError
+        Exception or stop the process if the configuration file contains a syntax error.
+        """
+
+        if location.startswith("python:"):
+            module_name = location[len("python:"):]
+            cfg = self.get_config_from_module_name(module_name)
+        else:
+            if location.startswith("file:"):
+                filename = location[len("file:"):]
+            else:
+                filename = location
+            cfg = self.get_config_from_filename(filename)
+
+        for k, v in cfg.items():
+            # Ignore unknown names
+            if k not in self.cfg.settings:
+                continue
+            try:
+                self.cfg.set(k.lower(), v)
+            except Exception:
+                print("Invalid value for %s: %s\n" % (k, v), file=sys.stderr)
+                sys.stderr.flush()
+                raise
+
+        return cfg
+
+    def load_config_from_file(self, filename):
+        return self.load_config_from_module_name_or_filename(location=filename)
+
+    def load_config(self):
+        # parse console args
+        parser = self.cfg.parser()
+        args = parser.parse_args()
+
+        # optional settings from apps
+        cfg = self.init(parser, args, args.args)
+
+        # set up import paths and follow symlinks
+        self.chdir()
+
+        # Load up the any app specific configuration
+        if cfg:
+            for k, v in cfg.items():
+                self.cfg.set(k.lower(), v)
+
+        env_args = parser.parse_args(self.cfg.get_cmd_args_from_env())
+
+        if args.config:
+            self.load_config_from_file(args.config)
+        elif env_args.config:
+            self.load_config_from_file(env_args.config)
+        else:
+            default_config = get_default_config_file()
+            if default_config is not None:
+                self.load_config_from_file(default_config)
+
+        # Load up environment configuration
+        for k, v in vars(env_args).items():
+            if v is None:
+                continue
+            if k == "args":
+                continue
+            self.cfg.set(k.lower(), v)
+
+        # Lastly, update the configuration with any command line settings.
+        for k, v in vars(args).items():
+            if v is None:
+                continue
+            if k == "args":
+                continue
+            self.cfg.set(k.lower(), v)
+
+        # current directory might be changed by the config now
+        # set up import paths and follow symlinks
+        self.chdir()
+
+    def run(self):
+        if self.cfg.print_config:
+            print(self.cfg)
+
+        if self.cfg.print_config or self.cfg.check_config:
+            try:
+                self.load()
+            except Exception:
+                msg = "\nError while loading the application:\n"
+                print(msg, file=sys.stderr)
+                traceback.print_exc()
+                sys.stderr.flush()
+                sys.exit(1)
+            sys.exit(0)
+
+        if self.cfg.spew:
+            debug.spew()
+
+        if self.cfg.daemon:
+            if os.environ.get('NOTIFY_SOCKET'):
+                msg = "Warning: you shouldn't specify `daemon = True`" \
+                      " when launching by systemd with `Type = notify`"
+                print(msg, file=sys.stderr, flush=True)
+
+            util.daemonize(self.cfg.enable_stdio_inheritance)
+
+        # set python paths
+        if self.cfg.pythonpath:
+            paths = self.cfg.pythonpath.split(",")
+            for path in paths:
+                pythonpath = os.path.abspath(path)
+                if pythonpath not in sys.path:
+                    sys.path.insert(0, pythonpath)
+
+        super().run()
diff --git a/.venv/Lib/site-packages/gunicorn/app/pasterapp.py b/.venv/Lib/site-packages/gunicorn/app/pasterapp.py
new file mode 100644
index 0000000000000000000000000000000000000000..b1738f250099540c1947fe8d6b821d6b196ede29
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/app/pasterapp.py
@@ -0,0 +1,74 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+import configparser
+import os
+
+from paste.deploy import loadapp
+
+from gunicorn.app.wsgiapp import WSGIApplication
+from gunicorn.config import get_default_config_file
+
+
+def get_wsgi_app(config_uri, name=None, defaults=None):
+    if ':' not in config_uri:
+        config_uri = "config:%s" % config_uri
+
+    return loadapp(
+        config_uri,
+        name=name,
+        relative_to=os.getcwd(),
+        global_conf=defaults,
+    )
+
+
+def has_logging_config(config_file):
+    parser = configparser.ConfigParser()
+    parser.read([config_file])
+    return parser.has_section('loggers')
+
+
+def serve(app, global_conf, **local_conf):
+    """\
+    A Paste Deployment server runner.
+
+    Example configuration:
+
+        [server:main]
+        use = egg:gunicorn#main
+        host = 127.0.0.1
+        port = 5000
+    """
+    config_file = global_conf['__file__']
+    gunicorn_config_file = local_conf.pop('config', None)
+
+    host = local_conf.pop('host', '')
+    port = local_conf.pop('port', '')
+    if host and port:
+        local_conf['bind'] = '%s:%s' % (host, port)
+    elif host:
+        local_conf['bind'] = host.split(',')
+
+    class PasterServerApplication(WSGIApplication):
+        def load_config(self):
+            self.cfg.set("default_proc_name", config_file)
+
+            if has_logging_config(config_file):
+                self.cfg.set("logconfig", config_file)
+
+            if gunicorn_config_file:
+                self.load_config_from_file(gunicorn_config_file)
+            else:
+                default_gunicorn_config_file = get_default_config_file()
+                if default_gunicorn_config_file is not None:
+                    self.load_config_from_file(default_gunicorn_config_file)
+
+            for k, v in local_conf.items():
+                if v is not None:
+                    self.cfg.set(k.lower(), v)
+
+        def load(self):
+            return app
+
+    PasterServerApplication().run()
diff --git a/.venv/Lib/site-packages/gunicorn/app/wsgiapp.py b/.venv/Lib/site-packages/gunicorn/app/wsgiapp.py
new file mode 100644
index 0000000000000000000000000000000000000000..1b0ba969d1561e82bbf8e877307b4fa0203bb0a2
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/app/wsgiapp.py
@@ -0,0 +1,70 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+import os
+
+from gunicorn.errors import ConfigError
+from gunicorn.app.base import Application
+from gunicorn import util
+
+
+class WSGIApplication(Application):
+    def init(self, parser, opts, args):
+        self.app_uri = None
+
+        if opts.paste:
+            from .pasterapp import has_logging_config
+
+            config_uri = os.path.abspath(opts.paste)
+            config_file = config_uri.split('#')[0]
+
+            if not os.path.exists(config_file):
+                raise ConfigError("%r not found" % config_file)
+
+            self.cfg.set("default_proc_name", config_file)
+            self.app_uri = config_uri
+
+            if has_logging_config(config_file):
+                self.cfg.set("logconfig", config_file)
+
+            return
+
+        if len(args) > 0:
+            self.cfg.set("default_proc_name", args[0])
+            self.app_uri = args[0]
+
+    def load_config(self):
+        super().load_config()
+
+        if self.app_uri is None:
+            if self.cfg.wsgi_app is not None:
+                self.app_uri = self.cfg.wsgi_app
+            else:
+                raise ConfigError("No application module specified.")
+
+    def load_wsgiapp(self):
+        return util.import_app(self.app_uri)
+
+    def load_pasteapp(self):
+        from .pasterapp import get_wsgi_app
+        return get_wsgi_app(self.app_uri, defaults=self.cfg.paste_global_conf)
+
+    def load(self):
+        if self.cfg.paste is not None:
+            return self.load_pasteapp()
+        else:
+            return self.load_wsgiapp()
+
+
+def run(prog=None):
+    """\
+    The ``gunicorn`` command line runner for launching Gunicorn with
+    generic WSGI applications.
+    """
+    from gunicorn.app.wsgiapp import WSGIApplication
+    WSGIApplication("%(prog)s [OPTIONS] [APP_MODULE]", prog=prog).run()
+
+
+if __name__ == '__main__':
+    run()
diff --git a/.venv/Lib/site-packages/gunicorn/arbiter.py b/.venv/Lib/site-packages/gunicorn/arbiter.py
new file mode 100644
index 0000000000000000000000000000000000000000..1eaf453d5d7bbfad2f97179c242ca4b63845cd1d
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/arbiter.py
@@ -0,0 +1,671 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+import errno
+import os
+import random
+import select
+import signal
+import sys
+import time
+import traceback
+
+from gunicorn.errors import HaltServer, AppImportError
+from gunicorn.pidfile import Pidfile
+from gunicorn import sock, systemd, util
+
+from gunicorn import __version__, SERVER_SOFTWARE
+
+
+class Arbiter:
+    """
+    Arbiter maintain the workers processes alive. It launches or
+    kills them if needed. It also manages application reloading
+    via SIGHUP/USR2.
+    """
+
+    # A flag indicating if a worker failed to
+    # to boot. If a worker process exist with
+    # this error code, the arbiter will terminate.
+    WORKER_BOOT_ERROR = 3
+
+    # A flag indicating if an application failed to be loaded
+    APP_LOAD_ERROR = 4
+
+    START_CTX = {}
+
+    LISTENERS = []
+    WORKERS = {}
+    PIPE = []
+
+    # I love dynamic languages
+    SIG_QUEUE = []
+    SIGNALS = [getattr(signal, "SIG%s" % x)
+               for x in "HUP QUIT INT TERM TTIN TTOU USR1 USR2 WINCH".split()]
+    SIG_NAMES = dict(
+        (getattr(signal, name), name[3:].lower()) for name in dir(signal)
+        if name[:3] == "SIG" and name[3] != "_"
+    )
+
+    def __init__(self, app):
+        os.environ["SERVER_SOFTWARE"] = SERVER_SOFTWARE
+
+        self._num_workers = None
+        self._last_logged_active_worker_count = None
+        self.log = None
+
+        self.setup(app)
+
+        self.pidfile = None
+        self.systemd = False
+        self.worker_age = 0
+        self.reexec_pid = 0
+        self.master_pid = 0
+        self.master_name = "Master"
+
+        cwd = util.getcwd()
+
+        args = sys.argv[:]
+        args.insert(0, sys.executable)
+
+        # init start context
+        self.START_CTX = {
+            "args": args,
+            "cwd": cwd,
+            0: sys.executable
+        }
+
+    def _get_num_workers(self):
+        return self._num_workers
+
+    def _set_num_workers(self, value):
+        old_value = self._num_workers
+        self._num_workers = value
+        self.cfg.nworkers_changed(self, value, old_value)
+    num_workers = property(_get_num_workers, _set_num_workers)
+
+    def setup(self, app):
+        self.app = app
+        self.cfg = app.cfg
+
+        if self.log is None:
+            self.log = self.cfg.logger_class(app.cfg)
+
+        # reopen files
+        if 'GUNICORN_PID' in os.environ:
+            self.log.reopen_files()
+
+        self.worker_class = self.cfg.worker_class
+        self.address = self.cfg.address
+        self.num_workers = self.cfg.workers
+        self.timeout = self.cfg.timeout
+        self.proc_name = self.cfg.proc_name
+
+        self.log.debug('Current configuration:\n{0}'.format(
+            '\n'.join(
+                '  {0}: {1}'.format(config, value.value)
+                for config, value
+                in sorted(self.cfg.settings.items(),
+                          key=lambda setting: setting[1]))))
+
+        # set environment' variables
+        if self.cfg.env:
+            for k, v in self.cfg.env.items():
+                os.environ[k] = v
+
+        if self.cfg.preload_app:
+            self.app.wsgi()
+
+    def start(self):
+        """\
+        Initialize the arbiter. Start listening and set pidfile if needed.
+        """
+        self.log.info("Starting gunicorn %s", __version__)
+
+        if 'GUNICORN_PID' in os.environ:
+            self.master_pid = int(os.environ.get('GUNICORN_PID'))
+            self.proc_name = self.proc_name + ".2"
+            self.master_name = "Master.2"
+
+        self.pid = os.getpid()
+        if self.cfg.pidfile is not None:
+            pidname = self.cfg.pidfile
+            if self.master_pid != 0:
+                pidname += ".2"
+            self.pidfile = Pidfile(pidname)
+            self.pidfile.create(self.pid)
+        self.cfg.on_starting(self)
+
+        self.init_signals()
+
+        if not self.LISTENERS:
+            fds = None
+            listen_fds = systemd.listen_fds()
+            if listen_fds:
+                self.systemd = True
+                fds = range(systemd.SD_LISTEN_FDS_START,
+                            systemd.SD_LISTEN_FDS_START + listen_fds)
+
+            elif self.master_pid:
+                fds = []
+                for fd in os.environ.pop('GUNICORN_FD').split(','):
+                    fds.append(int(fd))
+
+            self.LISTENERS = sock.create_sockets(self.cfg, self.log, fds)
+
+        listeners_str = ",".join([str(lnr) for lnr in self.LISTENERS])
+        self.log.debug("Arbiter booted")
+        self.log.info("Listening at: %s (%s)", listeners_str, self.pid)
+        self.log.info("Using worker: %s", self.cfg.worker_class_str)
+        systemd.sd_notify("READY=1\nSTATUS=Gunicorn arbiter booted", self.log)
+
+        # check worker class requirements
+        if hasattr(self.worker_class, "check_config"):
+            self.worker_class.check_config(self.cfg, self.log)
+
+        self.cfg.when_ready(self)
+
+    def init_signals(self):
+        """\
+        Initialize master signal handling. Most of the signals
+        are queued. Child signals only wake up the master.
+        """
+        # close old PIPE
+        for p in self.PIPE:
+            os.close(p)
+
+        # initialize the pipe
+        self.PIPE = pair = os.pipe()
+        for p in pair:
+            util.set_non_blocking(p)
+            util.close_on_exec(p)
+
+        self.log.close_on_exec()
+
+        # initialize all signals
+        for s in self.SIGNALS:
+            signal.signal(s, self.signal)
+        signal.signal(signal.SIGCHLD, self.handle_chld)
+
+    def signal(self, sig, frame):
+        if len(self.SIG_QUEUE) < 5:
+            self.SIG_QUEUE.append(sig)
+            self.wakeup()
+
+    def run(self):
+        "Main master loop."
+        self.start()
+        util._setproctitle("master [%s]" % self.proc_name)
+
+        try:
+            self.manage_workers()
+
+            while True:
+                self.maybe_promote_master()
+
+                sig = self.SIG_QUEUE.pop(0) if self.SIG_QUEUE else None
+                if sig is None:
+                    self.sleep()
+                    self.murder_workers()
+                    self.manage_workers()
+                    continue
+
+                if sig not in self.SIG_NAMES:
+                    self.log.info("Ignoring unknown signal: %s", sig)
+                    continue
+
+                signame = self.SIG_NAMES.get(sig)
+                handler = getattr(self, "handle_%s" % signame, None)
+                if not handler:
+                    self.log.error("Unhandled signal: %s", signame)
+                    continue
+                self.log.info("Handling signal: %s", signame)
+                handler()
+                self.wakeup()
+        except (StopIteration, KeyboardInterrupt):
+            self.halt()
+        except HaltServer as inst:
+            self.halt(reason=inst.reason, exit_status=inst.exit_status)
+        except SystemExit:
+            raise
+        except Exception:
+            self.log.error("Unhandled exception in main loop",
+                           exc_info=True)
+            self.stop(False)
+            if self.pidfile is not None:
+                self.pidfile.unlink()
+            sys.exit(-1)
+
+    def handle_chld(self, sig, frame):
+        "SIGCHLD handling"
+        self.reap_workers()
+        self.wakeup()
+
+    def handle_hup(self):
+        """\
+        HUP handling.
+        - Reload configuration
+        - Start the new worker processes with a new configuration
+        - Gracefully shutdown the old worker processes
+        """
+        self.log.info("Hang up: %s", self.master_name)
+        self.reload()
+
+    def handle_term(self):
+        "SIGTERM handling"
+        raise StopIteration
+
+    def handle_int(self):
+        "SIGINT handling"
+        self.stop(False)
+        raise StopIteration
+
+    def handle_quit(self):
+        "SIGQUIT handling"
+        self.stop(False)
+        raise StopIteration
+
+    def handle_ttin(self):
+        """\
+        SIGTTIN handling.
+        Increases the number of workers by one.
+        """
+        self.num_workers += 1
+        self.manage_workers()
+
+    def handle_ttou(self):
+        """\
+        SIGTTOU handling.
+        Decreases the number of workers by one.
+        """
+        if self.num_workers <= 1:
+            return
+        self.num_workers -= 1
+        self.manage_workers()
+
+    def handle_usr1(self):
+        """\
+        SIGUSR1 handling.
+        Kill all workers by sending them a SIGUSR1
+        """
+        self.log.reopen_files()
+        self.kill_workers(signal.SIGUSR1)
+
+    def handle_usr2(self):
+        """\
+        SIGUSR2 handling.
+        Creates a new arbiter/worker set as a fork of the current
+        arbiter without affecting old workers. Use this to do live
+        deployment with the ability to backout a change.
+        """
+        self.reexec()
+
+    def handle_winch(self):
+        """SIGWINCH handling"""
+        if self.cfg.daemon:
+            self.log.info("graceful stop of workers")
+            self.num_workers = 0
+            self.kill_workers(signal.SIGTERM)
+        else:
+            self.log.debug("SIGWINCH ignored. Not daemonized")
+
+    def maybe_promote_master(self):
+        if self.master_pid == 0:
+            return
+
+        if self.master_pid != os.getppid():
+            self.log.info("Master has been promoted.")
+            # reset master infos
+            self.master_name = "Master"
+            self.master_pid = 0
+            self.proc_name = self.cfg.proc_name
+            del os.environ['GUNICORN_PID']
+            # rename the pidfile
+            if self.pidfile is not None:
+                self.pidfile.rename(self.cfg.pidfile)
+            # reset proctitle
+            util._setproctitle("master [%s]" % self.proc_name)
+
+    def wakeup(self):
+        """\
+        Wake up the arbiter by writing to the PIPE
+        """
+        try:
+            os.write(self.PIPE[1], b'.')
+        except OSError as e:
+            if e.errno not in [errno.EAGAIN, errno.EINTR]:
+                raise
+
+    def halt(self, reason=None, exit_status=0):
+        """ halt arbiter """
+        self.stop()
+
+        log_func = self.log.info if exit_status == 0 else self.log.error
+        log_func("Shutting down: %s", self.master_name)
+        if reason is not None:
+            log_func("Reason: %s", reason)
+
+        if self.pidfile is not None:
+            self.pidfile.unlink()
+        self.cfg.on_exit(self)
+        sys.exit(exit_status)
+
+    def sleep(self):
+        """\
+        Sleep until PIPE is readable or we timeout.
+        A readable PIPE means a signal occurred.
+        """
+        try:
+            ready = select.select([self.PIPE[0]], [], [], 1.0)
+            if not ready[0]:
+                return
+            while os.read(self.PIPE[0], 1):
+                pass
+        except OSError as e:
+            # TODO: select.error is a subclass of OSError since Python 3.3.
+            error_number = getattr(e, 'errno', e.args[0])
+            if error_number not in [errno.EAGAIN, errno.EINTR]:
+                raise
+        except KeyboardInterrupt:
+            sys.exit()
+
+    def stop(self, graceful=True):
+        """\
+        Stop workers
+
+        :attr graceful: boolean, If True (the default) workers will be
+        killed gracefully  (ie. trying to wait for the current connection)
+        """
+        unlink = (
+            self.reexec_pid == self.master_pid == 0
+            and not self.systemd
+            and not self.cfg.reuse_port
+        )
+        sock.close_sockets(self.LISTENERS, unlink)
+
+        self.LISTENERS = []
+        sig = signal.SIGTERM
+        if not graceful:
+            sig = signal.SIGQUIT
+        limit = time.time() + self.cfg.graceful_timeout
+        # instruct the workers to exit
+        self.kill_workers(sig)
+        # wait until the graceful timeout
+        while self.WORKERS and time.time() < limit:
+            time.sleep(0.1)
+
+        self.kill_workers(signal.SIGKILL)
+
+    def reexec(self):
+        """\
+        Relaunch the master and workers.
+        """
+        if self.reexec_pid != 0:
+            self.log.warning("USR2 signal ignored. Child exists.")
+            return
+
+        if self.master_pid != 0:
+            self.log.warning("USR2 signal ignored. Parent exists.")
+            return
+
+        master_pid = os.getpid()
+        self.reexec_pid = os.fork()
+        if self.reexec_pid != 0:
+            return
+
+        self.cfg.pre_exec(self)
+
+        environ = self.cfg.env_orig.copy()
+        environ['GUNICORN_PID'] = str(master_pid)
+
+        if self.systemd:
+            environ['LISTEN_PID'] = str(os.getpid())
+            environ['LISTEN_FDS'] = str(len(self.LISTENERS))
+        else:
+            environ['GUNICORN_FD'] = ','.join(
+                str(lnr.fileno()) for lnr in self.LISTENERS)
+
+        os.chdir(self.START_CTX['cwd'])
+
+        # exec the process using the original environment
+        os.execvpe(self.START_CTX[0], self.START_CTX['args'], environ)
+
+    def reload(self):
+        old_address = self.cfg.address
+
+        # reset old environment
+        for k in self.cfg.env:
+            if k in self.cfg.env_orig:
+                # reset the key to the value it had before
+                # we launched gunicorn
+                os.environ[k] = self.cfg.env_orig[k]
+            else:
+                # delete the value set by gunicorn
+                try:
+                    del os.environ[k]
+                except KeyError:
+                    pass
+
+        # reload conf
+        self.app.reload()
+        self.setup(self.app)
+
+        # reopen log files
+        self.log.reopen_files()
+
+        # do we need to change listener ?
+        if old_address != self.cfg.address:
+            # close all listeners
+            for lnr in self.LISTENERS:
+                lnr.close()
+            # init new listeners
+            self.LISTENERS = sock.create_sockets(self.cfg, self.log)
+            listeners_str = ",".join([str(lnr) for lnr in self.LISTENERS])
+            self.log.info("Listening at: %s", listeners_str)
+
+        # do some actions on reload
+        self.cfg.on_reload(self)
+
+        # unlink pidfile
+        if self.pidfile is not None:
+            self.pidfile.unlink()
+
+        # create new pidfile
+        if self.cfg.pidfile is not None:
+            self.pidfile = Pidfile(self.cfg.pidfile)
+            self.pidfile.create(self.pid)
+
+        # set new proc_name
+        util._setproctitle("master [%s]" % self.proc_name)
+
+        # spawn new workers
+        for _ in range(self.cfg.workers):
+            self.spawn_worker()
+
+        # manage workers
+        self.manage_workers()
+
+    def murder_workers(self):
+        """\
+        Kill unused/idle workers
+        """
+        if not self.timeout:
+            return
+        workers = list(self.WORKERS.items())
+        for (pid, worker) in workers:
+            try:
+                if time.monotonic() - worker.tmp.last_update() <= self.timeout:
+                    continue
+            except (OSError, ValueError):
+                continue
+
+            if not worker.aborted:
+                self.log.critical("WORKER TIMEOUT (pid:%s)", pid)
+                worker.aborted = True
+                self.kill_worker(pid, signal.SIGABRT)
+            else:
+                self.kill_worker(pid, signal.SIGKILL)
+
+    def reap_workers(self):
+        """\
+        Reap workers to avoid zombie processes
+        """
+        try:
+            while True:
+                wpid, status = os.waitpid(-1, os.WNOHANG)
+                if not wpid:
+                    break
+                if self.reexec_pid == wpid:
+                    self.reexec_pid = 0
+                else:
+                    # A worker was terminated. If the termination reason was
+                    # that it could not boot, we'll shut it down to avoid
+                    # infinite start/stop cycles.
+                    exitcode = status >> 8
+                    if exitcode != 0:
+                        self.log.error('Worker (pid:%s) exited with code %s', wpid, exitcode)
+                    if exitcode == self.WORKER_BOOT_ERROR:
+                        reason = "Worker failed to boot."
+                        raise HaltServer(reason, self.WORKER_BOOT_ERROR)
+                    if exitcode == self.APP_LOAD_ERROR:
+                        reason = "App failed to load."
+                        raise HaltServer(reason, self.APP_LOAD_ERROR)
+
+                    if exitcode > 0:
+                        # If the exit code of the worker is greater than 0,
+                        # let the user know.
+                        self.log.error("Worker (pid:%s) exited with code %s.",
+                                       wpid, exitcode)
+                    elif status > 0:
+                        # If the exit code of the worker is 0 and the status
+                        # is greater than 0, then it was most likely killed
+                        # via a signal.
+                        try:
+                            sig_name = signal.Signals(status).name
+                        except ValueError:
+                            sig_name = "code {}".format(status)
+                        msg = "Worker (pid:{}) was sent {}!".format(
+                            wpid, sig_name)
+
+                        # Additional hint for SIGKILL
+                        if status == signal.SIGKILL:
+                            msg += " Perhaps out of memory?"
+                        self.log.error(msg)
+
+                    worker = self.WORKERS.pop(wpid, None)
+                    if not worker:
+                        continue
+                    worker.tmp.close()
+                    self.cfg.child_exit(self, worker)
+        except OSError as e:
+            if e.errno != errno.ECHILD:
+                raise
+
+    def manage_workers(self):
+        """\
+        Maintain the number of workers by spawning or killing
+        as required.
+        """
+        if len(self.WORKERS) < self.num_workers:
+            self.spawn_workers()
+
+        workers = self.WORKERS.items()
+        workers = sorted(workers, key=lambda w: w[1].age)
+        while len(workers) > self.num_workers:
+            (pid, _) = workers.pop(0)
+            self.kill_worker(pid, signal.SIGTERM)
+
+        active_worker_count = len(workers)
+        if self._last_logged_active_worker_count != active_worker_count:
+            self._last_logged_active_worker_count = active_worker_count
+            self.log.debug("{0} workers".format(active_worker_count),
+                           extra={"metric": "gunicorn.workers",
+                                  "value": active_worker_count,
+                                  "mtype": "gauge"})
+
+    def spawn_worker(self):
+        self.worker_age += 1
+        worker = self.worker_class(self.worker_age, self.pid, self.LISTENERS,
+                                   self.app, self.timeout / 2.0,
+                                   self.cfg, self.log)
+        self.cfg.pre_fork(self, worker)
+        pid = os.fork()
+        if pid != 0:
+            worker.pid = pid
+            self.WORKERS[pid] = worker
+            return pid
+
+        # Do not inherit the temporary files of other workers
+        for sibling in self.WORKERS.values():
+            sibling.tmp.close()
+
+        # Process Child
+        worker.pid = os.getpid()
+        try:
+            util._setproctitle("worker [%s]" % self.proc_name)
+            self.log.info("Booting worker with pid: %s", worker.pid)
+            self.cfg.post_fork(self, worker)
+            worker.init_process()
+            sys.exit(0)
+        except SystemExit:
+            raise
+        except AppImportError as e:
+            self.log.debug("Exception while loading the application",
+                           exc_info=True)
+            print("%s" % e, file=sys.stderr)
+            sys.stderr.flush()
+            sys.exit(self.APP_LOAD_ERROR)
+        except Exception:
+            self.log.exception("Exception in worker process")
+            if not worker.booted:
+                sys.exit(self.WORKER_BOOT_ERROR)
+            sys.exit(-1)
+        finally:
+            self.log.info("Worker exiting (pid: %s)", worker.pid)
+            try:
+                worker.tmp.close()
+                self.cfg.worker_exit(self, worker)
+            except Exception:
+                self.log.warning("Exception during worker exit:\n%s",
+                                 traceback.format_exc())
+
+    def spawn_workers(self):
+        """\
+        Spawn new workers as needed.
+
+        This is where a worker process leaves the main loop
+        of the master process.
+        """
+
+        for _ in range(self.num_workers - len(self.WORKERS)):
+            self.spawn_worker()
+            time.sleep(0.1 * random.random())
+
+    def kill_workers(self, sig):
+        """\
+        Kill all workers with the signal `sig`
+        :attr sig: `signal.SIG*` value
+        """
+        worker_pids = list(self.WORKERS.keys())
+        for pid in worker_pids:
+            self.kill_worker(pid, sig)
+
+    def kill_worker(self, pid, sig):
+        """\
+        Kill a worker
+
+        :attr pid: int, worker pid
+        :attr sig: `signal.SIG*` value
+         """
+        try:
+            os.kill(pid, sig)
+        except OSError as e:
+            if e.errno == errno.ESRCH:
+                try:
+                    worker = self.WORKERS.pop(pid)
+                    worker.tmp.close()
+                    self.cfg.worker_exit(self, worker)
+                    return
+                except (KeyError, OSError):
+                    return
+            raise
diff --git a/.venv/Lib/site-packages/gunicorn/config.py b/.venv/Lib/site-packages/gunicorn/config.py
new file mode 100644
index 0000000000000000000000000000000000000000..402a26b6828aef2255d0ab8824023acbc7405da4
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/config.py
@@ -0,0 +1,2442 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+# Please remember to run "make -C docs html" after update "desc" attributes.
+
+import argparse
+import copy
+import grp
+import inspect
+import ipaddress
+import os
+import pwd
+import re
+import shlex
+import ssl
+import sys
+import textwrap
+
+from gunicorn import __version__, util
+from gunicorn.errors import ConfigError
+from gunicorn.reloader import reloader_engines
+
+KNOWN_SETTINGS = []
+PLATFORM = sys.platform
+
+
+def make_settings(ignore=None):
+    settings = {}
+    ignore = ignore or ()
+    for s in KNOWN_SETTINGS:
+        setting = s()
+        if setting.name in ignore:
+            continue
+        settings[setting.name] = setting.copy()
+    return settings
+
+
+def auto_int(_, x):
+    # for compatible with octal numbers in python3
+    if re.match(r'0(\d)', x, re.IGNORECASE):
+        x = x.replace('0', '0o', 1)
+    return int(x, 0)
+
+
+class Config:
+
+    def __init__(self, usage=None, prog=None):
+        self.settings = make_settings()
+        self.usage = usage
+        self.prog = prog or os.path.basename(sys.argv[0])
+        self.env_orig = os.environ.copy()
+
+    def __str__(self):
+        lines = []
+        kmax = max(len(k) for k in self.settings)
+        for k in sorted(self.settings):
+            v = self.settings[k].value
+            if callable(v):
+                v = "<{}()>".format(v.__qualname__)
+            lines.append("{k:{kmax}} = {v}".format(k=k, v=v, kmax=kmax))
+        return "\n".join(lines)
+
+    def __getattr__(self, name):
+        if name not in self.settings:
+            raise AttributeError("No configuration setting for: %s" % name)
+        return self.settings[name].get()
+
+    def __setattr__(self, name, value):
+        if name != "settings" and name in self.settings:
+            raise AttributeError("Invalid access!")
+        super().__setattr__(name, value)
+
+    def set(self, name, value):
+        if name not in self.settings:
+            raise AttributeError("No configuration setting for: %s" % name)
+        self.settings[name].set(value)
+
+    def get_cmd_args_from_env(self):
+        if 'GUNICORN_CMD_ARGS' in self.env_orig:
+            return shlex.split(self.env_orig['GUNICORN_CMD_ARGS'])
+        return []
+
+    def parser(self):
+        kwargs = {
+            "usage": self.usage,
+            "prog": self.prog
+        }
+        parser = argparse.ArgumentParser(**kwargs)
+        parser.add_argument("-v", "--version",
+                            action="version", default=argparse.SUPPRESS,
+                            version="%(prog)s (version " + __version__ + ")\n",
+                            help="show program's version number and exit")
+        parser.add_argument("args", nargs="*", help=argparse.SUPPRESS)
+
+        keys = sorted(self.settings, key=self.settings.__getitem__)
+        for k in keys:
+            self.settings[k].add_option(parser)
+
+        return parser
+
+    @property
+    def worker_class_str(self):
+        uri = self.settings['worker_class'].get()
+
+        if isinstance(uri, str):
+            # are we using a threaded worker?
+            is_sync = uri.endswith('SyncWorker') or uri == 'sync'
+            if is_sync and self.threads > 1:
+                return "gthread"
+            return uri
+        return uri.__name__
+
+    @property
+    def worker_class(self):
+        uri = self.settings['worker_class'].get()
+
+        # are we using a threaded worker?
+        is_sync = isinstance(uri, str) and (uri.endswith('SyncWorker') or uri == 'sync')
+        if is_sync and self.threads > 1:
+            uri = "gunicorn.workers.gthread.ThreadWorker"
+
+        worker_class = util.load_class(uri)
+        if hasattr(worker_class, "setup"):
+            worker_class.setup()
+        return worker_class
+
+    @property
+    def address(self):
+        s = self.settings['bind'].get()
+        return [util.parse_address(util.bytes_to_str(bind)) for bind in s]
+
+    @property
+    def uid(self):
+        return self.settings['user'].get()
+
+    @property
+    def gid(self):
+        return self.settings['group'].get()
+
+    @property
+    def proc_name(self):
+        pn = self.settings['proc_name'].get()
+        if pn is not None:
+            return pn
+        else:
+            return self.settings['default_proc_name'].get()
+
+    @property
+    def logger_class(self):
+        uri = self.settings['logger_class'].get()
+        if uri == "simple":
+            # support the default
+            uri = LoggerClass.default
+
+        # if default logger is in use, and statsd is on, automagically switch
+        # to the statsd logger
+        if uri == LoggerClass.default:
+            if 'statsd_host' in self.settings and self.settings['statsd_host'].value is not None:
+                uri = "gunicorn.instrument.statsd.Statsd"
+
+        logger_class = util.load_class(
+            uri,
+            default="gunicorn.glogging.Logger",
+            section="gunicorn.loggers")
+
+        if hasattr(logger_class, "install"):
+            logger_class.install()
+        return logger_class
+
+    @property
+    def is_ssl(self):
+        return self.certfile or self.keyfile
+
+    @property
+    def ssl_options(self):
+        opts = {}
+        for name, value in self.settings.items():
+            if value.section == 'SSL':
+                opts[name] = value.get()
+        return opts
+
+    @property
+    def env(self):
+        raw_env = self.settings['raw_env'].get()
+        env = {}
+
+        if not raw_env:
+            return env
+
+        for e in raw_env:
+            s = util.bytes_to_str(e)
+            try:
+                k, v = s.split('=', 1)
+            except ValueError:
+                raise RuntimeError("environment setting %r invalid" % s)
+
+            env[k] = v
+
+        return env
+
+    @property
+    def sendfile(self):
+        if self.settings['sendfile'].get() is not None:
+            return False
+
+        if 'SENDFILE' in os.environ:
+            sendfile = os.environ['SENDFILE'].lower()
+            return sendfile in ['y', '1', 'yes', 'true']
+
+        return True
+
+    @property
+    def reuse_port(self):
+        return self.settings['reuse_port'].get()
+
+    @property
+    def paste_global_conf(self):
+        raw_global_conf = self.settings['raw_paste_global_conf'].get()
+        if raw_global_conf is None:
+            return None
+
+        global_conf = {}
+        for e in raw_global_conf:
+            s = util.bytes_to_str(e)
+            try:
+                k, v = re.split(r'(?<!\\)=', s, 1)
+            except ValueError:
+                raise RuntimeError("environment setting %r invalid" % s)
+            k = k.replace('\\=', '=')
+            v = v.replace('\\=', '=')
+            global_conf[k] = v
+
+        return global_conf
+
+
+class SettingMeta(type):
+    def __new__(cls, name, bases, attrs):
+        super_new = super().__new__
+        parents = [b for b in bases if isinstance(b, SettingMeta)]
+        if not parents:
+            return super_new(cls, name, bases, attrs)
+
+        attrs["order"] = len(KNOWN_SETTINGS)
+        attrs["validator"] = staticmethod(attrs["validator"])
+
+        new_class = super_new(cls, name, bases, attrs)
+        new_class.fmt_desc(attrs.get("desc", ""))
+        KNOWN_SETTINGS.append(new_class)
+        return new_class
+
+    def fmt_desc(cls, desc):
+        desc = textwrap.dedent(desc).strip()
+        setattr(cls, "desc", desc)
+        setattr(cls, "short", desc.splitlines()[0])
+
+
+class Setting:
+    name = None
+    value = None
+    section = None
+    cli = None
+    validator = None
+    type = None
+    meta = None
+    action = None
+    default = None
+    short = None
+    desc = None
+    nargs = None
+    const = None
+
+    def __init__(self):
+        if self.default is not None:
+            self.set(self.default)
+
+    def add_option(self, parser):
+        if not self.cli:
+            return
+        args = tuple(self.cli)
+
+        help_txt = "%s [%s]" % (self.short, self.default)
+        help_txt = help_txt.replace("%", "%%")
+
+        kwargs = {
+            "dest": self.name,
+            "action": self.action or "store",
+            "type": self.type or str,
+            "default": None,
+            "help": help_txt
+        }
+
+        if self.meta is not None:
+            kwargs['metavar'] = self.meta
+
+        if kwargs["action"] != "store":
+            kwargs.pop("type")
+
+        if self.nargs is not None:
+            kwargs["nargs"] = self.nargs
+
+        if self.const is not None:
+            kwargs["const"] = self.const
+
+        parser.add_argument(*args, **kwargs)
+
+    def copy(self):
+        return copy.copy(self)
+
+    def get(self):
+        return self.value
+
+    def set(self, val):
+        if not callable(self.validator):
+            raise TypeError('Invalid validator: %s' % self.name)
+        self.value = self.validator(val)
+
+    def __lt__(self, other):
+        return (self.section == other.section and
+                self.order < other.order)
+    __cmp__ = __lt__
+
+    def __repr__(self):
+        return "<%s.%s object at %x with value %r>" % (
+            self.__class__.__module__,
+            self.__class__.__name__,
+            id(self),
+            self.value,
+        )
+
+
+Setting = SettingMeta('Setting', (Setting,), {})
+
+
+def validate_bool(val):
+    if val is None:
+        return
+
+    if isinstance(val, bool):
+        return val
+    if not isinstance(val, str):
+        raise TypeError("Invalid type for casting: %s" % val)
+    if val.lower().strip() == "true":
+        return True
+    elif val.lower().strip() == "false":
+        return False
+    else:
+        raise ValueError("Invalid boolean: %s" % val)
+
+
+def validate_dict(val):
+    if not isinstance(val, dict):
+        raise TypeError("Value is not a dictionary: %s " % val)
+    return val
+
+
+def validate_pos_int(val):
+    if not isinstance(val, int):
+        val = int(val, 0)
+    else:
+        # Booleans are ints!
+        val = int(val)
+    if val < 0:
+        raise ValueError("Value must be positive: %s" % val)
+    return val
+
+
+def validate_ssl_version(val):
+    if val != SSLVersion.default:
+        sys.stderr.write("Warning: option `ssl_version` is deprecated and it is ignored. Use ssl_context instead.\n")
+    return val
+
+
+def validate_string(val):
+    if val is None:
+        return None
+    if not isinstance(val, str):
+        raise TypeError("Not a string: %s" % val)
+    return val.strip()
+
+
+def validate_file_exists(val):
+    if val is None:
+        return None
+    if not os.path.exists(val):
+        raise ValueError("File %s does not exists." % val)
+    return val
+
+
+def validate_list_string(val):
+    if not val:
+        return []
+
+    # legacy syntax
+    if isinstance(val, str):
+        val = [val]
+
+    return [validate_string(v) for v in val]
+
+
+def validate_list_of_existing_files(val):
+    return [validate_file_exists(v) for v in validate_list_string(val)]
+
+
+def validate_string_to_addr_list(val):
+    val = validate_string_to_list(val)
+
+    for addr in val:
+        if addr == "*":
+            continue
+        _vaid_ip = ipaddress.ip_address(addr)
+
+    return val
+
+
+def validate_string_to_list(val):
+    val = validate_string(val)
+
+    if not val:
+        return []
+
+    return [v.strip() for v in val.split(",") if v]
+
+
+def validate_class(val):
+    if inspect.isfunction(val) or inspect.ismethod(val):
+        val = val()
+    if inspect.isclass(val):
+        return val
+    return validate_string(val)
+
+
+def validate_callable(arity):
+    def _validate_callable(val):
+        if isinstance(val, str):
+            try:
+                mod_name, obj_name = val.rsplit(".", 1)
+            except ValueError:
+                raise TypeError("Value '%s' is not import string. "
+                                "Format: module[.submodules...].object" % val)
+            try:
+                mod = __import__(mod_name, fromlist=[obj_name])
+                val = getattr(mod, obj_name)
+            except ImportError as e:
+                raise TypeError(str(e))
+            except AttributeError:
+                raise TypeError("Can not load '%s' from '%s'"
+                                "" % (obj_name, mod_name))
+        if not callable(val):
+            raise TypeError("Value is not callable: %s" % val)
+        if arity != -1 and arity != util.get_arity(val):
+            raise TypeError("Value must have an arity of: %s" % arity)
+        return val
+    return _validate_callable
+
+
+def validate_user(val):
+    if val is None:
+        return os.geteuid()
+    if isinstance(val, int):
+        return val
+    elif val.isdigit():
+        return int(val)
+    else:
+        try:
+            return pwd.getpwnam(val).pw_uid
+        except KeyError:
+            raise ConfigError("No such user: '%s'" % val)
+
+
+def validate_group(val):
+    if val is None:
+        return os.getegid()
+
+    if isinstance(val, int):
+        return val
+    elif val.isdigit():
+        return int(val)
+    else:
+        try:
+            return grp.getgrnam(val).gr_gid
+        except KeyError:
+            raise ConfigError("No such group: '%s'" % val)
+
+
+def validate_post_request(val):
+    val = validate_callable(-1)(val)
+
+    largs = util.get_arity(val)
+    if largs == 4:
+        return val
+    elif largs == 3:
+        return lambda worker, req, env, _r: val(worker, req, env)
+    elif largs == 2:
+        return lambda worker, req, _e, _r: val(worker, req)
+    else:
+        raise TypeError("Value must have an arity of: 4")
+
+
+def validate_chdir(val):
+    # valid if the value is a string
+    val = validate_string(val)
+
+    # transform relative paths
+    path = os.path.abspath(os.path.normpath(os.path.join(util.getcwd(), val)))
+
+    # test if the path exists
+    if not os.path.exists(path):
+        raise ConfigError("can't chdir to %r" % val)
+
+    return path
+
+
+def validate_statsd_address(val):
+    val = validate_string(val)
+    if val is None:
+        return None
+
+    # As of major release 20, util.parse_address would recognize unix:PORT
+    # as a UDS address, breaking backwards compatibility. We defend against
+    # that regression here (this is also unit-tested).
+    # Feel free to remove in the next major release.
+    unix_hostname_regression = re.match(r'^unix:(\d+)$', val)
+    if unix_hostname_regression:
+        return ('unix', int(unix_hostname_regression.group(1)))
+
+    try:
+        address = util.parse_address(val, default_port='8125')
+    except RuntimeError:
+        raise TypeError("Value must be one of ('host:port', 'unix://PATH')")
+
+    return address
+
+
+def validate_reload_engine(val):
+    if val not in reloader_engines:
+        raise ConfigError("Invalid reload_engine: %r" % val)
+
+    return val
+
+
+def get_default_config_file():
+    config_path = os.path.join(os.path.abspath(os.getcwd()),
+                               'gunicorn.conf.py')
+    if os.path.exists(config_path):
+        return config_path
+    return None
+
+
+class ConfigFile(Setting):
+    name = "config"
+    section = "Config File"
+    cli = ["-c", "--config"]
+    meta = "CONFIG"
+    validator = validate_string
+    default = "./gunicorn.conf.py"
+    desc = """\
+        :ref:`The Gunicorn config file<configuration_file>`.
+
+        A string of the form ``PATH``, ``file:PATH``, or ``python:MODULE_NAME``.
+
+        Only has an effect when specified on the command line or as part of an
+        application specific configuration.
+
+        By default, a file named ``gunicorn.conf.py`` will be read from the same
+        directory where gunicorn is being run.
+
+        .. versionchanged:: 19.4
+           Loading the config from a Python module requires the ``python:``
+           prefix.
+        """
+
+
+class WSGIApp(Setting):
+    name = "wsgi_app"
+    section = "Config File"
+    meta = "STRING"
+    validator = validate_string
+    default = None
+    desc = """\
+        A WSGI application path in pattern ``$(MODULE_NAME):$(VARIABLE_NAME)``.
+
+        .. versionadded:: 20.1.0
+        """
+
+
+class Bind(Setting):
+    name = "bind"
+    action = "append"
+    section = "Server Socket"
+    cli = ["-b", "--bind"]
+    meta = "ADDRESS"
+    validator = validate_list_string
+
+    if 'PORT' in os.environ:
+        default = ['0.0.0.0:{0}'.format(os.environ.get('PORT'))]
+    else:
+        default = ['127.0.0.1:8000']
+
+    desc = """\
+        The socket to bind.
+
+        A string of the form: ``HOST``, ``HOST:PORT``, ``unix:PATH``,
+        ``fd://FD``. An IP is a valid ``HOST``.
+
+        .. versionchanged:: 20.0
+           Support for ``fd://FD`` got added.
+
+        Multiple addresses can be bound. ex.::
+
+            $ gunicorn -b 127.0.0.1:8000 -b [::1]:8000 test:app
+
+        will bind the `test:app` application on localhost both on ipv6
+        and ipv4 interfaces.
+
+        If the ``PORT`` environment variable is defined, the default
+        is ``['0.0.0.0:$PORT']``. If it is not defined, the default
+        is ``['127.0.0.1:8000']``.
+        """
+
+
+class Backlog(Setting):
+    name = "backlog"
+    section = "Server Socket"
+    cli = ["--backlog"]
+    meta = "INT"
+    validator = validate_pos_int
+    type = int
+    default = 2048
+    desc = """\
+        The maximum number of pending connections.
+
+        This refers to the number of clients that can be waiting to be served.
+        Exceeding this number results in the client getting an error when
+        attempting to connect. It should only affect servers under significant
+        load.
+
+        Must be a positive integer. Generally set in the 64-2048 range.
+        """
+
+
+class Workers(Setting):
+    name = "workers"
+    section = "Worker Processes"
+    cli = ["-w", "--workers"]
+    meta = "INT"
+    validator = validate_pos_int
+    type = int
+    default = int(os.environ.get("WEB_CONCURRENCY", 1))
+    desc = """\
+        The number of worker processes for handling requests.
+
+        A positive integer generally in the ``2-4 x $(NUM_CORES)`` range.
+        You'll want to vary this a bit to find the best for your particular
+        application's work load.
+
+        By default, the value of the ``WEB_CONCURRENCY`` environment variable,
+        which is set by some Platform-as-a-Service providers such as Heroku. If
+        it is not defined, the default is ``1``.
+        """
+
+
+class WorkerClass(Setting):
+    name = "worker_class"
+    section = "Worker Processes"
+    cli = ["-k", "--worker-class"]
+    meta = "STRING"
+    validator = validate_class
+    default = "sync"
+    desc = """\
+        The type of workers to use.
+
+        The default class (``sync``) should handle most "normal" types of
+        workloads. You'll want to read :doc:`design` for information on when
+        you might want to choose one of the other worker classes. Required
+        libraries may be installed using setuptools' ``extras_require`` feature.
+
+        A string referring to one of the following bundled classes:
+
+        * ``sync``
+        * ``eventlet`` - Requires eventlet >= 0.24.1 (or install it via
+          ``pip install gunicorn[eventlet]``)
+        * ``gevent``   - Requires gevent >= 1.4 (or install it via
+          ``pip install gunicorn[gevent]``)
+        * ``tornado``  - Requires tornado >= 0.2 (or install it via
+          ``pip install gunicorn[tornado]``)
+        * ``gthread``  - Python 2 requires the futures package to be installed
+          (or install it via ``pip install gunicorn[gthread]``)
+
+        Optionally, you can provide your own worker by giving Gunicorn a
+        Python path to a subclass of ``gunicorn.workers.base.Worker``.
+        This alternative syntax will load the gevent class:
+        ``gunicorn.workers.ggevent.GeventWorker``.
+        """
+
+
+class WorkerThreads(Setting):
+    name = "threads"
+    section = "Worker Processes"
+    cli = ["--threads"]
+    meta = "INT"
+    validator = validate_pos_int
+    type = int
+    default = 1
+    desc = """\
+        The number of worker threads for handling requests.
+
+        Run each worker with the specified number of threads.
+
+        A positive integer generally in the ``2-4 x $(NUM_CORES)`` range.
+        You'll want to vary this a bit to find the best for your particular
+        application's work load.
+
+        If it is not defined, the default is ``1``.
+
+        This setting only affects the Gthread worker type.
+
+        .. note::
+           If you try to use the ``sync`` worker type and set the ``threads``
+           setting to more than 1, the ``gthread`` worker type will be used
+           instead.
+        """
+
+
+class WorkerConnections(Setting):
+    name = "worker_connections"
+    section = "Worker Processes"
+    cli = ["--worker-connections"]
+    meta = "INT"
+    validator = validate_pos_int
+    type = int
+    default = 1000
+    desc = """\
+        The maximum number of simultaneous clients.
+
+        This setting only affects the ``gthread``, ``eventlet`` and ``gevent`` worker types.
+        """
+
+
+class MaxRequests(Setting):
+    name = "max_requests"
+    section = "Worker Processes"
+    cli = ["--max-requests"]
+    meta = "INT"
+    validator = validate_pos_int
+    type = int
+    default = 0
+    desc = """\
+        The maximum number of requests a worker will process before restarting.
+
+        Any value greater than zero will limit the number of requests a worker
+        will process before automatically restarting. This is a simple method
+        to help limit the damage of memory leaks.
+
+        If this is set to zero (the default) then the automatic worker
+        restarts are disabled.
+        """
+
+
+class MaxRequestsJitter(Setting):
+    name = "max_requests_jitter"
+    section = "Worker Processes"
+    cli = ["--max-requests-jitter"]
+    meta = "INT"
+    validator = validate_pos_int
+    type = int
+    default = 0
+    desc = """\
+        The maximum jitter to add to the *max_requests* setting.
+
+        The jitter causes the restart per worker to be randomized by
+        ``randint(0, max_requests_jitter)``. This is intended to stagger worker
+        restarts to avoid all workers restarting at the same time.
+
+        .. versionadded:: 19.2
+        """
+
+
+class Timeout(Setting):
+    name = "timeout"
+    section = "Worker Processes"
+    cli = ["-t", "--timeout"]
+    meta = "INT"
+    validator = validate_pos_int
+    type = int
+    default = 30
+    desc = """\
+        Workers silent for more than this many seconds are killed and restarted.
+
+        Value is a positive number or 0. Setting it to 0 has the effect of
+        infinite timeouts by disabling timeouts for all workers entirely.
+
+        Generally, the default of thirty seconds should suffice. Only set this
+        noticeably higher if you're sure of the repercussions for sync workers.
+        For the non sync workers it just means that the worker process is still
+        communicating and is not tied to the length of time required to handle a
+        single request.
+        """
+
+
+class GracefulTimeout(Setting):
+    name = "graceful_timeout"
+    section = "Worker Processes"
+    cli = ["--graceful-timeout"]
+    meta = "INT"
+    validator = validate_pos_int
+    type = int
+    default = 30
+    desc = """\
+        Timeout for graceful workers restart.
+
+        After receiving a restart signal, workers have this much time to finish
+        serving requests. Workers still alive after the timeout (starting from
+        the receipt of the restart signal) are force killed.
+        """
+
+
+class Keepalive(Setting):
+    name = "keepalive"
+    section = "Worker Processes"
+    cli = ["--keep-alive"]
+    meta = "INT"
+    validator = validate_pos_int
+    type = int
+    default = 2
+    desc = """\
+        The number of seconds to wait for requests on a Keep-Alive connection.
+
+        Generally set in the 1-5 seconds range for servers with direct connection
+        to the client (e.g. when you don't have separate load balancer). When
+        Gunicorn is deployed behind a load balancer, it often makes sense to
+        set this to a higher value.
+
+        .. note::
+           ``sync`` worker does not support persistent connections and will
+           ignore this option.
+        """
+
+
+class LimitRequestLine(Setting):
+    name = "limit_request_line"
+    section = "Security"
+    cli = ["--limit-request-line"]
+    meta = "INT"
+    validator = validate_pos_int
+    type = int
+    default = 4094
+    desc = """\
+        The maximum size of HTTP request line in bytes.
+
+        This parameter is used to limit the allowed size of a client's
+        HTTP request-line. Since the request-line consists of the HTTP
+        method, URI, and protocol version, this directive places a
+        restriction on the length of a request-URI allowed for a request
+        on the server. A server needs this value to be large enough to
+        hold any of its resource names, including any information that
+        might be passed in the query part of a GET request. Value is a number
+        from 0 (unlimited) to 8190.
+
+        This parameter can be used to prevent any DDOS attack.
+        """
+
+
+class LimitRequestFields(Setting):
+    name = "limit_request_fields"
+    section = "Security"
+    cli = ["--limit-request-fields"]
+    meta = "INT"
+    validator = validate_pos_int
+    type = int
+    default = 100
+    desc = """\
+        Limit the number of HTTP headers fields in a request.
+
+        This parameter is used to limit the number of headers in a request to
+        prevent DDOS attack. Used with the *limit_request_field_size* it allows
+        more safety. By default this value is 100 and can't be larger than
+        32768.
+        """
+
+
+class LimitRequestFieldSize(Setting):
+    name = "limit_request_field_size"
+    section = "Security"
+    cli = ["--limit-request-field_size"]
+    meta = "INT"
+    validator = validate_pos_int
+    type = int
+    default = 8190
+    desc = """\
+        Limit the allowed size of an HTTP request header field.
+
+        Value is a positive number or 0. Setting it to 0 will allow unlimited
+        header field sizes.
+
+        .. warning::
+           Setting this parameter to a very high or unlimited value can open
+           up for DDOS attacks.
+        """
+
+
+class Reload(Setting):
+    name = "reload"
+    section = 'Debugging'
+    cli = ['--reload']
+    validator = validate_bool
+    action = 'store_true'
+    default = False
+
+    desc = '''\
+        Restart workers when code changes.
+
+        This setting is intended for development. It will cause workers to be
+        restarted whenever application code changes.
+
+        The reloader is incompatible with application preloading. When using a
+        paste configuration be sure that the server block does not import any
+        application code or the reload will not work as designed.
+
+        The default behavior is to attempt inotify with a fallback to file
+        system polling. Generally, inotify should be preferred if available
+        because it consumes less system resources.
+
+        .. note::
+           In order to use the inotify reloader, you must have the ``inotify``
+           package installed.
+        '''
+
+
+class ReloadEngine(Setting):
+    name = "reload_engine"
+    section = "Debugging"
+    cli = ["--reload-engine"]
+    meta = "STRING"
+    validator = validate_reload_engine
+    default = "auto"
+    desc = """\
+        The implementation that should be used to power :ref:`reload`.
+
+        Valid engines are:
+
+        * ``'auto'``
+        * ``'poll'``
+        * ``'inotify'`` (requires inotify)
+
+        .. versionadded:: 19.7
+        """
+
+
+class ReloadExtraFiles(Setting):
+    name = "reload_extra_files"
+    action = "append"
+    section = "Debugging"
+    cli = ["--reload-extra-file"]
+    meta = "FILES"
+    validator = validate_list_of_existing_files
+    default = []
+    desc = """\
+        Extends :ref:`reload` option to also watch and reload on additional files
+        (e.g., templates, configurations, specifications, etc.).
+
+        .. versionadded:: 19.8
+        """
+
+
+class Spew(Setting):
+    name = "spew"
+    section = "Debugging"
+    cli = ["--spew"]
+    validator = validate_bool
+    action = "store_true"
+    default = False
+    desc = """\
+        Install a trace function that spews every line executed by the server.
+
+        This is the nuclear option.
+        """
+
+
+class ConfigCheck(Setting):
+    name = "check_config"
+    section = "Debugging"
+    cli = ["--check-config"]
+    validator = validate_bool
+    action = "store_true"
+    default = False
+    desc = """\
+        Check the configuration and exit. The exit status is 0 if the
+        configuration is correct, and 1 if the configuration is incorrect.
+        """
+
+
+class PrintConfig(Setting):
+    name = "print_config"
+    section = "Debugging"
+    cli = ["--print-config"]
+    validator = validate_bool
+    action = "store_true"
+    default = False
+    desc = """\
+        Print the configuration settings as fully resolved. Implies :ref:`check-config`.
+        """
+
+
+class PreloadApp(Setting):
+    name = "preload_app"
+    section = "Server Mechanics"
+    cli = ["--preload"]
+    validator = validate_bool
+    action = "store_true"
+    default = False
+    desc = """\
+        Load application code before the worker processes are forked.
+
+        By preloading an application you can save some RAM resources as well as
+        speed up server boot times. Although, if you defer application loading
+        to each worker process, you can reload your application code easily by
+        restarting workers.
+        """
+
+
+class Sendfile(Setting):
+    name = "sendfile"
+    section = "Server Mechanics"
+    cli = ["--no-sendfile"]
+    validator = validate_bool
+    action = "store_const"
+    const = False
+
+    desc = """\
+        Disables the use of ``sendfile()``.
+
+        If not set, the value of the ``SENDFILE`` environment variable is used
+        to enable or disable its usage.
+
+        .. versionadded:: 19.2
+        .. versionchanged:: 19.4
+           Swapped ``--sendfile`` with ``--no-sendfile`` to actually allow
+           disabling.
+        .. versionchanged:: 19.6
+           added support for the ``SENDFILE`` environment variable
+        """
+
+
+class ReusePort(Setting):
+    name = "reuse_port"
+    section = "Server Mechanics"
+    cli = ["--reuse-port"]
+    validator = validate_bool
+    action = "store_true"
+    default = False
+
+    desc = """\
+        Set the ``SO_REUSEPORT`` flag on the listening socket.
+
+        .. versionadded:: 19.8
+        """
+
+
+class Chdir(Setting):
+    name = "chdir"
+    section = "Server Mechanics"
+    cli = ["--chdir"]
+    validator = validate_chdir
+    default = util.getcwd()
+    default_doc = "``'.'``"
+    desc = """\
+        Change directory to specified directory before loading apps.
+        """
+
+
+class Daemon(Setting):
+    name = "daemon"
+    section = "Server Mechanics"
+    cli = ["-D", "--daemon"]
+    validator = validate_bool
+    action = "store_true"
+    default = False
+    desc = """\
+        Daemonize the Gunicorn process.
+
+        Detaches the server from the controlling terminal and enters the
+        background.
+        """
+
+
+class Env(Setting):
+    name = "raw_env"
+    action = "append"
+    section = "Server Mechanics"
+    cli = ["-e", "--env"]
+    meta = "ENV"
+    validator = validate_list_string
+    default = []
+
+    desc = """\
+        Set environment variables in the execution environment.
+
+        Should be a list of strings in the ``key=value`` format.
+
+        For example on the command line:
+
+        .. code-block:: console
+
+            $ gunicorn -b 127.0.0.1:8000 --env FOO=1 test:app
+
+        Or in the configuration file:
+
+        .. code-block:: python
+
+            raw_env = ["FOO=1"]
+        """
+
+
+class Pidfile(Setting):
+    name = "pidfile"
+    section = "Server Mechanics"
+    cli = ["-p", "--pid"]
+    meta = "FILE"
+    validator = validate_string
+    default = None
+    desc = """\
+        A filename to use for the PID file.
+
+        If not set, no PID file will be written.
+        """
+
+
+class WorkerTmpDir(Setting):
+    name = "worker_tmp_dir"
+    section = "Server Mechanics"
+    cli = ["--worker-tmp-dir"]
+    meta = "DIR"
+    validator = validate_string
+    default = None
+    desc = """\
+        A directory to use for the worker heartbeat temporary file.
+
+        If not set, the default temporary directory will be used.
+
+        .. note::
+           The current heartbeat system involves calling ``os.fchmod`` on
+           temporary file handlers and may block a worker for arbitrary time
+           if the directory is on a disk-backed filesystem.
+
+           See :ref:`blocking-os-fchmod` for more detailed information
+           and a solution for avoiding this problem.
+        """
+
+
+class User(Setting):
+    name = "user"
+    section = "Server Mechanics"
+    cli = ["-u", "--user"]
+    meta = "USER"
+    validator = validate_user
+    default = os.geteuid()
+    default_doc = "``os.geteuid()``"
+    desc = """\
+        Switch worker processes to run as this user.
+
+        A valid user id (as an integer) or the name of a user that can be
+        retrieved with a call to ``pwd.getpwnam(value)`` or ``None`` to not
+        change the worker process user.
+        """
+
+
+class Group(Setting):
+    name = "group"
+    section = "Server Mechanics"
+    cli = ["-g", "--group"]
+    meta = "GROUP"
+    validator = validate_group
+    default = os.getegid()
+    default_doc = "``os.getegid()``"
+    desc = """\
+        Switch worker process to run as this group.
+
+        A valid group id (as an integer) or the name of a user that can be
+        retrieved with a call to ``pwd.getgrnam(value)`` or ``None`` to not
+        change the worker processes group.
+        """
+
+
+class Umask(Setting):
+    name = "umask"
+    section = "Server Mechanics"
+    cli = ["-m", "--umask"]
+    meta = "INT"
+    validator = validate_pos_int
+    type = auto_int
+    default = 0
+    desc = """\
+        A bit mask for the file mode on files written by Gunicorn.
+
+        Note that this affects unix socket permissions.
+
+        A valid value for the ``os.umask(mode)`` call or a string compatible
+        with ``int(value, 0)`` (``0`` means Python guesses the base, so values
+        like ``0``, ``0xFF``, ``0022`` are valid for decimal, hex, and octal
+        representations)
+        """
+
+
+class Initgroups(Setting):
+    name = "initgroups"
+    section = "Server Mechanics"
+    cli = ["--initgroups"]
+    validator = validate_bool
+    action = 'store_true'
+    default = False
+
+    desc = """\
+        If true, set the worker process's group access list with all of the
+        groups of which the specified username is a member, plus the specified
+        group id.
+
+        .. versionadded:: 19.7
+        """
+
+
+class TmpUploadDir(Setting):
+    name = "tmp_upload_dir"
+    section = "Server Mechanics"
+    meta = "DIR"
+    validator = validate_string
+    default = None
+    desc = """\
+        Directory to store temporary request data as they are read.
+
+        This may disappear in the near future.
+
+        This path should be writable by the process permissions set for Gunicorn
+        workers. If not specified, Gunicorn will choose a system generated
+        temporary directory.
+        """
+
+
+class SecureSchemeHeader(Setting):
+    name = "secure_scheme_headers"
+    section = "Server Mechanics"
+    validator = validate_dict
+    default = {
+        "X-FORWARDED-PROTOCOL": "ssl",
+        "X-FORWARDED-PROTO": "https",
+        "X-FORWARDED-SSL": "on"
+    }
+    desc = """\
+
+        A dictionary containing headers and values that the front-end proxy
+        uses to indicate HTTPS requests. If the source IP is permitted by
+        :ref:`forwarded-allow-ips` (below), *and* at least one request header matches
+        a key-value pair listed in this dictionary, then Gunicorn will set
+        ``wsgi.url_scheme`` to ``https``, so your application can tell that the
+        request is secure.
+
+        If the other headers listed in this dictionary are not present in the request, they will be ignored,
+        but if the other headers are present and do not match the provided values, then
+        the request will fail to parse. See the note below for more detailed examples of this behaviour.
+
+        The dictionary should map upper-case header names to exact string
+        values. The value comparisons are case-sensitive, unlike the header
+        names, so make sure they're exactly what your front-end proxy sends
+        when handling HTTPS requests.
+
+        It is important that your front-end proxy configuration ensures that
+        the headers defined here can not be passed directly from the client.
+        """
+
+
+class ForwardedAllowIPS(Setting):
+    name = "forwarded_allow_ips"
+    section = "Server Mechanics"
+    cli = ["--forwarded-allow-ips"]
+    meta = "STRING"
+    validator = validate_string_to_addr_list
+    default = os.environ.get("FORWARDED_ALLOW_IPS", "127.0.0.1,::1")
+    desc = """\
+        Front-end's IPs from which allowed to handle set secure headers.
+        (comma separated).
+
+        Set to ``*`` to disable checking of front-end IPs. This is useful for setups
+        where you don't know in advance the IP address of front-end, but
+        instead have ensured via other means that only your
+        authorized front-ends can access Gunicorn.
+
+        By default, the value of the ``FORWARDED_ALLOW_IPS`` environment
+        variable. If it is not defined, the default is ``"127.0.0.1,::1"``.
+
+        .. note::
+
+            This option does not affect UNIX socket connections. Connections not associated with
+            an IP address are treated as allowed, unconditionally.
+
+        .. note::
+
+            The interplay between the request headers, the value of ``forwarded_allow_ips``, and the value of
+            ``secure_scheme_headers`` is complex. Various scenarios are documented below to further elaborate.
+            In each case, we have a request from the remote address 134.213.44.18, and the default value of
+            ``secure_scheme_headers``:
+
+            .. code::
+
+                secure_scheme_headers = {
+                    'X-FORWARDED-PROTOCOL': 'ssl',
+                    'X-FORWARDED-PROTO': 'https',
+                    'X-FORWARDED-SSL': 'on'
+                }
+
+
+            .. list-table::
+                :header-rows: 1
+                :align: center
+                :widths: auto
+
+                * - ``forwarded-allow-ips``
+                  - Secure Request Headers
+                  - Result
+                  - Explanation
+                * - .. code::
+
+                        ["127.0.0.1"]
+                  - .. code::
+
+                        X-Forwarded-Proto: https
+                  - .. code::
+
+                        wsgi.url_scheme = "http"
+                  - IP address was not allowed
+                * - .. code::
+
+                        "*"
+                  - <none>
+                  - .. code::
+
+                        wsgi.url_scheme = "http"
+                  - IP address allowed, but no secure headers provided
+                * - .. code::
+
+                        "*"
+                  - .. code::
+
+                        X-Forwarded-Proto: https
+                  - .. code::
+
+                        wsgi.url_scheme = "https"
+                  - IP address allowed, one request header matched
+                * - .. code::
+
+                        ["134.213.44.18"]
+                  - .. code::
+
+                        X-Forwarded-Ssl: on
+                        X-Forwarded-Proto: http
+                  - ``InvalidSchemeHeaders()`` raised
+                  - IP address allowed, but the two secure headers disagreed on if HTTPS was used
+
+
+        """
+
+
+class AccessLog(Setting):
+    name = "accesslog"
+    section = "Logging"
+    cli = ["--access-logfile"]
+    meta = "FILE"
+    validator = validate_string
+    default = None
+    desc = """\
+        The Access log file to write to.
+
+        ``'-'`` means log to stdout.
+        """
+
+
+class DisableRedirectAccessToSyslog(Setting):
+    name = "disable_redirect_access_to_syslog"
+    section = "Logging"
+    cli = ["--disable-redirect-access-to-syslog"]
+    validator = validate_bool
+    action = 'store_true'
+    default = False
+    desc = """\
+    Disable redirect access logs to syslog.
+
+    .. versionadded:: 19.8
+    """
+
+
+class AccessLogFormat(Setting):
+    name = "access_log_format"
+    section = "Logging"
+    cli = ["--access-logformat"]
+    meta = "STRING"
+    validator = validate_string
+    default = '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"'
+    desc = """\
+        The access log format.
+
+        ===========  ===========
+        Identifier   Description
+        ===========  ===========
+        h            remote address
+        l            ``'-'``
+        u            user name (if HTTP Basic auth used)
+        t            date of the request
+        r            status line (e.g. ``GET / HTTP/1.1``)
+        m            request method
+        U            URL path without query string
+        q            query string
+        H            protocol
+        s            status
+        B            response length
+        b            response length or ``'-'`` (CLF format)
+        f            referrer (note: header is ``referer``)
+        a            user agent
+        T            request time in seconds
+        M            request time in milliseconds
+        D            request time in microseconds
+        L            request time in decimal seconds
+        p            process ID
+        {header}i    request header
+        {header}o    response header
+        {variable}e  environment variable
+        ===========  ===========
+
+        Use lowercase for header and environment variable names, and put
+        ``{...}x`` names inside ``%(...)s``. For example::
+
+            %({x-forwarded-for}i)s
+        """
+
+
+class ErrorLog(Setting):
+    name = "errorlog"
+    section = "Logging"
+    cli = ["--error-logfile", "--log-file"]
+    meta = "FILE"
+    validator = validate_string
+    default = '-'
+    desc = """\
+        The Error log file to write to.
+
+        Using ``'-'`` for FILE makes gunicorn log to stderr.
+
+        .. versionchanged:: 19.2
+           Log to stderr by default.
+
+        """
+
+
+class Loglevel(Setting):
+    name = "loglevel"
+    section = "Logging"
+    cli = ["--log-level"]
+    meta = "LEVEL"
+    validator = validate_string
+    default = "info"
+    desc = """\
+        The granularity of Error log outputs.
+
+        Valid level names are:
+
+        * ``'debug'``
+        * ``'info'``
+        * ``'warning'``
+        * ``'error'``
+        * ``'critical'``
+        """
+
+
+class CaptureOutput(Setting):
+    name = "capture_output"
+    section = "Logging"
+    cli = ["--capture-output"]
+    validator = validate_bool
+    action = 'store_true'
+    default = False
+    desc = """\
+        Redirect stdout/stderr to specified file in :ref:`errorlog`.
+
+        .. versionadded:: 19.6
+        """
+
+
+class LoggerClass(Setting):
+    name = "logger_class"
+    section = "Logging"
+    cli = ["--logger-class"]
+    meta = "STRING"
+    validator = validate_class
+    default = "gunicorn.glogging.Logger"
+    desc = """\
+        The logger you want to use to log events in Gunicorn.
+
+        The default class (``gunicorn.glogging.Logger``) handles most
+        normal usages in logging. It provides error and access logging.
+
+        You can provide your own logger by giving Gunicorn a Python path to a
+        class that quacks like ``gunicorn.glogging.Logger``.
+        """
+
+
+class LogConfig(Setting):
+    name = "logconfig"
+    section = "Logging"
+    cli = ["--log-config"]
+    meta = "FILE"
+    validator = validate_string
+    default = None
+    desc = """\
+    The log config file to use.
+    Gunicorn uses the standard Python logging module's Configuration
+    file format.
+    """
+
+
+class LogConfigDict(Setting):
+    name = "logconfig_dict"
+    section = "Logging"
+    validator = validate_dict
+    default = {}
+    desc = """\
+    The log config dictionary to use, using the standard Python
+    logging module's dictionary configuration format. This option
+    takes precedence over the :ref:`logconfig` and :ref:`logconfig-json` options,
+    which uses the older file configuration format and JSON
+    respectively.
+
+    Format: https://docs.python.org/3/library/logging.config.html#logging.config.dictConfig
+
+    For more context you can look at the default configuration dictionary for logging,
+    which can be found at ``gunicorn.glogging.CONFIG_DEFAULTS``.
+
+    .. versionadded:: 19.8
+    """
+
+
+class LogConfigJson(Setting):
+    name = "logconfig_json"
+    section = "Logging"
+    cli = ["--log-config-json"]
+    meta = "FILE"
+    validator = validate_string
+    default = None
+    desc = """\
+    The log config to read config from a JSON file
+
+    Format: https://docs.python.org/3/library/logging.config.html#logging.config.jsonConfig
+
+    .. versionadded:: 20.0
+    """
+
+
+class SyslogTo(Setting):
+    name = "syslog_addr"
+    section = "Logging"
+    cli = ["--log-syslog-to"]
+    meta = "SYSLOG_ADDR"
+    validator = validate_string
+
+    if PLATFORM == "darwin":
+        default = "unix:///var/run/syslog"
+    elif PLATFORM in ('freebsd', 'dragonfly', ):
+        default = "unix:///var/run/log"
+    elif PLATFORM == "openbsd":
+        default = "unix:///dev/log"
+    else:
+        default = "udp://localhost:514"
+
+    desc = """\
+    Address to send syslog messages.
+
+    Address is a string of the form:
+
+    * ``unix://PATH#TYPE`` : for unix domain socket. ``TYPE`` can be ``stream``
+      for the stream driver or ``dgram`` for the dgram driver.
+      ``stream`` is the default.
+    * ``udp://HOST:PORT`` : for UDP sockets
+    * ``tcp://HOST:PORT`` : for TCP sockets
+
+    """
+
+
+class Syslog(Setting):
+    name = "syslog"
+    section = "Logging"
+    cli = ["--log-syslog"]
+    validator = validate_bool
+    action = 'store_true'
+    default = False
+    desc = """\
+    Send *Gunicorn* logs to syslog.
+
+    .. versionchanged:: 19.8
+       You can now disable sending access logs by using the
+       :ref:`disable-redirect-access-to-syslog` setting.
+    """
+
+
+class SyslogPrefix(Setting):
+    name = "syslog_prefix"
+    section = "Logging"
+    cli = ["--log-syslog-prefix"]
+    meta = "SYSLOG_PREFIX"
+    validator = validate_string
+    default = None
+    desc = """\
+    Makes Gunicorn use the parameter as program-name in the syslog entries.
+
+    All entries will be prefixed by ``gunicorn.<prefix>``. By default the
+    program name is the name of the process.
+    """
+
+
+class SyslogFacility(Setting):
+    name = "syslog_facility"
+    section = "Logging"
+    cli = ["--log-syslog-facility"]
+    meta = "SYSLOG_FACILITY"
+    validator = validate_string
+    default = "user"
+    desc = """\
+    Syslog facility name
+    """
+
+
+class EnableStdioInheritance(Setting):
+    name = "enable_stdio_inheritance"
+    section = "Logging"
+    cli = ["-R", "--enable-stdio-inheritance"]
+    validator = validate_bool
+    default = False
+    action = "store_true"
+    desc = """\
+    Enable stdio inheritance.
+
+    Enable inheritance for stdio file descriptors in daemon mode.
+
+    Note: To disable the Python stdout buffering, you can to set the user
+    environment variable ``PYTHONUNBUFFERED`` .
+    """
+
+
+# statsD monitoring
+class StatsdHost(Setting):
+    name = "statsd_host"
+    section = "Logging"
+    cli = ["--statsd-host"]
+    meta = "STATSD_ADDR"
+    default = None
+    validator = validate_statsd_address
+    desc = """\
+    The address of the StatsD server to log to.
+
+    Address is a string of the form:
+
+    * ``unix://PATH`` : for a unix domain socket.
+    * ``HOST:PORT`` : for a network address
+
+    .. versionadded:: 19.1
+    """
+
+
+# Datadog Statsd (dogstatsd) tags. https://docs.datadoghq.com/developers/dogstatsd/
+class DogstatsdTags(Setting):
+    name = "dogstatsd_tags"
+    section = "Logging"
+    cli = ["--dogstatsd-tags"]
+    meta = "DOGSTATSD_TAGS"
+    default = ""
+    validator = validate_string
+    desc = """\
+    A comma-delimited list of datadog statsd (dogstatsd) tags to append to
+    statsd metrics.
+
+    .. versionadded:: 20
+    """
+
+
+class StatsdPrefix(Setting):
+    name = "statsd_prefix"
+    section = "Logging"
+    cli = ["--statsd-prefix"]
+    meta = "STATSD_PREFIX"
+    default = ""
+    validator = validate_string
+    desc = """\
+    Prefix to use when emitting statsd metrics (a trailing ``.`` is added,
+    if not provided).
+
+    .. versionadded:: 19.2
+    """
+
+
+class Procname(Setting):
+    name = "proc_name"
+    section = "Process Naming"
+    cli = ["-n", "--name"]
+    meta = "STRING"
+    validator = validate_string
+    default = None
+    desc = """\
+        A base to use with setproctitle for process naming.
+
+        This affects things like ``ps`` and ``top``. If you're going to be
+        running more than one instance of Gunicorn you'll probably want to set a
+        name to tell them apart. This requires that you install the setproctitle
+        module.
+
+        If not set, the *default_proc_name* setting will be used.
+        """
+
+
+class DefaultProcName(Setting):
+    name = "default_proc_name"
+    section = "Process Naming"
+    validator = validate_string
+    default = "gunicorn"
+    desc = """\
+        Internal setting that is adjusted for each type of application.
+        """
+
+
+class PythonPath(Setting):
+    name = "pythonpath"
+    section = "Server Mechanics"
+    cli = ["--pythonpath"]
+    meta = "STRING"
+    validator = validate_string
+    default = None
+    desc = """\
+        A comma-separated list of directories to add to the Python path.
+
+        e.g.
+        ``'/home/djangoprojects/myproject,/home/python/mylibrary'``.
+        """
+
+
+class Paste(Setting):
+    name = "paste"
+    section = "Server Mechanics"
+    cli = ["--paste", "--paster"]
+    meta = "STRING"
+    validator = validate_string
+    default = None
+    desc = """\
+        Load a PasteDeploy config file. The argument may contain a ``#``
+        symbol followed by the name of an app section from the config file,
+        e.g. ``production.ini#admin``.
+
+        At this time, using alternate server blocks is not supported. Use the
+        command line arguments to control server configuration instead.
+        """
+
+
+class OnStarting(Setting):
+    name = "on_starting"
+    section = "Server Hooks"
+    validator = validate_callable(1)
+    type = callable
+
+    def on_starting(server):
+        pass
+    default = staticmethod(on_starting)
+    desc = """\
+        Called just before the master process is initialized.
+
+        The callable needs to accept a single instance variable for the Arbiter.
+        """
+
+
+class OnReload(Setting):
+    name = "on_reload"
+    section = "Server Hooks"
+    validator = validate_callable(1)
+    type = callable
+
+    def on_reload(server):
+        pass
+    default = staticmethod(on_reload)
+    desc = """\
+        Called to recycle workers during a reload via SIGHUP.
+
+        The callable needs to accept a single instance variable for the Arbiter.
+        """
+
+
+class WhenReady(Setting):
+    name = "when_ready"
+    section = "Server Hooks"
+    validator = validate_callable(1)
+    type = callable
+
+    def when_ready(server):
+        pass
+    default = staticmethod(when_ready)
+    desc = """\
+        Called just after the server is started.
+
+        The callable needs to accept a single instance variable for the Arbiter.
+        """
+
+
+class Prefork(Setting):
+    name = "pre_fork"
+    section = "Server Hooks"
+    validator = validate_callable(2)
+    type = callable
+
+    def pre_fork(server, worker):
+        pass
+    default = staticmethod(pre_fork)
+    desc = """\
+        Called just before a worker is forked.
+
+        The callable needs to accept two instance variables for the Arbiter and
+        new Worker.
+        """
+
+
+class Postfork(Setting):
+    name = "post_fork"
+    section = "Server Hooks"
+    validator = validate_callable(2)
+    type = callable
+
+    def post_fork(server, worker):
+        pass
+    default = staticmethod(post_fork)
+    desc = """\
+        Called just after a worker has been forked.
+
+        The callable needs to accept two instance variables for the Arbiter and
+        new Worker.
+        """
+
+
+class PostWorkerInit(Setting):
+    name = "post_worker_init"
+    section = "Server Hooks"
+    validator = validate_callable(1)
+    type = callable
+
+    def post_worker_init(worker):
+        pass
+
+    default = staticmethod(post_worker_init)
+    desc = """\
+        Called just after a worker has initialized the application.
+
+        The callable needs to accept one instance variable for the initialized
+        Worker.
+        """
+
+
+class WorkerInt(Setting):
+    name = "worker_int"
+    section = "Server Hooks"
+    validator = validate_callable(1)
+    type = callable
+
+    def worker_int(worker):
+        pass
+
+    default = staticmethod(worker_int)
+    desc = """\
+        Called just after a worker exited on SIGINT or SIGQUIT.
+
+        The callable needs to accept one instance variable for the initialized
+        Worker.
+        """
+
+
+class WorkerAbort(Setting):
+    name = "worker_abort"
+    section = "Server Hooks"
+    validator = validate_callable(1)
+    type = callable
+
+    def worker_abort(worker):
+        pass
+
+    default = staticmethod(worker_abort)
+    desc = """\
+        Called when a worker received the SIGABRT signal.
+
+        This call generally happens on timeout.
+
+        The callable needs to accept one instance variable for the initialized
+        Worker.
+        """
+
+
+class PreExec(Setting):
+    name = "pre_exec"
+    section = "Server Hooks"
+    validator = validate_callable(1)
+    type = callable
+
+    def pre_exec(server):
+        pass
+    default = staticmethod(pre_exec)
+    desc = """\
+        Called just before a new master process is forked.
+
+        The callable needs to accept a single instance variable for the Arbiter.
+        """
+
+
+class PreRequest(Setting):
+    name = "pre_request"
+    section = "Server Hooks"
+    validator = validate_callable(2)
+    type = callable
+
+    def pre_request(worker, req):
+        worker.log.debug("%s %s", req.method, req.path)
+    default = staticmethod(pre_request)
+    desc = """\
+        Called just before a worker processes the request.
+
+        The callable needs to accept two instance variables for the Worker and
+        the Request.
+        """
+
+
+class PostRequest(Setting):
+    name = "post_request"
+    section = "Server Hooks"
+    validator = validate_post_request
+    type = callable
+
+    def post_request(worker, req, environ, resp):
+        pass
+    default = staticmethod(post_request)
+    desc = """\
+        Called after a worker processes the request.
+
+        The callable needs to accept two instance variables for the Worker and
+        the Request.
+        """
+
+
+class ChildExit(Setting):
+    name = "child_exit"
+    section = "Server Hooks"
+    validator = validate_callable(2)
+    type = callable
+
+    def child_exit(server, worker):
+        pass
+    default = staticmethod(child_exit)
+    desc = """\
+        Called just after a worker has been exited, in the master process.
+
+        The callable needs to accept two instance variables for the Arbiter and
+        the just-exited Worker.
+
+        .. versionadded:: 19.7
+        """
+
+
+class WorkerExit(Setting):
+    name = "worker_exit"
+    section = "Server Hooks"
+    validator = validate_callable(2)
+    type = callable
+
+    def worker_exit(server, worker):
+        pass
+    default = staticmethod(worker_exit)
+    desc = """\
+        Called just after a worker has been exited, in the worker process.
+
+        The callable needs to accept two instance variables for the Arbiter and
+        the just-exited Worker.
+        """
+
+
+class NumWorkersChanged(Setting):
+    name = "nworkers_changed"
+    section = "Server Hooks"
+    validator = validate_callable(3)
+    type = callable
+
+    def nworkers_changed(server, new_value, old_value):
+        pass
+    default = staticmethod(nworkers_changed)
+    desc = """\
+        Called just after *num_workers* has been changed.
+
+        The callable needs to accept an instance variable of the Arbiter and
+        two integers of number of workers after and before change.
+
+        If the number of workers is set for the first time, *old_value* would
+        be ``None``.
+        """
+
+
+class OnExit(Setting):
+    name = "on_exit"
+    section = "Server Hooks"
+    validator = validate_callable(1)
+
+    def on_exit(server):
+        pass
+
+    default = staticmethod(on_exit)
+    desc = """\
+        Called just before exiting Gunicorn.
+
+        The callable needs to accept a single instance variable for the Arbiter.
+        """
+
+
+class NewSSLContext(Setting):
+    name = "ssl_context"
+    section = "Server Hooks"
+    validator = validate_callable(2)
+    type = callable
+
+    def ssl_context(config, default_ssl_context_factory):
+        return default_ssl_context_factory()
+
+    default = staticmethod(ssl_context)
+    desc = """\
+        Called when SSLContext is needed.
+
+        Allows customizing SSL context.
+
+        The callable needs to accept an instance variable for the Config and
+        a factory function that returns default SSLContext which is initialized
+        with certificates, private key, cert_reqs, and ciphers according to
+        config and can be further customized by the callable.
+        The callable needs to return SSLContext object.
+
+        Following example shows a configuration file that sets the minimum TLS version to 1.3:
+
+        .. code-block:: python
+
+            def ssl_context(conf, default_ssl_context_factory):
+                import ssl
+                context = default_ssl_context_factory()
+                context.minimum_version = ssl.TLSVersion.TLSv1_3
+                return context
+
+        .. versionadded:: 21.0
+        """
+
+
+class ProxyProtocol(Setting):
+    name = "proxy_protocol"
+    section = "Server Mechanics"
+    cli = ["--proxy-protocol"]
+    validator = validate_bool
+    default = False
+    action = "store_true"
+    desc = """\
+        Enable detect PROXY protocol (PROXY mode).
+
+        Allow using HTTP and Proxy together. It may be useful for work with
+        stunnel as HTTPS frontend and Gunicorn as HTTP server.
+
+        PROXY protocol: http://haproxy.1wt.eu/download/1.5/doc/proxy-protocol.txt
+
+        Example for stunnel config::
+
+            [https]
+            protocol = proxy
+            accept  = 443
+            connect = 80
+            cert = /etc/ssl/certs/stunnel.pem
+            key = /etc/ssl/certs/stunnel.key
+        """
+
+
+class ProxyAllowFrom(Setting):
+    name = "proxy_allow_ips"
+    section = "Server Mechanics"
+    cli = ["--proxy-allow-from"]
+    validator = validate_string_to_addr_list
+    default = "127.0.0.1,::1"
+    desc = """\
+        Front-end's IPs from which allowed accept proxy requests (comma separated).
+
+        Set to ``*`` to disable checking of front-end IPs. This is useful for setups
+        where you don't know in advance the IP address of front-end, but
+        instead have ensured via other means that only your
+        authorized front-ends can access Gunicorn.
+
+        .. note::
+
+            This option does not affect UNIX socket connections. Connections not associated with
+            an IP address are treated as allowed, unconditionally.
+        """
+
+
+class KeyFile(Setting):
+    name = "keyfile"
+    section = "SSL"
+    cli = ["--keyfile"]
+    meta = "FILE"
+    validator = validate_string
+    default = None
+    desc = """\
+    SSL key file
+    """
+
+
+class CertFile(Setting):
+    name = "certfile"
+    section = "SSL"
+    cli = ["--certfile"]
+    meta = "FILE"
+    validator = validate_string
+    default = None
+    desc = """\
+    SSL certificate file
+    """
+
+
+class SSLVersion(Setting):
+    name = "ssl_version"
+    section = "SSL"
+    cli = ["--ssl-version"]
+    validator = validate_ssl_version
+
+    if hasattr(ssl, "PROTOCOL_TLS"):
+        default = ssl.PROTOCOL_TLS
+    else:
+        default = ssl.PROTOCOL_SSLv23
+
+    default = ssl.PROTOCOL_SSLv23
+    desc = """\
+    SSL version to use (see stdlib ssl module's).
+
+    .. deprecated:: 21.0
+       The option is deprecated and it is currently ignored. Use :ref:`ssl-context` instead.
+
+    ============= ============
+    --ssl-version Description
+    ============= ============
+    SSLv3         SSLv3 is not-secure and is strongly discouraged.
+    SSLv23        Alias for TLS. Deprecated in Python 3.6, use TLS.
+    TLS           Negotiate highest possible version between client/server.
+                  Can yield SSL. (Python 3.6+)
+    TLSv1         TLS 1.0
+    TLSv1_1       TLS 1.1 (Python 3.4+)
+    TLSv1_2       TLS 1.2 (Python 3.4+)
+    TLS_SERVER    Auto-negotiate the highest protocol version like TLS,
+                  but only support server-side SSLSocket connections.
+                  (Python 3.6+)
+    ============= ============
+
+    .. versionchanged:: 19.7
+       The default value has been changed from ``ssl.PROTOCOL_TLSv1`` to
+       ``ssl.PROTOCOL_SSLv23``.
+    .. versionchanged:: 20.0
+       This setting now accepts string names based on ``ssl.PROTOCOL_``
+       constants.
+    .. versionchanged:: 20.0.1
+       The default value has been changed from ``ssl.PROTOCOL_SSLv23`` to
+       ``ssl.PROTOCOL_TLS`` when Python >= 3.6 .
+    """
+
+
+class CertReqs(Setting):
+    name = "cert_reqs"
+    section = "SSL"
+    cli = ["--cert-reqs"]
+    validator = validate_pos_int
+    default = ssl.CERT_NONE
+    desc = """\
+    Whether client certificate is required (see stdlib ssl module's)
+
+    ===========  ===========================
+    --cert-reqs      Description
+    ===========  ===========================
+    `0`          no client verification
+    `1`          ssl.CERT_OPTIONAL
+    `2`          ssl.CERT_REQUIRED
+    ===========  ===========================
+    """
+
+
+class CACerts(Setting):
+    name = "ca_certs"
+    section = "SSL"
+    cli = ["--ca-certs"]
+    meta = "FILE"
+    validator = validate_string
+    default = None
+    desc = """\
+    CA certificates file
+    """
+
+
+class SuppressRaggedEOFs(Setting):
+    name = "suppress_ragged_eofs"
+    section = "SSL"
+    cli = ["--suppress-ragged-eofs"]
+    action = "store_true"
+    default = True
+    validator = validate_bool
+    desc = """\
+    Suppress ragged EOFs (see stdlib ssl module's)
+    """
+
+
+class DoHandshakeOnConnect(Setting):
+    name = "do_handshake_on_connect"
+    section = "SSL"
+    cli = ["--do-handshake-on-connect"]
+    validator = validate_bool
+    action = "store_true"
+    default = False
+    desc = """\
+    Whether to perform SSL handshake on socket connect (see stdlib ssl module's)
+    """
+
+
+class Ciphers(Setting):
+    name = "ciphers"
+    section = "SSL"
+    cli = ["--ciphers"]
+    validator = validate_string
+    default = None
+    desc = """\
+    SSL Cipher suite to use, in the format of an OpenSSL cipher list.
+
+    By default we use the default cipher list from Python's ``ssl`` module,
+    which contains ciphers considered strong at the time of each Python
+    release.
+
+    As a recommended alternative, the Open Web App Security Project (OWASP)
+    offers `a vetted set of strong cipher strings rated A+ to C-
+    <https://www.owasp.org/index.php/TLS_Cipher_String_Cheat_Sheet>`_.
+    OWASP provides details on user-agent compatibility at each security level.
+
+    See the `OpenSSL Cipher List Format Documentation
+    <https://www.openssl.org/docs/manmaster/man1/ciphers.html#CIPHER-LIST-FORMAT>`_
+    for details on the format of an OpenSSL cipher list.
+    """
+
+
+class PasteGlobalConf(Setting):
+    name = "raw_paste_global_conf"
+    action = "append"
+    section = "Server Mechanics"
+    cli = ["--paste-global"]
+    meta = "CONF"
+    validator = validate_list_string
+    default = []
+
+    desc = """\
+        Set a PasteDeploy global config variable in ``key=value`` form.
+
+        The option can be specified multiple times.
+
+        The variables are passed to the PasteDeploy entrypoint. Example::
+
+            $ gunicorn -b 127.0.0.1:8000 --paste development.ini --paste-global FOO=1 --paste-global BAR=2
+
+        .. versionadded:: 19.7
+        """
+
+
+class PermitObsoleteFolding(Setting):
+    name = "permit_obsolete_folding"
+    section = "Server Mechanics"
+    cli = ["--permit-obsolete-folding"]
+    validator = validate_bool
+    action = "store_true"
+    default = False
+    desc = """\
+        Permit requests employing obsolete HTTP line folding mechanism
+
+        The folding mechanism was deprecated by rfc7230 Section 3.2.4 and will not be
+         employed in HTTP request headers from standards-compliant HTTP clients.
+
+        This option is provided to diagnose backwards-incompatible changes.
+        Use with care and only if necessary. Temporary; the precise effect of this option may
+        change in a future version, or it may be removed altogether.
+
+        .. versionadded:: 23.0.0
+        """
+
+
+class StripHeaderSpaces(Setting):
+    name = "strip_header_spaces"
+    section = "Server Mechanics"
+    cli = ["--strip-header-spaces"]
+    validator = validate_bool
+    action = "store_true"
+    default = False
+    desc = """\
+        Strip spaces present between the header name and the the ``:``.
+
+        This is known to induce vulnerabilities and is not compliant with the HTTP/1.1 standard.
+        See https://portswigger.net/research/http-desync-attacks-request-smuggling-reborn.
+
+        Use with care and only if necessary. Deprecated; scheduled for removal in 25.0.0
+
+        .. versionadded:: 20.0.1
+        """
+
+
+class PermitUnconventionalHTTPMethod(Setting):
+    name = "permit_unconventional_http_method"
+    section = "Server Mechanics"
+    cli = ["--permit-unconventional-http-method"]
+    validator = validate_bool
+    action = "store_true"
+    default = False
+    desc = """\
+        Permit HTTP methods not matching conventions, such as IANA registration guidelines
+
+        This permits request methods of length less than 3 or more than 20,
+        methods with lowercase characters or methods containing the # character.
+        HTTP methods are case sensitive by definition, and merely uppercase by convention.
+
+        If unset, Gunicorn will apply nonstandard restrictions and cause 400 response status
+        in cases where otherwise 501 status is expected. While this option does modify that
+        behaviour, it should not be depended upon to guarantee standards-compliant behaviour.
+        Rather, it is provided temporarily, to assist in diagnosing backwards-incompatible
+        changes around the incomplete application of those restrictions.
+
+        Use with care and only if necessary. Temporary; scheduled for removal in 24.0.0
+
+        .. versionadded:: 22.0.0
+        """
+
+
+class PermitUnconventionalHTTPVersion(Setting):
+    name = "permit_unconventional_http_version"
+    section = "Server Mechanics"
+    cli = ["--permit-unconventional-http-version"]
+    validator = validate_bool
+    action = "store_true"
+    default = False
+    desc = """\
+        Permit HTTP version not matching conventions of 2023
+
+        This disables the refusal of likely malformed request lines.
+        It is unusual to specify HTTP 1 versions other than 1.0 and 1.1.
+
+        This option is provided to diagnose backwards-incompatible changes.
+        Use with care and only if necessary. Temporary; the precise effect of this option may
+        change in a future version, or it may be removed altogether.
+
+        .. versionadded:: 22.0.0
+        """
+
+
+class CasefoldHTTPMethod(Setting):
+    name = "casefold_http_method"
+    section = "Server Mechanics"
+    cli = ["--casefold-http-method"]
+    validator = validate_bool
+    action = "store_true"
+    default = False
+    desc = """\
+         Transform received HTTP methods to uppercase
+
+         HTTP methods are case sensitive by definition, and merely uppercase by convention.
+
+         This option is provided because previous versions of gunicorn defaulted to this behaviour.
+
+         Use with care and only if necessary. Deprecated; scheduled for removal in 24.0.0
+
+         .. versionadded:: 22.0.0
+         """
+
+
+def validate_header_map_behaviour(val):
+    # FIXME: refactor all of this subclassing stdlib argparse
+
+    if val is None:
+        return
+
+    if not isinstance(val, str):
+        raise TypeError("Invalid type for casting: %s" % val)
+    if val.lower().strip() == "drop":
+        return "drop"
+    elif val.lower().strip() == "refuse":
+        return "refuse"
+    elif val.lower().strip() == "dangerous":
+        return "dangerous"
+    else:
+        raise ValueError("Invalid header map behaviour: %s" % val)
+
+
+class ForwarderHeaders(Setting):
+    name = "forwarder_headers"
+    section = "Server Mechanics"
+    cli = ["--forwarder-headers"]
+    validator = validate_string_to_list
+    default = "SCRIPT_NAME,PATH_INFO"
+    desc = """\
+
+        A list containing upper-case header field names that the front-end proxy
+        (see :ref:`forwarded-allow-ips`) sets, to be used in WSGI environment.
+
+        This option has no effect for headers not present in the request.
+
+        This option can be used to transfer ``SCRIPT_NAME``, ``PATH_INFO``
+        and ``REMOTE_USER``.
+
+        It is important that your front-end proxy configuration ensures that
+        the headers defined here can not be passed directly from the client.
+        """
+
+
+class HeaderMap(Setting):
+    name = "header_map"
+    section = "Server Mechanics"
+    cli = ["--header-map"]
+    validator = validate_header_map_behaviour
+    default = "drop"
+    desc = """\
+        Configure how header field names are mapped into environ
+
+        Headers containing underscores are permitted by RFC9110,
+        but gunicorn joining headers of different names into
+        the same environment variable will dangerously confuse applications as to which is which.
+
+        The safe default ``drop`` is to silently drop headers that cannot be unambiguously mapped.
+        The value ``refuse`` will return an error if a request contains *any* such header.
+        The value ``dangerous`` matches the previous, not advisable, behaviour of mapping different
+        header field names into the same environ name.
+
+        If the source is permitted as explained in :ref:`forwarded-allow-ips`, *and* the header name is
+        present in :ref:`forwarder-headers`, the header is mapped into environment regardless of
+        the state of this setting.
+
+        Use with care and only if necessary and after considering if your problem could
+        instead be solved by specifically renaming or rewriting only the intended headers
+        on a proxy in front of Gunicorn.
+
+        .. versionadded:: 22.0.0
+        """
diff --git a/.venv/Lib/site-packages/gunicorn/debug.py b/.venv/Lib/site-packages/gunicorn/debug.py
new file mode 100644
index 0000000000000000000000000000000000000000..5fae0b4dd7f8ff8360d76e8ae09f754ad16e06b0
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/debug.py
@@ -0,0 +1,68 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+"""The debug module contains utilities and functions for better
+debugging Gunicorn."""
+
+import sys
+import linecache
+import re
+import inspect
+
+__all__ = ['spew', 'unspew']
+
+_token_spliter = re.compile(r'\W+')
+
+
+class Spew:
+
+    def __init__(self, trace_names=None, show_values=True):
+        self.trace_names = trace_names
+        self.show_values = show_values
+
+    def __call__(self, frame, event, arg):
+        if event == 'line':
+            lineno = frame.f_lineno
+            if '__file__' in frame.f_globals:
+                filename = frame.f_globals['__file__']
+                if (filename.endswith('.pyc') or
+                        filename.endswith('.pyo')):
+                    filename = filename[:-1]
+                name = frame.f_globals['__name__']
+                line = linecache.getline(filename, lineno)
+            else:
+                name = '[unknown]'
+                try:
+                    src = inspect.getsourcelines(frame)
+                    line = src[lineno]
+                except OSError:
+                    line = 'Unknown code named [%s].  VM instruction #%d' % (
+                        frame.f_code.co_name, frame.f_lasti)
+            if self.trace_names is None or name in self.trace_names:
+                print('%s:%s: %s' % (name, lineno, line.rstrip()))
+                if not self.show_values:
+                    return self
+                details = []
+                tokens = _token_spliter.split(line)
+                for tok in tokens:
+                    if tok in frame.f_globals:
+                        details.append('%s=%r' % (tok, frame.f_globals[tok]))
+                    if tok in frame.f_locals:
+                        details.append('%s=%r' % (tok, frame.f_locals[tok]))
+                if details:
+                    print("\t%s" % ' '.join(details))
+        return self
+
+
+def spew(trace_names=None, show_values=False):
+    """Install a trace hook which writes incredibly detailed logs
+    about what code is being executed to stdout.
+    """
+    sys.settrace(Spew(trace_names, show_values))
+
+
+def unspew():
+    """Remove the trace hook installed by spew.
+    """
+    sys.settrace(None)
diff --git a/.venv/Lib/site-packages/gunicorn/errors.py b/.venv/Lib/site-packages/gunicorn/errors.py
new file mode 100644
index 0000000000000000000000000000000000000000..1128380803170ebff35cef90c8fa718ad47fab8e
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/errors.py
@@ -0,0 +1,28 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+# We don't need to call super() in __init__ methods of our
+# BaseException and Exception classes because we also define
+# our own __str__ methods so there is no need to pass 'message'
+# to the base class to get a meaningful output from 'str(exc)'.
+# pylint: disable=super-init-not-called
+
+
+# we inherit from BaseException here to make sure to not be caught
+# at application level
+class HaltServer(BaseException):
+    def __init__(self, reason, exit_status=1):
+        self.reason = reason
+        self.exit_status = exit_status
+
+    def __str__(self):
+        return "<HaltServer %r %d>" % (self.reason, self.exit_status)
+
+
+class ConfigError(Exception):
+    """ Exception raised on config error """
+
+
+class AppImportError(Exception):
+    """ Exception raised when loading an application """
diff --git a/.venv/Lib/site-packages/gunicorn/glogging.py b/.venv/Lib/site-packages/gunicorn/glogging.py
new file mode 100644
index 0000000000000000000000000000000000000000..e34fcd5f781df5f1c1de52c8a4c0c9defdf5d165
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/glogging.py
@@ -0,0 +1,473 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+import base64
+import binascii
+import json
+import time
+import logging
+logging.Logger.manager.emittedNoHandlerWarning = 1  # noqa
+from logging.config import dictConfig
+from logging.config import fileConfig
+import os
+import socket
+import sys
+import threading
+import traceback
+
+from gunicorn import util
+
+
+# syslog facility codes
+SYSLOG_FACILITIES = {
+    "auth": 4,
+    "authpriv": 10,
+    "cron": 9,
+    "daemon": 3,
+    "ftp": 11,
+    "kern": 0,
+    "lpr": 6,
+    "mail": 2,
+    "news": 7,
+    "security": 4,  # DEPRECATED
+    "syslog": 5,
+    "user": 1,
+    "uucp": 8,
+    "local0": 16,
+    "local1": 17,
+    "local2": 18,
+    "local3": 19,
+    "local4": 20,
+    "local5": 21,
+    "local6": 22,
+    "local7": 23
+}
+
+CONFIG_DEFAULTS = {
+    "version": 1,
+    "disable_existing_loggers": False,
+    "root": {"level": "INFO", "handlers": ["console"]},
+    "loggers": {
+        "gunicorn.error": {
+            "level": "INFO",
+            "handlers": ["error_console"],
+            "propagate": True,
+            "qualname": "gunicorn.error"
+        },
+
+        "gunicorn.access": {
+            "level": "INFO",
+            "handlers": ["console"],
+            "propagate": True,
+            "qualname": "gunicorn.access"
+        }
+    },
+    "handlers": {
+        "console": {
+            "class": "logging.StreamHandler",
+            "formatter": "generic",
+            "stream": "ext://sys.stdout"
+        },
+        "error_console": {
+            "class": "logging.StreamHandler",
+            "formatter": "generic",
+            "stream": "ext://sys.stderr"
+        },
+    },
+    "formatters": {
+        "generic": {
+            "format": "%(asctime)s [%(process)d] [%(levelname)s] %(message)s",
+            "datefmt": "[%Y-%m-%d %H:%M:%S %z]",
+            "class": "logging.Formatter"
+        }
+    }
+}
+
+
+def loggers():
+    """ get list of all loggers """
+    root = logging.root
+    existing = list(root.manager.loggerDict.keys())
+    return [logging.getLogger(name) for name in existing]
+
+
+class SafeAtoms(dict):
+
+    def __init__(self, atoms):
+        dict.__init__(self)
+        for key, value in atoms.items():
+            if isinstance(value, str):
+                self[key] = value.replace('"', '\\"')
+            else:
+                self[key] = value
+
+    def __getitem__(self, k):
+        if k.startswith("{"):
+            kl = k.lower()
+            if kl in self:
+                return super().__getitem__(kl)
+            else:
+                return "-"
+        if k in self:
+            return super().__getitem__(k)
+        else:
+            return '-'
+
+
+def parse_syslog_address(addr):
+
+    # unix domain socket type depends on backend
+    # SysLogHandler will try both when given None
+    if addr.startswith("unix://"):
+        sock_type = None
+
+        # set socket type only if explicitly requested
+        parts = addr.split("#", 1)
+        if len(parts) == 2:
+            addr = parts[0]
+            if parts[1] == "dgram":
+                sock_type = socket.SOCK_DGRAM
+
+        return (sock_type, addr.split("unix://")[1])
+
+    if addr.startswith("udp://"):
+        addr = addr.split("udp://")[1]
+        socktype = socket.SOCK_DGRAM
+    elif addr.startswith("tcp://"):
+        addr = addr.split("tcp://")[1]
+        socktype = socket.SOCK_STREAM
+    else:
+        raise RuntimeError("invalid syslog address")
+
+    if '[' in addr and ']' in addr:
+        host = addr.split(']')[0][1:].lower()
+    elif ':' in addr:
+        host = addr.split(':')[0].lower()
+    elif addr == "":
+        host = "localhost"
+    else:
+        host = addr.lower()
+
+    addr = addr.split(']')[-1]
+    if ":" in addr:
+        port = addr.split(':', 1)[1]
+        if not port.isdigit():
+            raise RuntimeError("%r is not a valid port number." % port)
+        port = int(port)
+    else:
+        port = 514
+
+    return (socktype, (host, port))
+
+
+class Logger:
+
+    LOG_LEVELS = {
+        "critical": logging.CRITICAL,
+        "error": logging.ERROR,
+        "warning": logging.WARNING,
+        "info": logging.INFO,
+        "debug": logging.DEBUG
+    }
+    loglevel = logging.INFO
+
+    error_fmt = r"%(asctime)s [%(process)d] [%(levelname)s] %(message)s"
+    datefmt = r"[%Y-%m-%d %H:%M:%S %z]"
+
+    access_fmt = "%(message)s"
+    syslog_fmt = "[%(process)d] %(message)s"
+
+    atoms_wrapper_class = SafeAtoms
+
+    def __init__(self, cfg):
+        self.error_log = logging.getLogger("gunicorn.error")
+        self.error_log.propagate = False
+        self.access_log = logging.getLogger("gunicorn.access")
+        self.access_log.propagate = False
+        self.error_handlers = []
+        self.access_handlers = []
+        self.logfile = None
+        self.lock = threading.Lock()
+        self.cfg = cfg
+        self.setup(cfg)
+
+    def setup(self, cfg):
+        self.loglevel = self.LOG_LEVELS.get(cfg.loglevel.lower(), logging.INFO)
+        self.error_log.setLevel(self.loglevel)
+        self.access_log.setLevel(logging.INFO)
+
+        # set gunicorn.error handler
+        if self.cfg.capture_output and cfg.errorlog != "-":
+            for stream in sys.stdout, sys.stderr:
+                stream.flush()
+
+            self.logfile = open(cfg.errorlog, 'a+')
+            os.dup2(self.logfile.fileno(), sys.stdout.fileno())
+            os.dup2(self.logfile.fileno(), sys.stderr.fileno())
+
+        self._set_handler(self.error_log, cfg.errorlog,
+                          logging.Formatter(self.error_fmt, self.datefmt))
+
+        # set gunicorn.access handler
+        if cfg.accesslog is not None:
+            self._set_handler(
+                self.access_log, cfg.accesslog,
+                fmt=logging.Formatter(self.access_fmt), stream=sys.stdout
+            )
+
+        # set syslog handler
+        if cfg.syslog:
+            self._set_syslog_handler(
+                self.error_log, cfg, self.syslog_fmt, "error"
+            )
+            if not cfg.disable_redirect_access_to_syslog:
+                self._set_syslog_handler(
+                    self.access_log, cfg, self.syslog_fmt, "access"
+                )
+
+        if cfg.logconfig_dict:
+            config = CONFIG_DEFAULTS.copy()
+            config.update(cfg.logconfig_dict)
+            try:
+                dictConfig(config)
+            except (
+                    AttributeError,
+                    ImportError,
+                    ValueError,
+                    TypeError
+            ) as exc:
+                raise RuntimeError(str(exc))
+        elif cfg.logconfig_json:
+            config = CONFIG_DEFAULTS.copy()
+            if os.path.exists(cfg.logconfig_json):
+                try:
+                    config_json = json.load(open(cfg.logconfig_json))
+                    config.update(config_json)
+                    dictConfig(config)
+                except (
+                    json.JSONDecodeError,
+                    AttributeError,
+                    ImportError,
+                    ValueError,
+                    TypeError
+                ) as exc:
+                    raise RuntimeError(str(exc))
+        elif cfg.logconfig:
+            if os.path.exists(cfg.logconfig):
+                defaults = CONFIG_DEFAULTS.copy()
+                defaults['__file__'] = cfg.logconfig
+                defaults['here'] = os.path.dirname(cfg.logconfig)
+                fileConfig(cfg.logconfig, defaults=defaults,
+                           disable_existing_loggers=False)
+            else:
+                msg = "Error: log config '%s' not found"
+                raise RuntimeError(msg % cfg.logconfig)
+
+    def critical(self, msg, *args, **kwargs):
+        self.error_log.critical(msg, *args, **kwargs)
+
+    def error(self, msg, *args, **kwargs):
+        self.error_log.error(msg, *args, **kwargs)
+
+    def warning(self, msg, *args, **kwargs):
+        self.error_log.warning(msg, *args, **kwargs)
+
+    def info(self, msg, *args, **kwargs):
+        self.error_log.info(msg, *args, **kwargs)
+
+    def debug(self, msg, *args, **kwargs):
+        self.error_log.debug(msg, *args, **kwargs)
+
+    def exception(self, msg, *args, **kwargs):
+        self.error_log.exception(msg, *args, **kwargs)
+
+    def log(self, lvl, msg, *args, **kwargs):
+        if isinstance(lvl, str):
+            lvl = self.LOG_LEVELS.get(lvl.lower(), logging.INFO)
+        self.error_log.log(lvl, msg, *args, **kwargs)
+
+    def atoms(self, resp, req, environ, request_time):
+        """ Gets atoms for log formatting.
+        """
+        status = resp.status
+        if isinstance(status, str):
+            status = status.split(None, 1)[0]
+        atoms = {
+            'h': environ.get('REMOTE_ADDR', '-'),
+            'l': '-',
+            'u': self._get_user(environ) or '-',
+            't': self.now(),
+            'r': "%s %s %s" % (environ['REQUEST_METHOD'],
+                               environ['RAW_URI'],
+                               environ["SERVER_PROTOCOL"]),
+            's': status,
+            'm': environ.get('REQUEST_METHOD'),
+            'U': environ.get('PATH_INFO'),
+            'q': environ.get('QUERY_STRING'),
+            'H': environ.get('SERVER_PROTOCOL'),
+            'b': getattr(resp, 'sent', None) is not None and str(resp.sent) or '-',
+            'B': getattr(resp, 'sent', None),
+            'f': environ.get('HTTP_REFERER', '-'),
+            'a': environ.get('HTTP_USER_AGENT', '-'),
+            'T': request_time.seconds,
+            'D': (request_time.seconds * 1000000) + request_time.microseconds,
+            'M': (request_time.seconds * 1000) + int(request_time.microseconds / 1000),
+            'L': "%d.%06d" % (request_time.seconds, request_time.microseconds),
+            'p': "<%s>" % os.getpid()
+        }
+
+        # add request headers
+        if hasattr(req, 'headers'):
+            req_headers = req.headers
+        else:
+            req_headers = req
+
+        if hasattr(req_headers, "items"):
+            req_headers = req_headers.items()
+
+        atoms.update({"{%s}i" % k.lower(): v for k, v in req_headers})
+
+        resp_headers = resp.headers
+        if hasattr(resp_headers, "items"):
+            resp_headers = resp_headers.items()
+
+        # add response headers
+        atoms.update({"{%s}o" % k.lower(): v for k, v in resp_headers})
+
+        # add environ variables
+        environ_variables = environ.items()
+        atoms.update({"{%s}e" % k.lower(): v for k, v in environ_variables})
+
+        return atoms
+
+    def access(self, resp, req, environ, request_time):
+        """ See http://httpd.apache.org/docs/2.0/logs.html#combined
+        for format details
+        """
+
+        if not (self.cfg.accesslog or self.cfg.logconfig or
+           self.cfg.logconfig_dict or self.cfg.logconfig_json or
+           (self.cfg.syslog and not self.cfg.disable_redirect_access_to_syslog)):
+            return
+
+        # wrap atoms:
+        # - make sure atoms will be test case insensitively
+        # - if atom doesn't exist replace it by '-'
+        safe_atoms = self.atoms_wrapper_class(
+            self.atoms(resp, req, environ, request_time)
+        )
+
+        try:
+            self.access_log.info(self.cfg.access_log_format, safe_atoms)
+        except Exception:
+            self.error(traceback.format_exc())
+
+    def now(self):
+        """ return date in Apache Common Log Format """
+        return time.strftime('[%d/%b/%Y:%H:%M:%S %z]')
+
+    def reopen_files(self):
+        if self.cfg.capture_output and self.cfg.errorlog != "-":
+            for stream in sys.stdout, sys.stderr:
+                stream.flush()
+
+            with self.lock:
+                if self.logfile is not None:
+                    self.logfile.close()
+                self.logfile = open(self.cfg.errorlog, 'a+')
+                os.dup2(self.logfile.fileno(), sys.stdout.fileno())
+                os.dup2(self.logfile.fileno(), sys.stderr.fileno())
+
+        for log in loggers():
+            for handler in log.handlers:
+                if isinstance(handler, logging.FileHandler):
+                    handler.acquire()
+                    try:
+                        if handler.stream:
+                            handler.close()
+                            handler.stream = handler._open()
+                    finally:
+                        handler.release()
+
+    def close_on_exec(self):
+        for log in loggers():
+            for handler in log.handlers:
+                if isinstance(handler, logging.FileHandler):
+                    handler.acquire()
+                    try:
+                        if handler.stream:
+                            util.close_on_exec(handler.stream.fileno())
+                    finally:
+                        handler.release()
+
+    def _get_gunicorn_handler(self, log):
+        for h in log.handlers:
+            if getattr(h, "_gunicorn", False):
+                return h
+
+    def _set_handler(self, log, output, fmt, stream=None):
+        # remove previous gunicorn log handler
+        h = self._get_gunicorn_handler(log)
+        if h:
+            log.handlers.remove(h)
+
+        if output is not None:
+            if output == "-":
+                h = logging.StreamHandler(stream)
+            else:
+                util.check_is_writable(output)
+                h = logging.FileHandler(output)
+                # make sure the user can reopen the file
+                try:
+                    os.chown(h.baseFilename, self.cfg.user, self.cfg.group)
+                except OSError:
+                    # it's probably OK there, we assume the user has given
+                    # /dev/null as a parameter.
+                    pass
+
+            h.setFormatter(fmt)
+            h._gunicorn = True
+            log.addHandler(h)
+
+    def _set_syslog_handler(self, log, cfg, fmt, name):
+        # setup format
+        prefix = cfg.syslog_prefix or cfg.proc_name.replace(":", ".")
+
+        prefix = "gunicorn.%s.%s" % (prefix, name)
+
+        # set format
+        fmt = logging.Formatter(r"%s: %s" % (prefix, fmt))
+
+        # syslog facility
+        try:
+            facility = SYSLOG_FACILITIES[cfg.syslog_facility.lower()]
+        except KeyError:
+            raise RuntimeError("unknown facility name")
+
+        # parse syslog address
+        socktype, addr = parse_syslog_address(cfg.syslog_addr)
+
+        # finally setup the syslog handler
+        h = logging.handlers.SysLogHandler(address=addr,
+                                           facility=facility, socktype=socktype)
+
+        h.setFormatter(fmt)
+        h._gunicorn = True
+        log.addHandler(h)
+
+    def _get_user(self, environ):
+        user = None
+        http_auth = environ.get("HTTP_AUTHORIZATION")
+        if http_auth and http_auth.lower().startswith('basic'):
+            auth = http_auth.split(" ", 1)
+            if len(auth) == 2:
+                try:
+                    # b64decode doesn't accept unicode in Python < 3.3
+                    # so we need to convert it to a byte string
+                    auth = base64.b64decode(auth[1].strip().encode('utf-8'))
+                    # b64decode returns a byte string
+                    user = auth.split(b":", 1)[0].decode("UTF-8")
+                except (TypeError, binascii.Error, UnicodeDecodeError) as exc:
+                    self.debug("Couldn't get username: %s", exc)
+        return user
diff --git a/.venv/Lib/site-packages/gunicorn/http/__init__.py b/.venv/Lib/site-packages/gunicorn/http/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..11473bb0a64f27dfc9d78711c8a100bc10e57ae8
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/http/__init__.py
@@ -0,0 +1,8 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+from gunicorn.http.message import Message, Request
+from gunicorn.http.parser import RequestParser
+
+__all__ = ['Message', 'Request', 'RequestParser']
diff --git a/.venv/Lib/site-packages/gunicorn/http/__pycache__/__init__.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/http/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fe2189f99cfb4ba7d0bcf56217a2807440d1f21b
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/http/__pycache__/__init__.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/http/__pycache__/body.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/http/__pycache__/body.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fc5256b8ba8a5483bc666e85ff57c5da0da77da5
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/http/__pycache__/body.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/http/__pycache__/errors.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/http/__pycache__/errors.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8c4333280e6cf692ca8b3bff26dee01ac52ca907
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/http/__pycache__/errors.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/http/__pycache__/message.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/http/__pycache__/message.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c58c714db20483f4419a25a90a8287e848ec8518
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/http/__pycache__/message.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/http/__pycache__/parser.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/http/__pycache__/parser.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7d5dd9001b27c4f8e2b306c49687e5a51f5f79a1
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/http/__pycache__/parser.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/http/__pycache__/unreader.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/http/__pycache__/unreader.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a6c0117de8a09030dd226fee4739beb81113ab68
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/http/__pycache__/unreader.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/http/__pycache__/wsgi.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/http/__pycache__/wsgi.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a63643a77cb8852e31f1c2403c505d1c28988b7d
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/http/__pycache__/wsgi.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/http/body.py b/.venv/Lib/site-packages/gunicorn/http/body.py
new file mode 100644
index 0000000000000000000000000000000000000000..d7ee29e783cab1ae6bed5cfbd5fbd5837a11401b
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/http/body.py
@@ -0,0 +1,268 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+import io
+import sys
+
+from gunicorn.http.errors import (NoMoreData, ChunkMissingTerminator,
+                                  InvalidChunkSize)
+
+
+class ChunkedReader:
+    def __init__(self, req, unreader):
+        self.req = req
+        self.parser = self.parse_chunked(unreader)
+        self.buf = io.BytesIO()
+
+    def read(self, size):
+        if not isinstance(size, int):
+            raise TypeError("size must be an integer type")
+        if size < 0:
+            raise ValueError("Size must be positive.")
+        if size == 0:
+            return b""
+
+        if self.parser:
+            while self.buf.tell() < size:
+                try:
+                    self.buf.write(next(self.parser))
+                except StopIteration:
+                    self.parser = None
+                    break
+
+        data = self.buf.getvalue()
+        ret, rest = data[:size], data[size:]
+        self.buf = io.BytesIO()
+        self.buf.write(rest)
+        return ret
+
+    def parse_trailers(self, unreader, data):
+        buf = io.BytesIO()
+        buf.write(data)
+
+        idx = buf.getvalue().find(b"\r\n\r\n")
+        done = buf.getvalue()[:2] == b"\r\n"
+        while idx < 0 and not done:
+            self.get_data(unreader, buf)
+            idx = buf.getvalue().find(b"\r\n\r\n")
+            done = buf.getvalue()[:2] == b"\r\n"
+        if done:
+            unreader.unread(buf.getvalue()[2:])
+            return b""
+        self.req.trailers = self.req.parse_headers(buf.getvalue()[:idx], from_trailer=True)
+        unreader.unread(buf.getvalue()[idx + 4:])
+
+    def parse_chunked(self, unreader):
+        (size, rest) = self.parse_chunk_size(unreader)
+        while size > 0:
+            while size > len(rest):
+                size -= len(rest)
+                yield rest
+                rest = unreader.read()
+                if not rest:
+                    raise NoMoreData()
+            yield rest[:size]
+            # Remove \r\n after chunk
+            rest = rest[size:]
+            while len(rest) < 2:
+                new_data = unreader.read()
+                if not new_data:
+                    break
+                rest += new_data
+            if rest[:2] != b'\r\n':
+                raise ChunkMissingTerminator(rest[:2])
+            (size, rest) = self.parse_chunk_size(unreader, data=rest[2:])
+
+    def parse_chunk_size(self, unreader, data=None):
+        buf = io.BytesIO()
+        if data is not None:
+            buf.write(data)
+
+        idx = buf.getvalue().find(b"\r\n")
+        while idx < 0:
+            self.get_data(unreader, buf)
+            idx = buf.getvalue().find(b"\r\n")
+
+        data = buf.getvalue()
+        line, rest_chunk = data[:idx], data[idx + 2:]
+
+        # RFC9112 7.1.1: BWS before chunk-ext - but ONLY then
+        chunk_size, *chunk_ext = line.split(b";", 1)
+        if chunk_ext:
+            chunk_size = chunk_size.rstrip(b" \t")
+        if any(n not in b"0123456789abcdefABCDEF" for n in chunk_size):
+            raise InvalidChunkSize(chunk_size)
+        if len(chunk_size) == 0:
+            raise InvalidChunkSize(chunk_size)
+        chunk_size = int(chunk_size, 16)
+
+        if chunk_size == 0:
+            try:
+                self.parse_trailers(unreader, rest_chunk)
+            except NoMoreData:
+                pass
+            return (0, None)
+        return (chunk_size, rest_chunk)
+
+    def get_data(self, unreader, buf):
+        data = unreader.read()
+        if not data:
+            raise NoMoreData()
+        buf.write(data)
+
+
+class LengthReader:
+    def __init__(self, unreader, length):
+        self.unreader = unreader
+        self.length = length
+
+    def read(self, size):
+        if not isinstance(size, int):
+            raise TypeError("size must be an integral type")
+
+        size = min(self.length, size)
+        if size < 0:
+            raise ValueError("Size must be positive.")
+        if size == 0:
+            return b""
+
+        buf = io.BytesIO()
+        data = self.unreader.read()
+        while data:
+            buf.write(data)
+            if buf.tell() >= size:
+                break
+            data = self.unreader.read()
+
+        buf = buf.getvalue()
+        ret, rest = buf[:size], buf[size:]
+        self.unreader.unread(rest)
+        self.length -= size
+        return ret
+
+
+class EOFReader:
+    def __init__(self, unreader):
+        self.unreader = unreader
+        self.buf = io.BytesIO()
+        self.finished = False
+
+    def read(self, size):
+        if not isinstance(size, int):
+            raise TypeError("size must be an integral type")
+        if size < 0:
+            raise ValueError("Size must be positive.")
+        if size == 0:
+            return b""
+
+        if self.finished:
+            data = self.buf.getvalue()
+            ret, rest = data[:size], data[size:]
+            self.buf = io.BytesIO()
+            self.buf.write(rest)
+            return ret
+
+        data = self.unreader.read()
+        while data:
+            self.buf.write(data)
+            if self.buf.tell() > size:
+                break
+            data = self.unreader.read()
+
+        if not data:
+            self.finished = True
+
+        data = self.buf.getvalue()
+        ret, rest = data[:size], data[size:]
+        self.buf = io.BytesIO()
+        self.buf.write(rest)
+        return ret
+
+
+class Body:
+    def __init__(self, reader):
+        self.reader = reader
+        self.buf = io.BytesIO()
+
+    def __iter__(self):
+        return self
+
+    def __next__(self):
+        ret = self.readline()
+        if not ret:
+            raise StopIteration()
+        return ret
+
+    next = __next__
+
+    def getsize(self, size):
+        if size is None:
+            return sys.maxsize
+        elif not isinstance(size, int):
+            raise TypeError("size must be an integral type")
+        elif size < 0:
+            return sys.maxsize
+        return size
+
+    def read(self, size=None):
+        size = self.getsize(size)
+        if size == 0:
+            return b""
+
+        if size < self.buf.tell():
+            data = self.buf.getvalue()
+            ret, rest = data[:size], data[size:]
+            self.buf = io.BytesIO()
+            self.buf.write(rest)
+            return ret
+
+        while size > self.buf.tell():
+            data = self.reader.read(1024)
+            if not data:
+                break
+            self.buf.write(data)
+
+        data = self.buf.getvalue()
+        ret, rest = data[:size], data[size:]
+        self.buf = io.BytesIO()
+        self.buf.write(rest)
+        return ret
+
+    def readline(self, size=None):
+        size = self.getsize(size)
+        if size == 0:
+            return b""
+
+        data = self.buf.getvalue()
+        self.buf = io.BytesIO()
+
+        ret = []
+        while 1:
+            idx = data.find(b"\n", 0, size)
+            idx = idx + 1 if idx >= 0 else size if len(data) >= size else 0
+            if idx:
+                ret.append(data[:idx])
+                self.buf.write(data[idx:])
+                break
+
+            ret.append(data)
+            size -= len(data)
+            data = self.reader.read(min(1024, size))
+            if not data:
+                break
+
+        return b"".join(ret)
+
+    def readlines(self, size=None):
+        ret = []
+        data = self.read()
+        while data:
+            pos = data.find(b"\n")
+            if pos < 0:
+                ret.append(data)
+                data = b""
+            else:
+                line, data = data[:pos + 1], data[pos + 1:]
+                ret.append(line)
+        return ret
diff --git a/.venv/Lib/site-packages/gunicorn/http/errors.py b/.venv/Lib/site-packages/gunicorn/http/errors.py
new file mode 100644
index 0000000000000000000000000000000000000000..bcb9700725075c396c5016da53be993270e63567
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/http/errors.py
@@ -0,0 +1,145 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+# We don't need to call super() in __init__ methods of our
+# BaseException and Exception classes because we also define
+# our own __str__ methods so there is no need to pass 'message'
+# to the base class to get a meaningful output from 'str(exc)'.
+# pylint: disable=super-init-not-called
+
+
+class ParseException(Exception):
+    pass
+
+
+class NoMoreData(IOError):
+    def __init__(self, buf=None):
+        self.buf = buf
+
+    def __str__(self):
+        return "No more data after: %r" % self.buf
+
+
+class ConfigurationProblem(ParseException):
+    def __init__(self, info):
+        self.info = info
+        self.code = 500
+
+    def __str__(self):
+        return "Configuration problem: %s" % self.info
+
+
+class InvalidRequestLine(ParseException):
+    def __init__(self, req):
+        self.req = req
+        self.code = 400
+
+    def __str__(self):
+        return "Invalid HTTP request line: %r" % self.req
+
+
+class InvalidRequestMethod(ParseException):
+    def __init__(self, method):
+        self.method = method
+
+    def __str__(self):
+        return "Invalid HTTP method: %r" % self.method
+
+
+class InvalidHTTPVersion(ParseException):
+    def __init__(self, version):
+        self.version = version
+
+    def __str__(self):
+        return "Invalid HTTP Version: %r" % (self.version,)
+
+
+class InvalidHeader(ParseException):
+    def __init__(self, hdr, req=None):
+        self.hdr = hdr
+        self.req = req
+
+    def __str__(self):
+        return "Invalid HTTP Header: %r" % self.hdr
+
+
+class ObsoleteFolding(ParseException):
+    def __init__(self, hdr):
+        self.hdr = hdr
+
+    def __str__(self):
+        return "Obsolete line folding is unacceptable: %r" % (self.hdr, )
+
+
+class InvalidHeaderName(ParseException):
+    def __init__(self, hdr):
+        self.hdr = hdr
+
+    def __str__(self):
+        return "Invalid HTTP header name: %r" % self.hdr
+
+
+class UnsupportedTransferCoding(ParseException):
+    def __init__(self, hdr):
+        self.hdr = hdr
+        self.code = 501
+
+    def __str__(self):
+        return "Unsupported transfer coding: %r" % self.hdr
+
+
+class InvalidChunkSize(IOError):
+    def __init__(self, data):
+        self.data = data
+
+    def __str__(self):
+        return "Invalid chunk size: %r" % self.data
+
+
+class ChunkMissingTerminator(IOError):
+    def __init__(self, term):
+        self.term = term
+
+    def __str__(self):
+        return "Invalid chunk terminator is not '\\r\\n': %r" % self.term
+
+
+class LimitRequestLine(ParseException):
+    def __init__(self, size, max_size):
+        self.size = size
+        self.max_size = max_size
+
+    def __str__(self):
+        return "Request Line is too large (%s > %s)" % (self.size, self.max_size)
+
+
+class LimitRequestHeaders(ParseException):
+    def __init__(self, msg):
+        self.msg = msg
+
+    def __str__(self):
+        return self.msg
+
+
+class InvalidProxyLine(ParseException):
+    def __init__(self, line):
+        self.line = line
+        self.code = 400
+
+    def __str__(self):
+        return "Invalid PROXY line: %r" % self.line
+
+
+class ForbiddenProxyRequest(ParseException):
+    def __init__(self, host):
+        self.host = host
+        self.code = 403
+
+    def __str__(self):
+        return "Proxy request from %r not allowed" % self.host
+
+
+class InvalidSchemeHeaders(ParseException):
+    def __str__(self):
+        return "Contradictory scheme headers"
diff --git a/.venv/Lib/site-packages/gunicorn/http/message.py b/.venv/Lib/site-packages/gunicorn/http/message.py
new file mode 100644
index 0000000000000000000000000000000000000000..59ce0bf4beed664b012b501dd874579a24973377
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/http/message.py
@@ -0,0 +1,463 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+import io
+import re
+import socket
+
+from gunicorn.http.body import ChunkedReader, LengthReader, EOFReader, Body
+from gunicorn.http.errors import (
+    InvalidHeader, InvalidHeaderName, NoMoreData,
+    InvalidRequestLine, InvalidRequestMethod, InvalidHTTPVersion,
+    LimitRequestLine, LimitRequestHeaders,
+    UnsupportedTransferCoding, ObsoleteFolding,
+)
+from gunicorn.http.errors import InvalidProxyLine, ForbiddenProxyRequest
+from gunicorn.http.errors import InvalidSchemeHeaders
+from gunicorn.util import bytes_to_str, split_request_uri
+
+MAX_REQUEST_LINE = 8190
+MAX_HEADERS = 32768
+DEFAULT_MAX_HEADERFIELD_SIZE = 8190
+
+# verbosely on purpose, avoid backslash ambiguity
+RFC9110_5_6_2_TOKEN_SPECIALS = r"!#$%&'*+-.^_`|~"
+TOKEN_RE = re.compile(r"[%s0-9a-zA-Z]+" % (re.escape(RFC9110_5_6_2_TOKEN_SPECIALS)))
+METHOD_BADCHAR_RE = re.compile("[a-z#]")
+# usually 1.0 or 1.1 - RFC9112 permits restricting to single-digit versions
+VERSION_RE = re.compile(r"HTTP/(\d)\.(\d)")
+RFC9110_5_5_INVALID_AND_DANGEROUS = re.compile(r"[\0\r\n]")
+
+
+class Message:
+    def __init__(self, cfg, unreader, peer_addr):
+        self.cfg = cfg
+        self.unreader = unreader
+        self.peer_addr = peer_addr
+        self.remote_addr = peer_addr
+        self.version = None
+        self.headers = []
+        self.trailers = []
+        self.body = None
+        self.scheme = "https" if cfg.is_ssl else "http"
+        self.must_close = False
+
+        # set headers limits
+        self.limit_request_fields = cfg.limit_request_fields
+        if (self.limit_request_fields <= 0
+                or self.limit_request_fields > MAX_HEADERS):
+            self.limit_request_fields = MAX_HEADERS
+        self.limit_request_field_size = cfg.limit_request_field_size
+        if self.limit_request_field_size < 0:
+            self.limit_request_field_size = DEFAULT_MAX_HEADERFIELD_SIZE
+
+        # set max header buffer size
+        max_header_field_size = self.limit_request_field_size or DEFAULT_MAX_HEADERFIELD_SIZE
+        self.max_buffer_headers = self.limit_request_fields * \
+            (max_header_field_size + 2) + 4
+
+        unused = self.parse(self.unreader)
+        self.unreader.unread(unused)
+        self.set_body_reader()
+
+    def force_close(self):
+        self.must_close = True
+
+    def parse(self, unreader):
+        raise NotImplementedError()
+
+    def parse_headers(self, data, from_trailer=False):
+        cfg = self.cfg
+        headers = []
+
+        # Split lines on \r\n
+        lines = [bytes_to_str(line) for line in data.split(b"\r\n")]
+
+        # handle scheme headers
+        scheme_header = False
+        secure_scheme_headers = {}
+        forwarder_headers = []
+        if from_trailer:
+            # nonsense. either a request is https from the beginning
+            #  .. or we are just behind a proxy who does not remove conflicting trailers
+            pass
+        elif ('*' in cfg.forwarded_allow_ips or
+              not isinstance(self.peer_addr, tuple)
+              or self.peer_addr[0] in cfg.forwarded_allow_ips):
+            secure_scheme_headers = cfg.secure_scheme_headers
+            forwarder_headers = cfg.forwarder_headers
+
+        # Parse headers into key/value pairs paying attention
+        # to continuation lines.
+        while lines:
+            if len(headers) >= self.limit_request_fields:
+                raise LimitRequestHeaders("limit request headers fields")
+
+            # Parse initial header name: value pair.
+            curr = lines.pop(0)
+            header_length = len(curr) + len("\r\n")
+            if curr.find(":") <= 0:
+                raise InvalidHeader(curr)
+            name, value = curr.split(":", 1)
+            if self.cfg.strip_header_spaces:
+                name = name.rstrip(" \t")
+            if not TOKEN_RE.fullmatch(name):
+                raise InvalidHeaderName(name)
+
+            # this is still a dangerous place to do this
+            #  but it is more correct than doing it before the pattern match:
+            # after we entered Unicode wonderland, 8bits could case-shift into ASCII:
+            # b"\xDF".decode("latin-1").upper().encode("ascii") == b"SS"
+            name = name.upper()
+
+            value = [value.strip(" \t")]
+
+            # Consume value continuation lines..
+            while lines and lines[0].startswith((" ", "\t")):
+                # .. which is obsolete here, and no longer done by default
+                if not self.cfg.permit_obsolete_folding:
+                    raise ObsoleteFolding(name)
+                curr = lines.pop(0)
+                header_length += len(curr) + len("\r\n")
+                if header_length > self.limit_request_field_size > 0:
+                    raise LimitRequestHeaders("limit request headers "
+                                              "fields size")
+                value.append(curr.strip("\t "))
+            value = " ".join(value)
+
+            if RFC9110_5_5_INVALID_AND_DANGEROUS.search(value):
+                raise InvalidHeader(name)
+
+            if header_length > self.limit_request_field_size > 0:
+                raise LimitRequestHeaders("limit request headers fields size")
+
+            if name in secure_scheme_headers:
+                secure = value == secure_scheme_headers[name]
+                scheme = "https" if secure else "http"
+                if scheme_header:
+                    if scheme != self.scheme:
+                        raise InvalidSchemeHeaders()
+                else:
+                    scheme_header = True
+                    self.scheme = scheme
+
+            # ambiguous mapping allows fooling downstream, e.g. merging non-identical headers:
+            # X-Forwarded-For: 2001:db8::ha:cc:ed
+            # X_Forwarded_For: 127.0.0.1,::1
+            # HTTP_X_FORWARDED_FOR = 2001:db8::ha:cc:ed,127.0.0.1,::1
+            # Only modify after fixing *ALL* header transformations; network to wsgi env
+            if "_" in name:
+                if name in forwarder_headers or "*" in forwarder_headers:
+                    # This forwarder may override our environment
+                    pass
+                elif self.cfg.header_map == "dangerous":
+                    # as if we did not know we cannot safely map this
+                    pass
+                elif self.cfg.header_map == "drop":
+                    # almost as if it never had been there
+                    # but still counts against resource limits
+                    continue
+                else:
+                    # fail-safe fallthrough: refuse
+                    raise InvalidHeaderName(name)
+
+            headers.append((name, value))
+
+        return headers
+
+    def set_body_reader(self):
+        chunked = False
+        content_length = None
+
+        for (name, value) in self.headers:
+            if name == "CONTENT-LENGTH":
+                if content_length is not None:
+                    raise InvalidHeader("CONTENT-LENGTH", req=self)
+                content_length = value
+            elif name == "TRANSFER-ENCODING":
+                # T-E can be a list
+                # https://datatracker.ietf.org/doc/html/rfc9112#name-transfer-encoding
+                vals = [v.strip() for v in value.split(',')]
+                for val in vals:
+                    if val.lower() == "chunked":
+                        # DANGER: transfer codings stack, and stacked chunking is never intended
+                        if chunked:
+                            raise InvalidHeader("TRANSFER-ENCODING", req=self)
+                        chunked = True
+                    elif val.lower() == "identity":
+                        # does not do much, could still plausibly desync from what the proxy does
+                        # safe option: nuke it, its never needed
+                        if chunked:
+                            raise InvalidHeader("TRANSFER-ENCODING", req=self)
+                    elif val.lower() in ('compress', 'deflate', 'gzip'):
+                        # chunked should be the last one
+                        if chunked:
+                            raise InvalidHeader("TRANSFER-ENCODING", req=self)
+                        self.force_close()
+                    else:
+                        raise UnsupportedTransferCoding(value)
+
+        if chunked:
+            # two potentially dangerous cases:
+            #  a) CL + TE (TE overrides CL.. only safe if the recipient sees it that way too)
+            #  b) chunked HTTP/1.0 (always faulty)
+            if self.version < (1, 1):
+                # framing wonky, see RFC 9112 Section 6.1
+                raise InvalidHeader("TRANSFER-ENCODING", req=self)
+            if content_length is not None:
+                # we cannot be certain the message framing we understood matches proxy intent
+                #  -> whatever happens next, remaining input must not be trusted
+                raise InvalidHeader("CONTENT-LENGTH", req=self)
+            self.body = Body(ChunkedReader(self, self.unreader))
+        elif content_length is not None:
+            try:
+                if str(content_length).isnumeric():
+                    content_length = int(content_length)
+                else:
+                    raise InvalidHeader("CONTENT-LENGTH", req=self)
+            except ValueError:
+                raise InvalidHeader("CONTENT-LENGTH", req=self)
+
+            if content_length < 0:
+                raise InvalidHeader("CONTENT-LENGTH", req=self)
+
+            self.body = Body(LengthReader(self.unreader, content_length))
+        else:
+            self.body = Body(EOFReader(self.unreader))
+
+    def should_close(self):
+        if self.must_close:
+            return True
+        for (h, v) in self.headers:
+            if h == "CONNECTION":
+                v = v.lower().strip(" \t")
+                if v == "close":
+                    return True
+                elif v == "keep-alive":
+                    return False
+                break
+        return self.version <= (1, 0)
+
+
+class Request(Message):
+    def __init__(self, cfg, unreader, peer_addr, req_number=1):
+        self.method = None
+        self.uri = None
+        self.path = None
+        self.query = None
+        self.fragment = None
+
+        # get max request line size
+        self.limit_request_line = cfg.limit_request_line
+        if (self.limit_request_line < 0
+                or self.limit_request_line >= MAX_REQUEST_LINE):
+            self.limit_request_line = MAX_REQUEST_LINE
+
+        self.req_number = req_number
+        self.proxy_protocol_info = None
+        super().__init__(cfg, unreader, peer_addr)
+
+    def get_data(self, unreader, buf, stop=False):
+        data = unreader.read()
+        if not data:
+            if stop:
+                raise StopIteration()
+            raise NoMoreData(buf.getvalue())
+        buf.write(data)
+
+    def parse(self, unreader):
+        buf = io.BytesIO()
+        self.get_data(unreader, buf, stop=True)
+
+        # get request line
+        line, rbuf = self.read_line(unreader, buf, self.limit_request_line)
+
+        # proxy protocol
+        if self.proxy_protocol(bytes_to_str(line)):
+            # get next request line
+            buf = io.BytesIO()
+            buf.write(rbuf)
+            line, rbuf = self.read_line(unreader, buf, self.limit_request_line)
+
+        self.parse_request_line(line)
+        buf = io.BytesIO()
+        buf.write(rbuf)
+
+        # Headers
+        data = buf.getvalue()
+        idx = data.find(b"\r\n\r\n")
+
+        done = data[:2] == b"\r\n"
+        while True:
+            idx = data.find(b"\r\n\r\n")
+            done = data[:2] == b"\r\n"
+
+            if idx < 0 and not done:
+                self.get_data(unreader, buf)
+                data = buf.getvalue()
+                if len(data) > self.max_buffer_headers:
+                    raise LimitRequestHeaders("max buffer headers")
+            else:
+                break
+
+        if done:
+            self.unreader.unread(data[2:])
+            return b""
+
+        self.headers = self.parse_headers(data[:idx], from_trailer=False)
+
+        ret = data[idx + 4:]
+        buf = None
+        return ret
+
+    def read_line(self, unreader, buf, limit=0):
+        data = buf.getvalue()
+
+        while True:
+            idx = data.find(b"\r\n")
+            if idx >= 0:
+                # check if the request line is too large
+                if idx > limit > 0:
+                    raise LimitRequestLine(idx, limit)
+                break
+            if len(data) - 2 > limit > 0:
+                raise LimitRequestLine(len(data), limit)
+            self.get_data(unreader, buf)
+            data = buf.getvalue()
+
+        return (data[:idx],  # request line,
+                data[idx + 2:])  # residue in the buffer, skip \r\n
+
+    def proxy_protocol(self, line):
+        """\
+        Detect, check and parse proxy protocol.
+
+        :raises: ForbiddenProxyRequest, InvalidProxyLine.
+        :return: True for proxy protocol line else False
+        """
+        if not self.cfg.proxy_protocol:
+            return False
+
+        if self.req_number != 1:
+            return False
+
+        if not line.startswith("PROXY"):
+            return False
+
+        self.proxy_protocol_access_check()
+        self.parse_proxy_protocol(line)
+
+        return True
+
+    def proxy_protocol_access_check(self):
+        # check in allow list
+        if ("*" not in self.cfg.proxy_allow_ips and
+            isinstance(self.peer_addr, tuple) and
+                self.peer_addr[0] not in self.cfg.proxy_allow_ips):
+            raise ForbiddenProxyRequest(self.peer_addr[0])
+
+    def parse_proxy_protocol(self, line):
+        bits = line.split(" ")
+
+        if len(bits) != 6:
+            raise InvalidProxyLine(line)
+
+        # Extract data
+        proto = bits[1]
+        s_addr = bits[2]
+        d_addr = bits[3]
+
+        # Validation
+        if proto not in ["TCP4", "TCP6"]:
+            raise InvalidProxyLine("protocol '%s' not supported" % proto)
+        if proto == "TCP4":
+            try:
+                socket.inet_pton(socket.AF_INET, s_addr)
+                socket.inet_pton(socket.AF_INET, d_addr)
+            except OSError:
+                raise InvalidProxyLine(line)
+        elif proto == "TCP6":
+            try:
+                socket.inet_pton(socket.AF_INET6, s_addr)
+                socket.inet_pton(socket.AF_INET6, d_addr)
+            except OSError:
+                raise InvalidProxyLine(line)
+
+        try:
+            s_port = int(bits[4])
+            d_port = int(bits[5])
+        except ValueError:
+            raise InvalidProxyLine("invalid port %s" % line)
+
+        if not ((0 <= s_port <= 65535) and (0 <= d_port <= 65535)):
+            raise InvalidProxyLine("invalid port %s" % line)
+
+        # Set data
+        self.proxy_protocol_info = {
+            "proxy_protocol": proto,
+            "client_addr": s_addr,
+            "client_port": s_port,
+            "proxy_addr": d_addr,
+            "proxy_port": d_port
+        }
+
+    def parse_request_line(self, line_bytes):
+        bits = [bytes_to_str(bit) for bit in line_bytes.split(b" ", 2)]
+        if len(bits) != 3:
+            raise InvalidRequestLine(bytes_to_str(line_bytes))
+
+        # Method: RFC9110 Section 9
+        self.method = bits[0]
+
+        # nonstandard restriction, suitable for all IANA registered methods
+        # partially enforced in previous gunicorn versions
+        if not self.cfg.permit_unconventional_http_method:
+            if METHOD_BADCHAR_RE.search(self.method):
+                raise InvalidRequestMethod(self.method)
+            if not 3 <= len(bits[0]) <= 20:
+                raise InvalidRequestMethod(self.method)
+        # standard restriction: RFC9110 token
+        if not TOKEN_RE.fullmatch(self.method):
+            raise InvalidRequestMethod(self.method)
+        # nonstandard and dangerous
+        # methods are merely uppercase by convention, no case-insensitive treatment is intended
+        if self.cfg.casefold_http_method:
+            self.method = self.method.upper()
+
+        # URI
+        self.uri = bits[1]
+
+        # Python stdlib explicitly tells us it will not perform validation.
+        # https://docs.python.org/3/library/urllib.parse.html#url-parsing-security
+        # There are *four* `request-target` forms in rfc9112, none of them can be empty:
+        # 1. origin-form, which starts with a slash
+        # 2. absolute-form, which starts with a non-empty scheme
+        # 3. authority-form, (for CONNECT) which contains a colon after the host
+        # 4. asterisk-form, which is an asterisk (`\x2A`)
+        # => manually reject one always invalid URI: empty
+        if len(self.uri) == 0:
+            raise InvalidRequestLine(bytes_to_str(line_bytes))
+
+        try:
+            parts = split_request_uri(self.uri)
+        except ValueError:
+            raise InvalidRequestLine(bytes_to_str(line_bytes))
+        self.path = parts.path or ""
+        self.query = parts.query or ""
+        self.fragment = parts.fragment or ""
+
+        # Version
+        match = VERSION_RE.fullmatch(bits[2])
+        if match is None:
+            raise InvalidHTTPVersion(bits[2])
+        self.version = (int(match.group(1)), int(match.group(2)))
+        if not (1, 0) <= self.version < (2, 0):
+            # if ever relaxing this, carefully review Content-Encoding processing
+            if not self.cfg.permit_unconventional_http_version:
+                raise InvalidHTTPVersion(self.version)
+
+    def set_body_reader(self):
+        super().set_body_reader()
+        if isinstance(self.body.reader, EOFReader):
+            self.body = Body(LengthReader(self.unreader, 0))
diff --git a/.venv/Lib/site-packages/gunicorn/http/parser.py b/.venv/Lib/site-packages/gunicorn/http/parser.py
new file mode 100644
index 0000000000000000000000000000000000000000..88da17ab06e19162c091431772d303d82921bb78
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/http/parser.py
@@ -0,0 +1,51 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+from gunicorn.http.message import Request
+from gunicorn.http.unreader import SocketUnreader, IterUnreader
+
+
+class Parser:
+
+    mesg_class = None
+
+    def __init__(self, cfg, source, source_addr):
+        self.cfg = cfg
+        if hasattr(source, "recv"):
+            self.unreader = SocketUnreader(source)
+        else:
+            self.unreader = IterUnreader(source)
+        self.mesg = None
+        self.source_addr = source_addr
+
+        # request counter (for keepalive connetions)
+        self.req_count = 0
+
+    def __iter__(self):
+        return self
+
+    def __next__(self):
+        # Stop if HTTP dictates a stop.
+        if self.mesg and self.mesg.should_close():
+            raise StopIteration()
+
+        # Discard any unread body of the previous message
+        if self.mesg:
+            data = self.mesg.body.read(8192)
+            while data:
+                data = self.mesg.body.read(8192)
+
+        # Parse the next request
+        self.req_count += 1
+        self.mesg = self.mesg_class(self.cfg, self.unreader, self.source_addr, self.req_count)
+        if not self.mesg:
+            raise StopIteration()
+        return self.mesg
+
+    next = __next__
+
+
+class RequestParser(Parser):
+
+    mesg_class = Request
diff --git a/.venv/Lib/site-packages/gunicorn/http/unreader.py b/.venv/Lib/site-packages/gunicorn/http/unreader.py
new file mode 100644
index 0000000000000000000000000000000000000000..9aadfbcff83add527f2440ee996d37aebe2be274
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/http/unreader.py
@@ -0,0 +1,78 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+import io
+import os
+
+# Classes that can undo reading data from
+# a given type of data source.
+
+
+class Unreader:
+    def __init__(self):
+        self.buf = io.BytesIO()
+
+    def chunk(self):
+        raise NotImplementedError()
+
+    def read(self, size=None):
+        if size is not None and not isinstance(size, int):
+            raise TypeError("size parameter must be an int or long.")
+
+        if size is not None:
+            if size == 0:
+                return b""
+            if size < 0:
+                size = None
+
+        self.buf.seek(0, os.SEEK_END)
+
+        if size is None and self.buf.tell():
+            ret = self.buf.getvalue()
+            self.buf = io.BytesIO()
+            return ret
+        if size is None:
+            d = self.chunk()
+            return d
+
+        while self.buf.tell() < size:
+            chunk = self.chunk()
+            if not chunk:
+                ret = self.buf.getvalue()
+                self.buf = io.BytesIO()
+                return ret
+            self.buf.write(chunk)
+        data = self.buf.getvalue()
+        self.buf = io.BytesIO()
+        self.buf.write(data[size:])
+        return data[:size]
+
+    def unread(self, data):
+        self.buf.seek(0, os.SEEK_END)
+        self.buf.write(data)
+
+
+class SocketUnreader(Unreader):
+    def __init__(self, sock, max_chunk=8192):
+        super().__init__()
+        self.sock = sock
+        self.mxchunk = max_chunk
+
+    def chunk(self):
+        return self.sock.recv(self.mxchunk)
+
+
+class IterUnreader(Unreader):
+    def __init__(self, iterable):
+        super().__init__()
+        self.iter = iter(iterable)
+
+    def chunk(self):
+        if not self.iter:
+            return b""
+        try:
+            return next(self.iter)
+        except StopIteration:
+            self.iter = None
+            return b""
diff --git a/.venv/Lib/site-packages/gunicorn/http/wsgi.py b/.venv/Lib/site-packages/gunicorn/http/wsgi.py
new file mode 100644
index 0000000000000000000000000000000000000000..419ac503a4a7c4a564498ac570baf3151d51daf6
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/http/wsgi.py
@@ -0,0 +1,401 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+import io
+import logging
+import os
+import re
+import sys
+
+from gunicorn.http.message import TOKEN_RE
+from gunicorn.http.errors import ConfigurationProblem, InvalidHeader, InvalidHeaderName
+from gunicorn import SERVER_SOFTWARE, SERVER
+from gunicorn import util
+
+# Send files in at most 1GB blocks as some operating systems can have problems
+# with sending files in blocks over 2GB.
+BLKSIZE = 0x3FFFFFFF
+
+# RFC9110 5.5: field-vchar = VCHAR / obs-text
+# RFC4234 B.1: VCHAR = 0x21-x07E = printable ASCII
+HEADER_VALUE_RE = re.compile(r'[ \t\x21-\x7e\x80-\xff]*')
+
+log = logging.getLogger(__name__)
+
+
+class FileWrapper:
+
+    def __init__(self, filelike, blksize=8192):
+        self.filelike = filelike
+        self.blksize = blksize
+        if hasattr(filelike, 'close'):
+            self.close = filelike.close
+
+    def __getitem__(self, key):
+        data = self.filelike.read(self.blksize)
+        if data:
+            return data
+        raise IndexError
+
+
+class WSGIErrorsWrapper(io.RawIOBase):
+
+    def __init__(self, cfg):
+        # There is no public __init__ method for RawIOBase so
+        # we don't need to call super() in the __init__ method.
+        # pylint: disable=super-init-not-called
+        errorlog = logging.getLogger("gunicorn.error")
+        handlers = errorlog.handlers
+        self.streams = []
+
+        if cfg.errorlog == "-":
+            self.streams.append(sys.stderr)
+            handlers = handlers[1:]
+
+        for h in handlers:
+            if hasattr(h, "stream"):
+                self.streams.append(h.stream)
+
+    def write(self, data):
+        for stream in self.streams:
+            try:
+                stream.write(data)
+            except UnicodeError:
+                stream.write(data.encode("UTF-8"))
+            stream.flush()
+
+
+def base_environ(cfg):
+    return {
+        "wsgi.errors": WSGIErrorsWrapper(cfg),
+        "wsgi.version": (1, 0),
+        "wsgi.multithread": False,
+        "wsgi.multiprocess": (cfg.workers > 1),
+        "wsgi.run_once": False,
+        "wsgi.file_wrapper": FileWrapper,
+        "wsgi.input_terminated": True,
+        "SERVER_SOFTWARE": SERVER_SOFTWARE,
+    }
+
+
+def default_environ(req, sock, cfg):
+    env = base_environ(cfg)
+    env.update({
+        "wsgi.input": req.body,
+        "gunicorn.socket": sock,
+        "REQUEST_METHOD": req.method,
+        "QUERY_STRING": req.query,
+        "RAW_URI": req.uri,
+        "SERVER_PROTOCOL": "HTTP/%s" % ".".join([str(v) for v in req.version])
+    })
+    return env
+
+
+def proxy_environ(req):
+    info = req.proxy_protocol_info
+
+    if not info:
+        return {}
+
+    return {
+        "PROXY_PROTOCOL": info["proxy_protocol"],
+        "REMOTE_ADDR": info["client_addr"],
+        "REMOTE_PORT": str(info["client_port"]),
+        "PROXY_ADDR": info["proxy_addr"],
+        "PROXY_PORT": str(info["proxy_port"]),
+    }
+
+
+def create(req, sock, client, server, cfg):
+    resp = Response(req, sock, cfg)
+
+    # set initial environ
+    environ = default_environ(req, sock, cfg)
+
+    # default variables
+    host = None
+    script_name = os.environ.get("SCRIPT_NAME", "")
+
+    # add the headers to the environ
+    for hdr_name, hdr_value in req.headers:
+        if hdr_name == "EXPECT":
+            # handle expect
+            if hdr_value.lower() == "100-continue":
+                sock.send(b"HTTP/1.1 100 Continue\r\n\r\n")
+        elif hdr_name == 'HOST':
+            host = hdr_value
+        elif hdr_name == "SCRIPT_NAME":
+            script_name = hdr_value
+        elif hdr_name == "CONTENT-TYPE":
+            environ['CONTENT_TYPE'] = hdr_value
+            continue
+        elif hdr_name == "CONTENT-LENGTH":
+            environ['CONTENT_LENGTH'] = hdr_value
+            continue
+
+        # do not change lightly, this is a common source of security problems
+        # RFC9110 Section 17.10 discourages ambiguous or incomplete mappings
+        key = 'HTTP_' + hdr_name.replace('-', '_')
+        if key in environ:
+            hdr_value = "%s,%s" % (environ[key], hdr_value)
+        environ[key] = hdr_value
+
+    # set the url scheme
+    environ['wsgi.url_scheme'] = req.scheme
+
+    # set the REMOTE_* keys in environ
+    # authors should be aware that REMOTE_HOST and REMOTE_ADDR
+    # may not qualify the remote addr:
+    # http://www.ietf.org/rfc/rfc3875
+    if isinstance(client, str):
+        environ['REMOTE_ADDR'] = client
+    elif isinstance(client, bytes):
+        environ['REMOTE_ADDR'] = client.decode()
+    else:
+        environ['REMOTE_ADDR'] = client[0]
+        environ['REMOTE_PORT'] = str(client[1])
+
+    # handle the SERVER_*
+    # Normally only the application should use the Host header but since the
+    # WSGI spec doesn't support unix sockets, we are using it to create
+    # viable SERVER_* if possible.
+    if isinstance(server, str):
+        server = server.split(":")
+        if len(server) == 1:
+            # unix socket
+            if host:
+                server = host.split(':')
+                if len(server) == 1:
+                    if req.scheme == "http":
+                        server.append(80)
+                    elif req.scheme == "https":
+                        server.append(443)
+                    else:
+                        server.append('')
+            else:
+                # no host header given which means that we are not behind a
+                # proxy, so append an empty port.
+                server.append('')
+    environ['SERVER_NAME'] = server[0]
+    environ['SERVER_PORT'] = str(server[1])
+
+    # set the path and script name
+    path_info = req.path
+    if script_name:
+        if not path_info.startswith(script_name):
+            raise ConfigurationProblem(
+                "Request path %r does not start with SCRIPT_NAME %r" %
+                (path_info, script_name))
+        path_info = path_info[len(script_name):]
+    environ['PATH_INFO'] = util.unquote_to_wsgi_str(path_info)
+    environ['SCRIPT_NAME'] = script_name
+
+    # override the environ with the correct remote and server address if
+    # we are behind a proxy using the proxy protocol.
+    environ.update(proxy_environ(req))
+    return resp, environ
+
+
+class Response:
+
+    def __init__(self, req, sock, cfg):
+        self.req = req
+        self.sock = sock
+        self.version = SERVER
+        self.status = None
+        self.chunked = False
+        self.must_close = False
+        self.headers = []
+        self.headers_sent = False
+        self.response_length = None
+        self.sent = 0
+        self.upgrade = False
+        self.cfg = cfg
+
+    def force_close(self):
+        self.must_close = True
+
+    def should_close(self):
+        if self.must_close or self.req.should_close():
+            return True
+        if self.response_length is not None or self.chunked:
+            return False
+        if self.req.method == 'HEAD':
+            return False
+        if self.status_code < 200 or self.status_code in (204, 304):
+            return False
+        return True
+
+    def start_response(self, status, headers, exc_info=None):
+        if exc_info:
+            try:
+                if self.status and self.headers_sent:
+                    util.reraise(exc_info[0], exc_info[1], exc_info[2])
+            finally:
+                exc_info = None
+        elif self.status is not None:
+            raise AssertionError("Response headers already set!")
+
+        self.status = status
+
+        # get the status code from the response here so we can use it to check
+        # the need for the connection header later without parsing the string
+        # each time.
+        try:
+            self.status_code = int(self.status.split()[0])
+        except ValueError:
+            self.status_code = None
+
+        self.process_headers(headers)
+        self.chunked = self.is_chunked()
+        return self.write
+
+    def process_headers(self, headers):
+        for name, value in headers:
+            if not isinstance(name, str):
+                raise TypeError('%r is not a string' % name)
+
+            if not TOKEN_RE.fullmatch(name):
+                raise InvalidHeaderName('%r' % name)
+
+            if not isinstance(value, str):
+                raise TypeError('%r is not a string' % value)
+
+            if not HEADER_VALUE_RE.fullmatch(value):
+                raise InvalidHeader('%r' % value)
+
+            # RFC9110 5.5
+            value = value.strip(" \t")
+            lname = name.lower()
+            if lname == "content-length":
+                self.response_length = int(value)
+            elif util.is_hoppish(name):
+                if lname == "connection":
+                    # handle websocket
+                    if value.lower() == "upgrade":
+                        self.upgrade = True
+                elif lname == "upgrade":
+                    if value.lower() == "websocket":
+                        self.headers.append((name, value))
+
+                # ignore hopbyhop headers
+                continue
+            self.headers.append((name, value))
+
+    def is_chunked(self):
+        # Only use chunked responses when the client is
+        # speaking HTTP/1.1 or newer and there was
+        # no Content-Length header set.
+        if self.response_length is not None:
+            return False
+        elif self.req.version <= (1, 0):
+            return False
+        elif self.req.method == 'HEAD':
+            # Responses to a HEAD request MUST NOT contain a response body.
+            return False
+        elif self.status_code in (204, 304):
+            # Do not use chunked responses when the response is guaranteed to
+            # not have a response body.
+            return False
+        return True
+
+    def default_headers(self):
+        # set the connection header
+        if self.upgrade:
+            connection = "upgrade"
+        elif self.should_close():
+            connection = "close"
+        else:
+            connection = "keep-alive"
+
+        headers = [
+            "HTTP/%s.%s %s\r\n" % (self.req.version[0],
+                                   self.req.version[1], self.status),
+            "Server: %s\r\n" % self.version,
+            "Date: %s\r\n" % util.http_date(),
+            "Connection: %s\r\n" % connection
+        ]
+        if self.chunked:
+            headers.append("Transfer-Encoding: chunked\r\n")
+        return headers
+
+    def send_headers(self):
+        if self.headers_sent:
+            return
+        tosend = self.default_headers()
+        tosend.extend(["%s: %s\r\n" % (k, v) for k, v in self.headers])
+
+        header_str = "%s\r\n" % "".join(tosend)
+        util.write(self.sock, util.to_bytestring(header_str, "latin-1"))
+        self.headers_sent = True
+
+    def write(self, arg):
+        self.send_headers()
+        if not isinstance(arg, bytes):
+            raise TypeError('%r is not a byte' % arg)
+        arglen = len(arg)
+        tosend = arglen
+        if self.response_length is not None:
+            if self.sent >= self.response_length:
+                # Never write more than self.response_length bytes
+                return
+
+            tosend = min(self.response_length - self.sent, tosend)
+            if tosend < arglen:
+                arg = arg[:tosend]
+
+        # Sending an empty chunk signals the end of the
+        # response and prematurely closes the response
+        if self.chunked and tosend == 0:
+            return
+
+        self.sent += tosend
+        util.write(self.sock, arg, self.chunked)
+
+    def can_sendfile(self):
+        return self.cfg.sendfile is not False
+
+    def sendfile(self, respiter):
+        if self.cfg.is_ssl or not self.can_sendfile():
+            return False
+
+        if not util.has_fileno(respiter.filelike):
+            return False
+
+        fileno = respiter.filelike.fileno()
+        try:
+            offset = os.lseek(fileno, 0, os.SEEK_CUR)
+            if self.response_length is None:
+                filesize = os.fstat(fileno).st_size
+                nbytes = filesize - offset
+            else:
+                nbytes = self.response_length
+        except (OSError, io.UnsupportedOperation):
+            return False
+
+        self.send_headers()
+
+        if self.is_chunked():
+            chunk_size = "%X\r\n" % nbytes
+            self.sock.sendall(chunk_size.encode('utf-8'))
+        if nbytes > 0:
+            self.sock.sendfile(respiter.filelike, offset=offset, count=nbytes)
+
+        if self.is_chunked():
+            self.sock.sendall(b"\r\n")
+
+        os.lseek(fileno, offset, os.SEEK_SET)
+
+        return True
+
+    def write_file(self, respiter):
+        if not self.sendfile(respiter):
+            for item in respiter:
+                self.write(item)
+
+    def close(self):
+        if not self.headers_sent:
+            self.send_headers()
+        if self.chunked:
+            util.write_chunk(self.sock, b"")
diff --git a/.venv/Lib/site-packages/gunicorn/instrument/__init__.py b/.venv/Lib/site-packages/gunicorn/instrument/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/.venv/Lib/site-packages/gunicorn/instrument/__pycache__/__init__.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/instrument/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..418affe88e6fddc99943f17afe315595d0fee433
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/instrument/__pycache__/__init__.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/instrument/__pycache__/statsd.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/instrument/__pycache__/statsd.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..82bb3073ddf5e874cdb3c30bf75fbf384878079b
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/instrument/__pycache__/statsd.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/instrument/statsd.py b/.venv/Lib/site-packages/gunicorn/instrument/statsd.py
new file mode 100644
index 0000000000000000000000000000000000000000..7bc4e6ffdfda6dd00ded1a2bfe907a9c0702671e
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/instrument/statsd.py
@@ -0,0 +1,134 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+"Bare-bones implementation of statsD's protocol, client-side"
+
+import logging
+import socket
+from re import sub
+
+from gunicorn.glogging import Logger
+
+# Instrumentation constants
+METRIC_VAR = "metric"
+VALUE_VAR = "value"
+MTYPE_VAR = "mtype"
+GAUGE_TYPE = "gauge"
+COUNTER_TYPE = "counter"
+HISTOGRAM_TYPE = "histogram"
+
+
+class Statsd(Logger):
+    """statsD-based instrumentation, that passes as a logger
+    """
+    def __init__(self, cfg):
+        Logger.__init__(self, cfg)
+        self.prefix = sub(r"^(.+[^.]+)\.*$", "\\g<1>.", cfg.statsd_prefix)
+
+        if isinstance(cfg.statsd_host, str):
+            address_family = socket.AF_UNIX
+        else:
+            address_family = socket.AF_INET
+
+        try:
+            self.sock = socket.socket(address_family, socket.SOCK_DGRAM)
+            self.sock.connect(cfg.statsd_host)
+        except Exception:
+            self.sock = None
+
+        self.dogstatsd_tags = cfg.dogstatsd_tags
+
+    # Log errors and warnings
+    def critical(self, msg, *args, **kwargs):
+        Logger.critical(self, msg, *args, **kwargs)
+        self.increment("gunicorn.log.critical", 1)
+
+    def error(self, msg, *args, **kwargs):
+        Logger.error(self, msg, *args, **kwargs)
+        self.increment("gunicorn.log.error", 1)
+
+    def warning(self, msg, *args, **kwargs):
+        Logger.warning(self, msg, *args, **kwargs)
+        self.increment("gunicorn.log.warning", 1)
+
+    def exception(self, msg, *args, **kwargs):
+        Logger.exception(self, msg, *args, **kwargs)
+        self.increment("gunicorn.log.exception", 1)
+
+    # Special treatment for info, the most common log level
+    def info(self, msg, *args, **kwargs):
+        self.log(logging.INFO, msg, *args, **kwargs)
+
+    # skip the run-of-the-mill logs
+    def debug(self, msg, *args, **kwargs):
+        self.log(logging.DEBUG, msg, *args, **kwargs)
+
+    def log(self, lvl, msg, *args, **kwargs):
+        """Log a given statistic if metric, value and type are present
+        """
+        try:
+            extra = kwargs.get("extra", None)
+            if extra is not None:
+                metric = extra.get(METRIC_VAR, None)
+                value = extra.get(VALUE_VAR, None)
+                typ = extra.get(MTYPE_VAR, None)
+                if metric and value and typ:
+                    if typ == GAUGE_TYPE:
+                        self.gauge(metric, value)
+                    elif typ == COUNTER_TYPE:
+                        self.increment(metric, value)
+                    elif typ == HISTOGRAM_TYPE:
+                        self.histogram(metric, value)
+                    else:
+                        pass
+
+            # Log to parent logger only if there is something to say
+            if msg:
+                Logger.log(self, lvl, msg, *args, **kwargs)
+        except Exception:
+            Logger.warning(self, "Failed to log to statsd", exc_info=True)
+
+    # access logging
+    def access(self, resp, req, environ, request_time):
+        """Measure request duration
+        request_time is a datetime.timedelta
+        """
+        Logger.access(self, resp, req, environ, request_time)
+        duration_in_ms = request_time.seconds * 1000 + float(request_time.microseconds) / 10 ** 3
+        status = resp.status
+        if isinstance(status, bytes):
+            status = status.decode('utf-8')
+        if isinstance(status, str):
+            status = int(status.split(None, 1)[0])
+        self.histogram("gunicorn.request.duration", duration_in_ms)
+        self.increment("gunicorn.requests", 1)
+        self.increment("gunicorn.request.status.%d" % status, 1)
+
+    # statsD methods
+    # you can use those directly if you want
+    def gauge(self, name, value):
+        self._sock_send("{0}{1}:{2}|g".format(self.prefix, name, value))
+
+    def increment(self, name, value, sampling_rate=1.0):
+        self._sock_send("{0}{1}:{2}|c|@{3}".format(self.prefix, name, value, sampling_rate))
+
+    def decrement(self, name, value, sampling_rate=1.0):
+        self._sock_send("{0}{1}:-{2}|c|@{3}".format(self.prefix, name, value, sampling_rate))
+
+    def histogram(self, name, value):
+        self._sock_send("{0}{1}:{2}|ms".format(self.prefix, name, value))
+
+    def _sock_send(self, msg):
+        try:
+            if isinstance(msg, str):
+                msg = msg.encode("ascii")
+
+            # http://docs.datadoghq.com/guides/dogstatsd/#datagram-format
+            if self.dogstatsd_tags:
+                msg = msg + b"|#" + self.dogstatsd_tags.encode('ascii')
+
+            if self.sock:
+                self.sock.send(msg)
+        except Exception:
+            Logger.warning(self, "Error sending message to statsd", exc_info=True)
diff --git a/.venv/Lib/site-packages/gunicorn/pidfile.py b/.venv/Lib/site-packages/gunicorn/pidfile.py
new file mode 100644
index 0000000000000000000000000000000000000000..b171f7d91a675972ce5b7ec744f7686cd45768be
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/pidfile.py
@@ -0,0 +1,85 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+import errno
+import os
+import tempfile
+
+
+class Pidfile:
+    """\
+    Manage a PID file. If a specific name is provided
+    it and '"%s.oldpid" % name' will be used. Otherwise
+    we create a temp file using os.mkstemp.
+    """
+
+    def __init__(self, fname):
+        self.fname = fname
+        self.pid = None
+
+    def create(self, pid):
+        oldpid = self.validate()
+        if oldpid:
+            if oldpid == os.getpid():
+                return
+            msg = "Already running on PID %s (or pid file '%s' is stale)"
+            raise RuntimeError(msg % (oldpid, self.fname))
+
+        self.pid = pid
+
+        # Write pidfile
+        fdir = os.path.dirname(self.fname)
+        if fdir and not os.path.isdir(fdir):
+            raise RuntimeError("%s doesn't exist. Can't create pidfile." % fdir)
+        fd, fname = tempfile.mkstemp(dir=fdir)
+        os.write(fd, ("%s\n" % self.pid).encode('utf-8'))
+        if self.fname:
+            os.rename(fname, self.fname)
+        else:
+            self.fname = fname
+        os.close(fd)
+
+        # set permissions to -rw-r--r--
+        os.chmod(self.fname, 420)
+
+    def rename(self, path):
+        self.unlink()
+        self.fname = path
+        self.create(self.pid)
+
+    def unlink(self):
+        """ delete pidfile"""
+        try:
+            with open(self.fname) as f:
+                pid1 = int(f.read() or 0)
+
+            if pid1 == self.pid:
+                os.unlink(self.fname)
+        except Exception:
+            pass
+
+    def validate(self):
+        """ Validate pidfile and make it stale if needed"""
+        if not self.fname:
+            return
+        try:
+            with open(self.fname) as f:
+                try:
+                    wpid = int(f.read())
+                except ValueError:
+                    return
+
+                try:
+                    os.kill(wpid, 0)
+                    return wpid
+                except OSError as e:
+                    if e.args[0] == errno.EPERM:
+                        return wpid
+                    if e.args[0] == errno.ESRCH:
+                        return
+                    raise
+        except OSError as e:
+            if e.args[0] == errno.ENOENT:
+                return
+            raise
diff --git a/.venv/Lib/site-packages/gunicorn/reloader.py b/.venv/Lib/site-packages/gunicorn/reloader.py
new file mode 100644
index 0000000000000000000000000000000000000000..1c67f2a7dc3fd7f2983224dba889fd305da93686
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/reloader.py
@@ -0,0 +1,131 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+# pylint: disable=no-else-continue
+
+import os
+import os.path
+import re
+import sys
+import time
+import threading
+
+COMPILED_EXT_RE = re.compile(r'py[co]$')
+
+
+class Reloader(threading.Thread):
+    def __init__(self, extra_files=None, interval=1, callback=None):
+        super().__init__()
+        self.daemon = True
+        self._extra_files = set(extra_files or ())
+        self._interval = interval
+        self._callback = callback
+
+    def add_extra_file(self, filename):
+        self._extra_files.add(filename)
+
+    def get_files(self):
+        fnames = [
+            COMPILED_EXT_RE.sub('py', module.__file__)
+            for module in tuple(sys.modules.values())
+            if getattr(module, '__file__', None)
+        ]
+
+        fnames.extend(self._extra_files)
+
+        return fnames
+
+    def run(self):
+        mtimes = {}
+        while True:
+            for filename in self.get_files():
+                try:
+                    mtime = os.stat(filename).st_mtime
+                except OSError:
+                    continue
+                old_time = mtimes.get(filename)
+                if old_time is None:
+                    mtimes[filename] = mtime
+                    continue
+                elif mtime > old_time:
+                    if self._callback:
+                        self._callback(filename)
+            time.sleep(self._interval)
+
+
+has_inotify = False
+if sys.platform.startswith('linux'):
+    try:
+        from inotify.adapters import Inotify
+        import inotify.constants
+        has_inotify = True
+    except ImportError:
+        pass
+
+
+if has_inotify:
+
+    class InotifyReloader(threading.Thread):
+        event_mask = (inotify.constants.IN_CREATE | inotify.constants.IN_DELETE
+                      | inotify.constants.IN_DELETE_SELF | inotify.constants.IN_MODIFY
+                      | inotify.constants.IN_MOVE_SELF | inotify.constants.IN_MOVED_FROM
+                      | inotify.constants.IN_MOVED_TO)
+
+        def __init__(self, extra_files=None, callback=None):
+            super().__init__()
+            self.daemon = True
+            self._callback = callback
+            self._dirs = set()
+            self._watcher = Inotify()
+
+            for extra_file in extra_files:
+                self.add_extra_file(extra_file)
+
+        def add_extra_file(self, filename):
+            dirname = os.path.dirname(filename)
+
+            if dirname in self._dirs:
+                return
+
+            self._watcher.add_watch(dirname, mask=self.event_mask)
+            self._dirs.add(dirname)
+
+        def get_dirs(self):
+            fnames = [
+                os.path.dirname(os.path.abspath(COMPILED_EXT_RE.sub('py', module.__file__)))
+                for module in tuple(sys.modules.values())
+                if getattr(module, '__file__', None)
+            ]
+
+            return set(fnames)
+
+        def run(self):
+            self._dirs = self.get_dirs()
+
+            for dirname in self._dirs:
+                if os.path.isdir(dirname):
+                    self._watcher.add_watch(dirname, mask=self.event_mask)
+
+            for event in self._watcher.event_gen():
+                if event is None:
+                    continue
+
+                filename = event[3]
+
+                self._callback(filename)
+
+else:
+
+    class InotifyReloader:
+        def __init__(self, extra_files=None, callback=None):
+            raise ImportError('You must have the inotify module installed to '
+                              'use the inotify reloader')
+
+
+preferred_reloader = InotifyReloader if has_inotify else Reloader
+
+reloader_engines = {
+    'auto': preferred_reloader,
+    'poll': Reloader,
+    'inotify': InotifyReloader,
+}
diff --git a/.venv/Lib/site-packages/gunicorn/sock.py b/.venv/Lib/site-packages/gunicorn/sock.py
new file mode 100644
index 0000000000000000000000000000000000000000..eb2b6fa9c6b62bb9266954cd94d29d1cf58d5de3
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/sock.py
@@ -0,0 +1,231 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+import errno
+import os
+import socket
+import ssl
+import stat
+import sys
+import time
+
+from gunicorn import util
+
+
+class BaseSocket:
+
+    def __init__(self, address, conf, log, fd=None):
+        self.log = log
+        self.conf = conf
+
+        self.cfg_addr = address
+        if fd is None:
+            sock = socket.socket(self.FAMILY, socket.SOCK_STREAM)
+            bound = False
+        else:
+            sock = socket.fromfd(fd, self.FAMILY, socket.SOCK_STREAM)
+            os.close(fd)
+            bound = True
+
+        self.sock = self.set_options(sock, bound=bound)
+
+    def __str__(self):
+        return "<socket %d>" % self.sock.fileno()
+
+    def __getattr__(self, name):
+        return getattr(self.sock, name)
+
+    def set_options(self, sock, bound=False):
+        sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+        if (self.conf.reuse_port
+                and hasattr(socket, 'SO_REUSEPORT')):  # pragma: no cover
+            try:
+                sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
+            except OSError as err:
+                if err.errno not in (errno.ENOPROTOOPT, errno.EINVAL):
+                    raise
+        if not bound:
+            self.bind(sock)
+        sock.setblocking(0)
+
+        # make sure that the socket can be inherited
+        if hasattr(sock, "set_inheritable"):
+            sock.set_inheritable(True)
+
+        sock.listen(self.conf.backlog)
+        return sock
+
+    def bind(self, sock):
+        sock.bind(self.cfg_addr)
+
+    def close(self):
+        if self.sock is None:
+            return
+
+        try:
+            self.sock.close()
+        except OSError as e:
+            self.log.info("Error while closing socket %s", str(e))
+
+        self.sock = None
+
+
+class TCPSocket(BaseSocket):
+
+    FAMILY = socket.AF_INET
+
+    def __str__(self):
+        if self.conf.is_ssl:
+            scheme = "https"
+        else:
+            scheme = "http"
+
+        addr = self.sock.getsockname()
+        return "%s://%s:%d" % (scheme, addr[0], addr[1])
+
+    def set_options(self, sock, bound=False):
+        sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+        return super().set_options(sock, bound=bound)
+
+
+class TCP6Socket(TCPSocket):
+
+    FAMILY = socket.AF_INET6
+
+    def __str__(self):
+        (host, port, _, _) = self.sock.getsockname()
+        return "http://[%s]:%d" % (host, port)
+
+
+class UnixSocket(BaseSocket):
+
+    FAMILY = socket.AF_UNIX
+
+    def __init__(self, addr, conf, log, fd=None):
+        if fd is None:
+            try:
+                st = os.stat(addr)
+            except OSError as e:
+                if e.args[0] != errno.ENOENT:
+                    raise
+            else:
+                if stat.S_ISSOCK(st.st_mode):
+                    os.remove(addr)
+                else:
+                    raise ValueError("%r is not a socket" % addr)
+        super().__init__(addr, conf, log, fd=fd)
+
+    def __str__(self):
+        return "unix:%s" % self.cfg_addr
+
+    def bind(self, sock):
+        old_umask = os.umask(self.conf.umask)
+        sock.bind(self.cfg_addr)
+        util.chown(self.cfg_addr, self.conf.uid, self.conf.gid)
+        os.umask(old_umask)
+
+
+def _sock_type(addr):
+    if isinstance(addr, tuple):
+        if util.is_ipv6(addr[0]):
+            sock_type = TCP6Socket
+        else:
+            sock_type = TCPSocket
+    elif isinstance(addr, (str, bytes)):
+        sock_type = UnixSocket
+    else:
+        raise TypeError("Unable to create socket from: %r" % addr)
+    return sock_type
+
+
+def create_sockets(conf, log, fds=None):
+    """
+    Create a new socket for the configured addresses or file descriptors.
+
+    If a configured address is a tuple then a TCP socket is created.
+    If it is a string, a Unix socket is created. Otherwise, a TypeError is
+    raised.
+    """
+    listeners = []
+
+    # get it only once
+    addr = conf.address
+    fdaddr = [bind for bind in addr if isinstance(bind, int)]
+    if fds:
+        fdaddr += list(fds)
+    laddr = [bind for bind in addr if not isinstance(bind, int)]
+
+    # check ssl config early to raise the error on startup
+    # only the certfile is needed since it can contains the keyfile
+    if conf.certfile and not os.path.exists(conf.certfile):
+        raise ValueError('certfile "%s" does not exist' % conf.certfile)
+
+    if conf.keyfile and not os.path.exists(conf.keyfile):
+        raise ValueError('keyfile "%s" does not exist' % conf.keyfile)
+
+    # sockets are already bound
+    if fdaddr:
+        for fd in fdaddr:
+            sock = socket.fromfd(fd, socket.AF_UNIX, socket.SOCK_STREAM)
+            sock_name = sock.getsockname()
+            sock_type = _sock_type(sock_name)
+            listener = sock_type(sock_name, conf, log, fd=fd)
+            listeners.append(listener)
+
+        return listeners
+
+    # no sockets is bound, first initialization of gunicorn in this env.
+    for addr in laddr:
+        sock_type = _sock_type(addr)
+        sock = None
+        for i in range(5):
+            try:
+                sock = sock_type(addr, conf, log)
+            except OSError as e:
+                if e.args[0] == errno.EADDRINUSE:
+                    log.error("Connection in use: %s", str(addr))
+                if e.args[0] == errno.EADDRNOTAVAIL:
+                    log.error("Invalid address: %s", str(addr))
+                msg = "connection to {addr} failed: {error}"
+                log.error(msg.format(addr=str(addr), error=str(e)))
+                if i < 5:
+                    log.debug("Retrying in 1 second.")
+                    time.sleep(1)
+            else:
+                break
+
+        if sock is None:
+            log.error("Can't connect to %s", str(addr))
+            sys.exit(1)
+
+        listeners.append(sock)
+
+    return listeners
+
+
+def close_sockets(listeners, unlink=True):
+    for sock in listeners:
+        sock_name = sock.getsockname()
+        sock.close()
+        if unlink and _sock_type(sock_name) is UnixSocket:
+            os.unlink(sock_name)
+
+
+def ssl_context(conf):
+    def default_ssl_context_factory():
+        context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH, cafile=conf.ca_certs)
+        context.load_cert_chain(certfile=conf.certfile, keyfile=conf.keyfile)
+        context.verify_mode = conf.cert_reqs
+        if conf.ciphers:
+            context.set_ciphers(conf.ciphers)
+        return context
+
+    return conf.ssl_context(conf, default_ssl_context_factory)
+
+
+def ssl_wrap_socket(sock, conf):
+    return ssl_context(conf).wrap_socket(sock,
+                                         server_side=True,
+                                         suppress_ragged_eofs=conf.suppress_ragged_eofs,
+                                         do_handshake_on_connect=conf.do_handshake_on_connect)
diff --git a/.venv/Lib/site-packages/gunicorn/systemd.py b/.venv/Lib/site-packages/gunicorn/systemd.py
new file mode 100644
index 0000000000000000000000000000000000000000..9b1855060c238be87f30e9d881e850c56c08aff8
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/systemd.py
@@ -0,0 +1,75 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+import os
+import socket
+
+SD_LISTEN_FDS_START = 3
+
+
+def listen_fds(unset_environment=True):
+    """
+    Get the number of sockets inherited from systemd socket activation.
+
+    :param unset_environment: clear systemd environment variables unless False
+    :type unset_environment: bool
+    :return: the number of sockets to inherit from systemd socket activation
+    :rtype: int
+
+    Returns zero immediately if $LISTEN_PID is not set to the current pid.
+    Otherwise, returns the number of systemd activation sockets specified by
+    $LISTEN_FDS.
+
+    When $LISTEN_PID matches the current pid, unsets the environment variables
+    unless the ``unset_environment`` flag is ``False``.
+
+    .. note::
+        Unlike the sd_listen_fds C function, this implementation does not set
+        the FD_CLOEXEC flag because the gunicorn arbiter never needs to do this.
+
+    .. seealso::
+        `<https://www.freedesktop.org/software/systemd/man/sd_listen_fds.html>`_
+
+    """
+    fds = int(os.environ.get('LISTEN_FDS', 0))
+    listen_pid = int(os.environ.get('LISTEN_PID', 0))
+
+    if listen_pid != os.getpid():
+        return 0
+
+    if unset_environment:
+        os.environ.pop('LISTEN_PID', None)
+        os.environ.pop('LISTEN_FDS', None)
+
+    return fds
+
+
+def sd_notify(state, logger, unset_environment=False):
+    """Send a notification to systemd. state is a string; see
+    the man page of sd_notify (http://www.freedesktop.org/software/systemd/man/sd_notify.html)
+    for a description of the allowable values.
+
+    If the unset_environment parameter is True, sd_notify() will unset
+    the $NOTIFY_SOCKET environment variable before returning (regardless of
+    whether the function call itself succeeded or not). Further calls to
+    sd_notify() will then fail, but the variable is no longer inherited by
+    child processes.
+    """
+
+    addr = os.environ.get('NOTIFY_SOCKET')
+    if addr is None:
+        # not run in a service, just a noop
+        return
+    try:
+        sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM | socket.SOCK_CLOEXEC)
+        if addr[0] == '@':
+            addr = '\0' + addr[1:]
+        sock.connect(addr)
+        sock.sendall(state.encode('utf-8'))
+    except Exception:
+        logger.debug("Exception while invoking sd_notify()", exc_info=True)
+    finally:
+        if unset_environment:
+            os.environ.pop('NOTIFY_SOCKET')
+        sock.close()
diff --git a/.venv/Lib/site-packages/gunicorn/util.py b/.venv/Lib/site-packages/gunicorn/util.py
new file mode 100644
index 0000000000000000000000000000000000000000..ecd81747212f737a9a40bc6b78d96dbbfc83db66
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/util.py
@@ -0,0 +1,653 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+import ast
+import email.utils
+import errno
+import fcntl
+import html
+import importlib
+import inspect
+import io
+import logging
+import os
+import pwd
+import random
+import re
+import socket
+import sys
+import textwrap
+import time
+import traceback
+import warnings
+
+try:
+    import importlib.metadata as importlib_metadata
+except (ModuleNotFoundError, ImportError):
+    import importlib_metadata
+
+from gunicorn.errors import AppImportError
+from gunicorn.workers import SUPPORTED_WORKERS
+import urllib.parse
+
+REDIRECT_TO = getattr(os, 'devnull', '/dev/null')
+
+# Server and Date aren't technically hop-by-hop
+# headers, but they are in the purview of the
+# origin server which the WSGI spec says we should
+# act like. So we drop them and add our own.
+#
+# In the future, concatenation server header values
+# might be better, but nothing else does it and
+# dropping them is easier.
+hop_headers = set("""
+    connection keep-alive proxy-authenticate proxy-authorization
+    te trailers transfer-encoding upgrade
+    server date
+    """.split())
+
+try:
+    from setproctitle import setproctitle
+
+    def _setproctitle(title):
+        setproctitle("gunicorn: %s" % title)
+except ImportError:
+    def _setproctitle(title):
+        pass
+
+
+def load_entry_point(distribution, group, name):
+    dist_obj = importlib_metadata.distribution(distribution)
+    eps = [ep for ep in dist_obj.entry_points
+           if ep.group == group and ep.name == name]
+    if not eps:
+        raise ImportError("Entry point %r not found" % ((group, name),))
+    return eps[0].load()
+
+
+def load_class(uri, default="gunicorn.workers.sync.SyncWorker",
+               section="gunicorn.workers"):
+    if inspect.isclass(uri):
+        return uri
+    if uri.startswith("egg:"):
+        # uses entry points
+        entry_str = uri.split("egg:")[1]
+        try:
+            dist, name = entry_str.rsplit("#", 1)
+        except ValueError:
+            dist = entry_str
+            name = default
+
+        try:
+            return load_entry_point(dist, section, name)
+        except Exception:
+            exc = traceback.format_exc()
+            msg = "class uri %r invalid or not found: \n\n[%s]"
+            raise RuntimeError(msg % (uri, exc))
+    else:
+        components = uri.split('.')
+        if len(components) == 1:
+            while True:
+                if uri.startswith("#"):
+                    uri = uri[1:]
+
+                if uri in SUPPORTED_WORKERS:
+                    components = SUPPORTED_WORKERS[uri].split(".")
+                    break
+
+                try:
+                    return load_entry_point(
+                        "gunicorn", section, uri
+                    )
+                except Exception:
+                    exc = traceback.format_exc()
+                    msg = "class uri %r invalid or not found: \n\n[%s]"
+                    raise RuntimeError(msg % (uri, exc))
+
+        klass = components.pop(-1)
+
+        try:
+            mod = importlib.import_module('.'.join(components))
+        except Exception:
+            exc = traceback.format_exc()
+            msg = "class uri %r invalid or not found: \n\n[%s]"
+            raise RuntimeError(msg % (uri, exc))
+        return getattr(mod, klass)
+
+
+positionals = (
+    inspect.Parameter.POSITIONAL_ONLY,
+    inspect.Parameter.POSITIONAL_OR_KEYWORD,
+)
+
+
+def get_arity(f):
+    sig = inspect.signature(f)
+    arity = 0
+
+    for param in sig.parameters.values():
+        if param.kind in positionals:
+            arity += 1
+
+    return arity
+
+
+def get_username(uid):
+    """ get the username for a user id"""
+    return pwd.getpwuid(uid).pw_name
+
+
+def set_owner_process(uid, gid, initgroups=False):
+    """ set user and group of workers processes """
+
+    if gid:
+        if uid:
+            try:
+                username = get_username(uid)
+            except KeyError:
+                initgroups = False
+
+        # versions of python < 2.6.2 don't manage unsigned int for
+        # groups like on osx or fedora
+        gid = abs(gid) & 0x7FFFFFFF
+
+        if initgroups:
+            os.initgroups(username, gid)
+        elif gid != os.getgid():
+            os.setgid(gid)
+
+    if uid and uid != os.getuid():
+        os.setuid(uid)
+
+
+def chown(path, uid, gid):
+    os.chown(path, uid, gid)
+
+
+if sys.platform.startswith("win"):
+    def _waitfor(func, pathname, waitall=False):
+        # Perform the operation
+        func(pathname)
+        # Now setup the wait loop
+        if waitall:
+            dirname = pathname
+        else:
+            dirname, name = os.path.split(pathname)
+            dirname = dirname or '.'
+        # Check for `pathname` to be removed from the filesystem.
+        # The exponential backoff of the timeout amounts to a total
+        # of ~1 second after which the deletion is probably an error
+        # anyway.
+        # Testing on a i7@4.3GHz shows that usually only 1 iteration is
+        # required when contention occurs.
+        timeout = 0.001
+        while timeout < 1.0:
+            # Note we are only testing for the existence of the file(s) in
+            # the contents of the directory regardless of any security or
+            # access rights.  If we have made it this far, we have sufficient
+            # permissions to do that much using Python's equivalent of the
+            # Windows API FindFirstFile.
+            # Other Windows APIs can fail or give incorrect results when
+            # dealing with files that are pending deletion.
+            L = os.listdir(dirname)
+            if not L if waitall else name in L:
+                return
+            # Increase the timeout and try again
+            time.sleep(timeout)
+            timeout *= 2
+        warnings.warn('tests may fail, delete still pending for ' + pathname,
+                      RuntimeWarning, stacklevel=4)
+
+    def _unlink(filename):
+        _waitfor(os.unlink, filename)
+else:
+    _unlink = os.unlink
+
+
+def unlink(filename):
+    try:
+        _unlink(filename)
+    except OSError as error:
+        # The filename need not exist.
+        if error.errno not in (errno.ENOENT, errno.ENOTDIR):
+            raise
+
+
+def is_ipv6(addr):
+    try:
+        socket.inet_pton(socket.AF_INET6, addr)
+    except OSError:  # not a valid address
+        return False
+    except ValueError:  # ipv6 not supported on this platform
+        return False
+    return True
+
+
+def parse_address(netloc, default_port='8000'):
+    if re.match(r'unix:(//)?', netloc):
+        return re.split(r'unix:(//)?', netloc)[-1]
+
+    if netloc.startswith("fd://"):
+        fd = netloc[5:]
+        try:
+            return int(fd)
+        except ValueError:
+            raise RuntimeError("%r is not a valid file descriptor." % fd) from None
+
+    if netloc.startswith("tcp://"):
+        netloc = netloc.split("tcp://")[1]
+    host, port = netloc, default_port
+
+    if '[' in netloc and ']' in netloc:
+        host = netloc.split(']')[0][1:]
+        port = (netloc.split(']:') + [default_port])[1]
+    elif ':' in netloc:
+        host, port = (netloc.split(':') + [default_port])[:2]
+    elif netloc == "":
+        host, port = "0.0.0.0", default_port
+
+    try:
+        port = int(port)
+    except ValueError:
+        raise RuntimeError("%r is not a valid port number." % port)
+
+    return host.lower(), port
+
+
+def close_on_exec(fd):
+    flags = fcntl.fcntl(fd, fcntl.F_GETFD)
+    flags |= fcntl.FD_CLOEXEC
+    fcntl.fcntl(fd, fcntl.F_SETFD, flags)
+
+
+def set_non_blocking(fd):
+    flags = fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK
+    fcntl.fcntl(fd, fcntl.F_SETFL, flags)
+
+
+def close(sock):
+    try:
+        sock.close()
+    except OSError:
+        pass
+
+
+try:
+    from os import closerange
+except ImportError:
+    def closerange(fd_low, fd_high):
+        # Iterate through and close all file descriptors.
+        for fd in range(fd_low, fd_high):
+            try:
+                os.close(fd)
+            except OSError:  # ERROR, fd wasn't open to begin with (ignored)
+                pass
+
+
+def write_chunk(sock, data):
+    if isinstance(data, str):
+        data = data.encode('utf-8')
+    chunk_size = "%X\r\n" % len(data)
+    chunk = b"".join([chunk_size.encode('utf-8'), data, b"\r\n"])
+    sock.sendall(chunk)
+
+
+def write(sock, data, chunked=False):
+    if chunked:
+        return write_chunk(sock, data)
+    sock.sendall(data)
+
+
+def write_nonblock(sock, data, chunked=False):
+    timeout = sock.gettimeout()
+    if timeout != 0.0:
+        try:
+            sock.setblocking(0)
+            return write(sock, data, chunked)
+        finally:
+            sock.setblocking(1)
+    else:
+        return write(sock, data, chunked)
+
+
+def write_error(sock, status_int, reason, mesg):
+    html_error = textwrap.dedent("""\
+    <html>
+      <head>
+        <title>%(reason)s</title>
+      </head>
+      <body>
+        <h1><p>%(reason)s</p></h1>
+        %(mesg)s
+      </body>
+    </html>
+    """) % {"reason": reason, "mesg": html.escape(mesg)}
+
+    http = textwrap.dedent("""\
+    HTTP/1.1 %s %s\r
+    Connection: close\r
+    Content-Type: text/html\r
+    Content-Length: %d\r
+    \r
+    %s""") % (str(status_int), reason, len(html_error), html_error)
+    write_nonblock(sock, http.encode('latin1'))
+
+
+def _called_with_wrong_args(f):
+    """Check whether calling a function raised a ``TypeError`` because
+    the call failed or because something in the function raised the
+    error.
+
+    :param f: The function that was called.
+    :return: ``True`` if the call failed.
+    """
+    tb = sys.exc_info()[2]
+
+    try:
+        while tb is not None:
+            if tb.tb_frame.f_code is f.__code__:
+                # In the function, it was called successfully.
+                return False
+
+            tb = tb.tb_next
+
+        # Didn't reach the function.
+        return True
+    finally:
+        # Delete tb to break a circular reference in Python 2.
+        # https://docs.python.org/2/library/sys.html#sys.exc_info
+        del tb
+
+
+def import_app(module):
+    parts = module.split(":", 1)
+    if len(parts) == 1:
+        obj = "application"
+    else:
+        module, obj = parts[0], parts[1]
+
+    try:
+        mod = importlib.import_module(module)
+    except ImportError:
+        if module.endswith(".py") and os.path.exists(module):
+            msg = "Failed to find application, did you mean '%s:%s'?"
+            raise ImportError(msg % (module.rsplit(".", 1)[0], obj))
+        raise
+
+    # Parse obj as a single expression to determine if it's a valid
+    # attribute name or function call.
+    try:
+        expression = ast.parse(obj, mode="eval").body
+    except SyntaxError:
+        raise AppImportError(
+            "Failed to parse %r as an attribute name or function call." % obj
+        )
+
+    if isinstance(expression, ast.Name):
+        name = expression.id
+        args = kwargs = None
+    elif isinstance(expression, ast.Call):
+        # Ensure the function name is an attribute name only.
+        if not isinstance(expression.func, ast.Name):
+            raise AppImportError("Function reference must be a simple name: %r" % obj)
+
+        name = expression.func.id
+
+        # Parse the positional and keyword arguments as literals.
+        try:
+            args = [ast.literal_eval(arg) for arg in expression.args]
+            kwargs = {kw.arg: ast.literal_eval(kw.value) for kw in expression.keywords}
+        except ValueError:
+            # literal_eval gives cryptic error messages, show a generic
+            # message with the full expression instead.
+            raise AppImportError(
+                "Failed to parse arguments as literal values: %r" % obj
+            )
+    else:
+        raise AppImportError(
+            "Failed to parse %r as an attribute name or function call." % obj
+        )
+
+    is_debug = logging.root.level == logging.DEBUG
+    try:
+        app = getattr(mod, name)
+    except AttributeError:
+        if is_debug:
+            traceback.print_exception(*sys.exc_info())
+        raise AppImportError("Failed to find attribute %r in %r." % (name, module))
+
+    # If the expression was a function call, call the retrieved object
+    # to get the real application.
+    if args is not None:
+        try:
+            app = app(*args, **kwargs)
+        except TypeError as e:
+            # If the TypeError was due to bad arguments to the factory
+            # function, show Python's nice error message without a
+            # traceback.
+            if _called_with_wrong_args(app):
+                raise AppImportError(
+                    "".join(traceback.format_exception_only(TypeError, e)).strip()
+                )
+
+            # Otherwise it was raised from within the function, show the
+            # full traceback.
+            raise
+
+    if app is None:
+        raise AppImportError("Failed to find application object: %r" % obj)
+
+    if not callable(app):
+        raise AppImportError("Application object must be callable.")
+    return app
+
+
+def getcwd():
+    # get current path, try to use PWD env first
+    try:
+        a = os.stat(os.environ['PWD'])
+        b = os.stat(os.getcwd())
+        if a.st_ino == b.st_ino and a.st_dev == b.st_dev:
+            cwd = os.environ['PWD']
+        else:
+            cwd = os.getcwd()
+    except Exception:
+        cwd = os.getcwd()
+    return cwd
+
+
+def http_date(timestamp=None):
+    """Return the current date and time formatted for a message header."""
+    if timestamp is None:
+        timestamp = time.time()
+    s = email.utils.formatdate(timestamp, localtime=False, usegmt=True)
+    return s
+
+
+def is_hoppish(header):
+    return header.lower().strip() in hop_headers
+
+
+def daemonize(enable_stdio_inheritance=False):
+    """\
+    Standard daemonization of a process.
+    http://www.faqs.org/faqs/unix-faq/programmer/faq/ section 1.7
+    """
+    if 'GUNICORN_FD' not in os.environ:
+        if os.fork():
+            os._exit(0)
+        os.setsid()
+
+        if os.fork():
+            os._exit(0)
+
+        os.umask(0o22)
+
+        # In both the following any file descriptors above stdin
+        # stdout and stderr are left untouched. The inheritance
+        # option simply allows one to have output go to a file
+        # specified by way of shell redirection when not wanting
+        # to use --error-log option.
+
+        if not enable_stdio_inheritance:
+            # Remap all of stdin, stdout and stderr on to
+            # /dev/null. The expectation is that users have
+            # specified the --error-log option.
+
+            closerange(0, 3)
+
+            fd_null = os.open(REDIRECT_TO, os.O_RDWR)
+            # PEP 446, make fd for /dev/null inheritable
+            os.set_inheritable(fd_null, True)
+
+            # expect fd_null to be always 0 here, but in-case not ...
+            if fd_null != 0:
+                os.dup2(fd_null, 0)
+
+            os.dup2(fd_null, 1)
+            os.dup2(fd_null, 2)
+
+        else:
+            fd_null = os.open(REDIRECT_TO, os.O_RDWR)
+
+            # Always redirect stdin to /dev/null as we would
+            # never expect to need to read interactive input.
+
+            if fd_null != 0:
+                os.close(0)
+                os.dup2(fd_null, 0)
+
+            # If stdout and stderr are still connected to
+            # their original file descriptors we check to see
+            # if they are associated with terminal devices.
+            # When they are we map them to /dev/null so that
+            # are still detached from any controlling terminal
+            # properly. If not we preserve them as they are.
+            #
+            # If stdin and stdout were not hooked up to the
+            # original file descriptors, then all bets are
+            # off and all we can really do is leave them as
+            # they were.
+            #
+            # This will allow 'gunicorn ... > output.log 2>&1'
+            # to work with stdout/stderr going to the file
+            # as expected.
+            #
+            # Note that if using --error-log option, the log
+            # file specified through shell redirection will
+            # only be used up until the log file specified
+            # by the option takes over. As it replaces stdout
+            # and stderr at the file descriptor level, then
+            # anything using stdout or stderr, including having
+            # cached a reference to them, will still work.
+
+            def redirect(stream, fd_expect):
+                try:
+                    fd = stream.fileno()
+                    if fd == fd_expect and stream.isatty():
+                        os.close(fd)
+                        os.dup2(fd_null, fd)
+                except AttributeError:
+                    pass
+
+            redirect(sys.stdout, 1)
+            redirect(sys.stderr, 2)
+
+
+def seed():
+    try:
+        random.seed(os.urandom(64))
+    except NotImplementedError:
+        random.seed('%s.%s' % (time.time(), os.getpid()))
+
+
+def check_is_writable(path):
+    try:
+        with open(path, 'a') as f:
+            f.close()
+    except OSError as e:
+        raise RuntimeError("Error: '%s' isn't writable [%r]" % (path, e))
+
+
+def to_bytestring(value, encoding="utf8"):
+    """Converts a string argument to a byte string"""
+    if isinstance(value, bytes):
+        return value
+    if not isinstance(value, str):
+        raise TypeError('%r is not a string' % value)
+
+    return value.encode(encoding)
+
+
+def has_fileno(obj):
+    if not hasattr(obj, "fileno"):
+        return False
+
+    # check BytesIO case and maybe others
+    try:
+        obj.fileno()
+    except (AttributeError, OSError, io.UnsupportedOperation):
+        return False
+
+    return True
+
+
+def warn(msg):
+    print("!!!", file=sys.stderr)
+
+    lines = msg.splitlines()
+    for i, line in enumerate(lines):
+        if i == 0:
+            line = "WARNING: %s" % line
+        print("!!! %s" % line, file=sys.stderr)
+
+    print("!!!\n", file=sys.stderr)
+    sys.stderr.flush()
+
+
+def make_fail_app(msg):
+    msg = to_bytestring(msg)
+
+    def app(environ, start_response):
+        start_response("500 Internal Server Error", [
+            ("Content-Type", "text/plain"),
+            ("Content-Length", str(len(msg)))
+        ])
+        return [msg]
+
+    return app
+
+
+def split_request_uri(uri):
+    if uri.startswith("//"):
+        # When the path starts with //, urlsplit considers it as a
+        # relative uri while the RFC says we should consider it as abs_path
+        # http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2
+        # We use temporary dot prefix to workaround this behaviour
+        parts = urllib.parse.urlsplit("." + uri)
+        return parts._replace(path=parts.path[1:])
+
+    return urllib.parse.urlsplit(uri)
+
+
+# From six.reraise
+def reraise(tp, value, tb=None):
+    try:
+        if value is None:
+            value = tp()
+        if value.__traceback__ is not tb:
+            raise value.with_traceback(tb)
+        raise value
+    finally:
+        value = None
+        tb = None
+
+
+def bytes_to_str(b):
+    if isinstance(b, str):
+        return b
+    return str(b, 'latin1')
+
+
+def unquote_to_wsgi_str(string):
+    return urllib.parse.unquote_to_bytes(string).decode('latin-1')
diff --git a/.venv/Lib/site-packages/gunicorn/workers/__init__.py b/.venv/Lib/site-packages/gunicorn/workers/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..3da5f85e890a4e000fb873d2ead9278dfe949210
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/workers/__init__.py
@@ -0,0 +1,14 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+# supported gunicorn workers.
+SUPPORTED_WORKERS = {
+    "sync": "gunicorn.workers.sync.SyncWorker",
+    "eventlet": "gunicorn.workers.geventlet.EventletWorker",
+    "gevent": "gunicorn.workers.ggevent.GeventWorker",
+    "gevent_wsgi": "gunicorn.workers.ggevent.GeventPyWSGIWorker",
+    "gevent_pywsgi": "gunicorn.workers.ggevent.GeventPyWSGIWorker",
+    "tornado": "gunicorn.workers.gtornado.TornadoWorker",
+    "gthread": "gunicorn.workers.gthread.ThreadWorker",
+}
diff --git a/.venv/Lib/site-packages/gunicorn/workers/__pycache__/__init__.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/workers/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..577b8f2a42d8c5ef7412d3c247d371269e810207
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/workers/__pycache__/__init__.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/workers/__pycache__/base.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/workers/__pycache__/base.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a50b57daf4ebc47aa840ed75df49e30f91c0b0c3
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/workers/__pycache__/base.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/workers/__pycache__/base_async.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/workers/__pycache__/base_async.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d9bb404c7d2071a0200b5083cc0858d799b50457
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/workers/__pycache__/base_async.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/workers/__pycache__/geventlet.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/workers/__pycache__/geventlet.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..133ec42c8ac2382a356522f49d80679a6aa5daf2
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/workers/__pycache__/geventlet.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/workers/__pycache__/ggevent.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/workers/__pycache__/ggevent.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c8324c37cd9a5438c98fd5073563948def9e6238
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/workers/__pycache__/ggevent.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/workers/__pycache__/gthread.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/workers/__pycache__/gthread.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..68f0f6302dc76cbfef772c80aa99c13340ae240e
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/workers/__pycache__/gthread.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/workers/__pycache__/gtornado.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/workers/__pycache__/gtornado.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..39da0373ad68f5abcb65c09ff89db0242179d554
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/workers/__pycache__/gtornado.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/workers/__pycache__/sync.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/workers/__pycache__/sync.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fbc0dca1f4a72641f4e987f69fbb15d7808037df
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/workers/__pycache__/sync.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/workers/__pycache__/workertmp.cpython-310.pyc b/.venv/Lib/site-packages/gunicorn/workers/__pycache__/workertmp.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f8e738afab2e9242091cc35cbf65bdba2cbe2c69
Binary files /dev/null and b/.venv/Lib/site-packages/gunicorn/workers/__pycache__/workertmp.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/gunicorn/workers/base.py b/.venv/Lib/site-packages/gunicorn/workers/base.py
new file mode 100644
index 0000000000000000000000000000000000000000..93c465c98e1e1552dfdf7e15dba8a8e378cb4b04
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/workers/base.py
@@ -0,0 +1,287 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+import io
+import os
+import signal
+import sys
+import time
+import traceback
+from datetime import datetime
+from random import randint
+from ssl import SSLError
+
+from gunicorn import util
+from gunicorn.http.errors import (
+    ForbiddenProxyRequest, InvalidHeader,
+    InvalidHeaderName, InvalidHTTPVersion,
+    InvalidProxyLine, InvalidRequestLine,
+    InvalidRequestMethod, InvalidSchemeHeaders,
+    LimitRequestHeaders, LimitRequestLine,
+    UnsupportedTransferCoding,
+    ConfigurationProblem, ObsoleteFolding,
+)
+from gunicorn.http.wsgi import Response, default_environ
+from gunicorn.reloader import reloader_engines
+from gunicorn.workers.workertmp import WorkerTmp
+
+
+class Worker:
+
+    SIGNALS = [getattr(signal, "SIG%s" % x) for x in (
+        "ABRT HUP QUIT INT TERM USR1 USR2 WINCH CHLD".split()
+    )]
+
+    PIPE = []
+
+    def __init__(self, age, ppid, sockets, app, timeout, cfg, log):
+        """\
+        This is called pre-fork so it shouldn't do anything to the
+        current process. If there's a need to make process wide
+        changes you'll want to do that in ``self.init_process()``.
+        """
+        self.age = age
+        self.pid = "[booting]"
+        self.ppid = ppid
+        self.sockets = sockets
+        self.app = app
+        self.timeout = timeout
+        self.cfg = cfg
+        self.booted = False
+        self.aborted = False
+        self.reloader = None
+
+        self.nr = 0
+
+        if cfg.max_requests > 0:
+            jitter = randint(0, cfg.max_requests_jitter)
+            self.max_requests = cfg.max_requests + jitter
+        else:
+            self.max_requests = sys.maxsize
+
+        self.alive = True
+        self.log = log
+        self.tmp = WorkerTmp(cfg)
+
+    def __str__(self):
+        return "<Worker %s>" % self.pid
+
+    def notify(self):
+        """\
+        Your worker subclass must arrange to have this method called
+        once every ``self.timeout`` seconds. If you fail in accomplishing
+        this task, the master process will murder your workers.
+        """
+        self.tmp.notify()
+
+    def run(self):
+        """\
+        This is the mainloop of a worker process. You should override
+        this method in a subclass to provide the intended behaviour
+        for your particular evil schemes.
+        """
+        raise NotImplementedError()
+
+    def init_process(self):
+        """\
+        If you override this method in a subclass, the last statement
+        in the function should be to call this method with
+        super().init_process() so that the ``run()`` loop is initiated.
+        """
+
+        # set environment' variables
+        if self.cfg.env:
+            for k, v in self.cfg.env.items():
+                os.environ[k] = v
+
+        util.set_owner_process(self.cfg.uid, self.cfg.gid,
+                               initgroups=self.cfg.initgroups)
+
+        # Reseed the random number generator
+        util.seed()
+
+        # For waking ourselves up
+        self.PIPE = os.pipe()
+        for p in self.PIPE:
+            util.set_non_blocking(p)
+            util.close_on_exec(p)
+
+        # Prevent fd inheritance
+        for s in self.sockets:
+            util.close_on_exec(s)
+        util.close_on_exec(self.tmp.fileno())
+
+        self.wait_fds = self.sockets + [self.PIPE[0]]
+
+        self.log.close_on_exec()
+
+        self.init_signals()
+
+        # start the reloader
+        if self.cfg.reload:
+            def changed(fname):
+                self.log.info("Worker reloading: %s modified", fname)
+                self.alive = False
+                os.write(self.PIPE[1], b"1")
+                self.cfg.worker_int(self)
+                time.sleep(0.1)
+                sys.exit(0)
+
+            reloader_cls = reloader_engines[self.cfg.reload_engine]
+            self.reloader = reloader_cls(extra_files=self.cfg.reload_extra_files,
+                                         callback=changed)
+
+        self.load_wsgi()
+        if self.reloader:
+            self.reloader.start()
+
+        self.cfg.post_worker_init(self)
+
+        # Enter main run loop
+        self.booted = True
+        self.run()
+
+    def load_wsgi(self):
+        try:
+            self.wsgi = self.app.wsgi()
+        except SyntaxError as e:
+            if not self.cfg.reload:
+                raise
+
+            self.log.exception(e)
+
+            # fix from PR #1228
+            # storing the traceback into exc_tb will create a circular reference.
+            # per https://docs.python.org/2/library/sys.html#sys.exc_info warning,
+            # delete the traceback after use.
+            try:
+                _, exc_val, exc_tb = sys.exc_info()
+                self.reloader.add_extra_file(exc_val.filename)
+
+                tb_string = io.StringIO()
+                traceback.print_tb(exc_tb, file=tb_string)
+                self.wsgi = util.make_fail_app(tb_string.getvalue())
+            finally:
+                del exc_tb
+
+    def init_signals(self):
+        # reset signaling
+        for s in self.SIGNALS:
+            signal.signal(s, signal.SIG_DFL)
+        # init new signaling
+        signal.signal(signal.SIGQUIT, self.handle_quit)
+        signal.signal(signal.SIGTERM, self.handle_exit)
+        signal.signal(signal.SIGINT, self.handle_quit)
+        signal.signal(signal.SIGWINCH, self.handle_winch)
+        signal.signal(signal.SIGUSR1, self.handle_usr1)
+        signal.signal(signal.SIGABRT, self.handle_abort)
+
+        # Don't let SIGTERM and SIGUSR1 disturb active requests
+        # by interrupting system calls
+        signal.siginterrupt(signal.SIGTERM, False)
+        signal.siginterrupt(signal.SIGUSR1, False)
+
+        if hasattr(signal, 'set_wakeup_fd'):
+            signal.set_wakeup_fd(self.PIPE[1])
+
+    def handle_usr1(self, sig, frame):
+        self.log.reopen_files()
+
+    def handle_exit(self, sig, frame):
+        self.alive = False
+
+    def handle_quit(self, sig, frame):
+        self.alive = False
+        # worker_int callback
+        self.cfg.worker_int(self)
+        time.sleep(0.1)
+        sys.exit(0)
+
+    def handle_abort(self, sig, frame):
+        self.alive = False
+        self.cfg.worker_abort(self)
+        sys.exit(1)
+
+    def handle_error(self, req, client, addr, exc):
+        request_start = datetime.now()
+        addr = addr or ('', -1)  # unix socket case
+        if isinstance(exc, (
+            InvalidRequestLine, InvalidRequestMethod,
+            InvalidHTTPVersion, InvalidHeader, InvalidHeaderName,
+            LimitRequestLine, LimitRequestHeaders,
+            InvalidProxyLine, ForbiddenProxyRequest,
+            InvalidSchemeHeaders, UnsupportedTransferCoding,
+            ConfigurationProblem, ObsoleteFolding,
+            SSLError,
+        )):
+
+            status_int = 400
+            reason = "Bad Request"
+
+            if isinstance(exc, InvalidRequestLine):
+                mesg = "Invalid Request Line '%s'" % str(exc)
+            elif isinstance(exc, InvalidRequestMethod):
+                mesg = "Invalid Method '%s'" % str(exc)
+            elif isinstance(exc, InvalidHTTPVersion):
+                mesg = "Invalid HTTP Version '%s'" % str(exc)
+            elif isinstance(exc, UnsupportedTransferCoding):
+                mesg = "%s" % str(exc)
+                status_int = 501
+            elif isinstance(exc, ConfigurationProblem):
+                mesg = "%s" % str(exc)
+                status_int = 500
+            elif isinstance(exc, ObsoleteFolding):
+                mesg = "%s" % str(exc)
+            elif isinstance(exc, (InvalidHeaderName, InvalidHeader,)):
+                mesg = "%s" % str(exc)
+                if not req and hasattr(exc, "req"):
+                    req = exc.req  # for access log
+            elif isinstance(exc, LimitRequestLine):
+                mesg = "%s" % str(exc)
+            elif isinstance(exc, LimitRequestHeaders):
+                reason = "Request Header Fields Too Large"
+                mesg = "Error parsing headers: '%s'" % str(exc)
+                status_int = 431
+            elif isinstance(exc, InvalidProxyLine):
+                mesg = "'%s'" % str(exc)
+            elif isinstance(exc, ForbiddenProxyRequest):
+                reason = "Forbidden"
+                mesg = "Request forbidden"
+                status_int = 403
+            elif isinstance(exc, InvalidSchemeHeaders):
+                mesg = "%s" % str(exc)
+            elif isinstance(exc, SSLError):
+                reason = "Forbidden"
+                mesg = "'%s'" % str(exc)
+                status_int = 403
+
+            msg = "Invalid request from ip={ip}: {error}"
+            self.log.warning(msg.format(ip=addr[0], error=str(exc)))
+        else:
+            if hasattr(req, "uri"):
+                self.log.exception("Error handling request %s", req.uri)
+            else:
+                self.log.exception("Error handling request (no URI read)")
+            status_int = 500
+            reason = "Internal Server Error"
+            mesg = ""
+
+        if req is not None:
+            request_time = datetime.now() - request_start
+            environ = default_environ(req, client, self.cfg)
+            environ['REMOTE_ADDR'] = addr[0]
+            environ['REMOTE_PORT'] = str(addr[1])
+            resp = Response(req, client, self.cfg)
+            resp.status = "%s %s" % (status_int, reason)
+            resp.response_length = len(mesg)
+            self.log.access(resp, req, environ, request_time)
+
+        try:
+            util.write_error(client, status_int, reason, mesg)
+        except Exception:
+            self.log.debug("Failed to send error message.")
+
+    def handle_winch(self, sig, fname):
+        # Ignore SIGWINCH in worker. Fixes a crash on OpenBSD.
+        self.log.debug("worker: SIGWINCH ignored.")
diff --git a/.venv/Lib/site-packages/gunicorn/workers/base_async.py b/.venv/Lib/site-packages/gunicorn/workers/base_async.py
new file mode 100644
index 0000000000000000000000000000000000000000..9466d6aaafa584802f520521644d8054bbb5f2d7
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/workers/base_async.py
@@ -0,0 +1,147 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+from datetime import datetime
+import errno
+import socket
+import ssl
+import sys
+
+from gunicorn import http
+from gunicorn.http import wsgi
+from gunicorn import util
+from gunicorn.workers import base
+
+ALREADY_HANDLED = object()
+
+
+class AsyncWorker(base.Worker):
+
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        self.worker_connections = self.cfg.worker_connections
+
+    def timeout_ctx(self):
+        raise NotImplementedError()
+
+    def is_already_handled(self, respiter):
+        # some workers will need to overload this function to raise a StopIteration
+        return respiter == ALREADY_HANDLED
+
+    def handle(self, listener, client, addr):
+        req = None
+        try:
+            parser = http.RequestParser(self.cfg, client, addr)
+            try:
+                listener_name = listener.getsockname()
+                if not self.cfg.keepalive:
+                    req = next(parser)
+                    self.handle_request(listener_name, req, client, addr)
+                else:
+                    # keepalive loop
+                    proxy_protocol_info = {}
+                    while True:
+                        req = None
+                        with self.timeout_ctx():
+                            req = next(parser)
+                        if not req:
+                            break
+                        if req.proxy_protocol_info:
+                            proxy_protocol_info = req.proxy_protocol_info
+                        else:
+                            req.proxy_protocol_info = proxy_protocol_info
+                        self.handle_request(listener_name, req, client, addr)
+            except http.errors.NoMoreData as e:
+                self.log.debug("Ignored premature client disconnection. %s", e)
+            except StopIteration as e:
+                self.log.debug("Closing connection. %s", e)
+            except ssl.SSLError:
+                # pass to next try-except level
+                util.reraise(*sys.exc_info())
+            except OSError:
+                # pass to next try-except level
+                util.reraise(*sys.exc_info())
+            except Exception as e:
+                self.handle_error(req, client, addr, e)
+        except ssl.SSLError as e:
+            if e.args[0] == ssl.SSL_ERROR_EOF:
+                self.log.debug("ssl connection closed")
+                client.close()
+            else:
+                self.log.debug("Error processing SSL request.")
+                self.handle_error(req, client, addr, e)
+        except OSError as e:
+            if e.errno not in (errno.EPIPE, errno.ECONNRESET, errno.ENOTCONN):
+                self.log.exception("Socket error processing request.")
+            else:
+                if e.errno == errno.ECONNRESET:
+                    self.log.debug("Ignoring connection reset")
+                elif e.errno == errno.ENOTCONN:
+                    self.log.debug("Ignoring socket not connected")
+                else:
+                    self.log.debug("Ignoring EPIPE")
+        except BaseException as e:
+            self.handle_error(req, client, addr, e)
+        finally:
+            util.close(client)
+
+    def handle_request(self, listener_name, req, sock, addr):
+        request_start = datetime.now()
+        environ = {}
+        resp = None
+        try:
+            self.cfg.pre_request(self, req)
+            resp, environ = wsgi.create(req, sock, addr,
+                                        listener_name, self.cfg)
+            environ["wsgi.multithread"] = True
+            self.nr += 1
+            if self.nr >= self.max_requests:
+                if self.alive:
+                    self.log.info("Autorestarting worker after current request.")
+                    self.alive = False
+
+            if not self.alive or not self.cfg.keepalive:
+                resp.force_close()
+
+            respiter = self.wsgi(environ, resp.start_response)
+            if self.is_already_handled(respiter):
+                return False
+            try:
+                if isinstance(respiter, environ['wsgi.file_wrapper']):
+                    resp.write_file(respiter)
+                else:
+                    for item in respiter:
+                        resp.write(item)
+                resp.close()
+            finally:
+                request_time = datetime.now() - request_start
+                self.log.access(resp, req, environ, request_time)
+                if hasattr(respiter, "close"):
+                    respiter.close()
+            if resp.should_close():
+                raise StopIteration()
+        except StopIteration:
+            raise
+        except OSError:
+            # If the original exception was a socket.error we delegate
+            # handling it to the caller (where handle() might ignore it)
+            util.reraise(*sys.exc_info())
+        except Exception:
+            if resp and resp.headers_sent:
+                # If the requests have already been sent, we should close the
+                # connection to indicate the error.
+                self.log.exception("Error handling request")
+                try:
+                    sock.shutdown(socket.SHUT_RDWR)
+                    sock.close()
+                except OSError:
+                    pass
+                raise StopIteration()
+            raise
+        finally:
+            try:
+                self.cfg.post_request(self, req, environ, resp)
+            except Exception:
+                self.log.exception("Exception in post_request hook")
+        return True
diff --git a/.venv/Lib/site-packages/gunicorn/workers/geventlet.py b/.venv/Lib/site-packages/gunicorn/workers/geventlet.py
new file mode 100644
index 0000000000000000000000000000000000000000..087eb61ec75da1bbc214c9877ca4a0d72c6ceb1e
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/workers/geventlet.py
@@ -0,0 +1,186 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+from functools import partial
+import sys
+
+try:
+    import eventlet
+except ImportError:
+    raise RuntimeError("eventlet worker requires eventlet 0.24.1 or higher")
+else:
+    from packaging.version import parse as parse_version
+    if parse_version(eventlet.__version__) < parse_version('0.24.1'):
+        raise RuntimeError("eventlet worker requires eventlet 0.24.1 or higher")
+
+from eventlet import hubs, greenthread
+from eventlet.greenio import GreenSocket
+import eventlet.wsgi
+import greenlet
+
+from gunicorn.workers.base_async import AsyncWorker
+from gunicorn.sock import ssl_wrap_socket
+
+# ALREADY_HANDLED is removed in 0.30.3+ now it's `WSGI_LOCAL.already_handled: bool`
+# https://github.com/eventlet/eventlet/pull/544
+EVENTLET_WSGI_LOCAL = getattr(eventlet.wsgi, "WSGI_LOCAL", None)
+EVENTLET_ALREADY_HANDLED = getattr(eventlet.wsgi, "ALREADY_HANDLED", None)
+
+
+def _eventlet_socket_sendfile(self, file, offset=0, count=None):
+    # Based on the implementation in gevent which in turn is slightly
+    # modified from the standard library implementation.
+    if self.gettimeout() == 0:
+        raise ValueError("non-blocking sockets are not supported")
+    if offset:
+        file.seek(offset)
+    blocksize = min(count, 8192) if count else 8192
+    total_sent = 0
+    # localize variable access to minimize overhead
+    file_read = file.read
+    sock_send = self.send
+    try:
+        while True:
+            if count:
+                blocksize = min(count - total_sent, blocksize)
+                if blocksize <= 0:
+                    break
+            data = memoryview(file_read(blocksize))
+            if not data:
+                break  # EOF
+            while True:
+                try:
+                    sent = sock_send(data)
+                except BlockingIOError:
+                    continue
+                else:
+                    total_sent += sent
+                    if sent < len(data):
+                        data = data[sent:]
+                    else:
+                        break
+        return total_sent
+    finally:
+        if total_sent > 0 and hasattr(file, 'seek'):
+            file.seek(offset + total_sent)
+
+
+def _eventlet_serve(sock, handle, concurrency):
+    """
+    Serve requests forever.
+
+    This code is nearly identical to ``eventlet.convenience.serve`` except
+    that it attempts to join the pool at the end, which allows for gunicorn
+    graceful shutdowns.
+    """
+    pool = eventlet.greenpool.GreenPool(concurrency)
+    server_gt = eventlet.greenthread.getcurrent()
+
+    while True:
+        try:
+            conn, addr = sock.accept()
+            gt = pool.spawn(handle, conn, addr)
+            gt.link(_eventlet_stop, server_gt, conn)
+            conn, addr, gt = None, None, None
+        except eventlet.StopServe:
+            sock.close()
+            pool.waitall()
+            return
+
+
+def _eventlet_stop(client, server, conn):
+    """
+    Stop a greenlet handling a request and close its connection.
+
+    This code is lifted from eventlet so as not to depend on undocumented
+    functions in the library.
+    """
+    try:
+        try:
+            client.wait()
+        finally:
+            conn.close()
+    except greenlet.GreenletExit:
+        pass
+    except Exception:
+        greenthread.kill(server, *sys.exc_info())
+
+
+def patch_sendfile():
+    # As of eventlet 0.25.1, GreenSocket.sendfile doesn't exist,
+    # meaning the native implementations of socket.sendfile will be used.
+    # If os.sendfile exists, it will attempt to use that, failing explicitly
+    # if the socket is in non-blocking mode, which the underlying
+    # socket object /is/. Even the regular _sendfile_use_send will
+    # fail in that way; plus, it would use the underlying socket.send which isn't
+    # properly cooperative. So we have to monkey-patch a working socket.sendfile()
+    # into GreenSocket; in this method, `self.send` will be the GreenSocket's
+    # send method which is properly cooperative.
+    if not hasattr(GreenSocket, 'sendfile'):
+        GreenSocket.sendfile = _eventlet_socket_sendfile
+
+
+class EventletWorker(AsyncWorker):
+
+    def patch(self):
+        hubs.use_hub()
+        eventlet.monkey_patch()
+        patch_sendfile()
+
+    def is_already_handled(self, respiter):
+        # eventlet >= 0.30.3
+        if getattr(EVENTLET_WSGI_LOCAL, "already_handled", None):
+            raise StopIteration()
+        # eventlet < 0.30.3
+        if respiter == EVENTLET_ALREADY_HANDLED:
+            raise StopIteration()
+        return super().is_already_handled(respiter)
+
+    def init_process(self):
+        self.patch()
+        super().init_process()
+
+    def handle_quit(self, sig, frame):
+        eventlet.spawn(super().handle_quit, sig, frame)
+
+    def handle_usr1(self, sig, frame):
+        eventlet.spawn(super().handle_usr1, sig, frame)
+
+    def timeout_ctx(self):
+        return eventlet.Timeout(self.cfg.keepalive or None, False)
+
+    def handle(self, listener, client, addr):
+        if self.cfg.is_ssl:
+            client = ssl_wrap_socket(client, self.cfg)
+        super().handle(listener, client, addr)
+
+    def run(self):
+        acceptors = []
+        for sock in self.sockets:
+            gsock = GreenSocket(sock)
+            gsock.setblocking(1)
+            hfun = partial(self.handle, gsock)
+            acceptor = eventlet.spawn(_eventlet_serve, gsock, hfun,
+                                      self.worker_connections)
+
+            acceptors.append(acceptor)
+            eventlet.sleep(0.0)
+
+        while self.alive:
+            self.notify()
+            eventlet.sleep(1.0)
+
+        self.notify()
+        t = None
+        try:
+            with eventlet.Timeout(self.cfg.graceful_timeout) as t:
+                for a in acceptors:
+                    a.kill(eventlet.StopServe())
+                for a in acceptors:
+                    a.wait()
+        except eventlet.Timeout as te:
+            if te != t:
+                raise
+            for a in acceptors:
+                a.kill()
diff --git a/.venv/Lib/site-packages/gunicorn/workers/ggevent.py b/.venv/Lib/site-packages/gunicorn/workers/ggevent.py
new file mode 100644
index 0000000000000000000000000000000000000000..b9b9b440857e8cfe0a8432891aa840ee588506cd
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/workers/ggevent.py
@@ -0,0 +1,193 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+import os
+import sys
+from datetime import datetime
+from functools import partial
+import time
+
+try:
+    import gevent
+except ImportError:
+    raise RuntimeError("gevent worker requires gevent 1.4 or higher")
+else:
+    from packaging.version import parse as parse_version
+    if parse_version(gevent.__version__) < parse_version('1.4'):
+        raise RuntimeError("gevent worker requires gevent 1.4 or higher")
+
+from gevent.pool import Pool
+from gevent.server import StreamServer
+from gevent import hub, monkey, socket, pywsgi
+
+import gunicorn
+from gunicorn.http.wsgi import base_environ
+from gunicorn.sock import ssl_context
+from gunicorn.workers.base_async import AsyncWorker
+
+VERSION = "gevent/%s gunicorn/%s" % (gevent.__version__, gunicorn.__version__)
+
+
+class GeventWorker(AsyncWorker):
+
+    server_class = None
+    wsgi_handler = None
+
+    def patch(self):
+        monkey.patch_all()
+
+        # patch sockets
+        sockets = []
+        for s in self.sockets:
+            sockets.append(socket.socket(s.FAMILY, socket.SOCK_STREAM,
+                                         fileno=s.sock.fileno()))
+        self.sockets = sockets
+
+    def notify(self):
+        super().notify()
+        if self.ppid != os.getppid():
+            self.log.info("Parent changed, shutting down: %s", self)
+            sys.exit(0)
+
+    def timeout_ctx(self):
+        return gevent.Timeout(self.cfg.keepalive, False)
+
+    def run(self):
+        servers = []
+        ssl_args = {}
+
+        if self.cfg.is_ssl:
+            ssl_args = {"ssl_context": ssl_context(self.cfg)}
+
+        for s in self.sockets:
+            s.setblocking(1)
+            pool = Pool(self.worker_connections)
+            if self.server_class is not None:
+                environ = base_environ(self.cfg)
+                environ.update({
+                    "wsgi.multithread": True,
+                    "SERVER_SOFTWARE": VERSION,
+                })
+                server = self.server_class(
+                    s, application=self.wsgi, spawn=pool, log=self.log,
+                    handler_class=self.wsgi_handler, environ=environ,
+                    **ssl_args)
+            else:
+                hfun = partial(self.handle, s)
+                server = StreamServer(s, handle=hfun, spawn=pool, **ssl_args)
+                if self.cfg.workers > 1:
+                    server.max_accept = 1
+
+            server.start()
+            servers.append(server)
+
+        while self.alive:
+            self.notify()
+            gevent.sleep(1.0)
+
+        try:
+            # Stop accepting requests
+            for server in servers:
+                if hasattr(server, 'close'):  # gevent 1.0
+                    server.close()
+                if hasattr(server, 'kill'):  # gevent < 1.0
+                    server.kill()
+
+            # Handle current requests until graceful_timeout
+            ts = time.time()
+            while time.time() - ts <= self.cfg.graceful_timeout:
+                accepting = 0
+                for server in servers:
+                    if server.pool.free_count() != server.pool.size:
+                        accepting += 1
+
+                # if no server is accepting a connection, we can exit
+                if not accepting:
+                    return
+
+                self.notify()
+                gevent.sleep(1.0)
+
+            # Force kill all active the handlers
+            self.log.warning("Worker graceful timeout (pid:%s)", self.pid)
+            for server in servers:
+                server.stop(timeout=1)
+        except Exception:
+            pass
+
+    def handle(self, listener, client, addr):
+        # Connected socket timeout defaults to socket.getdefaulttimeout().
+        # This forces to blocking mode.
+        client.setblocking(1)
+        super().handle(listener, client, addr)
+
+    def handle_request(self, listener_name, req, sock, addr):
+        try:
+            super().handle_request(listener_name, req, sock, addr)
+        except gevent.GreenletExit:
+            pass
+        except SystemExit:
+            pass
+
+    def handle_quit(self, sig, frame):
+        # Move this out of the signal handler so we can use
+        # blocking calls. See #1126
+        gevent.spawn(super().handle_quit, sig, frame)
+
+    def handle_usr1(self, sig, frame):
+        # Make the gevent workers handle the usr1 signal
+        # by deferring to a new greenlet. See #1645
+        gevent.spawn(super().handle_usr1, sig, frame)
+
+    def init_process(self):
+        self.patch()
+        hub.reinit()
+        super().init_process()
+
+
+class GeventResponse:
+
+    status = None
+    headers = None
+    sent = None
+
+    def __init__(self, status, headers, clength):
+        self.status = status
+        self.headers = headers
+        self.sent = clength
+
+
+class PyWSGIHandler(pywsgi.WSGIHandler):
+
+    def log_request(self):
+        start = datetime.fromtimestamp(self.time_start)
+        finish = datetime.fromtimestamp(self.time_finish)
+        response_time = finish - start
+        resp_headers = getattr(self, 'response_headers', {})
+
+        # Status is expected to be a string but is encoded to bytes in gevent for PY3
+        # Except when it isn't because gevent uses hardcoded strings for network errors.
+        status = self.status.decode() if isinstance(self.status, bytes) else self.status
+        resp = GeventResponse(status, resp_headers, self.response_length)
+        if hasattr(self, 'headers'):
+            req_headers = self.headers.items()
+        else:
+            req_headers = []
+        self.server.log.access(resp, req_headers, self.environ, response_time)
+
+    def get_environ(self):
+        env = super().get_environ()
+        env['gunicorn.sock'] = self.socket
+        env['RAW_URI'] = self.path
+        return env
+
+
+class PyWSGIServer(pywsgi.WSGIServer):
+    pass
+
+
+class GeventPyWSGIWorker(GeventWorker):
+    "The Gevent StreamServer based workers."
+    server_class = PyWSGIServer
+    wsgi_handler = PyWSGIHandler
diff --git a/.venv/Lib/site-packages/gunicorn/workers/gthread.py b/.venv/Lib/site-packages/gunicorn/workers/gthread.py
new file mode 100644
index 0000000000000000000000000000000000000000..7a23228cdd5d3a0f4d39833388a0f8780d1acbba
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/workers/gthread.py
@@ -0,0 +1,372 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+# design:
+# A threaded worker accepts connections in the main loop, accepted
+# connections are added to the thread pool as a connection job.
+# Keepalive connections are put back in the loop waiting for an event.
+# If no event happen after the keep alive timeout, the connection is
+# closed.
+# pylint: disable=no-else-break
+
+from concurrent import futures
+import errno
+import os
+import selectors
+import socket
+import ssl
+import sys
+import time
+from collections import deque
+from datetime import datetime
+from functools import partial
+from threading import RLock
+
+from . import base
+from .. import http
+from .. import util
+from .. import sock
+from ..http import wsgi
+
+
+class TConn:
+
+    def __init__(self, cfg, sock, client, server):
+        self.cfg = cfg
+        self.sock = sock
+        self.client = client
+        self.server = server
+
+        self.timeout = None
+        self.parser = None
+        self.initialized = False
+
+        # set the socket to non blocking
+        self.sock.setblocking(False)
+
+    def init(self):
+        self.initialized = True
+        self.sock.setblocking(True)
+
+        if self.parser is None:
+            # wrap the socket if needed
+            if self.cfg.is_ssl:
+                self.sock = sock.ssl_wrap_socket(self.sock, self.cfg)
+
+            # initialize the parser
+            self.parser = http.RequestParser(self.cfg, self.sock, self.client)
+
+    def set_timeout(self):
+        # set the timeout
+        self.timeout = time.time() + self.cfg.keepalive
+
+    def close(self):
+        util.close(self.sock)
+
+
+class ThreadWorker(base.Worker):
+
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        self.worker_connections = self.cfg.worker_connections
+        self.max_keepalived = self.cfg.worker_connections - self.cfg.threads
+        # initialise the pool
+        self.tpool = None
+        self.poller = None
+        self._lock = None
+        self.futures = deque()
+        self._keep = deque()
+        self.nr_conns = 0
+
+    @classmethod
+    def check_config(cls, cfg, log):
+        max_keepalived = cfg.worker_connections - cfg.threads
+
+        if max_keepalived <= 0 and cfg.keepalive:
+            log.warning("No keepalived connections can be handled. " +
+                        "Check the number of worker connections and threads.")
+
+    def init_process(self):
+        self.tpool = self.get_thread_pool()
+        self.poller = selectors.DefaultSelector()
+        self._lock = RLock()
+        super().init_process()
+
+    def get_thread_pool(self):
+        """Override this method to customize how the thread pool is created"""
+        return futures.ThreadPoolExecutor(max_workers=self.cfg.threads)
+
+    def handle_quit(self, sig, frame):
+        self.alive = False
+        # worker_int callback
+        self.cfg.worker_int(self)
+        self.tpool.shutdown(False)
+        time.sleep(0.1)
+        sys.exit(0)
+
+    def _wrap_future(self, fs, conn):
+        fs.conn = conn
+        self.futures.append(fs)
+        fs.add_done_callback(self.finish_request)
+
+    def enqueue_req(self, conn):
+        conn.init()
+        # submit the connection to a worker
+        fs = self.tpool.submit(self.handle, conn)
+        self._wrap_future(fs, conn)
+
+    def accept(self, server, listener):
+        try:
+            sock, client = listener.accept()
+            # initialize the connection object
+            conn = TConn(self.cfg, sock, client, server)
+
+            self.nr_conns += 1
+            # wait until socket is readable
+            with self._lock:
+                self.poller.register(conn.sock, selectors.EVENT_READ,
+                                     partial(self.on_client_socket_readable, conn))
+        except OSError as e:
+            if e.errno not in (errno.EAGAIN, errno.ECONNABORTED,
+                               errno.EWOULDBLOCK):
+                raise
+
+    def on_client_socket_readable(self, conn, client):
+        with self._lock:
+            # unregister the client from the poller
+            self.poller.unregister(client)
+
+            if conn.initialized:
+                # remove the connection from keepalive
+                try:
+                    self._keep.remove(conn)
+                except ValueError:
+                    # race condition
+                    return
+
+        # submit the connection to a worker
+        self.enqueue_req(conn)
+
+    def murder_keepalived(self):
+        now = time.time()
+        while True:
+            with self._lock:
+                try:
+                    # remove the connection from the queue
+                    conn = self._keep.popleft()
+                except IndexError:
+                    break
+
+            delta = conn.timeout - now
+            if delta > 0:
+                # add the connection back to the queue
+                with self._lock:
+                    self._keep.appendleft(conn)
+                break
+            else:
+                self.nr_conns -= 1
+                # remove the socket from the poller
+                with self._lock:
+                    try:
+                        self.poller.unregister(conn.sock)
+                    except OSError as e:
+                        if e.errno != errno.EBADF:
+                            raise
+                    except KeyError:
+                        # already removed by the system, continue
+                        pass
+                    except ValueError:
+                        # already removed by the system continue
+                        pass
+
+                # close the socket
+                conn.close()
+
+    def is_parent_alive(self):
+        # If our parent changed then we shut down.
+        if self.ppid != os.getppid():
+            self.log.info("Parent changed, shutting down: %s", self)
+            return False
+        return True
+
+    def run(self):
+        # init listeners, add them to the event loop
+        for sock in self.sockets:
+            sock.setblocking(False)
+            # a race condition during graceful shutdown may make the listener
+            # name unavailable in the request handler so capture it once here
+            server = sock.getsockname()
+            acceptor = partial(self.accept, server)
+            self.poller.register(sock, selectors.EVENT_READ, acceptor)
+
+        while self.alive:
+            # notify the arbiter we are alive
+            self.notify()
+
+            # can we accept more connections?
+            if self.nr_conns < self.worker_connections:
+                # wait for an event
+                events = self.poller.select(1.0)
+                for key, _ in events:
+                    callback = key.data
+                    callback(key.fileobj)
+
+                # check (but do not wait) for finished requests
+                result = futures.wait(self.futures, timeout=0,
+                                      return_when=futures.FIRST_COMPLETED)
+            else:
+                # wait for a request to finish
+                result = futures.wait(self.futures, timeout=1.0,
+                                      return_when=futures.FIRST_COMPLETED)
+
+            # clean up finished requests
+            for fut in result.done:
+                self.futures.remove(fut)
+
+            if not self.is_parent_alive():
+                break
+
+            # handle keepalive timeouts
+            self.murder_keepalived()
+
+        self.tpool.shutdown(False)
+        self.poller.close()
+
+        for s in self.sockets:
+            s.close()
+
+        futures.wait(self.futures, timeout=self.cfg.graceful_timeout)
+
+    def finish_request(self, fs):
+        if fs.cancelled():
+            self.nr_conns -= 1
+            fs.conn.close()
+            return
+
+        try:
+            (keepalive, conn) = fs.result()
+            # if the connection should be kept alived add it
+            # to the eventloop and record it
+            if keepalive and self.alive:
+                # flag the socket as non blocked
+                conn.sock.setblocking(False)
+
+                # register the connection
+                conn.set_timeout()
+                with self._lock:
+                    self._keep.append(conn)
+
+                    # add the socket to the event loop
+                    self.poller.register(conn.sock, selectors.EVENT_READ,
+                                         partial(self.on_client_socket_readable, conn))
+            else:
+                self.nr_conns -= 1
+                conn.close()
+        except Exception:
+            # an exception happened, make sure to close the
+            # socket.
+            self.nr_conns -= 1
+            fs.conn.close()
+
+    def handle(self, conn):
+        keepalive = False
+        req = None
+        try:
+            req = next(conn.parser)
+            if not req:
+                return (False, conn)
+
+            # handle the request
+            keepalive = self.handle_request(req, conn)
+            if keepalive:
+                return (keepalive, conn)
+        except http.errors.NoMoreData as e:
+            self.log.debug("Ignored premature client disconnection. %s", e)
+
+        except StopIteration as e:
+            self.log.debug("Closing connection. %s", e)
+        except ssl.SSLError as e:
+            if e.args[0] == ssl.SSL_ERROR_EOF:
+                self.log.debug("ssl connection closed")
+                conn.sock.close()
+            else:
+                self.log.debug("Error processing SSL request.")
+                self.handle_error(req, conn.sock, conn.client, e)
+
+        except OSError as e:
+            if e.errno not in (errno.EPIPE, errno.ECONNRESET, errno.ENOTCONN):
+                self.log.exception("Socket error processing request.")
+            else:
+                if e.errno == errno.ECONNRESET:
+                    self.log.debug("Ignoring connection reset")
+                elif e.errno == errno.ENOTCONN:
+                    self.log.debug("Ignoring socket not connected")
+                else:
+                    self.log.debug("Ignoring connection epipe")
+        except Exception as e:
+            self.handle_error(req, conn.sock, conn.client, e)
+
+        return (False, conn)
+
+    def handle_request(self, req, conn):
+        environ = {}
+        resp = None
+        try:
+            self.cfg.pre_request(self, req)
+            request_start = datetime.now()
+            resp, environ = wsgi.create(req, conn.sock, conn.client,
+                                        conn.server, self.cfg)
+            environ["wsgi.multithread"] = True
+            self.nr += 1
+            if self.nr >= self.max_requests:
+                if self.alive:
+                    self.log.info("Autorestarting worker after current request.")
+                    self.alive = False
+                resp.force_close()
+
+            if not self.alive or not self.cfg.keepalive:
+                resp.force_close()
+            elif len(self._keep) >= self.max_keepalived:
+                resp.force_close()
+
+            respiter = self.wsgi(environ, resp.start_response)
+            try:
+                if isinstance(respiter, environ['wsgi.file_wrapper']):
+                    resp.write_file(respiter)
+                else:
+                    for item in respiter:
+                        resp.write(item)
+
+                resp.close()
+            finally:
+                request_time = datetime.now() - request_start
+                self.log.access(resp, req, environ, request_time)
+                if hasattr(respiter, "close"):
+                    respiter.close()
+
+            if resp.should_close():
+                self.log.debug("Closing connection.")
+                return False
+        except OSError:
+            # pass to next try-except level
+            util.reraise(*sys.exc_info())
+        except Exception:
+            if resp and resp.headers_sent:
+                # If the requests have already been sent, we should close the
+                # connection to indicate the error.
+                self.log.exception("Error handling request")
+                try:
+                    conn.sock.shutdown(socket.SHUT_RDWR)
+                    conn.sock.close()
+                except OSError:
+                    pass
+                raise StopIteration()
+            raise
+        finally:
+            try:
+                self.cfg.post_request(self, req, environ, resp)
+            except Exception:
+                self.log.exception("Exception in post_request hook")
+
+        return True
diff --git a/.venv/Lib/site-packages/gunicorn/workers/gtornado.py b/.venv/Lib/site-packages/gunicorn/workers/gtornado.py
new file mode 100644
index 0000000000000000000000000000000000000000..544af7d09abb6398869a3ed4e209c4e32b111d70
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/workers/gtornado.py
@@ -0,0 +1,166 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+import os
+import sys
+
+try:
+    import tornado
+except ImportError:
+    raise RuntimeError("You need tornado installed to use this worker.")
+import tornado.web
+import tornado.httpserver
+from tornado.ioloop import IOLoop, PeriodicCallback
+from tornado.wsgi import WSGIContainer
+from gunicorn.workers.base import Worker
+from gunicorn import __version__ as gversion
+from gunicorn.sock import ssl_context
+
+
+# Tornado 5.0 updated its IOLoop, and the `io_loop` arguments to many
+# Tornado functions have been removed in Tornado 5.0. Also, they no
+# longer store PeriodCallbacks in ioloop._callbacks. Instead we store
+# them on our side, and use stop() on them when stopping the worker.
+# See https://www.tornadoweb.org/en/stable/releases/v5.0.0.html#backwards-compatibility-notes
+# for more details.
+TORNADO5 = tornado.version_info >= (5, 0, 0)
+
+
+class TornadoWorker(Worker):
+
+    @classmethod
+    def setup(cls):
+        web = sys.modules.pop("tornado.web")
+        old_clear = web.RequestHandler.clear
+
+        def clear(self):
+            old_clear(self)
+            if "Gunicorn" not in self._headers["Server"]:
+                self._headers["Server"] += " (Gunicorn/%s)" % gversion
+        web.RequestHandler.clear = clear
+        sys.modules["tornado.web"] = web
+
+    def handle_exit(self, sig, frame):
+        if self.alive:
+            super().handle_exit(sig, frame)
+
+    def handle_request(self):
+        self.nr += 1
+        if self.alive and self.nr >= self.max_requests:
+            self.log.info("Autorestarting worker after current request.")
+            self.alive = False
+
+    def watchdog(self):
+        if self.alive:
+            self.notify()
+
+        if self.ppid != os.getppid():
+            self.log.info("Parent changed, shutting down: %s", self)
+            self.alive = False
+
+    def heartbeat(self):
+        if not self.alive:
+            if self.server_alive:
+                if hasattr(self, 'server'):
+                    try:
+                        self.server.stop()
+                    except Exception:
+                        pass
+                self.server_alive = False
+            else:
+                if TORNADO5:
+                    for callback in self.callbacks:
+                        callback.stop()
+                    self.ioloop.stop()
+                else:
+                    if not self.ioloop._callbacks:
+                        self.ioloop.stop()
+
+    def init_process(self):
+        # IOLoop cannot survive a fork or be shared across processes
+        # in any way. When multiple processes are being used, each process
+        # should create its own IOLoop. We should clear current IOLoop
+        # if exists before os.fork.
+        IOLoop.clear_current()
+        super().init_process()
+
+    def run(self):
+        self.ioloop = IOLoop.instance()
+        self.alive = True
+        self.server_alive = False
+
+        if TORNADO5:
+            self.callbacks = []
+            self.callbacks.append(PeriodicCallback(self.watchdog, 1000))
+            self.callbacks.append(PeriodicCallback(self.heartbeat, 1000))
+            for callback in self.callbacks:
+                callback.start()
+        else:
+            PeriodicCallback(self.watchdog, 1000, io_loop=self.ioloop).start()
+            PeriodicCallback(self.heartbeat, 1000, io_loop=self.ioloop).start()
+
+        # Assume the app is a WSGI callable if its not an
+        # instance of tornado.web.Application or is an
+        # instance of tornado.wsgi.WSGIApplication
+        app = self.wsgi
+
+        if tornado.version_info[0] < 6:
+            if not isinstance(app, tornado.web.Application) or \
+                    isinstance(app, tornado.wsgi.WSGIApplication):
+                app = WSGIContainer(app)
+        elif not isinstance(app, WSGIContainer) and \
+                not isinstance(app, tornado.web.Application):
+            app = WSGIContainer(app)
+
+        # Monkey-patching HTTPConnection.finish to count the
+        # number of requests being handled by Tornado. This
+        # will help gunicorn shutdown the worker if max_requests
+        # is exceeded.
+        httpserver = sys.modules["tornado.httpserver"]
+        if hasattr(httpserver, 'HTTPConnection'):
+            old_connection_finish = httpserver.HTTPConnection.finish
+
+            def finish(other):
+                self.handle_request()
+                old_connection_finish(other)
+            httpserver.HTTPConnection.finish = finish
+            sys.modules["tornado.httpserver"] = httpserver
+
+            server_class = tornado.httpserver.HTTPServer
+        else:
+
+            class _HTTPServer(tornado.httpserver.HTTPServer):
+
+                def on_close(instance, server_conn):
+                    self.handle_request()
+                    super().on_close(server_conn)
+
+            server_class = _HTTPServer
+
+        if self.cfg.is_ssl:
+            if TORNADO5:
+                server = server_class(app, ssl_options=ssl_context(self.cfg))
+            else:
+                server = server_class(app, io_loop=self.ioloop,
+                                      ssl_options=ssl_context(self.cfg))
+        else:
+            if TORNADO5:
+                server = server_class(app)
+            else:
+                server = server_class(app, io_loop=self.ioloop)
+
+        self.server = server
+        self.server_alive = True
+
+        for s in self.sockets:
+            s.setblocking(0)
+            if hasattr(server, "add_socket"):  # tornado > 2.0
+                server.add_socket(s)
+            elif hasattr(server, "_sockets"):  # tornado 2.0
+                server._sockets[s.fileno()] = s
+
+        server.no_keep_alive = self.cfg.keepalive <= 0
+        server.start(num_processes=1)
+
+        self.ioloop.start()
diff --git a/.venv/Lib/site-packages/gunicorn/workers/sync.py b/.venv/Lib/site-packages/gunicorn/workers/sync.py
new file mode 100644
index 0000000000000000000000000000000000000000..4c029f9128ac564505abdd4a4b3ee5fc9b579861
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/workers/sync.py
@@ -0,0 +1,209 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+#
+
+from datetime import datetime
+import errno
+import os
+import select
+import socket
+import ssl
+import sys
+
+from gunicorn import http
+from gunicorn.http import wsgi
+from gunicorn import sock
+from gunicorn import util
+from gunicorn.workers import base
+
+
+class StopWaiting(Exception):
+    """ exception raised to stop waiting for a connection """
+
+
+class SyncWorker(base.Worker):
+
+    def accept(self, listener):
+        client, addr = listener.accept()
+        client.setblocking(1)
+        util.close_on_exec(client)
+        self.handle(listener, client, addr)
+
+    def wait(self, timeout):
+        try:
+            self.notify()
+            ret = select.select(self.wait_fds, [], [], timeout)
+            if ret[0]:
+                if self.PIPE[0] in ret[0]:
+                    os.read(self.PIPE[0], 1)
+                return ret[0]
+
+        except OSError as e:
+            if e.args[0] == errno.EINTR:
+                return self.sockets
+            if e.args[0] == errno.EBADF:
+                if self.nr < 0:
+                    return self.sockets
+                else:
+                    raise StopWaiting
+            raise
+
+    def is_parent_alive(self):
+        # If our parent changed then we shut down.
+        if self.ppid != os.getppid():
+            self.log.info("Parent changed, shutting down: %s", self)
+            return False
+        return True
+
+    def run_for_one(self, timeout):
+        listener = self.sockets[0]
+        while self.alive:
+            self.notify()
+
+            # Accept a connection. If we get an error telling us
+            # that no connection is waiting we fall down to the
+            # select which is where we'll wait for a bit for new
+            # workers to come give us some love.
+            try:
+                self.accept(listener)
+                # Keep processing clients until no one is waiting. This
+                # prevents the need to select() for every client that we
+                # process.
+                continue
+
+            except OSError as e:
+                if e.errno not in (errno.EAGAIN, errno.ECONNABORTED,
+                                   errno.EWOULDBLOCK):
+                    raise
+
+            if not self.is_parent_alive():
+                return
+
+            try:
+                self.wait(timeout)
+            except StopWaiting:
+                return
+
+    def run_for_multiple(self, timeout):
+        while self.alive:
+            self.notify()
+
+            try:
+                ready = self.wait(timeout)
+            except StopWaiting:
+                return
+
+            if ready is not None:
+                for listener in ready:
+                    if listener == self.PIPE[0]:
+                        continue
+
+                    try:
+                        self.accept(listener)
+                    except OSError as e:
+                        if e.errno not in (errno.EAGAIN, errno.ECONNABORTED,
+                                           errno.EWOULDBLOCK):
+                            raise
+
+            if not self.is_parent_alive():
+                return
+
+    def run(self):
+        # if no timeout is given the worker will never wait and will
+        # use the CPU for nothing. This minimal timeout prevent it.
+        timeout = self.timeout or 0.5
+
+        # self.socket appears to lose its blocking status after
+        # we fork in the arbiter. Reset it here.
+        for s in self.sockets:
+            s.setblocking(0)
+
+        if len(self.sockets) > 1:
+            self.run_for_multiple(timeout)
+        else:
+            self.run_for_one(timeout)
+
+    def handle(self, listener, client, addr):
+        req = None
+        try:
+            if self.cfg.is_ssl:
+                client = sock.ssl_wrap_socket(client, self.cfg)
+            parser = http.RequestParser(self.cfg, client, addr)
+            req = next(parser)
+            self.handle_request(listener, req, client, addr)
+        except http.errors.NoMoreData as e:
+            self.log.debug("Ignored premature client disconnection. %s", e)
+        except StopIteration as e:
+            self.log.debug("Closing connection. %s", e)
+        except ssl.SSLError as e:
+            if e.args[0] == ssl.SSL_ERROR_EOF:
+                self.log.debug("ssl connection closed")
+                client.close()
+            else:
+                self.log.debug("Error processing SSL request.")
+                self.handle_error(req, client, addr, e)
+        except OSError as e:
+            if e.errno not in (errno.EPIPE, errno.ECONNRESET, errno.ENOTCONN):
+                self.log.exception("Socket error processing request.")
+            else:
+                if e.errno == errno.ECONNRESET:
+                    self.log.debug("Ignoring connection reset")
+                elif e.errno == errno.ENOTCONN:
+                    self.log.debug("Ignoring socket not connected")
+                else:
+                    self.log.debug("Ignoring EPIPE")
+        except BaseException as e:
+            self.handle_error(req, client, addr, e)
+        finally:
+            util.close(client)
+
+    def handle_request(self, listener, req, client, addr):
+        environ = {}
+        resp = None
+        try:
+            self.cfg.pre_request(self, req)
+            request_start = datetime.now()
+            resp, environ = wsgi.create(req, client, addr,
+                                        listener.getsockname(), self.cfg)
+            # Force the connection closed until someone shows
+            # a buffering proxy that supports Keep-Alive to
+            # the backend.
+            resp.force_close()
+            self.nr += 1
+            if self.nr >= self.max_requests:
+                self.log.info("Autorestarting worker after current request.")
+                self.alive = False
+            respiter = self.wsgi(environ, resp.start_response)
+            try:
+                if isinstance(respiter, environ['wsgi.file_wrapper']):
+                    resp.write_file(respiter)
+                else:
+                    for item in respiter:
+                        resp.write(item)
+                resp.close()
+            finally:
+                request_time = datetime.now() - request_start
+                self.log.access(resp, req, environ, request_time)
+                if hasattr(respiter, "close"):
+                    respiter.close()
+        except OSError:
+            # pass to next try-except level
+            util.reraise(*sys.exc_info())
+        except Exception:
+            if resp and resp.headers_sent:
+                # If the requests have already been sent, we should close the
+                # connection to indicate the error.
+                self.log.exception("Error handling request")
+                try:
+                    client.shutdown(socket.SHUT_RDWR)
+                    client.close()
+                except OSError:
+                    pass
+                raise StopIteration()
+            raise
+        finally:
+            try:
+                self.cfg.post_request(self, req, environ, resp)
+            except Exception:
+                self.log.exception("Exception in post_request hook")
diff --git a/.venv/Lib/site-packages/gunicorn/workers/workertmp.py b/.venv/Lib/site-packages/gunicorn/workers/workertmp.py
new file mode 100644
index 0000000000000000000000000000000000000000..8ef00a560c4818c00895ed92db011d7682216ff3
--- /dev/null
+++ b/.venv/Lib/site-packages/gunicorn/workers/workertmp.py
@@ -0,0 +1,53 @@
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+import os
+import time
+import platform
+import tempfile
+
+from gunicorn import util
+
+PLATFORM = platform.system()
+IS_CYGWIN = PLATFORM.startswith('CYGWIN')
+
+
+class WorkerTmp:
+
+    def __init__(self, cfg):
+        old_umask = os.umask(cfg.umask)
+        fdir = cfg.worker_tmp_dir
+        if fdir and not os.path.isdir(fdir):
+            raise RuntimeError("%s doesn't exist. Can't create workertmp." % fdir)
+        fd, name = tempfile.mkstemp(prefix="wgunicorn-", dir=fdir)
+        os.umask(old_umask)
+
+        # change the owner and group of the file if the worker will run as
+        # a different user or group, so that the worker can modify the file
+        if cfg.uid != os.geteuid() or cfg.gid != os.getegid():
+            util.chown(name, cfg.uid, cfg.gid)
+
+        # unlink the file so we don't leak temporary files
+        try:
+            if not IS_CYGWIN:
+                util.unlink(name)
+            # In Python 3.8, open() emits RuntimeWarning if buffering=1 for binary mode.
+            # Because we never write to this file, pass 0 to switch buffering off.
+            self._tmp = os.fdopen(fd, 'w+b', 0)
+        except Exception:
+            os.close(fd)
+            raise
+
+    def notify(self):
+        new_time = time.monotonic()
+        os.utime(self._tmp.fileno(), (new_time, new_time))
+
+    def last_update(self):
+        return os.fstat(self._tmp.fileno()).st_mtime
+
+    def fileno(self):
+        return self._tmp.fileno()
+
+    def close(self):
+        return self._tmp.close()
diff --git a/.venv/Lib/site-packages/packaging-24.2.dist-info/INSTALLER b/.venv/Lib/site-packages/packaging-24.2.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/.venv/Lib/site-packages/packaging-24.2.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/.venv/Lib/site-packages/packaging-24.2.dist-info/LICENSE b/.venv/Lib/site-packages/packaging-24.2.dist-info/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..6f62d44e4ef733c0e713afcd2371fed7f2b3de67
--- /dev/null
+++ b/.venv/Lib/site-packages/packaging-24.2.dist-info/LICENSE
@@ -0,0 +1,3 @@
+This software is made available under the terms of *either* of the licenses
+found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made
+under the terms of *both* these licenses.
diff --git a/.venv/Lib/site-packages/packaging-24.2.dist-info/LICENSE.APACHE b/.venv/Lib/site-packages/packaging-24.2.dist-info/LICENSE.APACHE
new file mode 100644
index 0000000000000000000000000000000000000000..f433b1a53f5b830a205fd2df78e2b34974656c7b
--- /dev/null
+++ b/.venv/Lib/site-packages/packaging-24.2.dist-info/LICENSE.APACHE
@@ -0,0 +1,177 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
diff --git a/.venv/Lib/site-packages/packaging-24.2.dist-info/LICENSE.BSD b/.venv/Lib/site-packages/packaging-24.2.dist-info/LICENSE.BSD
new file mode 100644
index 0000000000000000000000000000000000000000..42ce7b75c92fb01a3f6ed17eea363f756b7da582
--- /dev/null
+++ b/.venv/Lib/site-packages/packaging-24.2.dist-info/LICENSE.BSD
@@ -0,0 +1,23 @@
+Copyright (c) Donald Stufft and individual contributors.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+    1. Redistributions of source code must retain the above copyright notice,
+       this list of conditions and the following disclaimer.
+
+    2. Redistributions in binary form must reproduce the above copyright
+       notice, this list of conditions and the following disclaimer in the
+       documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/.venv/Lib/site-packages/packaging-24.2.dist-info/METADATA b/.venv/Lib/site-packages/packaging-24.2.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..1479c8694bfbd583a896dbe9bd33cdb6d7e7371e
--- /dev/null
+++ b/.venv/Lib/site-packages/packaging-24.2.dist-info/METADATA
@@ -0,0 +1,102 @@
+Metadata-Version: 2.3
+Name: packaging
+Version: 24.2
+Summary: Core utilities for Python packages
+Author-email: Donald Stufft <donald@stufft.io>
+Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Typing :: Typed
+Project-URL: Documentation, https://packaging.pypa.io/
+Project-URL: Source, https://github.com/pypa/packaging
+
+packaging
+=========
+
+.. start-intro
+
+Reusable core utilities for various Python Packaging
+`interoperability specifications <https://packaging.python.org/specifications/>`_.
+
+This library provides utilities that implement the interoperability
+specifications which have clearly one correct behaviour (eg: :pep:`440`)
+or benefit greatly from having a single shared implementation (eg: :pep:`425`).
+
+.. end-intro
+
+The ``packaging`` project includes the following: version handling, specifiers,
+markers, requirements, tags, utilities.
+
+Documentation
+-------------
+
+The `documentation`_ provides information and the API for the following:
+
+- Version Handling
+- Specifiers
+- Markers
+- Requirements
+- Tags
+- Utilities
+
+Installation
+------------
+
+Use ``pip`` to install these utilities::
+
+    pip install packaging
+
+The ``packaging`` library uses calendar-based versioning (``YY.N``).
+
+Discussion
+----------
+
+If you run into bugs, you can file them in our `issue tracker`_.
+
+You can also join ``#pypa`` on Freenode to ask questions or get involved.
+
+
+.. _`documentation`: https://packaging.pypa.io/
+.. _`issue tracker`: https://github.com/pypa/packaging/issues
+
+
+Code of Conduct
+---------------
+
+Everyone interacting in the packaging project's codebases, issue trackers, chat
+rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.
+
+.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md
+
+Contributing
+------------
+
+The ``CONTRIBUTING.rst`` file outlines how to contribute to this project as
+well as how to report a potential security issue. The documentation for this
+project also covers information about `project development`_ and `security`_.
+
+.. _`project development`: https://packaging.pypa.io/en/latest/development/
+.. _`security`: https://packaging.pypa.io/en/latest/security/
+
+Project History
+---------------
+
+Please review the ``CHANGELOG.rst`` file or the `Changelog documentation`_ for
+recent changes and project history.
+
+.. _`Changelog documentation`: https://packaging.pypa.io/en/latest/changelog/
+
diff --git a/.venv/Lib/site-packages/packaging-24.2.dist-info/RECORD b/.venv/Lib/site-packages/packaging-24.2.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..33b945fc177326f91ee9f812ec72095048945c54
--- /dev/null
+++ b/.venv/Lib/site-packages/packaging-24.2.dist-info/RECORD
@@ -0,0 +1,40 @@
+packaging-24.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+packaging-24.2.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197
+packaging-24.2.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174
+packaging-24.2.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344
+packaging-24.2.dist-info/METADATA,sha256=ohH86s6k5mIfQxY2TS0LcSfADeOFa4BiCC-bxZV-pNs,3204
+packaging-24.2.dist-info/RECORD,,
+packaging-24.2.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
+packaging/__init__.py,sha256=dk4Ta_vmdVJxYHDcfyhvQNw8V3PgSBomKNXqg-D2JDY,494
+packaging/__pycache__/__init__.cpython-310.pyc,,
+packaging/__pycache__/_elffile.cpython-310.pyc,,
+packaging/__pycache__/_manylinux.cpython-310.pyc,,
+packaging/__pycache__/_musllinux.cpython-310.pyc,,
+packaging/__pycache__/_parser.cpython-310.pyc,,
+packaging/__pycache__/_structures.cpython-310.pyc,,
+packaging/__pycache__/_tokenizer.cpython-310.pyc,,
+packaging/__pycache__/markers.cpython-310.pyc,,
+packaging/__pycache__/metadata.cpython-310.pyc,,
+packaging/__pycache__/requirements.cpython-310.pyc,,
+packaging/__pycache__/specifiers.cpython-310.pyc,,
+packaging/__pycache__/tags.cpython-310.pyc,,
+packaging/__pycache__/utils.cpython-310.pyc,,
+packaging/__pycache__/version.cpython-310.pyc,,
+packaging/_elffile.py,sha256=cflAQAkE25tzhYmq_aCi72QfbT_tn891tPzfpbeHOwE,3306
+packaging/_manylinux.py,sha256=vl5OCoz4kx80H5rwXKeXWjl9WNISGmr4ZgTpTP9lU9c,9612
+packaging/_musllinux.py,sha256=p9ZqNYiOItGee8KcZFeHF_YcdhVwGHdK6r-8lgixvGQ,2694
+packaging/_parser.py,sha256=s_TvTvDNK0NrM2QB3VKThdWFM4Nc0P6JnkObkl3MjpM,10236
+packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
+packaging/_tokenizer.py,sha256=J6v5H7Jzvb-g81xp_2QACKwO7LxHQA6ikryMU7zXwN8,5273
+packaging/licenses/__init__.py,sha256=1x5M1nEYjcgwEbLt0dXwz2ukjr18DiCzC0sraQqJ-Ww,5715
+packaging/licenses/__pycache__/__init__.cpython-310.pyc,,
+packaging/licenses/__pycache__/_spdx.cpython-310.pyc,,
+packaging/licenses/_spdx.py,sha256=oAm1ztPFwlsmCKe7lAAsv_OIOfS1cWDu9bNBkeu-2ns,48398
+packaging/markers.py,sha256=c89TNzB7ZdGYhkovm6PYmqGyHxXlYVaLW591PHUNKD8,10561
+packaging/metadata.py,sha256=YJibM7GYe4re8-0a3OlXmGS-XDgTEoO4tlBt2q25Bng,34762
+packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+packaging/requirements.py,sha256=gYyRSAdbrIyKDY66ugIDUQjRMvxkH2ALioTmX3tnL6o,2947
+packaging/specifiers.py,sha256=GG1wPNMcL0fMJO68vF53wKMdwnfehDcaI-r9NpTfilA,40074
+packaging/tags.py,sha256=CFqrJzAzc2XNGexerH__T-Y5Iwq7WbsYXsiLERLWxY0,21014
+packaging/utils.py,sha256=0F3Hh9OFuRgrhTgGZUl5K22Fv1YP2tZl1z_2gO6kJiA,5050
+packaging/version.py,sha256=olfyuk_DPbflNkJ4wBWetXQ17c74x3DB501degUv7DY,16676
diff --git a/.venv/Lib/site-packages/packaging-24.2.dist-info/WHEEL b/.venv/Lib/site-packages/packaging-24.2.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..e3c6feefa22927866e3fd5575379ea972b432aaf
--- /dev/null
+++ b/.venv/Lib/site-packages/packaging-24.2.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: flit 3.10.1
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/.venv/Lib/site-packages/packaging/__init__.py b/.venv/Lib/site-packages/packaging/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..d79f73c574ffc759ef5d2145b1ec742d85c2500b
--- /dev/null
+++ b/.venv/Lib/site-packages/packaging/__init__.py
@@ -0,0 +1,15 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+__title__ = "packaging"
+__summary__ = "Core utilities for Python packages"
+__uri__ = "https://github.com/pypa/packaging"
+
+__version__ = "24.2"
+
+__author__ = "Donald Stufft and individual contributors"
+__email__ = "donald@stufft.io"
+
+__license__ = "BSD-2-Clause or Apache-2.0"
+__copyright__ = f"2014 {__author__}"
diff --git a/.venv/Lib/site-packages/packaging/__pycache__/__init__.cpython-310.pyc b/.venv/Lib/site-packages/packaging/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fd64248d9deea030590d0b03aa95311a2d0b20a9
Binary files /dev/null and b/.venv/Lib/site-packages/packaging/__pycache__/__init__.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/packaging/__pycache__/_elffile.cpython-310.pyc b/.venv/Lib/site-packages/packaging/__pycache__/_elffile.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..34b1b2fc6ccd606a1f7837fedabc0f7d1aaad8e0
Binary files /dev/null and b/.venv/Lib/site-packages/packaging/__pycache__/_elffile.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/packaging/__pycache__/_manylinux.cpython-310.pyc b/.venv/Lib/site-packages/packaging/__pycache__/_manylinux.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b5031dcef9a96774191e83596767ff014d8e709f
Binary files /dev/null and b/.venv/Lib/site-packages/packaging/__pycache__/_manylinux.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/packaging/__pycache__/_musllinux.cpython-310.pyc b/.venv/Lib/site-packages/packaging/__pycache__/_musllinux.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e6c54ec8e0917695519c09c863081accd756fdca
Binary files /dev/null and b/.venv/Lib/site-packages/packaging/__pycache__/_musllinux.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/packaging/__pycache__/_parser.cpython-310.pyc b/.venv/Lib/site-packages/packaging/__pycache__/_parser.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9411c0d53d4ed88d8d00ad9b41b7df10876a125f
Binary files /dev/null and b/.venv/Lib/site-packages/packaging/__pycache__/_parser.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/packaging/__pycache__/_structures.cpython-310.pyc b/.venv/Lib/site-packages/packaging/__pycache__/_structures.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..553fa77ca9df3a3f65936714b82f3c23d99c5c79
Binary files /dev/null and b/.venv/Lib/site-packages/packaging/__pycache__/_structures.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/packaging/__pycache__/_tokenizer.cpython-310.pyc b/.venv/Lib/site-packages/packaging/__pycache__/_tokenizer.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e93b7b75ab43e49468f92ce0fa266af17eed9d5e
Binary files /dev/null and b/.venv/Lib/site-packages/packaging/__pycache__/_tokenizer.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/packaging/__pycache__/markers.cpython-310.pyc b/.venv/Lib/site-packages/packaging/__pycache__/markers.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f28e0e4dc6137b8dbf587c1919e6fd078086d20f
Binary files /dev/null and b/.venv/Lib/site-packages/packaging/__pycache__/markers.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/packaging/__pycache__/metadata.cpython-310.pyc b/.venv/Lib/site-packages/packaging/__pycache__/metadata.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..69a3f82b9fd5937e2154430c30e7a20b3b2dea2b
Binary files /dev/null and b/.venv/Lib/site-packages/packaging/__pycache__/metadata.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/packaging/__pycache__/requirements.cpython-310.pyc b/.venv/Lib/site-packages/packaging/__pycache__/requirements.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2495a30a4bf767c15be0cc5732a773acd7ecccc2
Binary files /dev/null and b/.venv/Lib/site-packages/packaging/__pycache__/requirements.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/packaging/__pycache__/specifiers.cpython-310.pyc b/.venv/Lib/site-packages/packaging/__pycache__/specifiers.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a86e5ab11fad004d4890786fb8a71499cec0838d
Binary files /dev/null and b/.venv/Lib/site-packages/packaging/__pycache__/specifiers.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/packaging/__pycache__/tags.cpython-310.pyc b/.venv/Lib/site-packages/packaging/__pycache__/tags.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..dc5d3f4dd20cbdc0f633176985f2dfb61ce66e35
Binary files /dev/null and b/.venv/Lib/site-packages/packaging/__pycache__/tags.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/packaging/__pycache__/utils.cpython-310.pyc b/.venv/Lib/site-packages/packaging/__pycache__/utils.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..26c5d82f47470a478beb4e25e197fbabd463ab6e
Binary files /dev/null and b/.venv/Lib/site-packages/packaging/__pycache__/utils.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/packaging/__pycache__/version.cpython-310.pyc b/.venv/Lib/site-packages/packaging/__pycache__/version.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..419971d9f08e5952f6503988ba0974663b0a062c
Binary files /dev/null and b/.venv/Lib/site-packages/packaging/__pycache__/version.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/packaging/_elffile.py b/.venv/Lib/site-packages/packaging/_elffile.py
new file mode 100644
index 0000000000000000000000000000000000000000..25f4282cc29cb03d7be881f03dee841d7dbc215a
--- /dev/null
+++ b/.venv/Lib/site-packages/packaging/_elffile.py
@@ -0,0 +1,110 @@
+"""
+ELF file parser.
+
+This provides a class ``ELFFile`` that parses an ELF executable in a similar
+interface to ``ZipFile``. Only the read interface is implemented.
+
+Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
+ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
+"""
+
+from __future__ import annotations
+
+import enum
+import os
+import struct
+from typing import IO
+
+
+class ELFInvalid(ValueError):
+    pass
+
+
+class EIClass(enum.IntEnum):
+    C32 = 1
+    C64 = 2
+
+
+class EIData(enum.IntEnum):
+    Lsb = 1
+    Msb = 2
+
+
+class EMachine(enum.IntEnum):
+    I386 = 3
+    S390 = 22
+    Arm = 40
+    X8664 = 62
+    AArc64 = 183
+
+
+class ELFFile:
+    """
+    Representation of an ELF executable.
+    """
+
+    def __init__(self, f: IO[bytes]) -> None:
+        self._f = f
+
+        try:
+            ident = self._read("16B")
+        except struct.error as e:
+            raise ELFInvalid("unable to parse identification") from e
+        magic = bytes(ident[:4])
+        if magic != b"\x7fELF":
+            raise ELFInvalid(f"invalid magic: {magic!r}")
+
+        self.capacity = ident[4]  # Format for program header (bitness).
+        self.encoding = ident[5]  # Data structure encoding (endianness).
+
+        try:
+            # e_fmt: Format for program header.
+            # p_fmt: Format for section header.
+            # p_idx: Indexes to find p_type, p_offset, and p_filesz.
+            e_fmt, self._p_fmt, self._p_idx = {
+                (1, 1): ("<HHIIIIIHHH", "<IIIIIIII", (0, 1, 4)),  # 32-bit LSB.
+                (1, 2): (">HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)),  # 32-bit MSB.
+                (2, 1): ("<HHIQQQIHHH", "<IIQQQQQQ", (0, 2, 5)),  # 64-bit LSB.
+                (2, 2): (">HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)),  # 64-bit MSB.
+            }[(self.capacity, self.encoding)]
+        except KeyError as e:
+            raise ELFInvalid(
+                f"unrecognized capacity ({self.capacity}) or "
+                f"encoding ({self.encoding})"
+            ) from e
+
+        try:
+            (
+                _,
+                self.machine,  # Architecture type.
+                _,
+                _,
+                self._e_phoff,  # Offset of program header.
+                _,
+                self.flags,  # Processor-specific flags.
+                _,
+                self._e_phentsize,  # Size of section.
+                self._e_phnum,  # Number of sections.
+            ) = self._read(e_fmt)
+        except struct.error as e:
+            raise ELFInvalid("unable to parse machine and section information") from e
+
+    def _read(self, fmt: str) -> tuple[int, ...]:
+        return struct.unpack(fmt, self._f.read(struct.calcsize(fmt)))
+
+    @property
+    def interpreter(self) -> str | None:
+        """
+        The path recorded in the ``PT_INTERP`` section header.
+        """
+        for index in range(self._e_phnum):
+            self._f.seek(self._e_phoff + self._e_phentsize * index)
+            try:
+                data = self._read(self._p_fmt)
+            except struct.error:
+                continue
+            if data[self._p_idx[0]] != 3:  # Not PT_INTERP.
+                continue
+            self._f.seek(data[self._p_idx[1]])
+            return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0")
+        return None
diff --git a/.venv/Lib/site-packages/packaging/_manylinux.py b/.venv/Lib/site-packages/packaging/_manylinux.py
new file mode 100644
index 0000000000000000000000000000000000000000..61339a6fcc1b82803136f3bf980e0c8f574b2220
--- /dev/null
+++ b/.venv/Lib/site-packages/packaging/_manylinux.py
@@ -0,0 +1,263 @@
+from __future__ import annotations
+
+import collections
+import contextlib
+import functools
+import os
+import re
+import sys
+import warnings
+from typing import Generator, Iterator, NamedTuple, Sequence
+
+from ._elffile import EIClass, EIData, ELFFile, EMachine
+
+EF_ARM_ABIMASK = 0xFF000000
+EF_ARM_ABI_VER5 = 0x05000000
+EF_ARM_ABI_FLOAT_HARD = 0x00000400
+
+
+# `os.PathLike` not a generic type until Python 3.9, so sticking with `str`
+# as the type for `path` until then.
+@contextlib.contextmanager
+def _parse_elf(path: str) -> Generator[ELFFile | None, None, None]:
+    try:
+        with open(path, "rb") as f:
+            yield ELFFile(f)
+    except (OSError, TypeError, ValueError):
+        yield None
+
+
+def _is_linux_armhf(executable: str) -> bool:
+    # hard-float ABI can be detected from the ELF header of the running
+    # process
+    # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
+    with _parse_elf(executable) as f:
+        return (
+            f is not None
+            and f.capacity == EIClass.C32
+            and f.encoding == EIData.Lsb
+            and f.machine == EMachine.Arm
+            and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5
+            and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD
+        )
+
+
+def _is_linux_i686(executable: str) -> bool:
+    with _parse_elf(executable) as f:
+        return (
+            f is not None
+            and f.capacity == EIClass.C32
+            and f.encoding == EIData.Lsb
+            and f.machine == EMachine.I386
+        )
+
+
+def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool:
+    if "armv7l" in archs:
+        return _is_linux_armhf(executable)
+    if "i686" in archs:
+        return _is_linux_i686(executable)
+    allowed_archs = {
+        "x86_64",
+        "aarch64",
+        "ppc64",
+        "ppc64le",
+        "s390x",
+        "loongarch64",
+        "riscv64",
+    }
+    return any(arch in allowed_archs for arch in archs)
+
+
+# If glibc ever changes its major version, we need to know what the last
+# minor version was, so we can build the complete list of all versions.
+# For now, guess what the highest minor version might be, assume it will
+# be 50 for testing. Once this actually happens, update the dictionary
+# with the actual value.
+_LAST_GLIBC_MINOR: dict[int, int] = collections.defaultdict(lambda: 50)
+
+
+class _GLibCVersion(NamedTuple):
+    major: int
+    minor: int
+
+
+def _glibc_version_string_confstr() -> str | None:
+    """
+    Primary implementation of glibc_version_string using os.confstr.
+    """
+    # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
+    # to be broken or missing. This strategy is used in the standard library
+    # platform module.
+    # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
+    try:
+        # Should be a string like "glibc 2.17".
+        version_string: str | None = os.confstr("CS_GNU_LIBC_VERSION")
+        assert version_string is not None
+        _, version = version_string.rsplit()
+    except (AssertionError, AttributeError, OSError, ValueError):
+        # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
+        return None
+    return version
+
+
+def _glibc_version_string_ctypes() -> str | None:
+    """
+    Fallback implementation of glibc_version_string using ctypes.
+    """
+    try:
+        import ctypes
+    except ImportError:
+        return None
+
+    # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
+    # manpage says, "If filename is NULL, then the returned handle is for the
+    # main program". This way we can let the linker do the work to figure out
+    # which libc our process is actually using.
+    #
+    # We must also handle the special case where the executable is not a
+    # dynamically linked executable. This can occur when using musl libc,
+    # for example. In this situation, dlopen() will error, leading to an
+    # OSError. Interestingly, at least in the case of musl, there is no
+    # errno set on the OSError. The single string argument used to construct
+    # OSError comes from libc itself and is therefore not portable to
+    # hard code here. In any case, failure to call dlopen() means we
+    # can proceed, so we bail on our attempt.
+    try:
+        process_namespace = ctypes.CDLL(None)
+    except OSError:
+        return None
+
+    try:
+        gnu_get_libc_version = process_namespace.gnu_get_libc_version
+    except AttributeError:
+        # Symbol doesn't exist -> therefore, we are not linked to
+        # glibc.
+        return None
+
+    # Call gnu_get_libc_version, which returns a string like "2.5"
+    gnu_get_libc_version.restype = ctypes.c_char_p
+    version_str: str = gnu_get_libc_version()
+    # py2 / py3 compatibility:
+    if not isinstance(version_str, str):
+        version_str = version_str.decode("ascii")
+
+    return version_str
+
+
+def _glibc_version_string() -> str | None:
+    """Returns glibc version string, or None if not using glibc."""
+    return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
+
+
+def _parse_glibc_version(version_str: str) -> tuple[int, int]:
+    """Parse glibc version.
+
+    We use a regexp instead of str.split because we want to discard any
+    random junk that might come after the minor version -- this might happen
+    in patched/forked versions of glibc (e.g. Linaro's version of glibc
+    uses version strings like "2.20-2014.11"). See gh-3588.
+    """
+    m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
+    if not m:
+        warnings.warn(
+            f"Expected glibc version with 2 components major.minor,"
+            f" got: {version_str}",
+            RuntimeWarning,
+            stacklevel=2,
+        )
+        return -1, -1
+    return int(m.group("major")), int(m.group("minor"))
+
+
+@functools.lru_cache
+def _get_glibc_version() -> tuple[int, int]:
+    version_str = _glibc_version_string()
+    if version_str is None:
+        return (-1, -1)
+    return _parse_glibc_version(version_str)
+
+
+# From PEP 513, PEP 600
+def _is_compatible(arch: str, version: _GLibCVersion) -> bool:
+    sys_glibc = _get_glibc_version()
+    if sys_glibc < version:
+        return False
+    # Check for presence of _manylinux module.
+    try:
+        import _manylinux
+    except ImportError:
+        return True
+    if hasattr(_manylinux, "manylinux_compatible"):
+        result = _manylinux.manylinux_compatible(version[0], version[1], arch)
+        if result is not None:
+            return bool(result)
+        return True
+    if version == _GLibCVersion(2, 5):
+        if hasattr(_manylinux, "manylinux1_compatible"):
+            return bool(_manylinux.manylinux1_compatible)
+    if version == _GLibCVersion(2, 12):
+        if hasattr(_manylinux, "manylinux2010_compatible"):
+            return bool(_manylinux.manylinux2010_compatible)
+    if version == _GLibCVersion(2, 17):
+        if hasattr(_manylinux, "manylinux2014_compatible"):
+            return bool(_manylinux.manylinux2014_compatible)
+    return True
+
+
+_LEGACY_MANYLINUX_MAP = {
+    # CentOS 7 w/ glibc 2.17 (PEP 599)
+    (2, 17): "manylinux2014",
+    # CentOS 6 w/ glibc 2.12 (PEP 571)
+    (2, 12): "manylinux2010",
+    # CentOS 5 w/ glibc 2.5 (PEP 513)
+    (2, 5): "manylinux1",
+}
+
+
+def platform_tags(archs: Sequence[str]) -> Iterator[str]:
+    """Generate manylinux tags compatible to the current platform.
+
+    :param archs: Sequence of compatible architectures.
+        The first one shall be the closest to the actual architecture and be the part of
+        platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
+        The ``linux_`` prefix is assumed as a prerequisite for the current platform to
+        be manylinux-compatible.
+
+    :returns: An iterator of compatible manylinux tags.
+    """
+    if not _have_compatible_abi(sys.executable, archs):
+        return
+    # Oldest glibc to be supported regardless of architecture is (2, 17).
+    too_old_glibc2 = _GLibCVersion(2, 16)
+    if set(archs) & {"x86_64", "i686"}:
+        # On x86/i686 also oldest glibc to be supported is (2, 5).
+        too_old_glibc2 = _GLibCVersion(2, 4)
+    current_glibc = _GLibCVersion(*_get_glibc_version())
+    glibc_max_list = [current_glibc]
+    # We can assume compatibility across glibc major versions.
+    # https://sourceware.org/bugzilla/show_bug.cgi?id=24636
+    #
+    # Build a list of maximum glibc versions so that we can
+    # output the canonical list of all glibc from current_glibc
+    # down to too_old_glibc2, including all intermediary versions.
+    for glibc_major in range(current_glibc.major - 1, 1, -1):
+        glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
+        glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
+    for arch in archs:
+        for glibc_max in glibc_max_list:
+            if glibc_max.major == too_old_glibc2.major:
+                min_minor = too_old_glibc2.minor
+            else:
+                # For other glibc major versions oldest supported is (x, 0).
+                min_minor = -1
+            for glibc_minor in range(glibc_max.minor, min_minor, -1):
+                glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
+                tag = "manylinux_{}_{}".format(*glibc_version)
+                if _is_compatible(arch, glibc_version):
+                    yield f"{tag}_{arch}"
+                # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
+                if glibc_version in _LEGACY_MANYLINUX_MAP:
+                    legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
+                    if _is_compatible(arch, glibc_version):
+                        yield f"{legacy_tag}_{arch}"
diff --git a/.venv/Lib/site-packages/packaging/_musllinux.py b/.venv/Lib/site-packages/packaging/_musllinux.py
new file mode 100644
index 0000000000000000000000000000000000000000..d2bf30b56319ba862c5c9a1a39a87c6d1cb68718
--- /dev/null
+++ b/.venv/Lib/site-packages/packaging/_musllinux.py
@@ -0,0 +1,85 @@
+"""PEP 656 support.
+
+This module implements logic to detect if the currently running Python is
+linked against musl, and what musl version is used.
+"""
+
+from __future__ import annotations
+
+import functools
+import re
+import subprocess
+import sys
+from typing import Iterator, NamedTuple, Sequence
+
+from ._elffile import ELFFile
+
+
+class _MuslVersion(NamedTuple):
+    major: int
+    minor: int
+
+
+def _parse_musl_version(output: str) -> _MuslVersion | None:
+    lines = [n for n in (n.strip() for n in output.splitlines()) if n]
+    if len(lines) < 2 or lines[0][:4] != "musl":
+        return None
+    m = re.match(r"Version (\d+)\.(\d+)", lines[1])
+    if not m:
+        return None
+    return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
+
+
+@functools.lru_cache
+def _get_musl_version(executable: str) -> _MuslVersion | None:
+    """Detect currently-running musl runtime version.
+
+    This is done by checking the specified executable's dynamic linking
+    information, and invoking the loader to parse its output for a version
+    string. If the loader is musl, the output would be something like::
+
+        musl libc (x86_64)
+        Version 1.2.2
+        Dynamic Program Loader
+    """
+    try:
+        with open(executable, "rb") as f:
+            ld = ELFFile(f).interpreter
+    except (OSError, TypeError, ValueError):
+        return None
+    if ld is None or "musl" not in ld:
+        return None
+    proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True)
+    return _parse_musl_version(proc.stderr)
+
+
+def platform_tags(archs: Sequence[str]) -> Iterator[str]:
+    """Generate musllinux tags compatible to the current platform.
+
+    :param archs: Sequence of compatible architectures.
+        The first one shall be the closest to the actual architecture and be the part of
+        platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
+        The ``linux_`` prefix is assumed as a prerequisite for the current platform to
+        be musllinux-compatible.
+
+    :returns: An iterator of compatible musllinux tags.
+    """
+    sys_musl = _get_musl_version(sys.executable)
+    if sys_musl is None:  # Python not dynamically linked against musl.
+        return
+    for arch in archs:
+        for minor in range(sys_musl.minor, -1, -1):
+            yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
+
+
+if __name__ == "__main__":  # pragma: no cover
+    import sysconfig
+
+    plat = sysconfig.get_platform()
+    assert plat.startswith("linux-"), "not linux"
+
+    print("plat:", plat)
+    print("musl:", _get_musl_version(sys.executable))
+    print("tags:", end=" ")
+    for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])):
+        print(t, end="\n      ")
diff --git a/.venv/Lib/site-packages/packaging/_parser.py b/.venv/Lib/site-packages/packaging/_parser.py
new file mode 100644
index 0000000000000000000000000000000000000000..c1238c06eab95f8c90c393383a703aa3b8c366a5
--- /dev/null
+++ b/.venv/Lib/site-packages/packaging/_parser.py
@@ -0,0 +1,354 @@
+"""Handwritten parser of dependency specifiers.
+
+The docstring for each __parse_* function contains EBNF-inspired grammar representing
+the implementation.
+"""
+
+from __future__ import annotations
+
+import ast
+from typing import NamedTuple, Sequence, Tuple, Union
+
+from ._tokenizer import DEFAULT_RULES, Tokenizer
+
+
+class Node:
+    def __init__(self, value: str) -> None:
+        self.value = value
+
+    def __str__(self) -> str:
+        return self.value
+
+    def __repr__(self) -> str:
+        return f"<{self.__class__.__name__}('{self}')>"
+
+    def serialize(self) -> str:
+        raise NotImplementedError
+
+
+class Variable(Node):
+    def serialize(self) -> str:
+        return str(self)
+
+
+class Value(Node):
+    def serialize(self) -> str:
+        return f'"{self}"'
+
+
+class Op(Node):
+    def serialize(self) -> str:
+        return str(self)
+
+
+MarkerVar = Union[Variable, Value]
+MarkerItem = Tuple[MarkerVar, Op, MarkerVar]
+MarkerAtom = Union[MarkerItem, Sequence["MarkerAtom"]]
+MarkerList = Sequence[Union["MarkerList", MarkerAtom, str]]
+
+
+class ParsedRequirement(NamedTuple):
+    name: str
+    url: str
+    extras: list[str]
+    specifier: str
+    marker: MarkerList | None
+
+
+# --------------------------------------------------------------------------------------
+# Recursive descent parser for dependency specifier
+# --------------------------------------------------------------------------------------
+def parse_requirement(source: str) -> ParsedRequirement:
+    return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES))
+
+
+def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement:
+    """
+    requirement = WS? IDENTIFIER WS? extras WS? requirement_details
+    """
+    tokenizer.consume("WS")
+
+    name_token = tokenizer.expect(
+        "IDENTIFIER", expected="package name at the start of dependency specifier"
+    )
+    name = name_token.text
+    tokenizer.consume("WS")
+
+    extras = _parse_extras(tokenizer)
+    tokenizer.consume("WS")
+
+    url, specifier, marker = _parse_requirement_details(tokenizer)
+    tokenizer.expect("END", expected="end of dependency specifier")
+
+    return ParsedRequirement(name, url, extras, specifier, marker)
+
+
+def _parse_requirement_details(
+    tokenizer: Tokenizer,
+) -> tuple[str, str, MarkerList | None]:
+    """
+    requirement_details = AT URL (WS requirement_marker?)?
+                        | specifier WS? (requirement_marker)?
+    """
+
+    specifier = ""
+    url = ""
+    marker = None
+
+    if tokenizer.check("AT"):
+        tokenizer.read()
+        tokenizer.consume("WS")
+
+        url_start = tokenizer.position
+        url = tokenizer.expect("URL", expected="URL after @").text
+        if tokenizer.check("END", peek=True):
+            return (url, specifier, marker)
+
+        tokenizer.expect("WS", expected="whitespace after URL")
+
+        # The input might end after whitespace.
+        if tokenizer.check("END", peek=True):
+            return (url, specifier, marker)
+
+        marker = _parse_requirement_marker(
+            tokenizer, span_start=url_start, after="URL and whitespace"
+        )
+    else:
+        specifier_start = tokenizer.position
+        specifier = _parse_specifier(tokenizer)
+        tokenizer.consume("WS")
+
+        if tokenizer.check("END", peek=True):
+            return (url, specifier, marker)
+
+        marker = _parse_requirement_marker(
+            tokenizer,
+            span_start=specifier_start,
+            after=(
+                "version specifier"
+                if specifier
+                else "name and no valid version specifier"
+            ),
+        )
+
+    return (url, specifier, marker)
+
+
+def _parse_requirement_marker(
+    tokenizer: Tokenizer, *, span_start: int, after: str
+) -> MarkerList:
+    """
+    requirement_marker = SEMICOLON marker WS?
+    """
+
+    if not tokenizer.check("SEMICOLON"):
+        tokenizer.raise_syntax_error(
+            f"Expected end or semicolon (after {after})",
+            span_start=span_start,
+        )
+    tokenizer.read()
+
+    marker = _parse_marker(tokenizer)
+    tokenizer.consume("WS")
+
+    return marker
+
+
+def _parse_extras(tokenizer: Tokenizer) -> list[str]:
+    """
+    extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)?
+    """
+    if not tokenizer.check("LEFT_BRACKET", peek=True):
+        return []
+
+    with tokenizer.enclosing_tokens(
+        "LEFT_BRACKET",
+        "RIGHT_BRACKET",
+        around="extras",
+    ):
+        tokenizer.consume("WS")
+        extras = _parse_extras_list(tokenizer)
+        tokenizer.consume("WS")
+
+    return extras
+
+
+def _parse_extras_list(tokenizer: Tokenizer) -> list[str]:
+    """
+    extras_list = identifier (wsp* ',' wsp* identifier)*
+    """
+    extras: list[str] = []
+
+    if not tokenizer.check("IDENTIFIER"):
+        return extras
+
+    extras.append(tokenizer.read().text)
+
+    while True:
+        tokenizer.consume("WS")
+        if tokenizer.check("IDENTIFIER", peek=True):
+            tokenizer.raise_syntax_error("Expected comma between extra names")
+        elif not tokenizer.check("COMMA"):
+            break
+
+        tokenizer.read()
+        tokenizer.consume("WS")
+
+        extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma")
+        extras.append(extra_token.text)
+
+    return extras
+
+
+def _parse_specifier(tokenizer: Tokenizer) -> str:
+    """
+    specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS
+              | WS? version_many WS?
+    """
+    with tokenizer.enclosing_tokens(
+        "LEFT_PARENTHESIS",
+        "RIGHT_PARENTHESIS",
+        around="version specifier",
+    ):
+        tokenizer.consume("WS")
+        parsed_specifiers = _parse_version_many(tokenizer)
+        tokenizer.consume("WS")
+
+    return parsed_specifiers
+
+
+def _parse_version_many(tokenizer: Tokenizer) -> str:
+    """
+    version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)?
+    """
+    parsed_specifiers = ""
+    while tokenizer.check("SPECIFIER"):
+        span_start = tokenizer.position
+        parsed_specifiers += tokenizer.read().text
+        if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True):
+            tokenizer.raise_syntax_error(
+                ".* suffix can only be used with `==` or `!=` operators",
+                span_start=span_start,
+                span_end=tokenizer.position + 1,
+            )
+        if tokenizer.check("VERSION_LOCAL_LABEL_TRAIL", peek=True):
+            tokenizer.raise_syntax_error(
+                "Local version label can only be used with `==` or `!=` operators",
+                span_start=span_start,
+                span_end=tokenizer.position,
+            )
+        tokenizer.consume("WS")
+        if not tokenizer.check("COMMA"):
+            break
+        parsed_specifiers += tokenizer.read().text
+        tokenizer.consume("WS")
+
+    return parsed_specifiers
+
+
+# --------------------------------------------------------------------------------------
+# Recursive descent parser for marker expression
+# --------------------------------------------------------------------------------------
+def parse_marker(source: str) -> MarkerList:
+    return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES))
+
+
+def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList:
+    retval = _parse_marker(tokenizer)
+    tokenizer.expect("END", expected="end of marker expression")
+    return retval
+
+
+def _parse_marker(tokenizer: Tokenizer) -> MarkerList:
+    """
+    marker = marker_atom (BOOLOP marker_atom)+
+    """
+    expression = [_parse_marker_atom(tokenizer)]
+    while tokenizer.check("BOOLOP"):
+        token = tokenizer.read()
+        expr_right = _parse_marker_atom(tokenizer)
+        expression.extend((token.text, expr_right))
+    return expression
+
+
+def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom:
+    """
+    marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS?
+                | WS? marker_item WS?
+    """
+
+    tokenizer.consume("WS")
+    if tokenizer.check("LEFT_PARENTHESIS", peek=True):
+        with tokenizer.enclosing_tokens(
+            "LEFT_PARENTHESIS",
+            "RIGHT_PARENTHESIS",
+            around="marker expression",
+        ):
+            tokenizer.consume("WS")
+            marker: MarkerAtom = _parse_marker(tokenizer)
+            tokenizer.consume("WS")
+    else:
+        marker = _parse_marker_item(tokenizer)
+    tokenizer.consume("WS")
+    return marker
+
+
+def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem:
+    """
+    marker_item = WS? marker_var WS? marker_op WS? marker_var WS?
+    """
+    tokenizer.consume("WS")
+    marker_var_left = _parse_marker_var(tokenizer)
+    tokenizer.consume("WS")
+    marker_op = _parse_marker_op(tokenizer)
+    tokenizer.consume("WS")
+    marker_var_right = _parse_marker_var(tokenizer)
+    tokenizer.consume("WS")
+    return (marker_var_left, marker_op, marker_var_right)
+
+
+def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar:
+    """
+    marker_var = VARIABLE | QUOTED_STRING
+    """
+    if tokenizer.check("VARIABLE"):
+        return process_env_var(tokenizer.read().text.replace(".", "_"))
+    elif tokenizer.check("QUOTED_STRING"):
+        return process_python_str(tokenizer.read().text)
+    else:
+        tokenizer.raise_syntax_error(
+            message="Expected a marker variable or quoted string"
+        )
+
+
+def process_env_var(env_var: str) -> Variable:
+    if env_var in ("platform_python_implementation", "python_implementation"):
+        return Variable("platform_python_implementation")
+    else:
+        return Variable(env_var)
+
+
+def process_python_str(python_str: str) -> Value:
+    value = ast.literal_eval(python_str)
+    return Value(str(value))
+
+
+def _parse_marker_op(tokenizer: Tokenizer) -> Op:
+    """
+    marker_op = IN | NOT IN | OP
+    """
+    if tokenizer.check("IN"):
+        tokenizer.read()
+        return Op("in")
+    elif tokenizer.check("NOT"):
+        tokenizer.read()
+        tokenizer.expect("WS", expected="whitespace after 'not'")
+        tokenizer.expect("IN", expected="'in' after 'not'")
+        return Op("not in")
+    elif tokenizer.check("OP"):
+        return Op(tokenizer.read().text)
+    else:
+        return tokenizer.raise_syntax_error(
+            "Expected marker operator, one of "
+            "<=, <, !=, ==, >=, >, ~=, ===, in, not in"
+        )
diff --git a/.venv/Lib/site-packages/packaging/_structures.py b/.venv/Lib/site-packages/packaging/_structures.py
new file mode 100644
index 0000000000000000000000000000000000000000..90a6465f9682c886363eea5327dac64bf623a6ff
--- /dev/null
+++ b/.venv/Lib/site-packages/packaging/_structures.py
@@ -0,0 +1,61 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+class InfinityType:
+    def __repr__(self) -> str:
+        return "Infinity"
+
+    def __hash__(self) -> int:
+        return hash(repr(self))
+
+    def __lt__(self, other: object) -> bool:
+        return False
+
+    def __le__(self, other: object) -> bool:
+        return False
+
+    def __eq__(self, other: object) -> bool:
+        return isinstance(other, self.__class__)
+
+    def __gt__(self, other: object) -> bool:
+        return True
+
+    def __ge__(self, other: object) -> bool:
+        return True
+
+    def __neg__(self: object) -> "NegativeInfinityType":
+        return NegativeInfinity
+
+
+Infinity = InfinityType()
+
+
+class NegativeInfinityType:
+    def __repr__(self) -> str:
+        return "-Infinity"
+
+    def __hash__(self) -> int:
+        return hash(repr(self))
+
+    def __lt__(self, other: object) -> bool:
+        return True
+
+    def __le__(self, other: object) -> bool:
+        return True
+
+    def __eq__(self, other: object) -> bool:
+        return isinstance(other, self.__class__)
+
+    def __gt__(self, other: object) -> bool:
+        return False
+
+    def __ge__(self, other: object) -> bool:
+        return False
+
+    def __neg__(self: object) -> InfinityType:
+        return Infinity
+
+
+NegativeInfinity = NegativeInfinityType()
diff --git a/.venv/Lib/site-packages/packaging/_tokenizer.py b/.venv/Lib/site-packages/packaging/_tokenizer.py
new file mode 100644
index 0000000000000000000000000000000000000000..89d041605c006e326a67f399a58a1fec8eb24acf
--- /dev/null
+++ b/.venv/Lib/site-packages/packaging/_tokenizer.py
@@ -0,0 +1,194 @@
+from __future__ import annotations
+
+import contextlib
+import re
+from dataclasses import dataclass
+from typing import Iterator, NoReturn
+
+from .specifiers import Specifier
+
+
+@dataclass
+class Token:
+    name: str
+    text: str
+    position: int
+
+
+class ParserSyntaxError(Exception):
+    """The provided source text could not be parsed correctly."""
+
+    def __init__(
+        self,
+        message: str,
+        *,
+        source: str,
+        span: tuple[int, int],
+    ) -> None:
+        self.span = span
+        self.message = message
+        self.source = source
+
+        super().__init__()
+
+    def __str__(self) -> str:
+        marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^"
+        return "\n    ".join([self.message, self.source, marker])
+
+
+DEFAULT_RULES: dict[str, str | re.Pattern[str]] = {
+    "LEFT_PARENTHESIS": r"\(",
+    "RIGHT_PARENTHESIS": r"\)",
+    "LEFT_BRACKET": r"\[",
+    "RIGHT_BRACKET": r"\]",
+    "SEMICOLON": r";",
+    "COMMA": r",",
+    "QUOTED_STRING": re.compile(
+        r"""
+            (
+                ('[^']*')
+                |
+                ("[^"]*")
+            )
+        """,
+        re.VERBOSE,
+    ),
+    "OP": r"(===|==|~=|!=|<=|>=|<|>)",
+    "BOOLOP": r"\b(or|and)\b",
+    "IN": r"\bin\b",
+    "NOT": r"\bnot\b",
+    "VARIABLE": re.compile(
+        r"""
+            \b(
+                python_version
+                |python_full_version
+                |os[._]name
+                |sys[._]platform
+                |platform_(release|system)
+                |platform[._](version|machine|python_implementation)
+                |python_implementation
+                |implementation_(name|version)
+                |extra
+            )\b
+        """,
+        re.VERBOSE,
+    ),
+    "SPECIFIER": re.compile(
+        Specifier._operator_regex_str + Specifier._version_regex_str,
+        re.VERBOSE | re.IGNORECASE,
+    ),
+    "AT": r"\@",
+    "URL": r"[^ \t]+",
+    "IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b",
+    "VERSION_PREFIX_TRAIL": r"\.\*",
+    "VERSION_LOCAL_LABEL_TRAIL": r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*",
+    "WS": r"[ \t]+",
+    "END": r"$",
+}
+
+
+class Tokenizer:
+    """Context-sensitive token parsing.
+
+    Provides methods to examine the input stream to check whether the next token
+    matches.
+    """
+
+    def __init__(
+        self,
+        source: str,
+        *,
+        rules: dict[str, str | re.Pattern[str]],
+    ) -> None:
+        self.source = source
+        self.rules: dict[str, re.Pattern[str]] = {
+            name: re.compile(pattern) for name, pattern in rules.items()
+        }
+        self.next_token: Token | None = None
+        self.position = 0
+
+    def consume(self, name: str) -> None:
+        """Move beyond provided token name, if at current position."""
+        if self.check(name):
+            self.read()
+
+    def check(self, name: str, *, peek: bool = False) -> bool:
+        """Check whether the next token has the provided name.
+
+        By default, if the check succeeds, the token *must* be read before
+        another check. If `peek` is set to `True`, the token is not loaded and
+        would need to be checked again.
+        """
+        assert (
+            self.next_token is None
+        ), f"Cannot check for {name!r}, already have {self.next_token!r}"
+        assert name in self.rules, f"Unknown token name: {name!r}"
+
+        expression = self.rules[name]
+
+        match = expression.match(self.source, self.position)
+        if match is None:
+            return False
+        if not peek:
+            self.next_token = Token(name, match[0], self.position)
+        return True
+
+    def expect(self, name: str, *, expected: str) -> Token:
+        """Expect a certain token name next, failing with a syntax error otherwise.
+
+        The token is *not* read.
+        """
+        if not self.check(name):
+            raise self.raise_syntax_error(f"Expected {expected}")
+        return self.read()
+
+    def read(self) -> Token:
+        """Consume the next token and return it."""
+        token = self.next_token
+        assert token is not None
+
+        self.position += len(token.text)
+        self.next_token = None
+
+        return token
+
+    def raise_syntax_error(
+        self,
+        message: str,
+        *,
+        span_start: int | None = None,
+        span_end: int | None = None,
+    ) -> NoReturn:
+        """Raise ParserSyntaxError at the given position."""
+        span = (
+            self.position if span_start is None else span_start,
+            self.position if span_end is None else span_end,
+        )
+        raise ParserSyntaxError(
+            message,
+            source=self.source,
+            span=span,
+        )
+
+    @contextlib.contextmanager
+    def enclosing_tokens(
+        self, open_token: str, close_token: str, *, around: str
+    ) -> Iterator[None]:
+        if self.check(open_token):
+            open_position = self.position
+            self.read()
+        else:
+            open_position = None
+
+        yield
+
+        if open_position is None:
+            return
+
+        if not self.check(close_token):
+            self.raise_syntax_error(
+                f"Expected matching {close_token} for {open_token}, after {around}",
+                span_start=open_position,
+            )
+
+        self.read()
diff --git a/.venv/Lib/site-packages/packaging/licenses/__init__.py b/.venv/Lib/site-packages/packaging/licenses/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..569156d6ca47719f49b753a4781a86a924de173b
--- /dev/null
+++ b/.venv/Lib/site-packages/packaging/licenses/__init__.py
@@ -0,0 +1,145 @@
+#######################################################################################
+#
+# Adapted from:
+#  https://github.com/pypa/hatch/blob/5352e44/backend/src/hatchling/licenses/parse.py
+#
+# MIT License
+#
+# Copyright (c) 2017-present Ofek Lev <oss@ofek.dev>
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy of this
+# software and associated documentation files (the "Software"), to deal in the Software
+# without restriction, including without limitation the rights to use, copy, modify,
+# merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to the following
+# conditions:
+#
+# The above copyright notice and this permission notice shall be included in all copies
+# or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
+# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
+# CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
+# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+#
+# With additional allowance of arbitrary `LicenseRef-` identifiers, not just
+# `LicenseRef-Public-Domain` and `LicenseRef-Proprietary`.
+#
+#######################################################################################
+from __future__ import annotations
+
+import re
+from typing import NewType, cast
+
+from packaging.licenses._spdx import EXCEPTIONS, LICENSES
+
+__all__ = [
+    "NormalizedLicenseExpression",
+    "InvalidLicenseExpression",
+    "canonicalize_license_expression",
+]
+
+license_ref_allowed = re.compile("^[A-Za-z0-9.-]*$")
+
+NormalizedLicenseExpression = NewType("NormalizedLicenseExpression", str)
+
+
+class InvalidLicenseExpression(ValueError):
+    """Raised when a license-expression string is invalid
+
+    >>> canonicalize_license_expression("invalid")
+    Traceback (most recent call last):
+        ...
+    packaging.licenses.InvalidLicenseExpression: Invalid license expression: 'invalid'
+    """
+
+
+def canonicalize_license_expression(
+    raw_license_expression: str,
+) -> NormalizedLicenseExpression:
+    if not raw_license_expression:
+        message = f"Invalid license expression: {raw_license_expression!r}"
+        raise InvalidLicenseExpression(message)
+
+    # Pad any parentheses so tokenization can be achieved by merely splitting on
+    # whitespace.
+    license_expression = raw_license_expression.replace("(", " ( ").replace(")", " ) ")
+    licenseref_prefix = "LicenseRef-"
+    license_refs = {
+        ref.lower(): "LicenseRef-" + ref[len(licenseref_prefix) :]
+        for ref in license_expression.split()
+        if ref.lower().startswith(licenseref_prefix.lower())
+    }
+
+    # Normalize to lower case so we can look up licenses/exceptions
+    # and so boolean operators are Python-compatible.
+    license_expression = license_expression.lower()
+
+    tokens = license_expression.split()
+
+    # Rather than implementing boolean logic, we create an expression that Python can
+    # parse. Everything that is not involved with the grammar itself is treated as
+    # `False` and the expression should evaluate as such.
+    python_tokens = []
+    for token in tokens:
+        if token not in {"or", "and", "with", "(", ")"}:
+            python_tokens.append("False")
+        elif token == "with":
+            python_tokens.append("or")
+        elif token == "(" and python_tokens and python_tokens[-1] not in {"or", "and"}:
+            message = f"Invalid license expression: {raw_license_expression!r}"
+            raise InvalidLicenseExpression(message)
+        else:
+            python_tokens.append(token)
+
+    python_expression = " ".join(python_tokens)
+    try:
+        invalid = eval(python_expression, globals(), locals())
+    except Exception:
+        invalid = True
+
+    if invalid is not False:
+        message = f"Invalid license expression: {raw_license_expression!r}"
+        raise InvalidLicenseExpression(message) from None
+
+    # Take a final pass to check for unknown licenses/exceptions.
+    normalized_tokens = []
+    for token in tokens:
+        if token in {"or", "and", "with", "(", ")"}:
+            normalized_tokens.append(token.upper())
+            continue
+
+        if normalized_tokens and normalized_tokens[-1] == "WITH":
+            if token not in EXCEPTIONS:
+                message = f"Unknown license exception: {token!r}"
+                raise InvalidLicenseExpression(message)
+
+            normalized_tokens.append(EXCEPTIONS[token]["id"])
+        else:
+            if token.endswith("+"):
+                final_token = token[:-1]
+                suffix = "+"
+            else:
+                final_token = token
+                suffix = ""
+
+            if final_token.startswith("licenseref-"):
+                if not license_ref_allowed.match(final_token):
+                    message = f"Invalid licenseref: {final_token!r}"
+                    raise InvalidLicenseExpression(message)
+                normalized_tokens.append(license_refs[final_token] + suffix)
+            else:
+                if final_token not in LICENSES:
+                    message = f"Unknown license: {final_token!r}"
+                    raise InvalidLicenseExpression(message)
+                normalized_tokens.append(LICENSES[final_token]["id"] + suffix)
+
+    normalized_expression = " ".join(normalized_tokens)
+
+    return cast(
+        NormalizedLicenseExpression,
+        normalized_expression.replace("( ", "(").replace(" )", ")"),
+    )
diff --git a/.venv/Lib/site-packages/packaging/licenses/__pycache__/__init__.cpython-310.pyc b/.venv/Lib/site-packages/packaging/licenses/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..07ec6f6fa7cff18098eab9a32d3dd2bbdc3210cc
Binary files /dev/null and b/.venv/Lib/site-packages/packaging/licenses/__pycache__/__init__.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/packaging/licenses/__pycache__/_spdx.cpython-310.pyc b/.venv/Lib/site-packages/packaging/licenses/__pycache__/_spdx.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..05445c25d43b3769e51df4c475c419a575d3354d
Binary files /dev/null and b/.venv/Lib/site-packages/packaging/licenses/__pycache__/_spdx.cpython-310.pyc differ
diff --git a/.venv/Lib/site-packages/packaging/licenses/_spdx.py b/.venv/Lib/site-packages/packaging/licenses/_spdx.py
new file mode 100644
index 0000000000000000000000000000000000000000..eac22276a34ccd73fc9d70c67ca318a49eb11e77
--- /dev/null
+++ b/.venv/Lib/site-packages/packaging/licenses/_spdx.py
@@ -0,0 +1,759 @@
+
+from __future__ import annotations
+
+from typing import TypedDict
+
+class SPDXLicense(TypedDict):
+    id: str
+    deprecated: bool
+
+class SPDXException(TypedDict):
+    id: str
+    deprecated: bool
+
+
+VERSION = '3.25.0'
+
+LICENSES: dict[str, SPDXLicense] = {
+    '0bsd': {'id': '0BSD', 'deprecated': False},
+    '3d-slicer-1.0': {'id': '3D-Slicer-1.0', 'deprecated': False},
+    'aal': {'id': 'AAL', 'deprecated': False},
+    'abstyles': {'id': 'Abstyles', 'deprecated': False},
+    'adacore-doc': {'id': 'AdaCore-doc', 'deprecated': False},
+    'adobe-2006': {'id': 'Adobe-2006', 'deprecated': False},
+    'adobe-display-postscript': {'id': 'Adobe-Display-PostScript', 'deprecated': False},
+    'adobe-glyph': {'id': 'Adobe-Glyph', 'deprecated': False},
+    'adobe-utopia': {'id': 'Adobe-Utopia', 'deprecated': False},
+    'adsl': {'id': 'ADSL', 'deprecated': False},
+    'afl-1.1': {'id': 'AFL-1.1', 'deprecated': False},
+    'afl-1.2': {'id': 'AFL-1.2', 'deprecated': False},
+    'afl-2.0': {'id': 'AFL-2.0', 'deprecated': False},
+    'afl-2.1': {'id': 'AFL-2.1', 'deprecated': False},
+    'afl-3.0': {'id': 'AFL-3.0', 'deprecated': False},
+    'afmparse': {'id': 'Afmparse', 'deprecated': False},
+    'agpl-1.0': {'id': 'AGPL-1.0', 'deprecated': True},
+    'agpl-1.0-only': {'id': 'AGPL-1.0-only', 'deprecated': False},
+    'agpl-1.0-or-later': {'id': 'AGPL-1.0-or-later', 'deprecated': False},
+    'agpl-3.0': {'id': 'AGPL-3.0', 'deprecated': True},
+    'agpl-3.0-only': {'id': 'AGPL-3.0-only', 'deprecated': False},
+    'agpl-3.0-or-later': {'id': 'AGPL-3.0-or-later', 'deprecated': False},
+    'aladdin': {'id': 'Aladdin', 'deprecated': False},
+    'amd-newlib': {'id': 'AMD-newlib', 'deprecated': False},
+    'amdplpa': {'id': 'AMDPLPA', 'deprecated': False},
+    'aml': {'id': 'AML', 'deprecated': False},
+    'aml-glslang': {'id': 'AML-glslang', 'deprecated': False},
+    'ampas': {'id': 'AMPAS', 'deprecated': False},
+    'antlr-pd': {'id': 'ANTLR-PD', 'deprecated': False},
+    'antlr-pd-fallback': {'id': 'ANTLR-PD-fallback', 'deprecated': False},
+    'any-osi': {'id': 'any-OSI', 'deprecated': False},
+    'apache-1.0': {'id': 'Apache-1.0', 'deprecated': False},
+    'apache-1.1': {'id': 'Apache-1.1', 'deprecated': False},
+    'apache-2.0': {'id': 'Apache-2.0', 'deprecated': False},
+    'apafml': {'id': 'APAFML', 'deprecated': False},
+    'apl-1.0': {'id': 'APL-1.0', 'deprecated': False},
+    'app-s2p': {'id': 'App-s2p', 'deprecated': False},
+    'apsl-1.0': {'id': 'APSL-1.0', 'deprecated': False},
+    'apsl-1.1': {'id': 'APSL-1.1', 'deprecated': False},
+    'apsl-1.2': {'id': 'APSL-1.2', 'deprecated': False},
+    'apsl-2.0': {'id': 'APSL-2.0', 'deprecated': False},
+    'arphic-1999': {'id': 'Arphic-1999', 'deprecated': False},
+    'artistic-1.0': {'id': 'Artistic-1.0', 'deprecated': False},
+    'artistic-1.0-cl8': {'id': 'Artistic-1.0-cl8', 'deprecated': False},
+    'artistic-1.0-perl': {'id': 'Artistic-1.0-Perl', 'deprecated': False},
+    'artistic-2.0': {'id': 'Artistic-2.0', 'deprecated': False},
+    'aswf-digital-assets-1.0': {'id': 'ASWF-Digital-Assets-1.0', 'deprecated': False},
+    'aswf-digital-assets-1.1': {'id': 'ASWF-Digital-Assets-1.1', 'deprecated': False},
+    'baekmuk': {'id': 'Baekmuk', 'deprecated': False},
+    'bahyph': {'id': 'Bahyph', 'deprecated': False},
+    'barr': {'id': 'Barr', 'deprecated': False},
+    'bcrypt-solar-designer': {'id': 'bcrypt-Solar-Designer', 'deprecated': False},
+    'beerware': {'id': 'Beerware', 'deprecated': False},
+    'bitstream-charter': {'id': 'Bitstream-Charter', 'deprecated': False},
+    'bitstream-vera': {'id': 'Bitstream-Vera', 'deprecated': False},
+    'bittorrent-1.0': {'id': 'BitTorrent-1.0', 'deprecated': False},
+    'bittorrent-1.1': {'id': 'BitTorrent-1.1', 'deprecated': False},
+    'blessing': {'id': 'blessing', 'deprecated': False},
+    'blueoak-1.0.0': {'id': 'BlueOak-1.0.0', 'deprecated': False},
+    'boehm-gc': {'id': 'Boehm-GC', 'deprecated': False},
+    'borceux': {'id': 'Borceux', 'deprecated': False},
+    'brian-gladman-2-clause': {'id': 'Brian-Gladman-2-Clause', 'deprecated': False},
+    'brian-gladman-3-clause': {'id': 'Brian-Gladman-3-Clause', 'deprecated': False},
+    'bsd-1-clause': {'id': 'BSD-1-Clause', 'deprecated': False},
+    'bsd-2-clause': {'id': 'BSD-2-Clause', 'deprecated': False},
+    'bsd-2-clause-darwin': {'id': 'BSD-2-Clause-Darwin', 'deprecated': False},
+    'bsd-2-clause-first-lines': {'id': 'BSD-2-Clause-first-lines', 'deprecated': False},
+    'bsd-2-clause-freebsd': {'id': 'BSD-2-Clause-FreeBSD', 'deprecated': True},
+    'bsd-2-clause-netbsd': {'id': 'BSD-2-Clause-NetBSD', 'deprecated': True},
+    'bsd-2-clause-patent': {'id': 'BSD-2-Clause-Patent', 'deprecated': False},
+    'bsd-2-clause-views': {'id': 'BSD-2-Clause-Views', 'deprecated': False},
+    'bsd-3-clause': {'id': 'BSD-3-Clause', 'deprecated': False},
+    'bsd-3-clause-acpica': {'id': 'BSD-3-Clause-acpica', 'deprecated': False},
+    'bsd-3-clause-attribution': {'id': 'BSD-3-Clause-Attribution', 'deprecated': False},
+    'bsd-3-clause-clear': {'id': 'BSD-3-Clause-Clear', 'deprecated': False},
+    'bsd-3-clause-flex': {'id': 'BSD-3-Clause-flex', 'deprecated': False},
+    'bsd-3-clause-hp': {'id': 'BSD-3-Clause-HP', 'deprecated': False},
+    'bsd-3-clause-lbnl': {'id': 'BSD-3-Clause-LBNL', 'deprecated': False},
+    'bsd-3-clause-modification': {'id': 'BSD-3-Clause-Modification', 'deprecated': False},
+    'bsd-3-clause-no-military-license': {'id': 'BSD-3-Clause-No-Military-License', 'deprecated': False},
+    'bsd-3-clause-no-nuclear-license': {'id': 'BSD-3-Clause-No-Nuclear-License', 'deprecated': False},
+    'bsd-3-clause-no-nuclear-license-2014': {'id': 'BSD-3-Clause-No-Nuclear-License-2014', 'deprecated': False},
+    'bsd-3-clause-no-nuclear-warranty': {'id': 'BSD-3-Clause-No-Nuclear-Warranty', 'deprecated': False},
+    'bsd-3-clause-open-mpi': {'id': 'BSD-3-Clause-Open-MPI', 'deprecated': False},
+    'bsd-3-clause-sun': {'id': 'BSD-3-Clause-Sun', 'deprecated': False},
+    'bsd-4-clause': {'id': 'BSD-4-Clause', 'deprecated': False},
+    'bsd-4-clause-shortened': {'id': 'BSD-4-Clause-Shortened', 'deprecated': False},
+    'bsd-4-clause-uc': {'id': 'BSD-4-Clause-UC', 'deprecated': False},
+    'bsd-4.3reno': {'id': 'BSD-4.3RENO', 'deprecated': False},
+    'bsd-4.3tahoe': {'id': 'BSD-4.3TAHOE', 'deprecated': False},
+    'bsd-advertising-acknowledgement': {'id': 'BSD-Advertising-Acknowledgement', 'deprecated': False},
+    'bsd-attribution-hpnd-disclaimer': {'id': 'BSD-Attribution-HPND-disclaimer', 'deprecated': False},
+    'bsd-inferno-nettverk': {'id': 'BSD-Inferno-Nettverk', 'deprecated': False},
+    'bsd-protection': {'id': 'BSD-Protection', 'deprecated': False},
+    'bsd-source-beginning-file': {'id': 'BSD-Source-beginning-file', 'deprecated': False},
+    'bsd-source-code': {'id': 'BSD-Source-Code', 'deprecated': False},
+    'bsd-systemics': {'id': 'BSD-Systemics', 'deprecated': False},
+    'bsd-systemics-w3works': {'id': 'BSD-Systemics-W3Works', 'deprecated': False},
+    'bsl-1.0': {'id': 'BSL-1.0', 'deprecated': False},
+    'busl-1.1': {'id': 'BUSL-1.1', 'deprecated': False},
+    'bzip2-1.0.5': {'id': 'bzip2-1.0.5', 'deprecated': True},
+    'bzip2-1.0.6': {'id': 'bzip2-1.0.6', 'deprecated': False},
+    'c-uda-1.0': {'id': 'C-UDA-1.0', 'deprecated': False},
+    'cal-1.0': {'id': 'CAL-1.0', 'deprecated': False},
+    'cal-1.0-combined-work-exception': {'id': 'CAL-1.0-Combined-Work-Exception', 'deprecated': False},
+    'caldera': {'id': 'Caldera', 'deprecated': False},
+    'caldera-no-preamble': {'id': 'Caldera-no-preamble', 'deprecated': False},
+    'catharon': {'id': 'Catharon', 'deprecated': False},
+    'catosl-1.1': {'id': 'CATOSL-1.1', 'deprecated': False},
+    'cc-by-1.0': {'id': 'CC-BY-1.0', 'deprecated': False},
+    'cc-by-2.0': {'id': 'CC-BY-2.0', 'deprecated': False},
+    'cc-by-2.5': {'id': 'CC-BY-2.5', 'deprecated': False},
+    'cc-by-2.5-au': {'id': 'CC-BY-2.5-AU', 'deprecated': False},
+    'cc-by-3.0': {'id': 'CC-BY-3.0', 'deprecated': False},
+    'cc-by-3.0-at': {'id': 'CC-BY-3.0-AT', 'deprecated': False},
+    'cc-by-3.0-au': {'id': 'CC-BY-3.0-AU', 'deprecated': False},
+    'cc-by-3.0-de': {'id': 'CC-BY-3.0-DE', 'deprecated': False},
+    'cc-by-3.0-igo': {'id': 'CC-BY-3.0-IGO', 'deprecated': False},
+    'cc-by-3.0-nl': {'id': 'CC-BY-3.0-NL', 'deprecated': False},
+    'cc-by-3.0-us': {'id': 'CC-BY-3.0-US', 'deprecated': False},
+    'cc-by-4.0': {'id': 'CC-BY-4.0', 'deprecated': False},
+    'cc-by-nc-1.0': {'id': 'CC-BY-NC-1.0', 'deprecated': False},
+    'cc-by-nc-2.0': {'id': 'CC-BY-NC-2.0', 'deprecated': False},
+    'cc-by-nc-2.5': {'id': 'CC-BY-NC-2.5', 'deprecated': False},
+    'cc-by-nc-3.0': {'id': 'CC-BY-NC-3.0', 'deprecated': False},
+    'cc-by-nc-3.0-de': {'id': 'CC-BY-NC-3.0-DE', 'deprecated': False},
+    'cc-by-nc-4.0': {'id': 'CC-BY-NC-4.0', 'deprecated': False},
+    'cc-by-nc-nd-1.0': {'id': 'CC-BY-NC-ND-1.0', 'deprecated': False},
+    'cc-by-nc-nd-2.0': {'id': 'CC-BY-NC-ND-2.0', 'deprecated': False},
+    'cc-by-nc-nd-2.5': {'id': 'CC-BY-NC-ND-2.5', 'deprecated': False},
+    'cc-by-nc-nd-3.0': {'id': 'CC-BY-NC-ND-3.0', 'deprecated': False},
+    'cc-by-nc-nd-3.0-de': {'id': 'CC-BY-NC-ND-3.0-DE', 'deprecated': False},
+    'cc-by-nc-nd-3.0-igo': {'id': 'CC-BY-NC-ND-3.0-IGO', 'deprecated': False},
+    'cc-by-nc-nd-4.0': {'id': 'CC-BY-NC-ND-4.0', 'deprecated': False},
+    'cc-by-nc-sa-1.0': {'id': 'CC-BY-NC-SA-1.0', 'deprecated': False},
+    'cc-by-nc-sa-2.0': {'id': 'CC-BY-NC-SA-2.0', 'deprecated': False},
+    'cc-by-nc-sa-2.0-de': {'id': 'CC-BY-NC-SA-2.0-DE', 'deprecated': False},
+    'cc-by-nc-sa-2.0-fr': {'id': 'CC-BY-NC-SA-2.0-FR', 'deprecated': False},
+    'cc-by-nc-sa-2.0-uk': {'id': 'CC-BY-NC-SA-2.0-UK', 'deprecated': False},
+    'cc-by-nc-sa-2.5': {'id': 'CC-BY-NC-SA-2.5', 'deprecated': False},
+    'cc-by-nc-sa-3.0': {'id': 'CC-BY-NC-SA-3.0', 'deprecated': False},
+    'cc-by-nc-sa-3.0-de': {'id': 'CC-BY-NC-SA-3.0-DE', 'deprecated': False},
+    'cc-by-nc-sa-3.0-igo': {'id': 'CC-BY-NC-SA-3.0-IGO', 'deprecated': False},
+    'cc-by-nc-sa-4.0': {'id': 'CC-BY-NC-SA-4.0', 'deprecated': False},
+    'cc-by-nd-1.0': {'id': 'CC-BY-ND-1.0', 'deprecated': False},
+    'cc-by-nd-2.0': {'id': 'CC-BY-ND-2.0', 'deprecated': False},
+    'cc-by-nd-2.5': {'id': 'CC-BY-ND-2.5', 'deprecated': False},
+    'cc-by-nd-3.0': {'id': 'CC-BY-ND-3.0', 'deprecated': False},
+    'cc-by-nd-3.0-de': {'id': 'CC-BY-ND-3.0-DE', 'deprecated': False},
+    'cc-by-nd-4.0': {'id': 'CC-BY-ND-4.0', 'deprecated': False},
+    'cc-by-sa-1.0': {'id': 'CC-BY-SA-1.0', 'deprecated': False},
+    'cc-by-sa-2.0': {'id': 'CC-BY-SA-2.0', 'deprecated': False},
+    'cc-by-sa-2.0-uk': {'id': 'CC-BY-SA-2.0-UK', 'deprecated': False},
+    'cc-by-sa-2.1-jp': {'id': 'CC-BY-SA-2.1-JP', 'deprecated': False},
+    'cc-by-sa-2.5': {'id': 'CC-BY-SA-2.5', 'deprecated': False},
+    'cc-by-sa-3.0': {'id': 'CC-BY-SA-3.0', 'deprecated': False},
+    'cc-by-sa-3.0-at': {'id': 'CC-BY-SA-3.0-AT', 'deprecated': False},
+    'cc-by-sa-3.0-de': {'id': 'CC-BY-SA-3.0-DE', 'deprecated': False},
+    'cc-by-sa-3.0-igo': {'id': 'CC-BY-SA-3.0-IGO', 'deprecated': False},
+    'cc-by-sa-4.0': {'id': 'CC-BY-SA-4.0', 'deprecated': False},
+    'cc-pddc': {'id': 'CC-PDDC', 'deprecated': False},
+    'cc0-1.0': {'id': 'CC0-1.0', 'deprecated': False},
+    'cddl-1.0': {'id': 'CDDL-1.0', 'deprecated': False},
+    'cddl-1.1': {'id': 'CDDL-1.1', 'deprecated': False},
+    'cdl-1.0': {'id': 'CDL-1.0', 'deprecated': False},
+    'cdla-permissive-1.0': {'id': 'CDLA-Permissive-1.0', 'deprecated': False},
+    'cdla-permissive-2.0': {'id': 'CDLA-Permissive-2.0', 'deprecated': False},
+    'cdla-sharing-1.0': {'id': 'CDLA-Sharing-1.0', 'deprecated': False},
+    'cecill-1.0': {'id': 'CECILL-1.0', 'deprecated': False},
+    'cecill-1.1': {'id': 'CECILL-1.1', 'deprecated': False},
+    'cecill-2.0': {'id': 'CECILL-2.0', 'deprecated': False},
+    'cecill-2.1': {'id': 'CECILL-2.1', 'deprecated': False},
+    'cecill-b': {'id': 'CECILL-B', 'deprecated': False},
+    'cecill-c': {'id': 'CECILL-C', 'deprecated': False},
+    'cern-ohl-1.1': {'id': 'CERN-OHL-1.1', 'deprecated': False},
+    'cern-ohl-1.2': {'id': 'CERN-OHL-1.2', 'deprecated': False},
+    'cern-ohl-p-2.0': {'id': 'CERN-OHL-P-2.0', 'deprecated': False},
+    'cern-ohl-s-2.0': {'id': 'CERN-OHL-S-2.0', 'deprecated': False},
+    'cern-ohl-w-2.0': {'id': 'CERN-OHL-W-2.0', 'deprecated': False},
+    'cfitsio': {'id': 'CFITSIO', 'deprecated': False},
+    'check-cvs': {'id': 'check-cvs', 'deprecated': False},
+    'checkmk': {'id': 'checkmk', 'deprecated': False},
+    'clartistic': {'id': 'ClArtistic', 'deprecated': False},
+    'clips': {'id': 'Clips', 'deprecated': False},
+    'cmu-mach': {'id': 'CMU-Mach', 'deprecated': False},
+    'cmu-mach-nodoc': {'id': 'CMU-Mach-nodoc', 'deprecated': False},
+    'cnri-jython': {'id': 'CNRI-Jython', 'deprecated': False},
+    'cnri-python': {'id': 'CNRI-Python', 'deprecated': False},
+    'cnri-python-gpl-compatible': {'id': 'CNRI-Python-GPL-Compatible', 'deprecated': False},
+    'coil-1.0': {'id': 'COIL-1.0', 'deprecated': False},
+    'community-spec-1.0': {'id': 'Community-Spec-1.0', 'deprecated': False},
+    'condor-1.1': {'id': 'Condor-1.1', 'deprecated': False},
+    'copyleft-next-0.3.0': {'id': 'copyleft-next-0.3.0', 'deprecated': False},
+    'copyleft-next-0.3.1': {'id': 'copyleft-next-0.3.1', 'deprecated': False},
+    'cornell-lossless-jpeg': {'id': 'Cornell-Lossless-JPEG', 'deprecated': False},
+    'cpal-1.0': {'id': 'CPAL-1.0', 'deprecated': False},
+    'cpl-1.0': {'id': 'CPL-1.0', 'deprecated': False},
+    'cpol-1.02': {'id': 'CPOL-1.02', 'deprecated': False},
+    'cronyx': {'id': 'Cronyx', 'deprecated': False},
+    'crossword': {'id': 'Crossword', 'deprecated': False},
+    'crystalstacker': {'id': 'CrystalStacker', 'deprecated': False},
+    'cua-opl-1.0': {'id': 'CUA-OPL-1.0', 'deprecated': False},
+    'cube': {'id': 'Cube', 'deprecated': False},
+    'curl': {'id': 'curl', 'deprecated': False},
+    'cve-tou': {'id': 'cve-tou', 'deprecated': False},
+    'd-fsl-1.0': {'id': 'D-FSL-1.0', 'deprecated': False},
+    'dec-3-clause': {'id': 'DEC-3-Clause', 'deprecated': False},
+    'diffmark': {'id': 'diffmark', 'deprecated': False},
+    'dl-de-by-2.0': {'id': 'DL-DE-BY-2.0', 'deprecated': False},
+    'dl-de-zero-2.0': {'id': 'DL-DE-ZERO-2.0', 'deprecated': False},
+    'doc': {'id': 'DOC', 'deprecated': False},
+    'docbook-schema': {'id': 'DocBook-Schema', 'deprecated': False},
+    'docbook-xml': {'id': 'DocBook-XML', 'deprecated': False},
+    'dotseqn': {'id': 'Dotseqn', 'deprecated': False},
+    'drl-1.0': {'id': 'DRL-1.0', 'deprecated': False},
+    'drl-1.1': {'id': 'DRL-1.1', 'deprecated': False},
+    'dsdp': {'id': 'DSDP', 'deprecated': False},
+    'dtoa': {'id': 'dtoa', 'deprecated': False},
+    'dvipdfm': {'id': 'dvipdfm', 'deprecated': False},
+    'ecl-1.0': {'id': 'ECL-1.0', 'deprecated': False},
+    'ecl-2.0': {'id': 'ECL-2.0', 'deprecated': False},
+    'ecos-2.0': {'id': 'eCos-2.0', 'deprecated': True},
+    'efl-1.0': {'id': 'EFL-1.0', 'deprecated': False},
+    'efl-2.0': {'id': 'EFL-2.0', 'deprecated': False},
+    'egenix': {'id': 'eGenix', 'deprecated': False},
+    'elastic-2.0': {'id': 'Elastic-2.0', 'deprecated': False},
+    'entessa': {'id': 'Entessa', 'deprecated': False},
+    'epics': {'id': 'EPICS', 'deprecated': False},
+    'epl-1.0': {'id': 'EPL-1.0', 'deprecated': False},
+    'epl-2.0': {'id': 'EPL-2.0', 'deprecated': False},
+    'erlpl-1.1': {'id': 'ErlPL-1.1', 'deprecated': False},
+    'etalab-2.0': {'id': 'etalab-2.0', 'deprecated': False},
+    'eudatagrid': {'id': 'EUDatagrid', 'deprecated': False},
+    'eupl-1.0': {'id': 'EUPL-1.0', 'deprecated': False},
+    'eupl-1.1': {'id': 'EUPL-1.1', 'deprecated': False},
+    'eupl-1.2': {'id': 'EUPL-1.2', 'deprecated': False},
+    'eurosym': {'id': 'Eurosym', 'deprecated': False},
+    'fair': {'id': 'Fair', 'deprecated': False},
+    'fbm': {'id': 'FBM', 'deprecated': False},
+    'fdk-aac': {'id': 'FDK-AAC', 'deprecated': False},
+    'ferguson-twofish': {'id': 'Ferguson-Twofish', 'deprecated': False},
+    'frameworx-1.0': {'id': 'Frameworx-1.0', 'deprecated': False},
+    'freebsd-doc': {'id': 'FreeBSD-DOC', 'deprecated': False},
+    'freeimage': {'id': 'FreeImage', 'deprecated': False},
+    'fsfap': {'id': 'FSFAP', 'deprecated': False},
+    'fsfap-no-warranty-disclaimer': {'id': 'FSFAP-no-warranty-disclaimer', 'deprecated': False},
+    'fsful': {'id': 'FSFUL', 'deprecated': False},
+    'fsfullr': {'id': 'FSFULLR', 'deprecated': False},
+    'fsfullrwd': {'id': 'FSFULLRWD', 'deprecated': False},
+    'ftl': {'id': 'FTL', 'deprecated': False},
+    'furuseth': {'id': 'Furuseth', 'deprecated': False},
+    'fwlw': {'id': 'fwlw', 'deprecated': False},
+    'gcr-docs': {'id': 'GCR-docs', 'deprecated': False},
+    'gd': {'id': 'GD', 'deprecated': False},
+    'gfdl-1.1': {'id': 'GFDL-1.1', 'deprecated': True},
+    'gfdl-1.1-invariants-only': {'id': 'GFDL-1.1-invariants-only', 'deprecated': False},
+    'gfdl-1.1-invariants-or-later': {'id': 'GFDL-1.1-invariants-or-later', 'deprecated': False},
+    'gfdl-1.1-no-invariants-only': {'id': 'GFDL-1.1-no-invariants-only', 'deprecated': False},
+    'gfdl-1.1-no-invariants-or-later': {'id': 'GFDL-1.1-no-invariants-or-later', 'deprecated': False},
+    'gfdl-1.1-only': {'id': 'GFDL-1.1-only', 'deprecated': False},
+    'gfdl-1.1-or-later': {'id': 'GFDL-1.1-or-later', 'deprecated': False},
+    'gfdl-1.2': {'id': 'GFDL-1.2', 'deprecated': True},
+    'gfdl-1.2-invariants-only': {'id': 'GFDL-1.2-invariants-only', 'deprecated': False},
+    'gfdl-1.2-invariants-or-later': {'id': 'GFDL-1.2-invariants-or-later', 'deprecated': False},
+    'gfdl-1.2-no-invariants-only': {'id': 'GFDL-1.2-no-invariants-only', 'deprecated': False},
+    'gfdl-1.2-no-invariants-or-later': {'id': 'GFDL-1.2-no-invariants-or-later', 'deprecated': False},
+    'gfdl-1.2-only': {'id': 'GFDL-1.2-only', 'deprecated': False},
+    'gfdl-1.2-or-later': {'id': 'GFDL-1.2-or-later', 'deprecated': False},
+    'gfdl-1.3': {'id': 'GFDL-1.3', 'deprecated': True},
+    'gfdl-1.3-invariants-only': {'id': 'GFDL-1.3-invariants-only', 'deprecated': False},
+    'gfdl-1.3-invariants-or-later': {'id': 'GFDL-1.3-invariants-or-later', 'deprecated': False},
+    'gfdl-1.3-no-invariants-only': {'id': 'GFDL-1.3-no-invariants-only', 'deprecated': False},
+    'gfdl-1.3-no-invariants-or-later': {'id': 'GFDL-1.3-no-invariants-or-later', 'deprecated': False},
+    'gfdl-1.3-only': {'id': 'GFDL-1.3-only', 'deprecated': False},
+    'gfdl-1.3-or-later': {'id': 'GFDL-1.3-or-later', 'deprecated': False},
+    'giftware': {'id': 'Giftware', 'deprecated': False},
+    'gl2ps': {'id': 'GL2PS', 'deprecated': False},
+    'glide': {'id': 'Glide', 'deprecated': False},
+    'glulxe': {'id': 'Glulxe', 'deprecated': False},
+    'glwtpl': {'id': 'GLWTPL', 'deprecated': False},
+    'gnuplot': {'id': 'gnuplot', 'deprecated': False},
+    'gpl-1.0': {'id': 'GPL-1.0', 'deprecated': True},
+    'gpl-1.0+': {'id': 'GPL-1.0+', 'deprecated': True},
+    'gpl-1.0-only': {'id': 'GPL-1.0-only', 'deprecated': False},
+    'gpl-1.0-or-later': {'id': 'GPL-1.0-or-later', 'deprecated': False},
+    'gpl-2.0': {'id': 'GPL-2.0', 'deprecated': True},
+    'gpl-2.0+': {'id': 'GPL-2.0+', 'deprecated': True},
+    'gpl-2.0-only': {'id': 'GPL-2.0-only', 'deprecated': False},
+    'gpl-2.0-or-later': {'id': 'GPL-2.0-or-later', 'deprecated': False},
+    'gpl-2.0-with-autoconf-exception': {'id': 'GPL-2.0-with-autoconf-exception', 'deprecated': True},
+    'gpl-2.0-with-bison-exception': {'id': 'GPL-2.0-with-bison-exception', 'deprecated': True},
+    'gpl-2.0-with-classpath-exception': {'id': 'GPL-2.0-with-classpath-exception', 'deprecated': True},
+    'gpl-2.0-with-font-exception': {'id': 'GPL-2.0-with-font-exception', 'deprecated': True},
+    'gpl-2.0-with-gcc-exception': {'id': 'GPL-2.0-with-GCC-exception', 'deprecated': True},
+    'gpl-3.0': {'id': 'GPL-3.0', 'deprecated': True},
+    'gpl-3.0+': {'id': 'GPL-3.0+', 'deprecated': True},
+    'gpl-3.0-only': {'id': 'GPL-3.0-only', 'deprecated': False},
+    'gpl-3.0-or-later': {'id': 'GPL-3.0-or-later', 'deprecated': False},
+    'gpl-3.0-with-autoconf-exception': {'id': 'GPL-3.0-with-autoconf-exception', 'deprecated': True},
+    'gpl-3.0-with-gcc-exception': {'id': 'GPL-3.0-with-GCC-exception', 'deprecated': True},
+    'graphics-gems': {'id': 'Graphics-Gems', 'deprecated': False},
+    'gsoap-1.3b': {'id': 'gSOAP-1.3b', 'deprecated': False},
+    'gtkbook': {'id': 'gtkbook', 'deprecated': False},
+    'gutmann': {'id': 'Gutmann', 'deprecated': False},
+    'haskellreport': {'id': 'HaskellReport', 'deprecated': False},
+    'hdparm': {'id': 'hdparm', 'deprecated': False},
+    'hidapi': {'id': 'HIDAPI', 'deprecated': False},
+    'hippocratic-2.1': {'id': 'Hippocratic-2.1', 'deprecated': False},
+    'hp-1986': {'id': 'HP-1986', 'deprecated': False},
+    'hp-1989': {'id': 'HP-1989', 'deprecated': False},
+    'hpnd': {'id': 'HPND', 'deprecated': False},
+    'hpnd-dec': {'id': 'HPND-DEC', 'deprecated': False},
+    'hpnd-doc': {'id': 'HPND-doc', 'deprecated': False},
+    'hpnd-doc-sell': {'id': 'HPND-doc-sell', 'deprecated': False},
+    'hpnd-export-us': {'id': 'HPND-export-US', 'deprecated': False},
+    'hpnd-export-us-acknowledgement': {'id': 'HPND-export-US-acknowledgement', 'deprecated': False},
+    'hpnd-export-us-modify': {'id': 'HPND-export-US-modify', 'deprecated': False},
+    'hpnd-export2-us': {'id': 'HPND-export2-US', 'deprecated': False},
+    'hpnd-fenneberg-livingston': {'id': 'HPND-Fenneberg-Livingston', 'deprecated': False},
+    'hpnd-inria-imag': {'id': 'HPND-INRIA-IMAG', 'deprecated': False},
+    'hpnd-intel': {'id': 'HPND-Intel', 'deprecated': False},
+    'hpnd-kevlin-henney': {'id': 'HPND-Kevlin-Henney', 'deprecated': False},
+    'hpnd-markus-kuhn': {'id': 'HPND-Markus-Kuhn', 'deprecated': False},
+    'hpnd-merchantability-variant': {'id': 'HPND-merchantability-variant', 'deprecated': False},
+    'hpnd-mit-disclaimer': {'id': 'HPND-MIT-disclaimer', 'deprecated': False},
+    'hpnd-netrek': {'id': 'HPND-Netrek', 'deprecated': False},
+    'hpnd-pbmplus': {'id': 'HPND-Pbmplus', 'deprecated': False},
+    'hpnd-sell-mit-disclaimer-xserver': {'id': 'HPND-sell-MIT-disclaimer-xserver', 'deprecated': False},
+    'hpnd-sell-regexpr': {'id': 'HPND-sell-regexpr', 'deprecated': False},
+    'hpnd-sell-variant': {'id': 'HPND-sell-variant', 'deprecated': False},
+    'hpnd-sell-variant-mit-disclaimer': {'id': 'HPND-sell-variant-MIT-disclaimer', 'deprecated': False},
+    'hpnd-sell-variant-mit-disclaimer-rev': {'id': 'HPND-sell-variant-MIT-disclaimer-rev', 'deprecated': False},
+    'hpnd-uc': {'id': 'HPND-UC', 'deprecated': False},
+    'hpnd-uc-export-us': {'id': 'HPND-UC-export-US', 'deprecated': False},
+    'htmltidy': {'id': 'HTMLTIDY', 'deprecated': False},
+    'ibm-pibs': {'id': 'IBM-pibs', 'deprecated': False},
+    'icu': {'id': 'ICU', 'deprecated': False},
+    'iec-code-components-eula': {'id': 'IEC-Code-Components-EULA', 'deprecated': False},
+    'ijg': {'id': 'IJG', 'deprecated': False},
+    'ijg-short': {'id': 'IJG-short', 'deprecated': False},
+    'imagemagick': {'id': 'ImageMagick', 'deprecated': False},
+    'imatix': {'id': 'iMatix', 'deprecated': False},
+    'imlib2': {'id': 'Imlib2', 'deprecated': False},
+    'info-zip': {'id': 'Info-ZIP', 'deprecated': False},
+    'inner-net-2.0': {'id': 'Inner-Net-2.0', 'deprecated': False},
+    'intel': {'id': 'Intel', 'deprecated': False},
+    'intel-acpi': {'id': 'Intel-ACPI', 'deprecated': False},
+    'interbase-1.0': {'id': 'Interbase-1.0', 'deprecated': False},
+    'ipa': {'id': 'IPA', 'deprecated': False},
+    'ipl-1.0': {'id': 'IPL-1.0', 'deprecated': False},
+    'isc': {'id': 'ISC', 'deprecated': False},
+    'isc-veillard': {'id': 'ISC-Veillard', 'deprecated': False},
+    'jam': {'id': 'Jam', 'deprecated': False},
+    'jasper-2.0': {'id': 'JasPer-2.0', 'deprecated': False},
+    'jpl-image': {'id': 'JPL-image', 'deprecated': False},
+    'jpnic': {'id': 'JPNIC', 'deprecated': False},
+    'json': {'id': 'JSON', 'deprecated': False},
+    'kastrup': {'id': 'Kastrup', 'deprecated': False},
+    'kazlib': {'id': 'Kazlib', 'deprecated': False},
+    'knuth-ctan': {'id': 'Knuth-CTAN', 'deprecated': False},
+    'lal-1.2': {'id': 'LAL-1.2', 'deprecated': False},
+    'lal-1.3': {'id': 'LAL-1.3', 'deprecated': False},
+    'latex2e': {'id': 'Latex2e', 'deprecated': False},
+    'latex2e-translated-notice': {'id': 'Latex2e-translated-notice', 'deprecated': False},
+    'leptonica': {'id': 'Leptonica', 'deprecated': False},
+    'lgpl-2.0': {'id': 'LGPL-2.0', 'deprecated': True},
+    'lgpl-2.0+': {'id': 'LGPL-2.0+', 'deprecated': True},
+    'lgpl-2.0-only': {'id': 'LGPL-2.0-only', 'deprecated': False},
+    'lgpl-2.0-or-later': {'id': 'LGPL-2.0-or-later', 'deprecated': False},
+    'lgpl-2.1': {'id': 'LGPL-2.1', 'deprecated': True},
+    'lgpl-2.1+': {'id': 'LGPL-2.1+', 'deprecated': True},
+    'lgpl-2.1-only': {'id': 'LGPL-2.1-only', 'deprecated': False},
+    'lgpl-2.1-or-later': {'id': 'LGPL-2.1-or-later', 'deprecated': False},
+    'lgpl-3.0': {'id': 'LGPL-3.0', 'deprecated': True},
+    'lgpl-3.0+': {'id': 'LGPL-3.0+', 'deprecated': True},
+    'lgpl-3.0-only': {'id': 'LGPL-3.0-only', 'deprecated': False},
+    'lgpl-3.0-or-later': {'id': 'LGPL-3.0-or-later', 'deprecated': False},
+    'lgpllr': {'id': 'LGPLLR', 'deprecated': False},
+    'libpng': {'id': 'Libpng', 'deprecated': False},
+    'libpng-2.0': {'id': 'libpng-2.0', 'deprecated': False},
+    'libselinux-1.0': {'id': 'libselinux-1.0', 'deprecated': False},
+    'libtiff': {'id': 'libtiff', 'deprecated': False},
+    'libutil-david-nugent': {'id': 'libutil-David-Nugent', 'deprecated': False},
+    'liliq-p-1.1': {'id': 'LiLiQ-P-1.1', 'deprecated': False},
+    'liliq-r-1.1': {'id': 'LiLiQ-R-1.1', 'deprecated': False},
+    'liliq-rplus-1.1': {'id': 'LiLiQ-Rplus-1.1', 'deprecated': False},
+    'linux-man-pages-1-para': {'id': 'Linux-man-pages-1-para', 'deprecated': False},
+    'linux-man-pages-copyleft': {'id': 'Linux-man-pages-copyleft', 'deprecated': False},
+    'linux-man-pages-copyleft-2-para': {'id': 'Linux-man-pages-copyleft-2-para', 'deprecated': False},
+    'linux-man-pages-copyleft-var': {'id': 'Linux-man-pages-copyleft-var', 'deprecated': False},
+    'linux-openib': {'id': 'Linux-OpenIB', 'deprecated': False},
+    'loop': {'id': 'LOOP', 'deprecated': False},
+    'lpd-document': {'id': 'LPD-document', 'deprecated': False},
+    'lpl-1.0': {'id': 'LPL-1.0', 'deprecated': False},
+    'lpl-1.02': {'id': 'LPL-1.02', 'deprecated': False},
+    'lppl-1.0': {'id': 'LPPL-1.0', 'deprecated': False},
+    'lppl-1.1': {'id': 'LPPL-1.1', 'deprecated': False},
+    'lppl-1.2': {'id': 'LPPL-1.2', 'deprecated': False},
+    'lppl-1.3a': {'id': 'LPPL-1.3a', 'deprecated': False},
+    'lppl-1.3c': {'id': 'LPPL-1.3c', 'deprecated': False},
+    'lsof': {'id': 'lsof', 'deprecated': False},
+    'lucida-bitmap-fonts': {'id': 'Lucida-Bitmap-Fonts', 'deprecated': False},
+    'lzma-sdk-9.11-to-9.20': {'id': 'LZMA-SDK-9.11-to-9.20', 'deprecated': False},
+    'lzma-sdk-9.22': {'id': 'LZMA-SDK-9.22', 'deprecated': False},
+    'mackerras-3-clause': {'id': 'Mackerras-3-Clause', 'deprecated': False},
+    'mackerras-3-clause-acknowledgment': {'id': 'Mackerras-3-Clause-acknowledgment', 'deprecated': False},
+    'magaz': {'id': 'magaz', 'deprecated': False},
+    'mailprio': {'id': 'mailprio', 'deprecated': False},
+    'makeindex': {'id': 'MakeIndex', 'deprecated': False},
+    'martin-birgmeier': {'id': 'Martin-Birgmeier', 'deprecated': False},
+    'mcphee-slideshow': {'id': 'McPhee-slideshow', 'deprecated': False},
+    'metamail': {'id': 'metamail', 'deprecated': False},
+    'minpack': {'id': 'Minpack', 'deprecated': False},
+    'miros': {'id': 'MirOS', 'deprecated': False},
+    'mit': {'id': 'MIT', 'deprecated': False},
+    'mit-0': {'id': 'MIT-0', 'deprecated': False},
+    'mit-advertising': {'id': 'MIT-advertising', 'deprecated': False},
+    'mit-cmu': {'id': 'MIT-CMU', 'deprecated': False},
+    'mit-enna': {'id': 'MIT-enna', 'deprecated': False},
+    'mit-feh': {'id': 'MIT-feh', 'deprecated': False},
+    'mit-festival': {'id': 'MIT-Festival', 'deprecated': False},
+    'mit-khronos-old': {'id': 'MIT-Khronos-old', 'deprecated': False},
+    'mit-modern-variant': {'id': 'MIT-Modern-Variant', 'deprecated': False},
+    'mit-open-group': {'id': 'MIT-open-group', 'deprecated': False},
+    'mit-testregex': {'id': 'MIT-testregex', 'deprecated': False},
+    'mit-wu': {'id': 'MIT-Wu', 'deprecated': False},
+    'mitnfa': {'id': 'MITNFA', 'deprecated': False},
+    'mmixware': {'id': 'MMIXware', 'deprecated': False},
+    'motosoto': {'id': 'Motosoto', 'deprecated': False},
+    'mpeg-ssg': {'id': 'MPEG-SSG', 'deprecated': False},
+    'mpi-permissive': {'id': 'mpi-permissive', 'deprecated': False},
+    'mpich2': {'id': 'mpich2', 'deprecated': False},
+    'mpl-1.0': {'id': 'MPL-1.0', 'deprecated': False},
+    'mpl-1.1': {'id': 'MPL-1.1', 'deprecated': False},
+    'mpl-2.0': {'id': 'MPL-2.0', 'deprecated': False},
+    'mpl-2.0-no-copyleft-exception': {'id': 'MPL-2.0-no-copyleft-exception', 'deprecated': False},
+    'mplus': {'id': 'mplus', 'deprecated': False},
+    'ms-lpl': {'id': 'MS-LPL', 'deprecated': False},
+    'ms-pl': {'id': 'MS-PL', 'deprecated': False},
+    'ms-rl': {'id': 'MS-RL', 'deprecated': False},
+    'mtll': {'id': 'MTLL', 'deprecated': False},
+    'mulanpsl-1.0': {'id': 'MulanPSL-1.0', 'deprecated': False},
+    'mulanpsl-2.0': {'id': 'MulanPSL-2.0', 'deprecated': False},
+    'multics': {'id': 'Multics', 'deprecated': False},
+    'mup': {'id': 'Mup', 'deprecated': False},
+    'naist-2003': {'id': 'NAIST-2003', 'deprecated': False},
+    'nasa-1.3': {'id': 'NASA-1.3', 'deprecated': False},
+    'naumen': {'id': 'Naumen', 'deprecated': False},
+    'nbpl-1.0': {'id': 'NBPL-1.0', 'deprecated': False},
+    'ncbi-pd': {'id': 'NCBI-PD', 'deprecated': False},
+    'ncgl-uk-2.0': {'id': 'NCGL-UK-2.0', 'deprecated': False},
+    'ncl': {'id': 'NCL', 'deprecated': False},
+    'ncsa': {'id': 'NCSA', 'deprecated': False},
+    'net-snmp': {'id': 'Net-SNMP', 'deprecated': True},
+    'netcdf': {'id': 'NetCDF', 'deprecated': False},
+    'newsletr': {'id': 'Newsletr', 'deprecated': False},
+    'ngpl': {'id': 'NGPL', 'deprecated': False},
+    'nicta-1.0': {'id': 'NICTA-1.0', 'deprecated': False},
+    'nist-pd': {'id': 'NIST-PD', 'deprecated': False},
+    'nist-pd-fallback': {'id': 'NIST-PD-fallback', 'deprecated': False},
+    'nist-software': {'id': 'NIST-Software', 'deprecated': False},
+    'nlod-1.0': {'id': 'NLOD-1.0', 'deprecated': False},
+    'nlod-2.0': {'id': 'NLOD-2.0', 'deprecated': False},
+    'nlpl': {'id': 'NLPL', 'deprecated': False},
+    'nokia': {'id': 'Nokia', 'deprecated': False},
+    'nosl': {'id': 'NOSL', 'deprecated': False},
+    'noweb': {'id': 'Noweb', 'deprecated': False},
+    'npl-1.0': {'id': 'NPL-1.0', 'deprecated': False},
+    'npl-1.1': {'id': 'NPL-1.1', 'deprecated': False},
+    'nposl-3.0': {'id': 'NPOSL-3.0', 'deprecated': False},
+    'nrl': {'id': 'NRL', 'deprecated': False},
+    'ntp': {'id': 'NTP', 'deprecated': False},
+    'ntp-0': {'id': 'NTP-0', 'deprecated': False},
+    'nunit': {'id': 'Nunit', 'deprecated': True},
+    'o-uda-1.0': {'id': 'O-UDA-1.0', 'deprecated': False},
+    'oar': {'id': 'OAR', 'deprecated': False},
+    'occt-pl': {'id': 'OCCT-PL', 'deprecated': False},
+    'oclc-2.0': {'id': 'OCLC-2.0', 'deprecated': False},
+    'odbl-1.0': {'id': 'ODbL-1.0', 'deprecated': False},
+    'odc-by-1.0': {'id': 'ODC-By-1.0', 'deprecated': False},
+    'offis': {'id': 'OFFIS', 'deprecated': False},
+    'ofl-1.0': {'id': 'OFL-1.0', 'deprecated': False},
+    'ofl-1.0-no-rfn': {'id': 'OFL-1.0-no-RFN', 'deprecated': False},
+    'ofl-1.0-rfn': {'id': 'OFL-1.0-RFN', 'deprecated': False},
+    'ofl-1.1': {'id': 'OFL-1.1', 'deprecated': False},
+    'ofl-1.1-no-rfn': {'id': 'OFL-1.1-no-RFN', 'deprecated': False},
+    'ofl-1.1-rfn': {'id': 'OFL-1.1-RFN', 'deprecated': False},
+    'ogc-1.0': {'id': 'OGC-1.0', 'deprecated': False},
+    'ogdl-taiwan-1.0': {'id': 'OGDL-Taiwan-1.0', 'deprecated': False},
+    'ogl-canada-2.0': {'id': 'OGL-Canada-2.0', 'deprecated': False},
+    'ogl-uk-1.0': {'id': 'OGL-UK-1.0', 'deprecated': False},
+    'ogl-uk-2.0': {'id': 'OGL-UK-2.0', 'deprecated': False},
+    'ogl-uk-3.0': {'id': 'OGL-UK-3.0', 'deprecated': False},
+    'ogtsl': {'id': 'OGTSL', 'deprecated': False},
+    'oldap-1.1': {'id': 'OLDAP-1.1', 'deprecated': False},
+    'oldap-1.2': {'id': 'OLDAP-1.2', 'deprecated': False},
+    'oldap-1.3': {'id': 'OLDAP-1.3', 'deprecated': False},
+    'oldap-1.4': {'id': 'OLDAP-1.4', 'deprecated': False},
+    'oldap-2.0': {'id': 'OLDAP-2.0', 'deprecated': False},
+    'oldap-2.0.1': {'id': 'OLDAP-2.0.1', 'deprecated': False},
+    'oldap-2.1': {'id': 'OLDAP-2.1', 'deprecated': False},
+    'oldap-2.2': {'id': 'OLDAP-2.2', 'deprecated': False},
+    'oldap-2.2.1': {'id': 'OLDAP-2.2.1', 'deprecated': False},
+    'oldap-2.2.2': {'id': 'OLDAP-2.2.2', 'deprecated': False},
+    'oldap-2.3': {'id': 'OLDAP-2.3', 'deprecated': False},
+    'oldap-2.4': {'id': 'OLDAP-2.4', 'deprecated': False},
+    'oldap-2.5': {'id': 'OLDAP-2.5', 'deprecated': False},
+    'oldap-2.6': {'id': 'OLDAP-2.6', 'deprecated': False},
+    'oldap-2.7': {'id': 'OLDAP-2.7', 'deprecated': False},
+    'oldap-2.8': {'id': 'OLDAP-2.8', 'deprecated': False},
+    'olfl-1.3': {'id': 'OLFL-1.3', 'deprecated': False},
+    'oml': {'id': 'OML', 'deprecated': False},
+    'openpbs-2.3': {'id': 'OpenPBS-2.3', 'deprecated': False},
+    'openssl': {'id': 'OpenSSL', 'deprecated': False},
+    'openssl-standalone': {'id': 'OpenSSL-standalone', 'deprecated': False},
+    'openvision': {'id': 'OpenVision', 'deprecated': False},
+    'opl-1.0': {'id': 'OPL-1.0', 'deprecated': False},
+    'opl-uk-3.0': {'id': 'OPL-UK-3.0', 'deprecated': False},
+    'opubl-1.0': {'id': 'OPUBL-1.0', 'deprecated': False},
+    'oset-pl-2.1': {'id': 'OSET-PL-2.1', 'deprecated': False},
+    'osl-1.0': {'id': 'OSL-1.0', 'deprecated': False},
+    'osl-1.1': {'id': 'OSL-1.1', 'deprecated': False},
+    'osl-2.0': {'id': 'OSL-2.0', 'deprecated': False},
+    'osl-2.1': {'id': 'OSL-2.1', 'deprecated': False},
+    'osl-3.0': {'id': 'OSL-3.0', 'deprecated': False},
+    'padl': {'id': 'PADL', 'deprecated': False},
+    'parity-6.0.0': {'id': 'Parity-6.0.0', 'deprecated': False},
+    'parity-7.0.0': {'id': 'Parity-7.0.0', 'deprecated': False},
+    'pddl-1.0': {'id': 'PDDL-1.0', 'deprecated': False},
+    'php-3.0': {'id': 'PHP-3.0', 'deprecated': False},
+    'php-3.01': {'id': 'PHP-3.01', 'deprecated': False},
+    'pixar': {'id': 'Pixar', 'deprecated': False},
+    'pkgconf': {'id': 'pkgconf', 'deprecated': False},
+    'plexus': {'id': 'Plexus', 'deprecated': False},
+    'pnmstitch': {'id': 'pnmstitch', 'deprecated': False},
+    'polyform-noncommercial-1.0.0': {'id': 'PolyForm-Noncommercial-1.0.0', 'deprecated': False},
+    'polyform-small-business-1.0.0': {'id': 'PolyForm-Small-Business-1.0.0', 'deprecated': False},
+    'postgresql': {'id': 'PostgreSQL', 'deprecated': False},
+    'ppl': {'id': 'PPL', 'deprecated': False},
+    'psf-2.0': {'id': 'PSF-2.0', 'deprecated': False},
+    'psfrag': {'id': 'psfrag', 'deprecated': False},
+    'psutils': {'id': 'psutils', 'deprecated': False},
+    'python-2.0': {'id': 'Python-2.0', 'deprecated': False},
+    'python-2.0.1': {'id': 'Python-2.0.1', 'deprecated': False},
+    'python-ldap': {'id': 'python-ldap', 'deprecated': False},
+    'qhull': {'id': 'Qhull', 'deprecated': False},
+    'qpl-1.0': {'id': 'QPL-1.0', 'deprecated': False},
+    'qpl-1.0-inria-2004': {'id': 'QPL-1.0-INRIA-2004', 'deprecated': False},
+    'radvd': {'id': 'radvd', 'deprecated': False},
+    'rdisc': {'id': 'Rdisc', 'deprecated': False},
+    'rhecos-1.1': {'id': 'RHeCos-1.1', 'deprecated': False},
+    'rpl-1.1': {'id': 'RPL-1.1', 'deprecated': False},
+    'rpl-1.5': {'id': 'RPL-1.5', 'deprecated': False},
+    'rpsl-1.0': {'id': 'RPSL-1.0', 'deprecated': False},
+    'rsa-md': {'id': 'RSA-MD', 'deprecated': False},
+    'rscpl': {'id': 'RSCPL', 'deprecated': False},
+    'ruby': {'id': 'Ruby', 'deprecated': False},
+    'ruby-pty': {'id': 'Ruby-pty', 'deprecated': False},
+    'sax-pd': {'id': 'SAX-PD', 'deprecated': False},
+    'sax-pd-2.0': {'id': 'SAX-PD-2.0', 'deprecated': False},
+    'saxpath': {'id': 'Saxpath', 'deprecated': False},
+    'scea': {'id': 'SCEA', 'deprecated': False},
+    'schemereport': {'id': 'SchemeReport', 'deprecated': False},
+    'sendmail': {'id': 'Sendmail', 'deprecated': False},
+    'sendmail-8.23': {'id': 'Sendmail-8.23', 'deprecated': False},
+    'sgi-b-1.0': {'id': 'SGI-B-1.0', 'deprecated': False},
+    'sgi-b-1.1': {'id': 'SGI-B-1.1', 'deprecated': False},
+    'sgi-b-2.0': {'id': 'SGI-B-2.0', 'deprecated': False},
+    'sgi-opengl': {'id': 'SGI-OpenGL', 'deprecated': False},
+    'sgp4': {'id': 'SGP4', 'deprecated': False},
+    'shl-0.5': {'id': 'SHL-0.5', 'deprecated': False},
+    'shl-0.51': {'id': 'SHL-0.51', 'deprecated': False},
+    'simpl-2.0': {'id': 'SimPL-2.0', 'deprecated': False},
+    'sissl': {'id': 'SISSL', 'deprecated': False},
+    'sissl-1.2': {'id': 'SISSL-1.2', 'deprecated': False},
+    'sl': {'id': 'SL', 'deprecated': False},
+    'sleepycat': {'id': 'Sleepycat', 'deprecated': False},
+    'smlnj': {'id': 'SMLNJ', 'deprecated': False},
+    'smppl': {'id': 'SMPPL', 'deprecated': False},
+    'snia': {'id': 'SNIA', 'deprecated': False},
+    'snprintf': {'id': 'snprintf', 'deprecated': False},
+    'softsurfer': {'id': 'softSurfer', 'deprecated': False},
+    'soundex': {'id': 'Soundex', 'deprecated': False},
+    'spencer-86': {'id': 'Spencer-86', 'deprecated': False},
+    'spencer-94': {'id': 'Spencer-94', 'deprecated': False},
+    'spencer-99': {'id': 'Spencer-99', 'deprecated': False},
+    'spl-1.0': {'id': 'SPL-1.0', 'deprecated': False},
+    'ssh-keyscan': {'id': 'ssh-keyscan', 'deprecated': False},
+    'ssh-openssh': {'id': 'SSH-OpenSSH', 'deprecated': False},
+    'ssh-short': {'id': 'SSH-short', 'deprecated': False},
+    'ssleay-standalone': {'id': 'SSLeay-standalone', 'deprecated': False},
+    'sspl-1.0': {'id': 'SSPL-1.0', 'deprecated': False},
+    'standardml-nj': {'id': 'StandardML-NJ', 'deprecated': True},
+    'sugarcrm-1.1.3': {'id': 'SugarCRM-1.1.3', 'deprecated': False},
+    'sun-ppp': {'id': 'Sun-PPP', 'deprecated': False},
+    'sun-ppp-2000': {'id': 'Sun-PPP-2000', 'deprecated': False},
+    'sunpro': {'id': 'SunPro', 'deprecated': False},
+    'swl': {'id': 'SWL', 'deprecated': False},
+    'swrule': {'id': 'swrule', 'deprecated': False},
+    'symlinks': {'id': 'Symlinks', 'deprecated': False},
+    'tapr-ohl-1.0': {'id': 'TAPR-OHL-1.0', 'deprecated': False},
+    'tcl': {'id': 'TCL', 'deprecated': False},
+    'tcp-wrappers': {'id': 'TCP-wrappers', 'deprecated': False},
+    'termreadkey': {'id': 'TermReadKey', 'deprecated': False},
+    'tgppl-1.0': {'id': 'TGPPL-1.0', 'deprecated': False},
+    'threeparttable': {'id': 'threeparttable', 'deprecated': False},
+    'tmate': {'id': 'TMate', 'deprecated': False},
+    'torque-1.1': {'id': 'TORQUE-1.1', 'deprecated': False},
+    'tosl': {'id': 'TOSL', 'deprecated': False},
+    'tpdl': {'id': 'TPDL', 'deprecated': False},
+    'tpl-1.0': {'id': 'TPL-1.0', 'deprecated': False},
+    'ttwl': {'id': 'TTWL', 'deprecated': False},
+    'ttyp0': {'id': 'TTYP0', 'deprecated': False},
+    'tu-berlin-1.0': {'id': 'TU-Berlin-1.0', 'deprecated': False},
+    'tu-berlin-2.0': {'id': 'TU-Berlin-2.0', 'deprecated': False},
+    'ubuntu-font-1.0': {'id': 'Ubuntu-font-1.0', 'deprecated': False},
+    'ucar': {'id': 'UCAR', 'deprecated': False},
+    'ucl-1.0': {'id': 'UCL-1.0', 'deprecated': False},
+    'ulem': {'id': 'ulem', 'deprecated': False},
+    'umich-merit': {'id': 'UMich-Merit', 'deprecated': False},
+    'unicode-3.0': {'id': 'Unicode-3.0', 'deprecated': False},
+    'unicode-dfs-2015': {'id': 'Unicode-DFS-2015', 'deprecated': False},
+    'unicode-dfs-2016': {'id': 'Unicode-DFS-2016', 'deprecated': False},
+    'unicode-tou': {'id': 'Unicode-TOU', 'deprecated': False},
+    'unixcrypt': {'id': 'UnixCrypt', 'deprecated': False},
+    'unlicense': {'id': 'Unlicense', 'deprecated': False},
+    'upl-1.0': {'id': 'UPL-1.0', 'deprecated': False},
+    'urt-rle': {'id': 'URT-RLE', 'deprecated': False},
+    'vim': {'id': 'Vim', 'deprecated': False},
+    'vostrom': {'id': 'VOSTROM', 'deprecated': False},
+    'vsl-1.0': {'id': 'VSL-1.0', 'deprecated': False},
+    'w3c': {'id': 'W3C', 'deprecated': False},
+    'w3c-19980720': {'id': 'W3C-19980720', 'deprecated': False},
+    'w3c-20150513': {'id': 'W3C-20150513', 'deprecated': False},
+    'w3m': {'id': 'w3m', 'deprecated': False},
+    'watcom-1.0': {'id': 'Watcom-1.0', 'deprecated': False},
+    'widget-workshop': {'id': 'Widget-Workshop', 'deprecated': False},
+    'wsuipa': {'id': 'Wsuipa', 'deprecated': False},
+    'wtfpl': {'id': 'WTFPL', 'deprecated': False},
+    'wxwindows': {'id': 'wxWindows', 'deprecated': True},
+    'x11': {'id': 'X11', 'deprecated': False},
+    'x11-distribute-modifications-variant': {'id': 'X11-distribute-modifications-variant', 'deprecated': False},
+    'x11-swapped': {'id': 'X11-swapped', 'deprecated': False},
+    'xdebug-1.03': {'id': 'Xdebug-1.03', 'deprecated': False},
+    'xerox': {'id': 'Xerox', 'deprecated': False},
+    'xfig': {'id': 'Xfig', 'deprecated': False},
+    'xfree86-1.1': {'id': 'XFree86-1.1', 'deprecated': False},
+    'xinetd': {'id': 'xinetd', 'deprecated': False},
+    'xkeyboard-config-zinoviev': {'id': 'xkeyboard-config-Zinoviev', 'deprecated': False},
+    'xlock': {'id': 'xlock', 'deprecated': False},
+    'xnet': {'id': 'Xnet', 'deprecated': False},
+    'xpp': {'id': 'xpp', 'deprecated': False},
+    'xskat': {'id': 'XSkat', 'deprecated': False},
+    'xzoom': {'id': 'xzoom', 'deprecated': False},
+    'ypl-1.0': {'id': 'YPL-1.0', 'deprecated': False},
+    'ypl-1.1': {'id': 'YPL-1.1', 'deprecated': False},
+    'zed': {'id': 'Zed', 'deprecated': False},
+    'zeeff': {'id': 'Zeeff', 'deprecated': False},
+    'zend-2.0': {'id': 'Zend-2.0', 'deprecated': False},
+    'zimbra-1.3': {'id': 'Zimbra-1.3', 'deprecated': False},
+    'zimbra-1.4': {'id': 'Zimbra-1.4', 'deprecated': False},
+    'zlib': {'id': 'Zlib', 'deprecated': False},
+    'zlib-acknowledgement': {'id': 'zlib-acknowledgement', 'deprecated': False},
+    'zpl-1.1': {'id': 'ZPL-1.1', 'deprecated': False},
+    'zpl-2.0': {'id': 'ZPL-2.0', 'deprecated': False},
+    'zpl-2.1': {'id': 'ZPL-2.1', 'deprecated': False},
+}
+
+EXCEPTIONS: dict[str, SPDXException] = {
+    '389-exception': {'id': '389-exception', 'deprecated': False},
+    'asterisk-exception': {'id': 'Asterisk-exception', 'deprecated': False},
+    'asterisk-linking-protocols-exception': {'id': 'Asterisk-linking-protocols-exception', 'deprecated': False},
+    'autoconf-exception-2.0': {'id': 'Autoconf-exception-2.0', 'deprecated': False},
+    'autoconf-exception-3.0': {'id': 'Autoconf-exception-3.0', 'deprecated': False},
+    'autoconf-exception-generic': {'id': 'Autoconf-exception-generic', 'deprecated': False},
+    'autoconf-exception-generic-3.0': {'id': 'Autoconf-exception-generic-3.0', 'deprecated': False},
+    'autoconf-exception-macro': {'id': 'Autoconf-exception-macro', 'deprecated': False},
+    'bison-exception-1.24': {'id': 'Bison-exception-1.24', 'deprecated': False},
+    'bison-exception-2.2': {'id': 'Bison-exception-2.2', 'deprecated': False},
+    'bootloader-exception': {'id': 'Bootloader-exception', 'deprecated': False},
+    'classpath-exception-2.0': {'id': 'Classpath-exception-2.0', 'deprecated': False},
+    'clisp-exception-2.0': {'id': 'CLISP-exception-2.0', 'deprecated': False},
+    'cryptsetup-openssl-exception': {'id': 'cryptsetup-OpenSSL-exception', 'deprecated': False},
+    'digirule-foss-exception': {'id': 'DigiRule-FOSS-exception', 'deprecated': False},
+    'ecos-exception-2.0': {'id': 'eCos-exception-2.0', 'deprecated': False},
+    'erlang-otp-linking-exception': {'id': 'erlang-otp-linking-exception', 'deprecated': False},
+    'fawkes-runtime-exception': {'id': 'Fawkes-Runtime-exception', 'deprecated': False},
+    'fltk-exception': {'id': 'FLTK-exception', 'deprecated': False},
+    'fmt-exception': {'id': 'fmt-exception', 'deprecated': False},
+    'font-exception-2.0': {'id': 'Font-exception-2.0', 'deprecated': False},
+    'freertos-exception-2.0': {'id': 'freertos-exception-2.0', 'deprecated': False},
+    'gcc-exception-2.0': {'id': 'GCC-exception-2.0', 'deprecated': False},
+    'gcc-exception-2.0-note': {'id': 'GCC-exception-2.0-note', 'deprecated': False},
+    'gcc-exception-3.1': {'id': 'GCC-exception-3.1', 'deprecated': False},
+    'gmsh-exception': {'id': 'Gmsh-exception', 'deprecated': False},
+    'gnat-exception': {'id': 'GNAT-exception', 'deprecated': False},
+    'gnome-examples-exception': {'id': 'GNOME-examples-exception', 'deprecated': False},
+    'gnu-compiler-exception': {'id': 'GNU-compiler-exception', 'deprecated': False},
+    'gnu-javamail-exception': {'id': 'gnu-javamail-exception', 'deprecated': False},
+    'gpl-3.0-interface-exception': {'id': 'GPL-3.0-interface-exception', 'deprecated': False},
+    'gpl-3.0-linking-exception': {'id': 'GPL-3.0-linking-exception', 'deprecated': False},
+    'gpl-3.0-linking-source-exception': {'id': 'GPL-3.0-linking-source-exception', 'deprecated': False},
+    'gpl-cc-1.0': {'id': 'GPL-CC-1.0', 'deprecated': False},
+    'gstreamer-exception-2005': {'id': 'GStreamer-exception-2005', 'deprecated': False},
+    'gstreamer-exception-2008': {'id': 'GStreamer-exception-2008', 'deprecated': False},
+    'i2p-gpl-java-exception': {'id': 'i2p-gpl-java-exception', 'deprecated': False},
+    'kicad-libraries-exception': {'id': 'KiCad-libraries-exception', 'deprecated': False},
+    'lgpl-3.0-linking-exception': {'id': 'LGPL-3.0-linking-exception', 'deprecated': False},
+    'libpri-openh323-exception': {'id': 'libpri-OpenH323-exception', 'deprecated': False},
+    'libtool-exception': {'id': 'Libtool-exception', 'deprecated': False},
+    'linux-syscall-note': {'id': 'Linux-syscall-note', 'deprecated': False},
+    'llgpl': {'id': 'LLGPL', 'deprecated': False},
+    'llvm-exception': {'id': 'LLVM-exception', 'deprecated': False},
+    'lzma-exception': {'id': 'LZMA-exception', 'deprecated': False},
+    'mif-exception': {'id': 'mif-exception', 'deprecated': False},
+    'nokia-qt-exception-1.1': {'id': 'Nokia-Qt-exception-1.1', 'deprecated': True},
+    'ocaml-lgpl-linking-exception': {'id': 'OCaml-LGPL-linking-exception', 'deprecated': False},
+    'occt-exception-1.0': {'id': 'OCCT-exception-1.0', 'deprecated': False},
+    'openjdk-assembly-exception-1.0': {'id': 'OpenJDK-assembly-exception-1.0', 'deprecated': False},
+    'openvpn-openssl-exception': {'id': 'openvpn-openssl-exception', 'deprecated': False},
+    'pcre2-exception': {'id': 'PCRE2-exception', 'deprecated': False},
+    'ps-or-pdf-font-exception-20170817': {'id': 'PS-or-PDF-font-exception-20170817', 'deprecated': False},
+    'qpl-1.0-inria-2004-exception': {'id': 'QPL-1.0-INRIA-2004-exception', 'deprecated': False},
+    'qt-gpl-exception-1.0': {'id': 'Qt-GPL-exception-1.0', 'deprecated': False},
+    'qt-lgpl-exception-1.1': {'id': 'Qt-LGPL-exception-1.1', 'deprecated': False},
+    'qwt-exception-1.0': {'id': 'Qwt-exception-1.0', 'deprecated': False},
+    'romic-exception': {'id': 'romic-exception', 'deprecated': False},
+    'rrdtool-floss-exception-2.0': {'id': 'RRDtool-FLOSS-exception-2.0', 'deprecated': False},
+    'sane-exception': {'id': 'SANE-exception', 'deprecated': False},
+    'shl-2.0': {'id': 'SHL-2.0', 'deprecated': False},
+    'shl-2.1': {'id': 'SHL-2.1', 'deprecated': False},
+    'stunnel-exception': {'id': 'stunnel-exception', 'deprecated': False},
+    'swi-exception': {'id': 'SWI-exception', 'deprecated': False},
+    'swift-exception': {'id': 'Swift-exception', 'deprecated': False},
+    'texinfo-exception': {'id': 'Texinfo-exception', 'deprecated': False},
+    'u-boot-exception-2.0': {'id': 'u-boot-exception-2.0', 'deprecated': False},
+    'ubdl-exception': {'id': 'UBDL-exception', 'deprecated': False},
+    'universal-foss-exception-1.0': {'id': 'Universal-FOSS-exception-1.0', 'deprecated': False},
+    'vsftpd-openssl-exception': {'id': 'vsftpd-openssl-exception', 'deprecated': False},
+    'wxwindows-exception-3.1': {'id': 'WxWindows-exception-3.1', 'deprecated': False},
+    'x11vnc-openssl-exception': {'id': 'x11vnc-openssl-exception', 'deprecated': False},
+}
diff --git a/.venv/Lib/site-packages/packaging/markers.py b/.venv/Lib/site-packages/packaging/markers.py
new file mode 100644
index 0000000000000000000000000000000000000000..fb7f49cf8cd43ffae71e3e8d15174d7536f9da02
--- /dev/null
+++ b/.venv/Lib/site-packages/packaging/markers.py
@@ -0,0 +1,331 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import operator
+import os
+import platform
+import sys
+from typing import Any, Callable, TypedDict, cast
+
+from ._parser import MarkerAtom, MarkerList, Op, Value, Variable
+from ._parser import parse_marker as _parse_marker
+from ._tokenizer import ParserSyntaxError
+from .specifiers import InvalidSpecifier, Specifier
+from .utils import canonicalize_name
+
+__all__ = [
+    "InvalidMarker",
+    "Marker",
+    "UndefinedComparison",
+    "UndefinedEnvironmentName",
+    "default_environment",
+]
+
+Operator = Callable[[str, str], bool]
+
+
+class InvalidMarker(ValueError):
+    """
+    An invalid marker was found, users should refer to PEP 508.
+    """
+
+
+class UndefinedComparison(ValueError):
+    """
+    An invalid operation was attempted on a value that doesn't support it.
+    """
+
+
+class UndefinedEnvironmentName(ValueError):
+    """
+    A name was attempted to be used that does not exist inside of the
+    environment.
+    """
+
+
+class Environment(TypedDict):
+    implementation_name: str
+    """The implementation's identifier, e.g. ``'cpython'``."""
+
+    implementation_version: str
+    """
+    The implementation's version, e.g. ``'3.13.0a2'`` for CPython 3.13.0a2, or
+    ``'7.3.13'`` for PyPy3.10 v7.3.13.
+    """
+
+    os_name: str
+    """
+    The value of :py:data:`os.name`. The name of the operating system dependent module
+    imported, e.g. ``'posix'``.
+    """
+
+    platform_machine: str
+    """
+    Returns the machine type, e.g. ``'i386'``.
+
+    An empty string if the value cannot be determined.
+    """
+
+    platform_release: str
+    """
+    The system's release, e.g. ``'2.2.0'`` or ``'NT'``.
+
+    An empty string if the value cannot be determined.
+    """
+
+    platform_system: str
+    """
+    The system/OS name, e.g. ``'Linux'``, ``'Windows'`` or ``'Java'``.
+
+    An empty string if the value cannot be determined.
+    """
+
+    platform_version: str
+    """
+    The system's release version, e.g. ``'#3 on degas'``.
+
+    An empty string if the value cannot be determined.
+    """
+
+    python_full_version: str
+    """
+    The Python version as string ``'major.minor.patchlevel'``.
+
+    Note that unlike the Python :py:data:`sys.version`, this value will always include
+    the patchlevel (it defaults to 0).
+    """
+
+    platform_python_implementation: str
+    """
+    A string identifying the Python implementation, e.g. ``'CPython'``.
+    """
+
+    python_version: str
+    """The Python version as string ``'major.minor'``."""
+
+    sys_platform: str
+    """
+    This string contains a platform identifier that can be used to append
+    platform-specific components to :py:data:`sys.path`, for instance.
+
+    For Unix systems, except on Linux and AIX, this is the lowercased OS name as
+    returned by ``uname -s`` with the first part of the version as returned by
+    ``uname -r`` appended, e.g. ``'sunos5'`` or ``'freebsd8'``, at the time when Python
+    was built.
+    """
+
+
+def _normalize_extra_values(results: Any) -> Any:
+    """
+    Normalize extra values.
+    """
+    if isinstance(results[0], tuple):
+        lhs, op, rhs = results[0]
+        if isinstance(lhs, Variable) and lhs.value == "extra":
+            normalized_extra = canonicalize_name(rhs.value)
+            rhs = Value(normalized_extra)
+        elif isinstance(rhs, Variable) and rhs.value == "extra":
+            normalized_extra = canonicalize_name(lhs.value)
+            lhs = Value(normalized_extra)
+        results[0] = lhs, op, rhs
+    return results
+
+
+def _format_marker(
+    marker: list[str] | MarkerAtom | str, first: bool | None = True
+) -> str:
+    assert isinstance(marker, (list, tuple, str))
+
+    # Sometimes we have a structure like [[...]] which is a single item list
+    # where the single item is itself it's own list. In that case we want skip
+    # the rest of this function so that we don't get extraneous () on the
+    # outside.
+    if (
+        isinstance(marker, list)
+        and len(marker) == 1
+        and isinstance(marker[0], (list, tuple))
+    ):
+        return _format_marker(marker[0])
+
+    if isinstance(marker, list):
+        inner = (_format_marker(m, first=False) for m in marker)
+        if first:
+            return " ".join(inner)
+        else:
+            return "(" + " ".join(inner) + ")"
+    elif isinstance(marker, tuple):
+        return " ".join([m.serialize() for m in marker])
+    else:
+        return marker
+
+
+_operators: dict[str, Operator] = {
+    "in": lambda lhs, rhs: lhs in rhs,
+    "not in": lambda lhs, rhs: lhs not in rhs,
+    "<": operator.lt,
+    "<=": operator.le,
+    "==": operator.eq,
+    "!=": operator.ne,
+    ">=": operator.ge,
+    ">": operator.gt,
+}
+
+
+def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
+    try:
+        spec = Specifier("".join([op.serialize(), rhs]))
+    except InvalidSpecifier:
+        pass
+    else:
+        return spec.contains(lhs, prereleases=True)
+
+    oper: Operator | None = _operators.get(op.serialize())
+    if oper is None:
+        raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
+
+    return oper(lhs, rhs)
+
+
+def _normalize(*values: str, key: str) -> tuple[str, ...]:
+    # PEP 685 – Comparison of extra names for optional distribution dependencies
+    # https://peps.python.org/pep-0685/
+    # > When comparing extra names, tools MUST normalize the names being
+    # > compared using the semantics outlined in PEP 503 for names
+    if key == "extra":
+        return tuple(canonicalize_name(v) for v in values)
+
+    # other environment markers don't have such standards
+    return values
+
+
+def _evaluate_markers(markers: MarkerList, environment: dict[str, str]) -> bool:
+    groups: list[list[bool]] = [[]]
+
+    for marker in markers:
+        assert isinstance(marker, (list, tuple, str))
+
+        if isinstance(marker, list):
+            groups[-1].append(_evaluate_markers(marker, environment))
+        elif isinstance(marker, tuple):
+            lhs, op, rhs = marker
+
+            if isinstance(lhs, Variable):
+                environment_key = lhs.value
+                lhs_value = environment[environment_key]
+                rhs_value = rhs.value
+            else:
+                lhs_value = lhs.value
+                environment_key = rhs.value
+                rhs_value = environment[environment_key]
+
+            lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key)
+            groups[-1].append(_eval_op(lhs_value, op, rhs_value))
+        else:
+            assert marker in ["and", "or"]
+            if marker == "or":
+                groups.append([])
+
+    return any(all(item) for item in groups)
+
+
+def format_full_version(info: sys._version_info) -> str:
+    version = f"{info.major}.{info.minor}.{info.micro}"
+    kind = info.releaselevel
+    if kind != "final":
+        version += kind[0] + str(info.serial)
+    return version
+
+
+def default_environment() -> Environment:
+    iver = format_full_version(sys.implementation.version)
+    implementation_name = sys.implementation.name
+    return {
+        "implementation_name": implementation_name,
+        "implementation_version": iver,
+        "os_name": os.name,
+        "platform_machine": platform.machine(),
+        "platform_release": platform.release(),
+        "platform_system": platform.system(),
+        "platform_version": platform.version(),
+        "python_full_version": platform.python_version(),
+        "platform_python_implementation": platform.python_implementation(),
+        "python_version": ".".join(platform.python_version_tuple()[:2]),
+        "sys_platform": sys.platform,
+    }
+
+
+class Marker:
+    def __init__(self, marker: str) -> None:
+        # Note: We create a Marker object without calling this constructor in
+        #       packaging.requirements.Requirement. If any additional logic is
+        #       added here, make sure to mirror/adapt Requirement.
+        try:
+            self._markers = _normalize_extra_values(_parse_marker(marker))
+            # The attribute `_markers` can be described in terms of a recursive type:
+            # MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]]
+            #
+            # For example, the following expression:
+            # python_version > "3.6" or (python_version == "3.6" and os_name == "unix")
+            #
+            # is parsed into:
+            # [
+            #     (<Variable('python_version')>, <Op('>')>, <Value('3.6')>),
+            #     'and',
+            #     [
+            #         (<Variable('python_version')>, <Op('==')>, <Value('3.6')>),
+            #         'or',
+            #         (<Variable('os_name')>, <Op('==')>, <Value('unix')>)
+            #     ]
+            # ]
+        except ParserSyntaxError as e:
+            raise InvalidMarker(str(e)) from e
+
+    def __str__(self) -> str:
+        return _format_marker(self._markers)
+
+    def __repr__(self) -> str:
+        return f"<Marker('{self}')>"
+
+    def __hash__(self) -> int:
+        return hash((self.__class__.__name__, str(self)))
+
+    def __eq__(self, other: Any) -> bool:
+        if not isinstance(other, Marker):
+            return NotImplemented
+
+        return str(self) == str(other)
+
+    def evaluate(self, environment: dict[str, str] | None = None) -> bool:
+        """Evaluate a marker.
+
+        Return the boolean from evaluating the given marker against the
+        environment. environment is an optional argument to override all or
+        part of the determined environment.
+
+        The environment is determined from the current Python process.
+        """
+        current_environment = cast("dict[str, str]", default_environment())
+        current_environment["extra"] = ""
+        if environment is not None:
+            current_environment.update(environment)
+            # The API used to allow setting extra to None. We need to handle this
+            # case for backwards compatibility.
+            if current_environment["extra"] is None:
+                current_environment["extra"] = ""
+
+        return _evaluate_markers(
+            self._markers, _repair_python_full_version(current_environment)
+        )
+
+
+def _repair_python_full_version(env: dict[str, str]) -> dict[str, str]:
+    """
+    Work around platform.python_version() returning something that is not PEP 440
+    compliant for non-tagged Python builds.
+    """
+    if env["python_full_version"].endswith("+"):
+        env["python_full_version"] += "local"
+    return env
diff --git a/.venv/Lib/site-packages/packaging/metadata.py b/.venv/Lib/site-packages/packaging/metadata.py
new file mode 100644
index 0000000000000000000000000000000000000000..721f411cfc44f6d24c13112e4246b5ad776a5e0b
--- /dev/null
+++ b/.venv/Lib/site-packages/packaging/metadata.py
@@ -0,0 +1,863 @@
+from __future__ import annotations
+
+import email.feedparser
+import email.header
+import email.message
+import email.parser
+import email.policy
+import pathlib
+import sys
+import typing
+from typing import (
+    Any,
+    Callable,
+    Generic,
+    Literal,
+    TypedDict,
+    cast,
+)
+
+from . import licenses, requirements, specifiers, utils
+from . import version as version_module
+from .licenses import NormalizedLicenseExpression
+
+T = typing.TypeVar("T")
+
+
+if sys.version_info >= (3, 11):  # pragma: no cover
+    ExceptionGroup = ExceptionGroup
+else:  # pragma: no cover
+
+    class ExceptionGroup(Exception):
+        """A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11.
+
+        If :external:exc:`ExceptionGroup` is already defined by Python itself,
+        that version is used instead.
+        """
+
+        message: str
+        exceptions: list[Exception]
+
+        def __init__(self, message: str, exceptions: list[Exception]) -> None:
+            self.message = message
+            self.exceptions = exceptions
+
+        def __repr__(self) -> str:
+            return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})"
+
+
+class InvalidMetadata(ValueError):
+    """A metadata field contains invalid data."""
+
+    field: str
+    """The name of the field that contains invalid data."""
+
+    def __init__(self, field: str, message: str) -> None:
+        self.field = field
+        super().__init__(message)
+
+
+# The RawMetadata class attempts to make as few assumptions about the underlying
+# serialization formats as possible. The idea is that as long as a serialization
+# formats offer some very basic primitives in *some* way then we can support
+# serializing to and from that format.
+class RawMetadata(TypedDict, total=False):
+    """A dictionary of raw core metadata.
+
+    Each field in core metadata maps to a key of this dictionary (when data is
+    provided). The key is lower-case and underscores are used instead of dashes
+    compared to the equivalent core metadata field. Any core metadata field that
+    can be specified multiple times or can hold multiple values in a single
+    field have a key with a plural name. See :class:`Metadata` whose attributes
+    match the keys of this dictionary.
+
+    Core metadata fields that can be specified multiple times are stored as a
+    list or dict depending on which is appropriate for the field. Any fields
+    which hold multiple values in a single field are stored as a list.
+
+    """
+
+    # Metadata 1.0 - PEP 241
+    metadata_version: str
+    name: str
+    version: str
+    platforms: list[str]
+    summary: str
+    description: str
+    keywords: list[str]
+    home_page: str
+    author: str
+    author_email: str
+    license: str
+
+    # Metadata 1.1 - PEP 314
+    supported_platforms: list[str]
+    download_url: str
+    classifiers: list[str]
+    requires: list[str]
+    provides: list[str]
+    obsoletes: list[str]
+
+    # Metadata 1.2 - PEP 345
+    maintainer: str
+    maintainer_email: str
+    requires_dist: list[str]
+    provides_dist: list[str]
+    obsoletes_dist: list[str]
+    requires_python: str
+    requires_external: list[str]
+    project_urls: dict[str, str]
+
+    # Metadata 2.0
+    # PEP 426 attempted to completely revamp the metadata format
+    # but got stuck without ever being able to build consensus on
+    # it and ultimately ended up withdrawn.
+    #
+    # However, a number of tools had started emitting METADATA with
+    # `2.0` Metadata-Version, so for historical reasons, this version
+    # was skipped.
+
+    # Metadata 2.1 - PEP 566
+    description_content_type: str
+    provides_extra: list[str]
+
+    # Metadata 2.2 - PEP 643
+    dynamic: list[str]
+
+    # Metadata 2.3 - PEP 685
+    # No new fields were added in PEP 685, just some edge case were
+    # tightened up to provide better interoptability.
+
+    # Metadata 2.4 - PEP 639
+    license_expression: str
+    license_files: list[str]
+
+
+_STRING_FIELDS = {
+    "author",
+    "author_email",
+    "description",
+    "description_content_type",
+    "download_url",
+    "home_page",
+    "license",
+    "license_expression",
+    "maintainer",
+    "maintainer_email",
+    "metadata_version",
+    "name",
+    "requires_python",
+    "summary",
+    "version",
+}
+
+_LIST_FIELDS = {
+    "classifiers",
+    "dynamic",
+    "license_files",
+    "obsoletes",
+    "obsoletes_dist",
+    "platforms",
+    "provides",
+    "provides_dist",
+    "provides_extra",
+    "requires",
+    "requires_dist",
+    "requires_external",
+    "supported_platforms",
+}
+
+_DICT_FIELDS = {
+    "project_urls",
+}
+
+
+def _parse_keywords(data: str) -> list[str]:
+    """Split a string of comma-separated keywords into a list of keywords."""
+    return [k.strip() for k in data.split(",")]
+
+
+def _parse_project_urls(data: list[str]) -> dict[str, str]:
+    """Parse a list of label/URL string pairings separated by a comma."""
+    urls = {}
+    for pair in data:
+        # Our logic is slightly tricky here as we want to try and do
+        # *something* reasonable with malformed data.
+        #
+        # The main thing that we have to worry about, is data that does
+        # not have a ',' at all to split the label from the Value. There
+        # isn't a singular right answer here, and we will fail validation
+        # later on (if the caller is validating) so it doesn't *really*
+        # matter, but since the missing value has to be an empty str
+        # and our return value is dict[str, str], if we let the key
+        # be the missing value, then they'd have multiple '' values that
+        # overwrite each other in a accumulating dict.
+        #
+        # The other potentional issue is that it's possible to have the
+        # same label multiple times in the metadata, with no solid "right"
+        # answer with what to do in that case. As such, we'll do the only
+        # thing we can, which is treat the field as unparseable and add it
+        # to our list of unparsed fields.
+        parts = [p.strip() for p in pair.split(",", 1)]
+        parts.extend([""] * (max(0, 2 - len(parts))))  # Ensure 2 items
+
+        # TODO: The spec doesn't say anything about if the keys should be
+        #       considered case sensitive or not... logically they should
+        #       be case-preserving and case-insensitive, but doing that
+        #       would open up more cases where we might have duplicate
+        #       entries.
+        label, url = parts
+        if label in urls:
+            # The label already exists in our set of urls, so this field
+            # is unparseable, and we can just add the whole thing to our
+            # unparseable data and stop processing it.
+            raise KeyError("duplicate labels in project urls")
+        urls[label] = url
+
+    return urls
+
+
+def _get_payload(msg: email.message.Message, source: bytes | str) -> str:
+    """Get the body of the message."""
+    # If our source is a str, then our caller has managed encodings for us,
+    # and we don't need to deal with it.
+    if isinstance(source, str):
+        payload = msg.get_payload()
+        assert isinstance(payload, str)
+        return payload
+    # If our source is a bytes, then we're managing the encoding and we need
+    # to deal with it.
+    else:
+        bpayload = msg.get_payload(decode=True)
+        assert isinstance(bpayload, bytes)
+        try:
+            return bpayload.decode("utf8", "strict")
+        except UnicodeDecodeError as exc:
+            raise ValueError("payload in an invalid encoding") from exc
+
+
+# The various parse_FORMAT functions here are intended to be as lenient as
+# possible in their parsing, while still returning a correctly typed
+# RawMetadata.
+#
+# To aid in this, we also generally want to do as little touching of the
+# data as possible, except where there are possibly some historic holdovers
+# that make valid data awkward to work with.
+#
+# While this is a lower level, intermediate format than our ``Metadata``
+# class, some light touch ups can make a massive difference in usability.
+
+# Map METADATA fields to RawMetadata.
+_EMAIL_TO_RAW_MAPPING = {
+    "author": "author",
+    "author-email": "author_email",
+    "classifier": "classifiers",
+    "description": "description",
+    "description-content-type": "description_content_type",
+    "download-url": "download_url",
+    "dynamic": "dynamic",
+    "home-page": "home_page",
+    "keywords": "keywords",
+    "license": "license",
+    "license-expression": "license_expression",
+    "license-file": "license_files",
+    "maintainer": "maintainer",
+    "maintainer-email": "maintainer_email",
+    "metadata-version": "metadata_version",
+    "name": "name",
+    "obsoletes": "obsoletes",
+    "obsoletes-dist": "obsoletes_dist",
+    "platform": "platforms",
+    "project-url": "project_urls",
+    "provides": "provides",
+    "provides-dist": "provides_dist",
+    "provides-extra": "provides_extra",
+    "requires": "requires",
+    "requires-dist": "requires_dist",
+    "requires-external": "requires_external",
+    "requires-python": "requires_python",
+    "summary": "summary",
+    "supported-platform": "supported_platforms",
+    "version": "version",
+}
+_RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()}
+
+
+def parse_email(data: bytes | str) -> tuple[RawMetadata, dict[str, list[str]]]:
+    """Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``).
+
+    This function returns a two-item tuple of dicts. The first dict is of
+    recognized fields from the core metadata specification. Fields that can be
+    parsed and translated into Python's built-in types are converted
+    appropriately. All other fields are left as-is. Fields that are allowed to
+    appear multiple times are stored as lists.
+
+    The second dict contains all other fields from the metadata. This includes
+    any unrecognized fields. It also includes any fields which are expected to
+    be parsed into a built-in type but were not formatted appropriately. Finally,
+    any fields that are expected to appear only once but are repeated are
+    included in this dict.
+
+    """
+    raw: dict[str, str | list[str] | dict[str, str]] = {}
+    unparsed: dict[str, list[str]] = {}
+
+    if isinstance(data, str):
+        parsed = email.parser.Parser(policy=email.policy.compat32).parsestr(data)
+    else:
+        parsed = email.parser.BytesParser(policy=email.policy.compat32).parsebytes(data)
+
+    # We have to wrap parsed.keys() in a set, because in the case of multiple
+    # values for a key (a list), the key will appear multiple times in the
+    # list of keys, but we're avoiding that by using get_all().
+    for name in frozenset(parsed.keys()):
+        # Header names in RFC are case insensitive, so we'll normalize to all
+        # lower case to make comparisons easier.
+        name = name.lower()
+
+        # We use get_all() here, even for fields that aren't multiple use,
+        # because otherwise someone could have e.g. two Name fields, and we
+        # would just silently ignore it rather than doing something about it.
+        headers = parsed.get_all(name) or []
+
+        # The way the email module works when parsing bytes is that it
+        # unconditionally decodes the bytes as ascii using the surrogateescape
+        # handler. When you pull that data back out (such as with get_all() ),
+        # it looks to see if the str has any surrogate escapes, and if it does
+        # it wraps it in a Header object instead of returning the string.
+        #
+        # As such, we'll look for those Header objects, and fix up the encoding.
+        value = []
+        # Flag if we have run into any issues processing the headers, thus
+        # signalling that the data belongs in 'unparsed'.
+        valid_encoding = True
+        for h in headers:
+            # It's unclear if this can return more types than just a Header or
+            # a str, so we'll just assert here to make sure.
+            assert isinstance(h, (email.header.Header, str))
+
+            # If it's a header object, we need to do our little dance to get
+            # the real data out of it. In cases where there is invalid data
+            # we're going to end up with mojibake, but there's no obvious, good
+            # way around that without reimplementing parts of the Header object
+            # ourselves.
+            #
+            # That should be fine since, if mojibacked happens, this key is
+            # going into the unparsed dict anyways.
+            if isinstance(h, email.header.Header):
+                # The Header object stores it's data as chunks, and each chunk
+                # can be independently encoded, so we'll need to check each
+                # of them.
+                chunks: list[tuple[bytes, str | None]] = []
+                for bin, encoding in email.header.decode_header(h):
+                    try:
+                        bin.decode("utf8", "strict")
+                    except UnicodeDecodeError:
+                        # Enable mojibake.
+                        encoding = "latin1"
+                        valid_encoding = False
+                    else:
+                        encoding = "utf8"
+                    chunks.append((bin, encoding))
+
+                # Turn our chunks back into a Header object, then let that
+                # Header object do the right thing to turn them into a
+                # string for us.
+                value.append(str(email.header.make_header(chunks)))
+            # This is already a string, so just add it.
+            else:
+                value.append(h)
+
+        # We've processed all of our values to get them into a list of str,
+        # but we may have mojibake data, in which case this is an unparsed
+        # field.
+        if not valid_encoding:
+            unparsed[name] = value
+            continue
+
+        raw_name = _EMAIL_TO_RAW_MAPPING.get(name)
+        if raw_name is None:
+            # This is a bit of a weird situation, we've encountered a key that
+            # we don't know what it means, so we don't know whether it's meant
+            # to be a list or not.
+            #
+            # Since we can't really tell one way or another, we'll just leave it
+            # as a list, even though it may be a single item list, because that's
+            # what makes the most sense for email headers.
+            unparsed[name] = value
+            continue
+
+        # If this is one of our string fields, then we'll check to see if our
+        # value is a list of a single item. If it is then we'll assume that
+        # it was emitted as a single string, and unwrap the str from inside
+        # the list.
+        #
+        # If it's any other kind of data, then we haven't the faintest clue
+        # what we should parse it as, and we have to just add it to our list
+        # of unparsed stuff.
+        if raw_name in _STRING_FIELDS and len(value) == 1:
+            raw[raw_name] = value[0]
+        # If this is one of our list of string fields, then we can just assign
+        # the value, since email *only* has strings, and our get_all() call
+        # above ensures that this is a list.
+        elif raw_name in _LIST_FIELDS:
+            raw[raw_name] = value
+        # Special Case: Keywords
+        # The keywords field is implemented in the metadata spec as a str,
+        # but it conceptually is a list of strings, and is serialized using
+        # ", ".join(keywords), so we'll do some light data massaging to turn
+        # this into what it logically is.
+        elif raw_name == "keywords" and len(value) == 1:
+            raw[raw_name] = _parse_keywords(value[0])
+        # Special Case: Project-URL
+        # The project urls is implemented in the metadata spec as a list of
+        # specially-formatted strings that represent a key and a value, which
+        # is fundamentally a mapping, however the email format doesn't support
+        # mappings in a sane way, so it was crammed into a list of strings
+        # instead.
+        #
+        # We will do a little light data massaging to turn this into a map as
+        # it logically should be.
+        elif raw_name == "project_urls":
+            try:
+                raw[raw_name] = _parse_project_urls(value)
+            except KeyError:
+                unparsed[name] = value
+        # Nothing that we've done has managed to parse this, so it'll just
+        # throw it in our unparseable data and move on.
+        else:
+            unparsed[name] = value
+
+    # We need to support getting the Description from the message payload in
+    # addition to getting it from the the headers. This does mean, though, there
+    # is the possibility of it being set both ways, in which case we put both
+    # in 'unparsed' since we don't know which is right.
+    try:
+        payload = _get_payload(parsed, data)
+    except ValueError:
+        unparsed.setdefault("description", []).append(
+            parsed.get_payload(decode=isinstance(data, bytes))  # type: ignore[call-overload]
+        )
+    else:
+        if payload:
+            # Check to see if we've already got a description, if so then both
+            # it, and this body move to unparseable.
+            if "description" in raw:
+                description_header = cast(str, raw.pop("description"))
+                unparsed.setdefault("description", []).extend(
+                    [description_header, payload]
+                )
+            elif "description" in unparsed:
+                unparsed["description"].append(payload)
+            else:
+                raw["description"] = payload
+
+    # We need to cast our `raw` to a metadata, because a TypedDict only support
+    # literal key names, but we're computing our key names on purpose, but the
+    # way this function is implemented, our `TypedDict` can only have valid key
+    # names.
+    return cast(RawMetadata, raw), unparsed
+
+
+_NOT_FOUND = object()
+
+
+# Keep the two values in sync.
+_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3", "2.4"]
+_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3", "2.4"]
+
+_REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"])
+
+
+class _Validator(Generic[T]):
+    """Validate a metadata field.
+
+    All _process_*() methods correspond to a core metadata field. The method is
+    called with the field's raw value. If the raw value is valid it is returned
+    in its "enriched" form (e.g. ``version.Version`` for the ``Version`` field).
+    If the raw value is invalid, :exc:`InvalidMetadata` is raised (with a cause
+    as appropriate).
+    """
+
+    name: str
+    raw_name: str
+    added: _MetadataVersion
+
+    def __init__(
+        self,
+        *,
+        added: _MetadataVersion = "1.0",
+    ) -> None:
+        self.added = added
+
+    def __set_name__(self, _owner: Metadata, name: str) -> None:
+        self.name = name
+        self.raw_name = _RAW_TO_EMAIL_MAPPING[name]
+
+    def __get__(self, instance: Metadata, _owner: type[Metadata]) -> T:
+        # With Python 3.8, the caching can be replaced with functools.cached_property().
+        # No need to check the cache as attribute lookup will resolve into the
+        # instance's __dict__ before __get__ is called.
+        cache = instance.__dict__
+        value = instance._raw.get(self.name)
+
+        # To make the _process_* methods easier, we'll check if the value is None
+        # and if this field is NOT a required attribute, and if both of those
+        # things are true, we'll skip the the converter. This will mean that the
+        # converters never have to deal with the None union.
+        if self.name in _REQUIRED_ATTRS or value is not None:
+            try:
+                converter: Callable[[Any], T] = getattr(self, f"_process_{self.name}")
+            except AttributeError:
+                pass
+            else:
+                value = converter(value)
+
+        cache[self.name] = value
+        try:
+            del instance._raw[self.name]  # type: ignore[misc]
+        except KeyError:
+            pass
+
+        return cast(T, value)
+
+    def _invalid_metadata(
+        self, msg: str, cause: Exception | None = None
+    ) -> InvalidMetadata:
+        exc = InvalidMetadata(
+            self.raw_name, msg.format_map({"field": repr(self.raw_name)})
+        )
+        exc.__cause__ = cause
+        return exc
+
+    def _process_metadata_version(self, value: str) -> _MetadataVersion:
+        # Implicitly makes Metadata-Version required.
+        if value not in _VALID_METADATA_VERSIONS:
+            raise self._invalid_metadata(f"{value!r} is not a valid metadata version")
+        return cast(_MetadataVersion, value)
+
+    def _process_name(self, value: str) -> str:
+        if not value:
+            raise self._invalid_metadata("{field} is a required field")
+        # Validate the name as a side-effect.
+        try:
+            utils.canonicalize_name(value, validate=True)
+        except utils.InvalidName as exc:
+            raise self._invalid_metadata(
+                f"{value!r} is invalid for {{field}}", cause=exc
+            ) from exc
+        else:
+            return value
+
+    def _process_version(self, value: str) -> version_module.Version:
+        if not value:
+            raise self._invalid_metadata("{field} is a required field")
+        try:
+            return version_module.parse(value)
+        except version_module.InvalidVersion as exc:
+            raise self._invalid_metadata(
+                f"{value!r} is invalid for {{field}}", cause=exc
+            ) from exc
+
+    def _process_summary(self, value: str) -> str:
+        """Check the field contains no newlines."""
+        if "\n" in value:
+            raise self._invalid_metadata("{field} must be a single line")
+        return value
+
+    def _process_description_content_type(self, value: str) -> str:
+        content_types = {"text/plain", "text/x-rst", "text/markdown"}
+        message = email.message.EmailMessage()
+        message["content-type"] = value
+
+        content_type, parameters = (
+            # Defaults to `text/plain` if parsing failed.
+            message.get_content_type().lower(),
+            message["content-type"].params,
+        )
+        # Check if content-type is valid or defaulted to `text/plain` and thus was
+        # not parseable.
+        if content_type not in content_types or content_type not in value.lower():
+            raise self._invalid_metadata(
+                f"{{field}} must be one of {list(content_types)}, not {value!r}"
+            )
+
+        charset = parameters.get("charset", "UTF-8")
+        if charset != "UTF-8":
+            raise self._invalid_metadata(
+                f"{{field}} can only specify the UTF-8 charset, not {list(charset)}"
+            )
+
+        markdown_variants = {"GFM", "CommonMark"}
+        variant = parameters.get("variant", "GFM")  # Use an acceptable default.
+        if content_type == "text/markdown" and variant not in markdown_variants:
+            raise self._invalid_metadata(
+                f"valid Markdown variants for {{field}} are {list(markdown_variants)}, "
+                f"not {variant!r}",
+            )
+        return value
+
+    def _process_dynamic(self, value: list[str]) -> list[str]:
+        for dynamic_field in map(str.lower, value):
+            if dynamic_field in {"name", "version", "metadata-version"}:
+                raise self._invalid_metadata(
+                    f"{dynamic_field!r} is not allowed as a dynamic field"
+                )
+            elif dynamic_field not in _EMAIL_TO_RAW_MAPPING:
+                raise self._invalid_metadata(
+                    f"{dynamic_field!r} is not a valid dynamic field"
+                )
+        return list(map(str.lower, value))
+
+    def _process_provides_extra(
+        self,
+        value: list[str],
+    ) -> list[utils.NormalizedName]:
+        normalized_names = []
+        try:
+            for name in value:
+                normalized_names.append(utils.canonicalize_name(name, validate=True))
+        except utils.InvalidName as exc:
+            raise self._invalid_metadata(
+                f"{name!r} is invalid for {{field}}", cause=exc
+            ) from exc
+        else:
+            return normalized_names
+
+    def _process_requires_python(self, value: str) -> specifiers.SpecifierSet:
+        try:
+            return specifiers.SpecifierSet(value)
+        except specifiers.InvalidSpecifier as exc:
+            raise self._invalid_metadata(
+                f"{value!r} is invalid for {{field}}", cause=exc
+            ) from exc
+
+    def _process_requires_dist(
+        self,
+        value: list[str],
+    ) -> list[requirements.Requirement]:
+        reqs = []
+        try:
+            for req in value:
+                reqs.append(requirements.Requirement(req))
+        except requirements.InvalidRequirement as exc:
+            raise self._invalid_metadata(
+                f"{req!r} is invalid for {{field}}", cause=exc
+            ) from exc
+        else:
+            return reqs
+
+    def _process_license_expression(
+        self, value: str
+    ) -> NormalizedLicenseExpression | None:
+        try:
+            return licenses.canonicalize_license_expression(value)
+        except ValueError as exc:
+            raise self._invalid_metadata(
+                f"{value!r} is invalid for {{field}}", cause=exc
+            ) from exc
+
+    def _process_license_files(self, value: list[str]) -> list[str]:
+        paths = []
+        for path in value:
+            if ".." in path:
+                raise self._invalid_metadata(
+                    f"{path!r} is invalid for {{field}}, "
+                    "parent directory indicators are not allowed"
+                )
+            if "*" in path:
+                raise self._invalid_metadata(
+                    f"{path!r} is invalid for {{field}}, paths must be resolved"
+                )
+            if (
+                pathlib.PurePosixPath(path).is_absolute()
+                or pathlib.PureWindowsPath(path).is_absolute()
+            ):
+                raise self._invalid_metadata(
+                    f"{path!r} is invalid for {{field}}, paths must be relative"
+                )
+            if pathlib.PureWindowsPath(path).as_posix() != path:
+                raise self._invalid_metadata(
+                    f"{path!r} is invalid for {{field}}, "
+                    "paths must use '/' delimiter"
+                )
+            paths.append(path)
+        return paths
+
+
+class Metadata:
+    """Representation of distribution metadata.
+
+    Compared to :class:`RawMetadata`, this class provides objects representing
+    metadata fields instead of only using built-in types. Any invalid metadata
+    will cause :exc:`InvalidMetadata` to be raised (with a
+    :py:attr:`~BaseException.__cause__` attribute as appropriate).
+    """
+
+    _raw: RawMetadata
+
+    @classmethod
+    def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> Metadata:
+        """Create an instance from :class:`RawMetadata`.
+
+        If *validate* is true, all metadata will be validated. All exceptions
+        related to validation will be gathered and raised as an :class:`ExceptionGroup`.
+        """
+        ins = cls()
+        ins._raw = data.copy()  # Mutations occur due to caching enriched values.
+
+        if validate:
+            exceptions: list[Exception] = []
+            try:
+                metadata_version = ins.metadata_version
+                metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version)
+            except InvalidMetadata as metadata_version_exc:
+                exceptions.append(metadata_version_exc)
+                metadata_version = None
+
+            # Make sure to check for the fields that are present, the required
+            # fields (so their absence can be reported).
+            fields_to_check = frozenset(ins._raw) | _REQUIRED_ATTRS
+            # Remove fields that have already been checked.
+            fields_to_check -= {"metadata_version"}
+
+            for key in fields_to_check:
+                try:
+                    if metadata_version:
+                        # Can't use getattr() as that triggers descriptor protocol which
+                        # will fail due to no value for the instance argument.
+                        try:
+                            field_metadata_version = cls.__dict__[key].added
+                        except KeyError:
+                            exc = InvalidMetadata(key, f"unrecognized field: {key!r}")
+                            exceptions.append(exc)
+                            continue
+                        field_age = _VALID_METADATA_VERSIONS.index(
+                            field_metadata_version
+                        )
+                        if field_age > metadata_age:
+                            field = _RAW_TO_EMAIL_MAPPING[key]
+                            exc = InvalidMetadata(
+                                field,
+                                f"{field} introduced in metadata version "
+                                f"{field_metadata_version}, not {metadata_version}",
+                            )
+                            exceptions.append(exc)
+                            continue
+                    getattr(ins, key)
+                except InvalidMetadata as exc:
+                    exceptions.append(exc)
+
+            if exceptions:
+                raise ExceptionGroup("invalid metadata", exceptions)
+
+        return ins
+
+    @classmethod
+    def from_email(cls, data: bytes | str, *, validate: bool = True) -> Metadata:
+        """Parse metadata from email headers.
+
+        If *validate* is true, the metadata will be validated. All exceptions
+        related to validation will be gathered and raised as an :class:`ExceptionGroup`.
+        """
+        raw, unparsed = parse_email(data)
+
+        if validate:
+            exceptions: list[Exception] = []
+            for unparsed_key in unparsed:
+                if unparsed_key in _EMAIL_TO_RAW_MAPPING:
+                    message = f"{unparsed_key!r} has invalid data"
+                else:
+                    message = f"unrecognized field: {unparsed_key!r}"
+                exceptions.append(InvalidMetadata(unparsed_key, message))
+
+            if exceptions:
+                raise ExceptionGroup("unparsed", exceptions)
+
+        try:
+            return cls.from_raw(raw, validate=validate)
+        except ExceptionGroup as exc_group:
+            raise ExceptionGroup(
+                "invalid or unparsed metadata", exc_group.exceptions
+            ) from None
+
+    metadata_version: _Validator[_MetadataVersion] = _Validator()
+    """:external:ref:`core-metadata-metadata-version`
+    (required; validated to be a valid metadata version)"""
+    # `name` is not normalized/typed to NormalizedName so as to provide access to
+    # the original/raw name.
+    name: _Validator[str] = _Validator()
+    """:external:ref:`core-metadata-name`
+    (required; validated using :func:`~packaging.utils.canonicalize_name` and its
+    *validate* parameter)"""
+    version: _Validator[version_module.Version] = _Validator()
+    """:external:ref:`core-metadata-version` (required)"""
+    dynamic: _Validator[list[str] | None] = _Validator(
+        added="2.2",
+    )
+    """:external:ref:`core-metadata-dynamic`
+    (validated against core metadata field names and lowercased)"""
+    platforms: _Validator[list[str] | None] = _Validator()
+    """:external:ref:`core-metadata-platform`"""
+    supported_platforms: _Validator[list[str] | None] = _Validator(added="1.1")
+    """:external:ref:`core-metadata-supported-platform`"""
+    summary: _Validator[str | None] = _Validator()
+    """:external:ref:`core-metadata-summary` (validated to contain no newlines)"""
+    description: _Validator[str | None] = _Validator()  # TODO 2.1: can be in body
+    """:external:ref:`core-metadata-description`"""
+    description_content_type: _Validator[str | None] = _Validator(added="2.1")
+    """:external:ref:`core-metadata-description-content-type` (validated)"""
+    keywords: _Validator[list[str] | None] = _Validator()
+    """:external:ref:`core-metadata-keywords`"""
+    home_page: _Validator[str | None] = _Validator()
+    """:external:ref:`core-metadata-home-page`"""
+    download_url: _Validator[str | None] = _Validator(added="1.1")
+    """:external:ref:`core-metadata-download-url`"""
+    author: _Validator[str | None] = _Validator()
+    """:external:ref:`core-metadata-author`"""
+    author_email: _Validator[str | None] = _Validator()
+    """:external:ref:`core-metadata-author-email`"""
+    maintainer: _Validator[str | None] = _Validator(added="1.2")
+    """:external:ref:`core-metadata-maintainer`"""
+    maintainer_email: _Validator[str | None] = _Validator(added="1.2")
+    """:external:ref:`core-metadata-maintainer-email`"""
+    license: _Validator[str | None] = _Validator()
+    """:external:ref:`core-metadata-license`"""
+    license_expression: _Validator[NormalizedLicenseExpression | None] = _Validator(
+        added="2.4"
+    )
+    """:external:ref:`core-metadata-license-expression`"""
+    license_files: _Validator[list[str] | None] = _Validator(added="2.4")
+    """:external:ref:`core-metadata-license-file`"""
+    classifiers: _Validator[list[str] | None] = _Validator(added="1.1")
+    """:external:ref:`core-metadata-classifier`"""
+    requires_dist: _Validator[list[requirements.Requirement] | None] = _Validator(
+        added="1.2"
+    )
+    """:external:ref:`core-metadata-requires-dist`"""
+    requires_python: _Validator[specifiers.SpecifierSet | None] = _Validator(
+        added="1.2"
+    )
+    """:external:ref:`core-metadata-requires-python`"""
+    # Because `Requires-External` allows for non-PEP 440 version specifiers, we
+    # don't do any processing on the values.
+    requires_external: _Validator[list[str] | None] = _Validator(added="1.2")
+    """:external:ref:`core-metadata-requires-external`"""
+    project_urls: _Validator[dict[str, str] | None] = _Validator(added="1.2")
+    """:external:ref:`core-metadata-project-url`"""
+    # PEP 685 lets us raise an error if an extra doesn't pass `Name` validation
+    # regardless of metadata version.
+    provides_extra: _Validator[list[utils.NormalizedName] | None] = _Validator(
+        added="2.1",
+    )
+    """:external:ref:`core-metadata-provides-extra`"""
+    provides_dist: _Validator[list[str] | None] = _Validator(added="1.2")
+    """:external:ref:`core-metadata-provides-dist`"""
+    obsoletes_dist: _Validator[list[str] | None] = _Validator(added="1.2")
+    """:external:ref:`core-metadata-obsoletes-dist`"""
+    requires: _Validator[list[str] | None] = _Validator(added="1.1")
+    """``Requires`` (deprecated)"""
+    provides: _Validator[list[str] | None] = _Validator(added="1.1")
+    """``Provides`` (deprecated)"""
+    obsoletes: _Validator[list[str] | None] = _Validator(added="1.1")
+    """``Obsoletes`` (deprecated)"""
diff --git a/.venv/Lib/site-packages/packaging/py.typed b/.venv/Lib/site-packages/packaging/py.typed
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/.venv/Lib/site-packages/packaging/requirements.py b/.venv/Lib/site-packages/packaging/requirements.py
new file mode 100644
index 0000000000000000000000000000000000000000..4e068c9567def3564f238a76fe7ab46b569f33e5
--- /dev/null
+++ b/.venv/Lib/site-packages/packaging/requirements.py
@@ -0,0 +1,91 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import annotations
+
+from typing import Any, Iterator
+
+from ._parser import parse_requirement as _parse_requirement
+from ._tokenizer import ParserSyntaxError
+from .markers import Marker, _normalize_extra_values
+from .specifiers import SpecifierSet
+from .utils import canonicalize_name
+
+
+class InvalidRequirement(ValueError):
+    """
+    An invalid requirement was found, users should refer to PEP 508.
+    """
+
+
+class Requirement:
+    """Parse a requirement.
+
+    Parse a given requirement string into its parts, such as name, specifier,
+    URL, and extras. Raises InvalidRequirement on a badly-formed requirement
+    string.
+    """
+
+    # TODO: Can we test whether something is contained within a requirement?
+    #       If so how do we do that? Do we need to test against the _name_ of
+    #       the thing as well as the version? What about the markers?
+    # TODO: Can we normalize the name and extra name?
+
+    def __init__(self, requirement_string: str) -> None:
+        try:
+            parsed = _parse_requirement(requirement_string)
+        except ParserSyntaxError as e:
+            raise InvalidRequirement(str(e)) from e
+
+        self.name: str = parsed.name
+        self.url: str | None = parsed.url or None
+        self.extras: set[str] = set(parsed.extras or [])
+        self.specifier: SpecifierSet = SpecifierSet(parsed.specifier)
+        self.marker: Marker | None = None
+        if parsed.marker is not None:
+            self.marker = Marker.__new__(Marker)
+            self.marker._markers = _normalize_extra_values(parsed.marker)
+
+    def _iter_parts(self, name: str) -> Iterator[str]:
+        yield name
+
+        if self.extras:
+            formatted_extras = ",".join(sorted(self.extras))
+            yield f"[{formatted_extras}]"
+
+        if self.specifier:
+            yield str(self.specifier)
+
+        if self.url:
+            yield f"@ {self.url}"
+            if self.marker:
+                yield " "
+
+        if self.marker:
+            yield f"; {self.marker}"
+
+    def __str__(self) -> str:
+        return "".join(self._iter_parts(self.name))
+
+    def __repr__(self) -> str:
+        return f"<Requirement('{self}')>"
+
+    def __hash__(self) -> int:
+        return hash(
+            (
+                self.__class__.__name__,
+                *self._iter_parts(canonicalize_name(self.name)),
+            )
+        )
+
+    def __eq__(self, other: Any) -> bool:
+        if not isinstance(other, Requirement):
+            return NotImplemented
+
+        return (
+            canonicalize_name(self.name) == canonicalize_name(other.name)
+            and self.extras == other.extras
+            and self.specifier == other.specifier
+            and self.url == other.url
+            and self.marker == other.marker
+        )
diff --git a/.venv/Lib/site-packages/packaging/specifiers.py b/.venv/Lib/site-packages/packaging/specifiers.py
new file mode 100644
index 0000000000000000000000000000000000000000..b30926af8bf4f47efe98eea44d5ded4cb6f7e07d
--- /dev/null
+++ b/.venv/Lib/site-packages/packaging/specifiers.py
@@ -0,0 +1,1020 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+"""
+.. testsetup::
+
+    from packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier
+    from packaging.version import Version
+"""
+
+from __future__ import annotations
+
+import abc
+import itertools
+import re
+from typing import Callable, Iterable, Iterator, TypeVar, Union
+
+from .utils import canonicalize_version
+from .version import Version
+
+UnparsedVersion = Union[Version, str]
+UnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion)
+CallableOperator = Callable[[Version, str], bool]
+
+
+def _coerce_version(version: UnparsedVersion) -> Version:
+    if not isinstance(version, Version):
+        version = Version(version)
+    return version
+
+
+class InvalidSpecifier(ValueError):
+    """
+    Raised when attempting to create a :class:`Specifier` with a specifier
+    string that is invalid.
+
+    >>> Specifier("lolwat")
+    Traceback (most recent call last):
+        ...
+    packaging.specifiers.InvalidSpecifier: Invalid specifier: 'lolwat'
+    """
+
+
+class BaseSpecifier(metaclass=abc.ABCMeta):
+    @abc.abstractmethod
+    def __str__(self) -> str:
+        """
+        Returns the str representation of this Specifier-like object. This
+        should be representative of the Specifier itself.
+        """
+
+    @abc.abstractmethod
+    def __hash__(self) -> int:
+        """
+        Returns a hash value for this Specifier-like object.
+        """
+
+    @abc.abstractmethod
+    def __eq__(self, other: object) -> bool:
+        """
+        Returns a boolean representing whether or not the two Specifier-like
+        objects are equal.
+
+        :param other: The other object to check against.
+        """
+
+    @property
+    @abc.abstractmethod
+    def prereleases(self) -> bool | None:
+        """Whether or not pre-releases as a whole are allowed.
+
+        This can be set to either ``True`` or ``False`` to explicitly enable or disable
+        prereleases or it can be set to ``None`` (the default) to use default semantics.
+        """
+
+    @prereleases.setter
+    def prereleases(self, value: bool) -> None:
+        """Setter for :attr:`prereleases`.
+
+        :param value: The value to set.
+        """
+
+    @abc.abstractmethod
+    def contains(self, item: str, prereleases: bool | None = None) -> bool:
+        """
+        Determines if the given item is contained within this specifier.
+        """
+
+    @abc.abstractmethod
+    def filter(
+        self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None
+    ) -> Iterator[UnparsedVersionVar]:
+        """
+        Takes an iterable of items and filters them so that only items which
+        are contained within this specifier are allowed in it.
+        """
+
+
+class Specifier(BaseSpecifier):
+    """This class abstracts handling of version specifiers.
+
+    .. tip::
+
+        It is generally not required to instantiate this manually. You should instead
+        prefer to work with :class:`SpecifierSet` instead, which can parse
+        comma-separated version specifiers (which is what package metadata contains).
+    """
+
+    _operator_regex_str = r"""
+        (?P<operator>(~=|==|!=|<=|>=|<|>|===))
+        """
+    _version_regex_str = r"""
+        (?P<version>
+            (?:
+                # The identity operators allow for an escape hatch that will
+                # do an exact string match of the version you wish to install.
+                # This will not be parsed by PEP 440 and we cannot determine
+                # any semantic meaning from it. This operator is discouraged
+                # but included entirely as an escape hatch.
+                (?<====)  # Only match for the identity operator
+                \s*
+                [^\s;)]*  # The arbitrary version can be just about anything,
+                          # we match everything except for whitespace, a
+                          # semi-colon for marker support, and a closing paren
+                          # since versions can be enclosed in them.
+            )
+            |
+            (?:
+                # The (non)equality operators allow for wild card and local
+                # versions to be specified so we have to define these two
+                # operators separately to enable that.
+                (?<===|!=)            # Only match for equals and not equals
+
+                \s*
+                v?
+                (?:[0-9]+!)?          # epoch
+                [0-9]+(?:\.[0-9]+)*   # release
+
+                # You cannot use a wild card and a pre-release, post-release, a dev or
+                # local version together so group them with a | and make them optional.
+                (?:
+                    \.\*  # Wild card syntax of .*
+                    |
+                    (?:                                  # pre release
+                        [-_\.]?
+                        (alpha|beta|preview|pre|a|b|c|rc)
+                        [-_\.]?
+                        [0-9]*
+                    )?
+                    (?:                                  # post release
+                        (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+                    )?
+                    (?:[-_\.]?dev[-_\.]?[0-9]*)?         # dev release
+                    (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
+                )?
+            )
+            |
+            (?:
+                # The compatible operator requires at least two digits in the
+                # release segment.
+                (?<=~=)               # Only match for the compatible operator
+
+                \s*
+                v?
+                (?:[0-9]+!)?          # epoch
+                [0-9]+(?:\.[0-9]+)+   # release  (We have a + instead of a *)
+                (?:                   # pre release
+                    [-_\.]?
+                    (alpha|beta|preview|pre|a|b|c|rc)
+                    [-_\.]?
+                    [0-9]*
+                )?
+                (?:                                   # post release
+                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+                )?
+                (?:[-_\.]?dev[-_\.]?[0-9]*)?          # dev release
+            )
+            |
+            (?:
+                # All other operators only allow a sub set of what the
+                # (non)equality operators do. Specifically they do not allow
+                # local versions to be specified nor do they allow the prefix
+                # matching wild cards.
+                (?<!==|!=|~=)         # We have special cases for these
+                                      # operators so we want to make sure they
+                                      # don't match here.
+
+                \s*
+                v?
+                (?:[0-9]+!)?          # epoch
+                [0-9]+(?:\.[0-9]+)*   # release
+                (?:                   # pre release
+                    [-_\.]?
+                    (alpha|beta|preview|pre|a|b|c|rc)
+                    [-_\.]?
+                    [0-9]*
+                )?
+                (?:                                   # post release
+                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+                )?
+                (?:[-_\.]?dev[-_\.]?[0-9]*)?          # dev release
+            )
+        )
+        """
+
+    _regex = re.compile(
+        r"^\s*" + _operator_regex_str + _version_regex_str + r"\s*$",
+        re.VERBOSE | re.IGNORECASE,
+    )
+
+    _operators = {
+        "~=": "compatible",
+        "==": "equal",
+        "!=": "not_equal",
+        "<=": "less_than_equal",
+        ">=": "greater_than_equal",
+        "<": "less_than",
+        ">": "greater_than",
+        "===": "arbitrary",
+    }
+
+    def __init__(self, spec: str = "", prereleases: bool | None = None) -> None:
+        """Initialize a Specifier instance.
+
+        :param spec:
+            The string representation of a specifier which will be parsed and
+            normalized before use.
+        :param prereleases:
+            This tells the specifier if it should accept prerelease versions if
+            applicable or not. The default of ``None`` will autodetect it from the
+            given specifiers.
+        :raises InvalidSpecifier:
+            If the given specifier is invalid (i.e. bad syntax).
+        """
+        match = self._regex.search(spec)
+        if not match:
+            raise InvalidSpecifier(f"Invalid specifier: {spec!r}")
+
+        self._spec: tuple[str, str] = (
+            match.group("operator").strip(),
+            match.group("version").strip(),
+        )
+
+        # Store whether or not this Specifier should accept prereleases
+        self._prereleases = prereleases
+
+    # https://github.com/python/mypy/pull/13475#pullrequestreview-1079784515
+    @property  # type: ignore[override]
+    def prereleases(self) -> bool:
+        # If there is an explicit prereleases set for this, then we'll just
+        # blindly use that.
+        if self._prereleases is not None:
+            return self._prereleases
+
+        # Look at all of our specifiers and determine if they are inclusive
+        # operators, and if they are if they are including an explicit
+        # prerelease.
+        operator, version = self._spec
+        if operator in ["==", ">=", "<=", "~=", "===", ">", "<"]:
+            # The == specifier can include a trailing .*, if it does we
+            # want to remove before parsing.
+            if operator == "==" and version.endswith(".*"):
+                version = version[:-2]
+
+            # Parse the version, and if it is a pre-release than this
+            # specifier allows pre-releases.
+            if Version(version).is_prerelease:
+                return True
+
+        return False
+
+    @prereleases.setter
+    def prereleases(self, value: bool) -> None:
+        self._prereleases = value
+
+    @property
+    def operator(self) -> str:
+        """The operator of this specifier.
+
+        >>> Specifier("==1.2.3").operator
+        '=='
+        """
+        return self._spec[0]
+
+    @property
+    def version(self) -> str:
+        """The version of this specifier.
+
+        >>> Specifier("==1.2.3").version
+        '1.2.3'
+        """
+        return self._spec[1]
+
+    def __repr__(self) -> str:
+        """A representation of the Specifier that shows all internal state.
+
+        >>> Specifier('>=1.0.0')
+        <Specifier('>=1.0.0')>
+        >>> Specifier('>=1.0.0', prereleases=False)
+        <Specifier('>=1.0.0', prereleases=False)>
+        >>> Specifier('>=1.0.0', prereleases=True)
+        <Specifier('>=1.0.0', prereleases=True)>
+        """
+        pre = (
+            f", prereleases={self.prereleases!r}"
+            if self._prereleases is not None
+            else ""
+        )
+
+        return f"<{self.__class__.__name__}({str(self)!r}{pre})>"
+
+    def __str__(self) -> str:
+        """A string representation of the Specifier that can be round-tripped.
+
+        >>> str(Specifier('>=1.0.0'))
+        '>=1.0.0'
+        >>> str(Specifier('>=1.0.0', prereleases=False))
+        '>=1.0.0'
+        """
+        return "{}{}".format(*self._spec)
+
+    @property
+    def _canonical_spec(self) -> tuple[str, str]:
+        canonical_version = canonicalize_version(
+            self._spec[1],
+            strip_trailing_zero=(self._spec[0] != "~="),
+        )
+        return self._spec[0], canonical_version
+
+    def __hash__(self) -> int:
+        return hash(self._canonical_spec)
+
+    def __eq__(self, other: object) -> bool:
+        """Whether or not the two Specifier-like objects are equal.
+
+        :param other: The other object to check against.
+
+        The value of :attr:`prereleases` is ignored.
+
+        >>> Specifier("==1.2.3") == Specifier("== 1.2.3.0")
+        True
+        >>> (Specifier("==1.2.3", prereleases=False) ==
+        ...  Specifier("==1.2.3", prereleases=True))
+        True
+        >>> Specifier("==1.2.3") == "==1.2.3"
+        True
+        >>> Specifier("==1.2.3") == Specifier("==1.2.4")
+        False
+        >>> Specifier("==1.2.3") == Specifier("~=1.2.3")
+        False
+        """
+        if isinstance(other, str):
+            try:
+                other = self.__class__(str(other))
+            except InvalidSpecifier:
+                return NotImplemented
+        elif not isinstance(other, self.__class__):
+            return NotImplemented
+
+        return self._canonical_spec == other._canonical_spec
+
+    def _get_operator(self, op: str) -> CallableOperator:
+        operator_callable: CallableOperator = getattr(
+            self, f"_compare_{self._operators[op]}"
+        )
+        return operator_callable
+
+    def _compare_compatible(self, prospective: Version, spec: str) -> bool:
+        # Compatible releases have an equivalent combination of >= and ==. That
+        # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
+        # implement this in terms of the other specifiers instead of
+        # implementing it ourselves. The only thing we need to do is construct
+        # the other specifiers.
+
+        # We want everything but the last item in the version, but we want to
+        # ignore suffix segments.
+        prefix = _version_join(
+            list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]
+        )
+
+        # Add the prefix notation to the end of our string
+        prefix += ".*"
+
+        return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
+            prospective, prefix
+        )
+
+    def _compare_equal(self, prospective: Version, spec: str) -> bool:
+        # We need special logic to handle prefix matching
+        if spec.endswith(".*"):
+            # In the case of prefix matching we want to ignore local segment.
+            normalized_prospective = canonicalize_version(
+                prospective.public, strip_trailing_zero=False
+            )
+            # Get the normalized version string ignoring the trailing .*
+            normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False)
+            # Split the spec out by bangs and dots, and pretend that there is
+            # an implicit dot in between a release segment and a pre-release segment.
+            split_spec = _version_split(normalized_spec)
+
+            # Split the prospective version out by bangs and dots, and pretend
+            # that there is an implicit dot in between a release segment and
+            # a pre-release segment.
+            split_prospective = _version_split(normalized_prospective)
+
+            # 0-pad the prospective version before shortening it to get the correct
+            # shortened version.
+            padded_prospective, _ = _pad_version(split_prospective, split_spec)
+
+            # Shorten the prospective version to be the same length as the spec
+            # so that we can determine if the specifier is a prefix of the
+            # prospective version or not.
+            shortened_prospective = padded_prospective[: len(split_spec)]
+
+            return shortened_prospective == split_spec
+        else:
+            # Convert our spec string into a Version
+            spec_version = Version(spec)
+
+            # If the specifier does not have a local segment, then we want to
+            # act as if the prospective version also does not have a local
+            # segment.
+            if not spec_version.local:
+                prospective = Version(prospective.public)
+
+            return prospective == spec_version
+
+    def _compare_not_equal(self, prospective: Version, spec: str) -> bool:
+        return not self._compare_equal(prospective, spec)
+
+    def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool:
+        # NB: Local version identifiers are NOT permitted in the version
+        # specifier, so local version labels can be universally removed from
+        # the prospective version.
+        return Version(prospective.public) <= Version(spec)
+
+    def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool:
+        # NB: Local version identifiers are NOT permitted in the version
+        # specifier, so local version labels can be universally removed from
+        # the prospective version.
+        return Version(prospective.public) >= Version(spec)
+
+    def _compare_less_than(self, prospective: Version, spec_str: str) -> bool:
+        # Convert our spec to a Version instance, since we'll want to work with
+        # it as a version.
+        spec = Version(spec_str)
+
+        # Check to see if the prospective version is less than the spec
+        # version. If it's not we can short circuit and just return False now
+        # instead of doing extra unneeded work.
+        if not prospective < spec:
+            return False
+
+        # This special case is here so that, unless the specifier itself
+        # includes is a pre-release version, that we do not accept pre-release
+        # versions for the version mentioned in the specifier (e.g. <3.1 should
+        # not match 3.1.dev0, but should match 3.0.dev0).
+        if not spec.is_prerelease and prospective.is_prerelease:
+            if Version(prospective.base_version) == Version(spec.base_version):
+                return False
+
+        # If we've gotten to here, it means that prospective version is both
+        # less than the spec version *and* it's not a pre-release of the same
+        # version in the spec.
+        return True
+
+    def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool:
+        # Convert our spec to a Version instance, since we'll want to work with
+        # it as a version.
+        spec = Version(spec_str)
+
+        # Check to see if the prospective version is greater than the spec
+        # version. If it's not we can short circuit and just return False now
+        # instead of doing extra unneeded work.
+        if not prospective > spec:
+            return False
+
+        # This special case is here so that, unless the specifier itself
+        # includes is a post-release version, that we do not accept
+        # post-release versions for the version mentioned in the specifier
+        # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
+        if not spec.is_postrelease and prospective.is_postrelease:
+            if Version(prospective.base_version) == Version(spec.base_version):
+                return False
+
+        # Ensure that we do not allow a local version of the version mentioned
+        # in the specifier, which is technically greater than, to match.
+        if prospective.local is not None:
+            if Version(prospective.base_version) == Version(spec.base_version):
+                return False
+
+        # If we've gotten to here, it means that prospective version is both
+        # greater than the spec version *and* it's not a pre-release of the
+        # same version in the spec.
+        return True
+
+    def _compare_arbitrary(self, prospective: Version, spec: str) -> bool:
+        return str(prospective).lower() == str(spec).lower()
+
+    def __contains__(self, item: str | Version) -> bool:
+        """Return whether or not the item is contained in this specifier.
+
+        :param item: The item to check for.
+
+        This is used for the ``in`` operator and behaves the same as
+        :meth:`contains` with no ``prereleases`` argument passed.
+
+        >>> "1.2.3" in Specifier(">=1.2.3")
+        True
+        >>> Version("1.2.3") in Specifier(">=1.2.3")
+        True
+        >>> "1.0.0" in Specifier(">=1.2.3")
+        False
+        >>> "1.3.0a1" in Specifier(">=1.2.3")
+        False
+        >>> "1.3.0a1" in Specifier(">=1.2.3", prereleases=True)
+        True
+        """
+        return self.contains(item)
+
+    def contains(self, item: UnparsedVersion, prereleases: bool | None = None) -> bool:
+        """Return whether or not the item is contained in this specifier.
+
+        :param item:
+            The item to check for, which can be a version string or a
+            :class:`Version` instance.
+        :param prereleases:
+            Whether or not to match prereleases with this Specifier. If set to
+            ``None`` (the default), it uses :attr:`prereleases` to determine
+            whether or not prereleases are allowed.
+
+        >>> Specifier(">=1.2.3").contains("1.2.3")
+        True
+        >>> Specifier(">=1.2.3").contains(Version("1.2.3"))
+        True
+        >>> Specifier(">=1.2.3").contains("1.0.0")
+        False
+        >>> Specifier(">=1.2.3").contains("1.3.0a1")
+        False
+        >>> Specifier(">=1.2.3", prereleases=True).contains("1.3.0a1")
+        True
+        >>> Specifier(">=1.2.3").contains("1.3.0a1", prereleases=True)
+        True
+        """
+
+        # Determine if prereleases are to be allowed or not.
+        if prereleases is None:
+            prereleases = self.prereleases
+
+        # Normalize item to a Version, this allows us to have a shortcut for
+        # "2.0" in Specifier(">=2")
+        normalized_item = _coerce_version(item)
+
+        # Determine if we should be supporting prereleases in this specifier
+        # or not, if we do not support prereleases than we can short circuit
+        # logic if this version is a prereleases.
+        if normalized_item.is_prerelease and not prereleases:
+            return False
+
+        # Actually do the comparison to determine if this item is contained
+        # within this Specifier or not.
+        operator_callable: CallableOperator = self._get_operator(self.operator)
+        return operator_callable(normalized_item, self.version)
+
+    def filter(
+        self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None
+    ) -> Iterator[UnparsedVersionVar]:
+        """Filter items in the given iterable, that match the specifier.
+
+        :param iterable:
+            An iterable that can contain version strings and :class:`Version` instances.
+            The items in the iterable will be filtered according to the specifier.
+        :param prereleases:
+            Whether or not to allow prereleases in the returned iterator. If set to
+            ``None`` (the default), it will be intelligently decide whether to allow
+            prereleases or not (based on the :attr:`prereleases` attribute, and
+            whether the only versions matching are prereleases).
+
+        This method is smarter than just ``filter(Specifier().contains, [...])``
+        because it implements the rule from :pep:`440` that a prerelease item
+        SHOULD be accepted if no other versions match the given specifier.
+
+        >>> list(Specifier(">=1.2.3").filter(["1.2", "1.3", "1.5a1"]))
+        ['1.3']
+        >>> list(Specifier(">=1.2.3").filter(["1.2", "1.2.3", "1.3", Version("1.4")]))
+        ['1.2.3', '1.3', <Version('1.4')>]
+        >>> list(Specifier(">=1.2.3").filter(["1.2", "1.5a1"]))
+        ['1.5a1']
+        >>> list(Specifier(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True))
+        ['1.3', '1.5a1']
+        >>> list(Specifier(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"]))
+        ['1.3', '1.5a1']
+        """
+
+        yielded = False
+        found_prereleases = []
+
+        kw = {"prereleases": prereleases if prereleases is not None else True}
+
+        # Attempt to iterate over all the values in the iterable and if any of
+        # them match, yield them.
+        for version in iterable:
+            parsed_version = _coerce_version(version)
+
+            if self.contains(parsed_version, **kw):
+                # If our version is a prerelease, and we were not set to allow
+                # prereleases, then we'll store it for later in case nothing
+                # else matches this specifier.
+                if parsed_version.is_prerelease and not (
+                    prereleases or self.prereleases
+                ):
+                    found_prereleases.append(version)
+                # Either this is not a prerelease, or we should have been
+                # accepting prereleases from the beginning.
+                else:
+                    yielded = True
+                    yield version
+
+        # Now that we've iterated over everything, determine if we've yielded
+        # any values, and if we have not and we have any prereleases stored up
+        # then we will go ahead and yield the prereleases.
+        if not yielded and found_prereleases:
+            for version in found_prereleases:
+                yield version
+
+
+_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
+
+
+def _version_split(version: str) -> list[str]:
+    """Split version into components.
+
+    The split components are intended for version comparison. The logic does
+    not attempt to retain the original version string, so joining the
+    components back with :func:`_version_join` may not produce the original
+    version string.
+    """
+    result: list[str] = []
+
+    epoch, _, rest = version.rpartition("!")
+    result.append(epoch or "0")
+
+    for item in rest.split("."):
+        match = _prefix_regex.search(item)
+        if match:
+            result.extend(match.groups())
+        else:
+            result.append(item)
+    return result
+
+
+def _version_join(components: list[str]) -> str:
+    """Join split version components into a version string.
+
+    This function assumes the input came from :func:`_version_split`, where the
+    first component must be the epoch (either empty or numeric), and all other
+    components numeric.
+    """
+    epoch, *rest = components
+    return f"{epoch}!{'.'.join(rest)}"
+
+
+def _is_not_suffix(segment: str) -> bool:
+    return not any(
+        segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
+    )
+
+
+def _pad_version(left: list[str], right: list[str]) -> tuple[list[str], list[str]]:
+    left_split, right_split = [], []
+
+    # Get the release segment of our versions
+    left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
+    right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
+
+    # Get the rest of our versions
+    left_split.append(left[len(left_split[0]) :])
+    right_split.append(right[len(right_split[0]) :])
+
+    # Insert our padding
+    left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
+    right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
+
+    return (
+        list(itertools.chain.from_iterable(left_split)),
+        list(itertools.chain.from_iterable(right_split)),
+    )
+
+
+class SpecifierSet(BaseSpecifier):
+    """This class abstracts handling of a set of version specifiers.
+
+    It can be passed a single specifier (``>=3.0``), a comma-separated list of
+    specifiers (``>=3.0,!=3.1``), or no specifier at all.
+    """
+
+    def __init__(
+        self,
+        specifiers: str | Iterable[Specifier] = "",
+        prereleases: bool | None = None,
+    ) -> None:
+        """Initialize a SpecifierSet instance.
+
+        :param specifiers:
+            The string representation of a specifier or a comma-separated list of
+            specifiers which will be parsed and normalized before use.
+            May also be an iterable of ``Specifier`` instances, which will be used
+            as is.
+        :param prereleases:
+            This tells the SpecifierSet if it should accept prerelease versions if
+            applicable or not. The default of ``None`` will autodetect it from the
+            given specifiers.
+
+        :raises InvalidSpecifier:
+            If the given ``specifiers`` are not parseable than this exception will be
+            raised.
+        """
+
+        if isinstance(specifiers, str):
+            # Split on `,` to break each individual specifier into its own item, and
+            # strip each item to remove leading/trailing whitespace.
+            split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
+
+            # Make each individual specifier a Specifier and save in a frozen set
+            # for later.
+            self._specs = frozenset(map(Specifier, split_specifiers))
+        else:
+            # Save the supplied specifiers in a frozen set.
+            self._specs = frozenset(specifiers)
+
+        # Store our prereleases value so we can use it later to determine if
+        # we accept prereleases or not.
+        self._prereleases = prereleases
+
+    @property
+    def prereleases(self) -> bool | None:
+        # If we have been given an explicit prerelease modifier, then we'll
+        # pass that through here.
+        if self._prereleases is not None:
+            return self._prereleases
+
+        # If we don't have any specifiers, and we don't have a forced value,
+        # then we'll just return None since we don't know if this should have
+        # pre-releases or not.
+        if not self._specs:
+            return None
+
+        # Otherwise we'll see if any of the given specifiers accept
+        # prereleases, if any of them do we'll return True, otherwise False.
+        return any(s.prereleases for s in self._specs)
+
+    @prereleases.setter
+    def prereleases(self, value: bool) -> None:
+        self._prereleases = value
+
+    def __repr__(self) -> str:
+        """A representation of the specifier set that shows all internal state.
+
+        Note that the ordering of the individual specifiers within the set may not
+        match the input string.
+
+        >>> SpecifierSet('>=1.0.0,!=2.0.0')
+        <SpecifierSet('!=2.0.0,>=1.0.0')>
+        >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=False)
+        <SpecifierSet('!=2.0.0,>=1.0.0', prereleases=False)>
+        >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=True)
+        <SpecifierSet('!=2.0.0,>=1.0.0', prereleases=True)>
+        """
+        pre = (
+            f", prereleases={self.prereleases!r}"
+            if self._prereleases is not None
+            else ""
+        )
+
+        return f"<SpecifierSet({str(self)!r}{pre})>"
+
+    def __str__(self) -> str:
+        """A string representation of the specifier set that can be round-tripped.
+
+        Note that the ordering of the individual specifiers within the set may not
+        match the input string.
+
+        >>> str(SpecifierSet(">=1.0.0,!=1.0.1"))
+        '!=1.0.1,>=1.0.0'
+        >>> str(SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False))
+        '!=1.0.1,>=1.0.0'
+        """
+        return ",".join(sorted(str(s) for s in self._specs))
+
+    def __hash__(self) -> int:
+        return hash(self._specs)
+
+    def __and__(self, other: SpecifierSet | str) -> SpecifierSet:
+        """Return a SpecifierSet which is a combination of the two sets.
+
+        :param other: The other object to combine with.
+
+        >>> SpecifierSet(">=1.0.0,!=1.0.1") & '<=2.0.0,!=2.0.1'
+        <SpecifierSet('!=1.0.1,!=2.0.1,<=2.0.0,>=1.0.0')>
+        >>> SpecifierSet(">=1.0.0,!=1.0.1") & SpecifierSet('<=2.0.0,!=2.0.1')
+        <SpecifierSet('!=1.0.1,!=2.0.1,<=2.0.0,>=1.0.0')>
+        """
+        if isinstance(other, str):
+            other = SpecifierSet(other)
+        elif not isinstance(other, SpecifierSet):
+            return NotImplemented
+
+        specifier = SpecifierSet()
+        specifier._specs = frozenset(self._specs | other._specs)
+
+        if self._prereleases is None and other._prereleases is not None:
+            specifier._prereleases = other._prereleases
+        elif self._prereleases is not None and other._prereleases is None:
+            specifier._prereleases = self._prereleases
+        elif self._prereleases == other._prereleases:
+            specifier._prereleases = self._prereleases
+        else:
+            raise ValueError(
+                "Cannot combine SpecifierSets with True and False prerelease "
+                "overrides."
+            )
+
+        return specifier
+
+    def __eq__(self, other: object) -> bool:
+        """Whether or not the two SpecifierSet-like objects are equal.
+
+        :param other: The other object to check against.
+
+        The value of :attr:`prereleases` is ignored.
+
+        >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.1")
+        True
+        >>> (SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False) ==
+        ...  SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True))
+        True
+        >>> SpecifierSet(">=1.0.0,!=1.0.1") == ">=1.0.0,!=1.0.1"
+        True
+        >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0")
+        False
+        >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.2")
+        False
+        """
+        if isinstance(other, (str, Specifier)):
+            other = SpecifierSet(str(other))
+        elif not isinstance(other, SpecifierSet):
+            return NotImplemented
+
+        return self._specs == other._specs
+
+    def __len__(self) -> int:
+        """Returns the number of specifiers in this specifier set."""
+        return len(self._specs)
+
+    def __iter__(self) -> Iterator[Specifier]:
+        """
+        Returns an iterator over all the underlying :class:`Specifier` instances
+        in this specifier set.
+
+        >>> sorted(SpecifierSet(">=1.0.0,!=1.0.1"), key=str)
+        [<Specifier('!=1.0.1')>, <Specifier('>=1.0.0')>]
+        """
+        return iter(self._specs)
+
+    def __contains__(self, item: UnparsedVersion) -> bool:
+        """Return whether or not the item is contained in this specifier.
+
+        :param item: The item to check for.
+
+        This is used for the ``in`` operator and behaves the same as
+        :meth:`contains` with no ``prereleases`` argument passed.
+
+        >>> "1.2.3" in SpecifierSet(">=1.0.0,!=1.0.1")
+        True
+        >>> Version("1.2.3") in SpecifierSet(">=1.0.0,!=1.0.1")
+        True
+        >>> "1.0.1" in SpecifierSet(">=1.0.0,!=1.0.1")
+        False
+        >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1")
+        False
+        >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True)
+        True
+        """
+        return self.contains(item)
+
+    def contains(
+        self,
+        item: UnparsedVersion,
+        prereleases: bool | None = None,
+        installed: bool | None = None,
+    ) -> bool:
+        """Return whether or not the item is contained in this SpecifierSet.
+
+        :param item:
+            The item to check for, which can be a version string or a
+            :class:`Version` instance.
+        :param prereleases:
+            Whether or not to match prereleases with this SpecifierSet. If set to
+            ``None`` (the default), it uses :attr:`prereleases` to determine
+            whether or not prereleases are allowed.
+
+        >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.2.3")
+        True
+        >>> SpecifierSet(">=1.0.0,!=1.0.1").contains(Version("1.2.3"))
+        True
+        >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.0.1")
+        False
+        >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1")
+        False
+        >>> SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True).contains("1.3.0a1")
+        True
+        >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1", prereleases=True)
+        True
+        """
+        # Ensure that our item is a Version instance.
+        if not isinstance(item, Version):
+            item = Version(item)
+
+        # Determine if we're forcing a prerelease or not, if we're not forcing
+        # one for this particular filter call, then we'll use whatever the
+        # SpecifierSet thinks for whether or not we should support prereleases.
+        if prereleases is None:
+            prereleases = self.prereleases
+
+        # We can determine if we're going to allow pre-releases by looking to
+        # see if any of the underlying items supports them. If none of them do
+        # and this item is a pre-release then we do not allow it and we can
+        # short circuit that here.
+        # Note: This means that 1.0.dev1 would not be contained in something
+        #       like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
+        if not prereleases and item.is_prerelease:
+            return False
+
+        if installed and item.is_prerelease:
+            item = Version(item.base_version)
+
+        # We simply dispatch to the underlying specs here to make sure that the
+        # given version is contained within all of them.
+        # Note: This use of all() here means that an empty set of specifiers
+        #       will always return True, this is an explicit design decision.
+        return all(s.contains(item, prereleases=prereleases) for s in self._specs)
+
+    def filter(
+        self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None
+    ) -> Iterator[UnparsedVersionVar]:
+        """Filter items in the given iterable, that match the specifiers in this set.
+
+        :param iterable:
+            An iterable that can contain version strings and :class:`Version` instances.
+            The items in the iterable will be filtered according to the specifier.
+        :param prereleases:
+            Whether or not to allow prereleases in the returned iterator. If set to
+            ``None`` (the default), it will be intelligently decide whether to allow
+            prereleases or not (based on the :attr:`prereleases` attribute, and
+            whether the only versions matching are prereleases).
+
+        This method is smarter than just ``filter(SpecifierSet(...).contains, [...])``
+        because it implements the rule from :pep:`440` that a prerelease item
+        SHOULD be accepted if no other versions match the given specifier.
+
+        >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", "1.5a1"]))
+        ['1.3']
+        >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", Version("1.4")]))
+        ['1.3', <Version('1.4')>]
+        >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.5a1"]))
+        []
+        >>> list(SpecifierSet(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True))
+        ['1.3', '1.5a1']
+        >>> list(SpecifierSet(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"]))
+        ['1.3', '1.5a1']
+
+        An "empty" SpecifierSet will filter items based on the presence of prerelease
+        versions in the set.
+
+        >>> list(SpecifierSet("").filter(["1.3", "1.5a1"]))
+        ['1.3']
+        >>> list(SpecifierSet("").filter(["1.5a1"]))
+        ['1.5a1']
+        >>> list(SpecifierSet("", prereleases=True).filter(["1.3", "1.5a1"]))
+        ['1.3', '1.5a1']
+        >>> list(SpecifierSet("").filter(["1.3", "1.5a1"], prereleases=True))
+        ['1.3', '1.5a1']
+        """
+        # Determine if we're forcing a prerelease or not, if we're not forcing
+        # one for this particular filter call, then we'll use whatever the
+        # SpecifierSet thinks for whether or not we should support prereleases.
+        if prereleases is None:
+            prereleases = self.prereleases
+
+        # If we have any specifiers, then we want to wrap our iterable in the
+        # filter method for each one, this will act as a logical AND amongst
+        # each specifier.
+        if self._specs:
+            for spec in self._specs:
+                iterable = spec.filter(iterable, prereleases=bool(prereleases))
+            return iter(iterable)
+        # If we do not have any specifiers, then we need to have a rough filter
+        # which will filter out any pre-releases, unless there are no final
+        # releases.
+        else:
+            filtered: list[UnparsedVersionVar] = []
+            found_prereleases: list[UnparsedVersionVar] = []
+
+            for item in iterable:
+                parsed_version = _coerce_version(item)
+
+                # Store any item which is a pre-release for later unless we've
+                # already found a final version or we are accepting prereleases
+                if parsed_version.is_prerelease and not prereleases:
+                    if not filtered:
+                        found_prereleases.append(item)
+                else:
+                    filtered.append(item)
+
+            # If we've found no items except for pre-releases, then we'll go
+            # ahead and use the pre-releases
+            if not filtered and found_prereleases and prereleases is None:
+                return iter(found_prereleases)
+
+            return iter(filtered)
diff --git a/.venv/Lib/site-packages/packaging/tags.py b/.venv/Lib/site-packages/packaging/tags.py
new file mode 100644
index 0000000000000000000000000000000000000000..f5903402abb5a0aed37bb23914f678ef7e34a554
--- /dev/null
+++ b/.venv/Lib/site-packages/packaging/tags.py
@@ -0,0 +1,617 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import logging
+import platform
+import re
+import struct
+import subprocess
+import sys
+import sysconfig
+from importlib.machinery import EXTENSION_SUFFIXES
+from typing import (
+    Iterable,
+    Iterator,
+    Sequence,
+    Tuple,
+    cast,
+)
+
+from . import _manylinux, _musllinux
+
+logger = logging.getLogger(__name__)
+
+PythonVersion = Sequence[int]
+AppleVersion = Tuple[int, int]
+
+INTERPRETER_SHORT_NAMES: dict[str, str] = {
+    "python": "py",  # Generic.
+    "cpython": "cp",
+    "pypy": "pp",
+    "ironpython": "ip",
+    "jython": "jy",
+}
+
+
+_32_BIT_INTERPRETER = struct.calcsize("P") == 4
+
+
+class Tag:
+    """
+    A representation of the tag triple for a wheel.
+
+    Instances are considered immutable and thus are hashable. Equality checking
+    is also supported.
+    """
+
+    __slots__ = ["_abi", "_hash", "_interpreter", "_platform"]
+
+    def __init__(self, interpreter: str, abi: str, platform: str) -> None:
+        self._interpreter = interpreter.lower()
+        self._abi = abi.lower()
+        self._platform = platform.lower()
+        # The __hash__ of every single element in a Set[Tag] will be evaluated each time
+        # that a set calls its `.disjoint()` method, which may be called hundreds of
+        # times when scanning a page of links for packages with tags matching that
+        # Set[Tag]. Pre-computing the value here produces significant speedups for
+        # downstream consumers.
+        self._hash = hash((self._interpreter, self._abi, self._platform))
+
+    @property
+    def interpreter(self) -> str:
+        return self._interpreter
+
+    @property
+    def abi(self) -> str:
+        return self._abi
+
+    @property
+    def platform(self) -> str:
+        return self._platform
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, Tag):
+            return NotImplemented
+
+        return (
+            (self._hash == other._hash)  # Short-circuit ASAP for perf reasons.
+            and (self._platform == other._platform)
+            and (self._abi == other._abi)
+            and (self._interpreter == other._interpreter)
+        )
+
+    def __hash__(self) -> int:
+        return self._hash
+
+    def __str__(self) -> str:
+        return f"{self._interpreter}-{self._abi}-{self._platform}"
+
+    def __repr__(self) -> str:
+        return f"<{self} @ {id(self)}>"
+
+
+def parse_tag(tag: str) -> frozenset[Tag]:
+    """
+    Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
+
+    Returning a set is required due to the possibility that the tag is a
+    compressed tag set.
+    """
+    tags = set()
+    interpreters, abis, platforms = tag.split("-")
+    for interpreter in interpreters.split("."):
+        for abi in abis.split("."):
+            for platform_ in platforms.split("."):
+                tags.add(Tag(interpreter, abi, platform_))
+    return frozenset(tags)
+
+
+def _get_config_var(name: str, warn: bool = False) -> int | str | None:
+    value: int | str | None = sysconfig.get_config_var(name)
+    if value is None and warn:
+        logger.debug(
+            "Config variable '%s' is unset, Python ABI tag may be incorrect", name
+        )
+    return value
+
+
+def _normalize_string(string: str) -> str:
+    return string.replace(".", "_").replace("-", "_").replace(" ", "_")
+
+
+def _is_threaded_cpython(abis: list[str]) -> bool:
+    """
+    Determine if the ABI corresponds to a threaded (`--disable-gil`) build.
+
+    The threaded builds are indicated by a "t" in the abiflags.
+    """
+    if len(abis) == 0:
+        return False
+    # expect e.g., cp313
+    m = re.match(r"cp\d+(.*)", abis[0])
+    if not m:
+        return False
+    abiflags = m.group(1)
+    return "t" in abiflags
+
+
+def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool:
+    """
+    Determine if the Python version supports abi3.
+
+    PEP 384 was first implemented in Python 3.2. The threaded (`--disable-gil`)
+    builds do not support abi3.
+    """
+    return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading
+
+
+def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> list[str]:
+    py_version = tuple(py_version)  # To allow for version comparison.
+    abis = []
+    version = _version_nodot(py_version[:2])
+    threading = debug = pymalloc = ucs4 = ""
+    with_debug = _get_config_var("Py_DEBUG", warn)
+    has_refcount = hasattr(sys, "gettotalrefcount")
+    # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
+    # extension modules is the best option.
+    # https://github.com/pypa/pip/issues/3383#issuecomment-173267692
+    has_ext = "_d.pyd" in EXTENSION_SUFFIXES
+    if with_debug or (with_debug is None and (has_refcount or has_ext)):
+        debug = "d"
+    if py_version >= (3, 13) and _get_config_var("Py_GIL_DISABLED", warn):
+        threading = "t"
+    if py_version < (3, 8):
+        with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
+        if with_pymalloc or with_pymalloc is None:
+            pymalloc = "m"
+        if py_version < (3, 3):
+            unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
+            if unicode_size == 4 or (
+                unicode_size is None and sys.maxunicode == 0x10FFFF
+            ):
+                ucs4 = "u"
+    elif debug:
+        # Debug builds can also load "normal" extension modules.
+        # We can also assume no UCS-4 or pymalloc requirement.
+        abis.append(f"cp{version}{threading}")
+    abis.insert(0, f"cp{version}{threading}{debug}{pymalloc}{ucs4}")
+    return abis
+
+
+def cpython_tags(
+    python_version: PythonVersion | None = None,
+    abis: Iterable[str] | None = None,
+    platforms: Iterable[str] | None = None,
+    *,
+    warn: bool = False,
+) -> Iterator[Tag]:
+    """
+    Yields the tags for a CPython interpreter.
+
+    The tags consist of:
+    - cp<python_version>-<abi>-<platform>
+    - cp<python_version>-abi3-<platform>
+    - cp<python_version>-none-<platform>
+    - cp<less than python_version>-abi3-<platform>  # Older Python versions down to 3.2.
+
+    If python_version only specifies a major version then user-provided ABIs and
+    the 'none' ABItag will be used.
+
+    If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
+    their normal position and not at the beginning.
+    """
+    if not python_version:
+        python_version = sys.version_info[:2]
+
+    interpreter = f"cp{_version_nodot(python_version[:2])}"
+
+    if abis is None:
+        if len(python_version) > 1:
+            abis = _cpython_abis(python_version, warn)
+        else:
+            abis = []
+    abis = list(abis)
+    # 'abi3' and 'none' are explicitly handled later.
+    for explicit_abi in ("abi3", "none"):
+        try:
+            abis.remove(explicit_abi)
+        except ValueError:
+            pass
+
+    platforms = list(platforms or platform_tags())
+    for abi in abis:
+        for platform_ in platforms:
+            yield Tag(interpreter, abi, platform_)
+
+    threading = _is_threaded_cpython(abis)
+    use_abi3 = _abi3_applies(python_version, threading)
+    if use_abi3:
+        yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
+    yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
+
+    if use_abi3:
+        for minor_version in range(python_version[1] - 1, 1, -1):
+            for platform_ in platforms:
+                version = _version_nodot((python_version[0], minor_version))
+                interpreter = f"cp{version}"
+                yield Tag(interpreter, "abi3", platform_)
+
+
+def _generic_abi() -> list[str]:
+    """
+    Return the ABI tag based on EXT_SUFFIX.
+    """
+    # The following are examples of `EXT_SUFFIX`.
+    # We want to keep the parts which are related to the ABI and remove the
+    # parts which are related to the platform:
+    # - linux:   '.cpython-310-x86_64-linux-gnu.so' => cp310
+    # - mac:     '.cpython-310-darwin.so'           => cp310
+    # - win:     '.cp310-win_amd64.pyd'             => cp310
+    # - win:     '.pyd'                             => cp37 (uses _cpython_abis())
+    # - pypy:    '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73
+    # - graalpy: '.graalpy-38-native-x86_64-darwin.dylib'
+    #                                               => graalpy_38_native
+
+    ext_suffix = _get_config_var("EXT_SUFFIX", warn=True)
+    if not isinstance(ext_suffix, str) or ext_suffix[0] != ".":
+        raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')")
+    parts = ext_suffix.split(".")
+    if len(parts) < 3:
+        # CPython3.7 and earlier uses ".pyd" on Windows.
+        return _cpython_abis(sys.version_info[:2])
+    soabi = parts[1]
+    if soabi.startswith("cpython"):
+        # non-windows
+        abi = "cp" + soabi.split("-")[1]
+    elif soabi.startswith("cp"):
+        # windows
+        abi = soabi.split("-")[0]
+    elif soabi.startswith("pypy"):
+        abi = "-".join(soabi.split("-")[:2])
+    elif soabi.startswith("graalpy"):
+        abi = "-".join(soabi.split("-")[:3])
+    elif soabi:
+        # pyston, ironpython, others?
+        abi = soabi
+    else:
+        return []
+    return [_normalize_string(abi)]
+
+
+def generic_tags(
+    interpreter: str | None = None,
+    abis: Iterable[str] | None = None,
+    platforms: Iterable[str] | None = None,
+    *,
+    warn: bool = False,
+) -> Iterator[Tag]:
+    """
+    Yields the tags for a generic interpreter.
+
+    The tags consist of:
+    - <interpreter>-<abi>-<platform>
+
+    The "none" ABI will be added if it was not explicitly provided.
+    """
+    if not interpreter:
+        interp_name = interpreter_name()
+        interp_version = interpreter_version(warn=warn)
+        interpreter = "".join([interp_name, interp_version])
+    if abis is None:
+        abis = _generic_abi()
+    else:
+        abis = list(abis)
+    platforms = list(platforms or platform_tags())
+    if "none" not in abis:
+        abis.append("none")
+    for abi in abis:
+        for platform_ in platforms:
+            yield Tag(interpreter, abi, platform_)
+
+
+def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
+    """
+    Yields Python versions in descending order.
+
+    After the latest version, the major-only version will be yielded, and then
+    all previous versions of that major version.
+    """
+    if len(py_version) > 1:
+        yield f"py{_version_nodot(py_version[:2])}"
+    yield f"py{py_version[0]}"
+    if len(py_version) > 1:
+        for minor in range(py_version[1] - 1, -1, -1):
+            yield f"py{_version_nodot((py_version[0], minor))}"
+
+
+def compatible_tags(
+    python_version: PythonVersion | None = None,
+    interpreter: str | None = None,
+    platforms: Iterable[str] | None = None,
+) -> Iterator[Tag]:
+    """
+    Yields the sequence of tags that are compatible with a specific version of Python.
+
+    The tags consist of:
+    - py*-none-<platform>
+    - <interpreter>-none-any  # ... if `interpreter` is provided.
+    - py*-none-any
+    """
+    if not python_version:
+        python_version = sys.version_info[:2]
+    platforms = list(platforms or platform_tags())
+    for version in _py_interpreter_range(python_version):
+        for platform_ in platforms:
+            yield Tag(version, "none", platform_)
+    if interpreter:
+        yield Tag(interpreter, "none", "any")
+    for version in _py_interpreter_range(python_version):
+        yield Tag(version, "none", "any")
+
+
+def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
+    if not is_32bit:
+        return arch
+
+    if arch.startswith("ppc"):
+        return "ppc"
+
+    return "i386"
+
+
+def _mac_binary_formats(version: AppleVersion, cpu_arch: str) -> list[str]:
+    formats = [cpu_arch]
+    if cpu_arch == "x86_64":
+        if version < (10, 4):
+            return []
+        formats.extend(["intel", "fat64", "fat32"])
+
+    elif cpu_arch == "i386":
+        if version < (10, 4):
+            return []
+        formats.extend(["intel", "fat32", "fat"])
+
+    elif cpu_arch == "ppc64":
+        # TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
+        if version > (10, 5) or version < (10, 4):
+            return []
+        formats.append("fat64")
+
+    elif cpu_arch == "ppc":
+        if version > (10, 6):
+            return []
+        formats.extend(["fat32", "fat"])
+
+    if cpu_arch in {"arm64", "x86_64"}:
+        formats.append("universal2")
+
+    if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
+        formats.append("universal")
+
+    return formats
+
+
+def mac_platforms(
+    version: AppleVersion | None = None, arch: str | None = None
+) -> Iterator[str]:
+    """
+    Yields the platform tags for a macOS system.
+
+    The `version` parameter is a two-item tuple specifying the macOS version to
+    generate platform tags for. The `arch` parameter is the CPU architecture to
+    generate platform tags for. Both parameters default to the appropriate value
+    for the current system.
+    """
+    version_str, _, cpu_arch = platform.mac_ver()
+    if version is None:
+        version = cast("AppleVersion", tuple(map(int, version_str.split(".")[:2])))
+        if version == (10, 16):
+            # When built against an older macOS SDK, Python will report macOS 10.16
+            # instead of the real version.
+            version_str = subprocess.run(
+                [
+                    sys.executable,
+                    "-sS",
+                    "-c",
+                    "import platform; print(platform.mac_ver()[0])",
+                ],
+                check=True,
+                env={"SYSTEM_VERSION_COMPAT": "0"},
+                stdout=subprocess.PIPE,
+                text=True,
+            ).stdout
+            version = cast("AppleVersion", tuple(map(int, version_str.split(".")[:2])))
+    else:
+        version = version
+    if arch is None:
+        arch = _mac_arch(cpu_arch)
+    else:
+        arch = arch
+
+    if (10, 0) <= version and version < (11, 0):
+        # Prior to Mac OS 11, each yearly release of Mac OS bumped the
+        # "minor" version number.  The major version was always 10.
+        major_version = 10
+        for minor_version in range(version[1], -1, -1):
+            compat_version = major_version, minor_version
+            binary_formats = _mac_binary_formats(compat_version, arch)
+            for binary_format in binary_formats:
+                yield f"macosx_{major_version}_{minor_version}_{binary_format}"
+
+    if version >= (11, 0):
+        # Starting with Mac OS 11, each yearly release bumps the major version
+        # number.   The minor versions are now the midyear updates.
+        minor_version = 0
+        for major_version in range(version[0], 10, -1):
+            compat_version = major_version, minor_version
+            binary_formats = _mac_binary_formats(compat_version, arch)
+            for binary_format in binary_formats:
+                yield f"macosx_{major_version}_{minor_version}_{binary_format}"
+
+    if version >= (11, 0):
+        # Mac OS 11 on x86_64 is compatible with binaries from previous releases.
+        # Arm64 support was introduced in 11.0, so no Arm binaries from previous
+        # releases exist.
+        #
+        # However, the "universal2" binary format can have a
+        # macOS version earlier than 11.0 when the x86_64 part of the binary supports
+        # that version of macOS.
+        major_version = 10
+        if arch == "x86_64":
+            for minor_version in range(16, 3, -1):
+                compat_version = major_version, minor_version
+                binary_formats = _mac_binary_formats(compat_version, arch)
+                for binary_format in binary_formats:
+                    yield f"macosx_{major_version}_{minor_version}_{binary_format}"
+        else:
+            for minor_version in range(16, 3, -1):
+                compat_version = major_version, minor_version
+                binary_format = "universal2"
+                yield f"macosx_{major_version}_{minor_version}_{binary_format}"
+
+
+def ios_platforms(
+    version: AppleVersion | None = None, multiarch: str | None = None
+) -> Iterator[str]:
+    """
+    Yields the platform tags for an iOS system.
+
+    :param version: A two-item tuple specifying the iOS version to generate
+        platform tags for. Defaults to the current iOS version.
+    :param multiarch: The CPU architecture+ABI to generate platform tags for -
+        (the value used by `sys.implementation._multiarch` e.g.,
+        `arm64_iphoneos` or `x84_64_iphonesimulator`). Defaults to the current
+        multiarch value.
+    """
+    if version is None:
+        # if iOS is the current platform, ios_ver *must* be defined. However,
+        # it won't exist for CPython versions before 3.13, which causes a mypy
+        # error.
+        _, release, _, _ = platform.ios_ver()  # type: ignore[attr-defined, unused-ignore]
+        version = cast("AppleVersion", tuple(map(int, release.split(".")[:2])))
+
+    if multiarch is None:
+        multiarch = sys.implementation._multiarch
+    multiarch = multiarch.replace("-", "_")
+
+    ios_platform_template = "ios_{major}_{minor}_{multiarch}"
+
+    # Consider any iOS major.minor version from the version requested, down to
+    # 12.0. 12.0 is the first iOS version that is known to have enough features
+    # to support CPython. Consider every possible minor release up to X.9. There
+    # highest the minor has ever gone is 8 (14.8 and 15.8) but having some extra
+    # candidates that won't ever match doesn't really hurt, and it saves us from
+    # having to keep an explicit list of known iOS versions in the code. Return
+    # the results descending order of version number.
+
+    # If the requested major version is less than 12, there won't be any matches.
+    if version[0] < 12:
+        return
+
+    # Consider the actual X.Y version that was requested.
+    yield ios_platform_template.format(
+        major=version[0], minor=version[1], multiarch=multiarch
+    )
+
+    # Consider every minor version from X.0 to the minor version prior to the
+    # version requested by the platform.
+    for minor in range(version[1] - 1, -1, -1):
+        yield ios_platform_template.format(
+            major=version[0], minor=minor, multiarch=multiarch
+        )
+
+    for major in range(version[0] - 1, 11, -1):
+        for minor in range(9, -1, -1):
+            yield ios_platform_template.format(
+                major=major, minor=minor, multiarch=multiarch
+            )
+
+
+def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
+    linux = _normalize_string(sysconfig.get_platform())
+    if not linux.startswith("linux_"):
+        # we should never be here, just yield the sysconfig one and return
+        yield linux
+        return
+    if is_32bit:
+        if linux == "linux_x86_64":
+            linux = "linux_i686"
+        elif linux == "linux_aarch64":
+            linux = "linux_armv8l"
+    _, arch = linux.split("_", 1)
+    archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch])
+    yield from _manylinux.platform_tags(archs)
+    yield from _musllinux.platform_tags(archs)
+    for arch in archs:
+        yield f"linux_{arch}"
+
+
+def _generic_platforms() -> Iterator[str]:
+    yield _normalize_string(sysconfig.get_platform())
+
+
+def platform_tags() -> Iterator[str]:
+    """
+    Provides the platform tags for this installation.
+    """
+    if platform.system() == "Darwin":
+        return mac_platforms()
+    elif platform.system() == "iOS":
+        return ios_platforms()
+    elif platform.system() == "Linux":
+        return _linux_platforms()
+    else:
+        return _generic_platforms()
+
+
+def interpreter_name() -> str:
+    """
+    Returns the name of the running interpreter.
+
+    Some implementations have a reserved, two-letter abbreviation which will
+    be returned when appropriate.
+    """
+    name = sys.implementation.name
+    return INTERPRETER_SHORT_NAMES.get(name) or name
+
+
+def interpreter_version(*, warn: bool = False) -> str:
+    """
+    Returns the version of the running interpreter.
+    """
+    version = _get_config_var("py_version_nodot", warn=warn)
+    if version:
+        version = str(version)
+    else:
+        version = _version_nodot(sys.version_info[:2])
+    return version
+
+
+def _version_nodot(version: PythonVersion) -> str:
+    return "".join(map(str, version))
+
+
+def sys_tags(*, warn: bool = False) -> Iterator[Tag]:
+    """
+    Returns the sequence of tag triples for the running interpreter.
+
+    The order of the sequence corresponds to priority order for the
+    interpreter, from most to least important.
+    """
+
+    interp_name = interpreter_name()
+    if interp_name == "cp":
+        yield from cpython_tags(warn=warn)
+    else:
+        yield from generic_tags()
+
+    if interp_name == "pp":
+        interp = "pp3"
+    elif interp_name == "cp":
+        interp = "cp" + interpreter_version(warn=warn)
+    else:
+        interp = None
+    yield from compatible_tags(interpreter=interp)
diff --git a/.venv/Lib/site-packages/packaging/utils.py b/.venv/Lib/site-packages/packaging/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..23450953df74eccd9c13cd2a955ce09d1f968565
--- /dev/null
+++ b/.venv/Lib/site-packages/packaging/utils.py
@@ -0,0 +1,163 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import functools
+import re
+from typing import NewType, Tuple, Union, cast
+
+from .tags import Tag, parse_tag
+from .version import InvalidVersion, Version, _TrimmedRelease
+
+BuildTag = Union[Tuple[()], Tuple[int, str]]
+NormalizedName = NewType("NormalizedName", str)
+
+
+class InvalidName(ValueError):
+    """
+    An invalid distribution name; users should refer to the packaging user guide.
+    """
+
+
+class InvalidWheelFilename(ValueError):
+    """
+    An invalid wheel filename was found, users should refer to PEP 427.
+    """
+
+
+class InvalidSdistFilename(ValueError):
+    """
+    An invalid sdist filename was found, users should refer to the packaging user guide.
+    """
+
+
+# Core metadata spec for `Name`
+_validate_regex = re.compile(
+    r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
+)
+_canonicalize_regex = re.compile(r"[-_.]+")
+_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$")
+# PEP 427: The build number must start with a digit.
+_build_tag_regex = re.compile(r"(\d+)(.*)")
+
+
+def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName:
+    if validate and not _validate_regex.match(name):
+        raise InvalidName(f"name is invalid: {name!r}")
+    # This is taken from PEP 503.
+    value = _canonicalize_regex.sub("-", name).lower()
+    return cast(NormalizedName, value)
+
+
+def is_normalized_name(name: str) -> bool:
+    return _normalized_regex.match(name) is not None
+
+
+@functools.singledispatch
+def canonicalize_version(
+    version: Version | str, *, strip_trailing_zero: bool = True
+) -> str:
+    """
+    Return a canonical form of a version as a string.
+
+    >>> canonicalize_version('1.0.1')
+    '1.0.1'
+
+    Per PEP 625, versions may have multiple canonical forms, differing
+    only by trailing zeros.
+
+    >>> canonicalize_version('1.0.0')
+    '1'
+    >>> canonicalize_version('1.0.0', strip_trailing_zero=False)
+    '1.0.0'
+
+    Invalid versions are returned unaltered.
+
+    >>> canonicalize_version('foo bar baz')
+    'foo bar baz'
+    """
+    return str(_TrimmedRelease(str(version)) if strip_trailing_zero else version)
+
+
+@canonicalize_version.register
+def _(version: str, *, strip_trailing_zero: bool = True) -> str:
+    try:
+        parsed = Version(version)
+    except InvalidVersion:
+        # Legacy versions cannot be normalized
+        return version
+    return canonicalize_version(parsed, strip_trailing_zero=strip_trailing_zero)
+
+
+def parse_wheel_filename(
+    filename: str,
+) -> tuple[NormalizedName, Version, BuildTag, frozenset[Tag]]:
+    if not filename.endswith(".whl"):
+        raise InvalidWheelFilename(
+            f"Invalid wheel filename (extension must be '.whl'): {filename!r}"
+        )
+
+    filename = filename[:-4]
+    dashes = filename.count("-")
+    if dashes not in (4, 5):
+        raise InvalidWheelFilename(
+            f"Invalid wheel filename (wrong number of parts): {filename!r}"
+        )
+
+    parts = filename.split("-", dashes - 2)
+    name_part = parts[0]
+    # See PEP 427 for the rules on escaping the project name.
+    if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
+        raise InvalidWheelFilename(f"Invalid project name: {filename!r}")
+    name = canonicalize_name(name_part)
+
+    try:
+        version = Version(parts[1])
+    except InvalidVersion as e:
+        raise InvalidWheelFilename(
+            f"Invalid wheel filename (invalid version): {filename!r}"
+        ) from e
+
+    if dashes == 5:
+        build_part = parts[2]
+        build_match = _build_tag_regex.match(build_part)
+        if build_match is None:
+            raise InvalidWheelFilename(
+                f"Invalid build number: {build_part} in {filename!r}"
+            )
+        build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))
+    else:
+        build = ()
+    tags = parse_tag(parts[-1])
+    return (name, version, build, tags)
+
+
+def parse_sdist_filename(filename: str) -> tuple[NormalizedName, Version]:
+    if filename.endswith(".tar.gz"):
+        file_stem = filename[: -len(".tar.gz")]
+    elif filename.endswith(".zip"):
+        file_stem = filename[: -len(".zip")]
+    else:
+        raise InvalidSdistFilename(
+            f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
+            f" {filename!r}"
+        )
+
+    # We are requiring a PEP 440 version, which cannot contain dashes,
+    # so we split on the last dash.
+    name_part, sep, version_part = file_stem.rpartition("-")
+    if not sep:
+        raise InvalidSdistFilename(f"Invalid sdist filename: {filename!r}")
+
+    name = canonicalize_name(name_part)
+
+    try:
+        version = Version(version_part)
+    except InvalidVersion as e:
+        raise InvalidSdistFilename(
+            f"Invalid sdist filename (invalid version): {filename!r}"
+        ) from e
+
+    return (name, version)
diff --git a/.venv/Lib/site-packages/packaging/version.py b/.venv/Lib/site-packages/packaging/version.py
new file mode 100644
index 0000000000000000000000000000000000000000..c9bbda20e463b8d9389ecd65f74af33810a02bdd
--- /dev/null
+++ b/.venv/Lib/site-packages/packaging/version.py
@@ -0,0 +1,582 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+"""
+.. testsetup::
+
+    from packaging.version import parse, Version
+"""
+
+from __future__ import annotations
+
+import itertools
+import re
+from typing import Any, Callable, NamedTuple, SupportsInt, Tuple, Union
+
+from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
+
+__all__ = ["VERSION_PATTERN", "InvalidVersion", "Version", "parse"]
+
+LocalType = Tuple[Union[int, str], ...]
+
+CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]]
+CmpLocalType = Union[
+    NegativeInfinityType,
+    Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...],
+]
+CmpKey = Tuple[
+    int,
+    Tuple[int, ...],
+    CmpPrePostDevType,
+    CmpPrePostDevType,
+    CmpPrePostDevType,
+    CmpLocalType,
+]
+VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool]
+
+
+class _Version(NamedTuple):
+    epoch: int
+    release: tuple[int, ...]
+    dev: tuple[str, int] | None
+    pre: tuple[str, int] | None
+    post: tuple[str, int] | None
+    local: LocalType | None
+
+
+def parse(version: str) -> Version:
+    """Parse the given version string.
+
+    >>> parse('1.0.dev1')
+    <Version('1.0.dev1')>
+
+    :param version: The version string to parse.
+    :raises InvalidVersion: When the version string is not a valid version.
+    """
+    return Version(version)
+
+
+class InvalidVersion(ValueError):
+    """Raised when a version string is not a valid version.
+
+    >>> Version("invalid")
+    Traceback (most recent call last):
+        ...
+    packaging.version.InvalidVersion: Invalid version: 'invalid'
+    """
+
+
+class _BaseVersion:
+    _key: tuple[Any, ...]
+
+    def __hash__(self) -> int:
+        return hash(self._key)
+
+    # Please keep the duplicated `isinstance` check
+    # in the six comparisons hereunder
+    # unless you find a way to avoid adding overhead function calls.
+    def __lt__(self, other: _BaseVersion) -> bool:
+        if not isinstance(other, _BaseVersion):
+            return NotImplemented
+
+        return self._key < other._key
+
+    def __le__(self, other: _BaseVersion) -> bool:
+        if not isinstance(other, _BaseVersion):
+            return NotImplemented
+
+        return self._key <= other._key
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, _BaseVersion):
+            return NotImplemented
+
+        return self._key == other._key
+
+    def __ge__(self, other: _BaseVersion) -> bool:
+        if not isinstance(other, _BaseVersion):
+            return NotImplemented
+
+        return self._key >= other._key
+
+    def __gt__(self, other: _BaseVersion) -> bool:
+        if not isinstance(other, _BaseVersion):
+            return NotImplemented
+
+        return self._key > other._key
+
+    def __ne__(self, other: object) -> bool:
+        if not isinstance(other, _BaseVersion):
+            return NotImplemented
+
+        return self._key != other._key
+
+
+# Deliberately not anchored to the start and end of the string, to make it
+# easier for 3rd party code to reuse
+_VERSION_PATTERN = r"""
+    v?
+    (?:
+        (?:(?P<epoch>[0-9]+)!)?                           # epoch
+        (?P<release>[0-9]+(?:\.[0-9]+)*)                  # release segment
+        (?P<pre>                                          # pre-release
+            [-_\.]?
+            (?P<pre_l>alpha|a|beta|b|preview|pre|c|rc)
+            [-_\.]?
+            (?P<pre_n>[0-9]+)?
+        )?
+        (?P<post>                                         # post release
+            (?:-(?P<post_n1>[0-9]+))
+            |
+            (?:
+                [-_\.]?
+                (?P<post_l>post|rev|r)
+                [-_\.]?
+                (?P<post_n2>[0-9]+)?
+            )
+        )?
+        (?P<dev>                                          # dev release
+            [-_\.]?
+            (?P<dev_l>dev)
+            [-_\.]?
+            (?P<dev_n>[0-9]+)?
+        )?
+    )
+    (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
+"""
+
+VERSION_PATTERN = _VERSION_PATTERN
+"""
+A string containing the regular expression used to match a valid version.
+
+The pattern is not anchored at either end, and is intended for embedding in larger
+expressions (for example, matching a version number as part of a file name). The
+regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
+flags set.
+
+:meta hide-value:
+"""
+
+
+class Version(_BaseVersion):
+    """This class abstracts handling of a project's versions.
+
+    A :class:`Version` instance is comparison aware and can be compared and
+    sorted using the standard Python interfaces.
+
+    >>> v1 = Version("1.0a5")
+    >>> v2 = Version("1.0")
+    >>> v1
+    <Version('1.0a5')>
+    >>> v2
+    <Version('1.0')>
+    >>> v1 < v2
+    True
+    >>> v1 == v2
+    False
+    >>> v1 > v2
+    False
+    >>> v1 >= v2
+    False
+    >>> v1 <= v2
+    True
+    """
+
+    _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
+    _key: CmpKey
+
+    def __init__(self, version: str) -> None:
+        """Initialize a Version object.
+
+        :param version:
+            The string representation of a version which will be parsed and normalized
+            before use.
+        :raises InvalidVersion:
+            If the ``version`` does not conform to PEP 440 in any way then this
+            exception will be raised.
+        """
+
+        # Validate the version and parse it into pieces
+        match = self._regex.search(version)
+        if not match:
+            raise InvalidVersion(f"Invalid version: {version!r}")
+
+        # Store the parsed out pieces of the version
+        self._version = _Version(
+            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+            release=tuple(int(i) for i in match.group("release").split(".")),
+            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
+            post=_parse_letter_version(
+                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
+            ),
+            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
+            local=_parse_local_version(match.group("local")),
+        )
+
+        # Generate a key which will be used for sorting
+        self._key = _cmpkey(
+            self._version.epoch,
+            self._version.release,
+            self._version.pre,
+            self._version.post,
+            self._version.dev,
+            self._version.local,
+        )
+
+    def __repr__(self) -> str:
+        """A representation of the Version that shows all internal state.
+
+        >>> Version('1.0.0')
+        <Version('1.0.0')>
+        """
+        return f"<Version('{self}')>"
+
+    def __str__(self) -> str:
+        """A string representation of the version that can be round-tripped.
+
+        >>> str(Version("1.0a5"))
+        '1.0a5'
+        """
+        parts = []
+
+        # Epoch
+        if self.epoch != 0:
+            parts.append(f"{self.epoch}!")
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self.release))
+
+        # Pre-release
+        if self.pre is not None:
+            parts.append("".join(str(x) for x in self.pre))
+
+        # Post-release
+        if self.post is not None:
+            parts.append(f".post{self.post}")
+
+        # Development release
+        if self.dev is not None:
+            parts.append(f".dev{self.dev}")
+
+        # Local version segment
+        if self.local is not None:
+            parts.append(f"+{self.local}")
+
+        return "".join(parts)
+
+    @property
+    def epoch(self) -> int:
+        """The epoch of the version.
+
+        >>> Version("2.0.0").epoch
+        0
+        >>> Version("1!2.0.0").epoch
+        1
+        """
+        return self._version.epoch
+
+    @property
+    def release(self) -> tuple[int, ...]:
+        """The components of the "release" segment of the version.
+
+        >>> Version("1.2.3").release
+        (1, 2, 3)
+        >>> Version("2.0.0").release
+        (2, 0, 0)
+        >>> Version("1!2.0.0.post0").release
+        (2, 0, 0)
+
+        Includes trailing zeroes but not the epoch or any pre-release / development /
+        post-release suffixes.
+        """
+        return self._version.release
+
+    @property
+    def pre(self) -> tuple[str, int] | None:
+        """The pre-release segment of the version.
+
+        >>> print(Version("1.2.3").pre)
+        None
+        >>> Version("1.2.3a1").pre
+        ('a', 1)
+        >>> Version("1.2.3b1").pre
+        ('b', 1)
+        >>> Version("1.2.3rc1").pre
+        ('rc', 1)
+        """
+        return self._version.pre
+
+    @property
+    def post(self) -> int | None:
+        """The post-release number of the version.
+
+        >>> print(Version("1.2.3").post)
+        None
+        >>> Version("1.2.3.post1").post
+        1
+        """
+        return self._version.post[1] if self._version.post else None
+
+    @property
+    def dev(self) -> int | None:
+        """The development number of the version.
+
+        >>> print(Version("1.2.3").dev)
+        None
+        >>> Version("1.2.3.dev1").dev
+        1
+        """
+        return self._version.dev[1] if self._version.dev else None
+
+    @property
+    def local(self) -> str | None:
+        """The local version segment of the version.
+
+        >>> print(Version("1.2.3").local)
+        None
+        >>> Version("1.2.3+abc").local
+        'abc'
+        """
+        if self._version.local:
+            return ".".join(str(x) for x in self._version.local)
+        else:
+            return None
+
+    @property
+    def public(self) -> str:
+        """The public portion of the version.
+
+        >>> Version("1.2.3").public
+        '1.2.3'
+        >>> Version("1.2.3+abc").public
+        '1.2.3'
+        >>> Version("1!1.2.3dev1+abc").public
+        '1!1.2.3.dev1'
+        """
+        return str(self).split("+", 1)[0]
+
+    @property
+    def base_version(self) -> str:
+        """The "base version" of the version.
+
+        >>> Version("1.2.3").base_version
+        '1.2.3'
+        >>> Version("1.2.3+abc").base_version
+        '1.2.3'
+        >>> Version("1!1.2.3dev1+abc").base_version
+        '1!1.2.3'
+
+        The "base version" is the public version of the project without any pre or post
+        release markers.
+        """
+        parts = []
+
+        # Epoch
+        if self.epoch != 0:
+            parts.append(f"{self.epoch}!")
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self.release))
+
+        return "".join(parts)
+
+    @property
+    def is_prerelease(self) -> bool:
+        """Whether this version is a pre-release.
+
+        >>> Version("1.2.3").is_prerelease
+        False
+        >>> Version("1.2.3a1").is_prerelease
+        True
+        >>> Version("1.2.3b1").is_prerelease
+        True
+        >>> Version("1.2.3rc1").is_prerelease
+        True
+        >>> Version("1.2.3dev1").is_prerelease
+        True
+        """
+        return self.dev is not None or self.pre is not None
+
+    @property
+    def is_postrelease(self) -> bool:
+        """Whether this version is a post-release.
+
+        >>> Version("1.2.3").is_postrelease
+        False
+        >>> Version("1.2.3.post1").is_postrelease
+        True
+        """
+        return self.post is not None
+
+    @property
+    def is_devrelease(self) -> bool:
+        """Whether this version is a development release.
+
+        >>> Version("1.2.3").is_devrelease
+        False
+        >>> Version("1.2.3.dev1").is_devrelease
+        True
+        """
+        return self.dev is not None
+
+    @property
+    def major(self) -> int:
+        """The first item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").major
+        1
+        """
+        return self.release[0] if len(self.release) >= 1 else 0
+
+    @property
+    def minor(self) -> int:
+        """The second item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").minor
+        2
+        >>> Version("1").minor
+        0
+        """
+        return self.release[1] if len(self.release) >= 2 else 0
+
+    @property
+    def micro(self) -> int:
+        """The third item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").micro
+        3
+        >>> Version("1").micro
+        0
+        """
+        return self.release[2] if len(self.release) >= 3 else 0
+
+
+class _TrimmedRelease(Version):
+    @property
+    def release(self) -> tuple[int, ...]:
+        """
+        Release segment without any trailing zeros.
+
+        >>> _TrimmedRelease('1.0.0').release
+        (1,)
+        >>> _TrimmedRelease('0.0').release
+        (0,)
+        """
+        rel = super().release
+        nonzeros = (index for index, val in enumerate(rel) if val)
+        last_nonzero = max(nonzeros, default=0)
+        return rel[: last_nonzero + 1]
+
+
+def _parse_letter_version(
+    letter: str | None, number: str | bytes | SupportsInt | None
+) -> tuple[str, int] | None:
+    if letter:
+        # We consider there to be an implicit 0 in a pre-release if there is
+        # not a numeral associated with it.
+        if number is None:
+            number = 0
+
+        # We normalize any letters to their lower case form
+        letter = letter.lower()
+
+        # We consider some words to be alternate spellings of other words and
+        # in those cases we want to normalize the spellings to our preferred
+        # spelling.
+        if letter == "alpha":
+            letter = "a"
+        elif letter == "beta":
+            letter = "b"
+        elif letter in ["c", "pre", "preview"]:
+            letter = "rc"
+        elif letter in ["rev", "r"]:
+            letter = "post"
+
+        return letter, int(number)
+
+    assert not letter
+    if number:
+        # We assume if we are given a number, but we are not given a letter
+        # then this is using the implicit post release syntax (e.g. 1.0-1)
+        letter = "post"
+
+        return letter, int(number)
+
+    return None
+
+
+_local_version_separators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local: str | None) -> LocalType | None:
+    """
+    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+    """
+    if local is not None:
+        return tuple(
+            part.lower() if not part.isdigit() else int(part)
+            for part in _local_version_separators.split(local)
+        )
+    return None
+
+
+def _cmpkey(
+    epoch: int,
+    release: tuple[int, ...],
+    pre: tuple[str, int] | None,
+    post: tuple[str, int] | None,
+    dev: tuple[str, int] | None,
+    local: LocalType | None,
+) -> CmpKey:
+    # When we compare a release version, we want to compare it with all of the
+    # trailing zeros removed. So we'll use a reverse the list, drop all the now
+    # leading zeros until we come to something non zero, then take the rest
+    # re-reverse it back into the correct order and make it a tuple and use
+    # that for our sorting key.
+    _release = tuple(
+        reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
+    )
+
+    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+    # We'll do this by abusing the pre segment, but we _only_ want to do this
+    # if there is not a pre or a post segment. If we have one of those then
+    # the normal sorting rules will handle this case correctly.
+    if pre is None and post is None and dev is not None:
+        _pre: CmpPrePostDevType = NegativeInfinity
+    # Versions without a pre-release (except as noted above) should sort after
+    # those with one.
+    elif pre is None:
+        _pre = Infinity
+    else:
+        _pre = pre
+
+    # Versions without a post segment should sort before those with one.
+    if post is None:
+        _post: CmpPrePostDevType = NegativeInfinity
+
+    else:
+        _post = post
+
+    # Versions without a development segment should sort after those with one.
+    if dev is None:
+        _dev: CmpPrePostDevType = Infinity
+
+    else:
+        _dev = dev
+
+    if local is None:
+        # Versions without a local segment should sort before those with one.
+        _local: CmpLocalType = NegativeInfinity
+    else:
+        # Versions with a local segment need that segment parsed to implement
+        # the sorting rules in PEP440.
+        # - Alpha numeric segments sort before numeric segments
+        # - Alpha numeric segments sort lexicographically
+        # - Numeric segments sort numerically
+        # - Shorter versions sort before longer versions when the prefixes
+        #   match exactly
+        _local = tuple(
+            (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
+        )
+
+    return epoch, _release, _pre, _post, _dev, _local
diff --git a/.venv/Scripts/gunicorn.exe b/.venv/Scripts/gunicorn.exe
new file mode 100644
index 0000000000000000000000000000000000000000..98ab21c82b3efcf76b802b84621fd655ad689da5
Binary files /dev/null and b/.venv/Scripts/gunicorn.exe differ
diff --git a/Layout.docx b/Layout.docx
deleted file mode 100644
index 46216a756e124bf90b1b2e53aa6258bdb5bd32ce..0000000000000000000000000000000000000000
Binary files a/Layout.docx and /dev/null differ
diff --git a/__pycache__/app.cpython-310.pyc b/__pycache__/app.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1ec2a5e5fec65e1c964a72c077f1536856fa497c
Binary files /dev/null and b/__pycache__/app.cpython-310.pyc differ
diff --git a/__pycache__/extensions.cpython-310.pyc b/__pycache__/extensions.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4efbb387f5d663acfe8d8c725968fd4f5a36cdd5
Binary files /dev/null and b/__pycache__/extensions.cpython-310.pyc differ
diff --git a/__pycache__/init.cpython-310.pyc b/__pycache__/init.cpython-310.pyc
index 70c0c8a56337358c5375c6a479abf98f3d879480..e9fecacebe42ff2347f55590f68866080ec235ef 100644
Binary files a/__pycache__/init.cpython-310.pyc and b/__pycache__/init.cpython-310.pyc differ
diff --git a/database b/database
new file mode 100644
index 0000000000000000000000000000000000000000..dc6d6607fc33d2977c736b79df6974887c9554d6
Binary files /dev/null and b/database differ
diff --git a/init.py b/init.py
index 2cc93c15bc4cd41db82d54360ac922fbaeba8c08..401b512ea3f21c21c4182a907daffece19a73a2f 100644
--- a/init.py
+++ b/init.py
@@ -1,21 +1,37 @@
-from flask import Flask, request
-from flask_sqlalchemy import SQLAlchemy
-from sqlalchemy import text
-import os
-from flask import url_for, render_template
-from sqlalchemy import Column, Integer, String, Table, ForeignKey
-from sqlalchemy.orm import Mapped, relationship
-from typing import List
-from flask import Flask, render_template, request, jsonify, redirect, url_for
+
 from werkzeug.utils import secure_filename
+from flask import Flask, request, render_template, request, jsonify, redirect, url_for, flash
+import os
+from flask_sqlalchemy import SQLAlchemy
+from flask_login import UserMixin , login_user, current_user, login_required, logout_user , LoginManager
+
 
+#Configs
 app = Flask(__name__, static_url_path='/static')
 
 basedir = os.path.abspath(os.path.dirname(__file__))
-app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + os.path.join(basedir, 'projects')
+app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + os.path.join(basedir, 'database')
+
+app.config["SECRET_KEY"] = "thisisase"
 
 db = SQLAlchemy(app)
 
+login_manager = LoginManager()
+login_manager.init_app(app)
+
+@login_manager.user_loader
+def loader_user(user_id):
+    return Users.query.get(user_id)
+
+
+#Models definition
+class Users(UserMixin, db.Model):
+    id = db.Column(db.Integer, primary_key=True)
+    username = db.Column(db.String(250), unique=True,
+                         nullable=False)
+    password = db.Column(db.String(250),
+                         nullable=False)
+
 class Projects(db.Model):  # Correctly inherit from db.Model
     __tablename__ = 'projects'  # Explicit table name
     id = db.Column(db.Integer, primary_key=True, autoincrement=True)
@@ -24,80 +40,82 @@ class Projects(db.Model):  # Correctly inherit from db.Model
     photo_url = db.Column(db.String(200), nullable=True)
     description = db.Column(db.String(500), nullable=True)
     link = db.Column(db.String(200), nullable=True)
-
-#upload folder
+    
+class Comments(db.Model):
+    id = db.Column(db.Integer, primary_key=True, autoincrement=True)
+    text = db.Column(db.Text, nullable=False)
+    user_id = db.Column(db.Integer, db.ForeignKey('users.id'), nullable=False)
+    project_id = db.Column(db.Integer, db.ForeignKey('projects.id'), nullable=False)
+    
+    user = db.relationship('Users', backref='comments')
+    project = db.relationship('Projects', backref='comments')
+    
+with app.app_context():
+    db.create_all()
+#End of models definition
+#https://www.geeksforgeeks.org/how-to-add-authentication-to-your-app-with-flask-login/
 
 UPLOAD_FOLDER = os.path.join(basedir, 'static/uploads')
 ALLOWED_EXTENSIONS = {'png', 'jpg', 'jpeg', 'gif'}
 
 app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
-app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024
-
-@app.route('/upload', methods=['POST']) #Photo upload
-def upload_photo():
-    if request.method =='POST':
-        mrx = request.files['photo_url']
-        file_path = os.path.join(app.config['UPLOAD_FOLDER'], secure_filename(mrx.filename))
-        mrx.save(file_path)
-        file_url = f'/static/uploads/{mrx.filename}'
-        return jsonify({'file_url': file_url})
-       
+app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024 
 
 @app.route('/')
 #loads template for frontpage as well as display projects
 def frontpage():
-    class Projects:
-        def __init__(self, id, name, proj_type, photo_url, description, link):
-            self.id = id
-            self.name = name
-            self.proj_type = proj_type
-            self.photo_url = photo_url
-            self.description = description
-            self.link = link   
-    
-    sql = f"SELECT * FROM projects"
-    sql = text(sql)
-    
+    projects = Projects.query.all()
     
-            
-    result = db.engine.connect().execute(sql)
-    projects = []
-    
-    for row in result:
-        projects.append(Projects(row[0],row[1], row[2], row[3], row[4], row[5]))
-        
     return render_template('front-page.html', projects=projects)
 
+#loads template for about
+@app.route('/about')
+def about():
+    return render_template('about.html')
 
-@app.route('/projects', methods=['GET', 'POST'])
-
-    
+#loads template for projects
+@app.route('/projects', methods=['GET', 'POST']) 
 def projects():
-    class Projects:
-        def __init__(self, id, name, proj_type, photo_url, description, link):
-            self.id = id
-            self.name = name
-            self.proj_type = proj_type
-            self.photo_url = photo_url
-            self.description = description
-            self.link = link   
+    projects = Projects.query.all()
     
-    sql = f"SELECT * FROM projects"
-    sql = text(sql)
+    # Create a dictionary to store comments organized by project_id
+    comments_dict = {}
     
+     # Get all comments with their associated users
+    comments_query = db.session.query(Comments, Users)\
+        .join(Users, Comments.user_id == Users.id)\
+        .order_by(Comments.project_id)\
+        .all()
+    print(comments_query)
     
+    # Organize comments by project_id
+    for comment, user in comments_query:
+        if comment.project_id not in comments_dict:
+            comments_dict[comment.project_id] = []
             
-    result = db.engine.connect().execute(sql)
-    projects = []
-    
-    for row in result:
-        projects.append(Projects(row[0],row[1], row[2], row[3], row[4], row[5]))
+        comment_data = {
+            'id': comment.id,
+            'text': comment.text,
+            'username': user.username,
+            'user_id': comment.user_id,
+            'project_id': comment.project_id
+        }
+        comments_dict[comment.project_id].append(comment_data)
         
-    return render_template('projects.html' , projects=projects)
+    return render_template('projects.html' , projects=projects,comments=comments_dict)
 
-@app.route('/add_project', methods=['POST'])
+#Section for uploading photo
+@app.route('/upload', methods=['POST']) #Photo upload
+def upload_photo():
+    if request.method =='POST':
+        mrx = request.files['photo_url']
+        file_path = os.path.join(app.config['UPLOAD_FOLDER'], secure_filename(mrx.filename))
+        mrx.save(file_path)
+        file_url = f'/static/uploads/{mrx.filename}'
+        return jsonify({'file_url': file_url})
 
-    
+#Section for adding projects
+@app.route('/add_project', methods=['POST'])  
 def add_project():
     obj = request.json
     print(obj)
@@ -111,13 +129,90 @@ def add_project():
     db.session.add(new_project)
     db.session.commit()
     
-    return jsonify({'message': 'Project added successfully'}), 201
+    return jsonify({
+        'id': new_project.id,
+        'name': new_project.name,
+        'proj_type': new_project.proj_type,
+        'description': new_project.description,
+        'photo_url': new_project.photo_url,
+        'link': new_project.link
+    })
+
+#section to delete projects
+@app.route('/delete_project', methods=['POST'])
+def delete_project():
+    if request.method =='POST':
+        project_id = request.form['project_id']
+        project = Projects.query.get(project_id)
+        db.session.delete(project)
+        db.session.commit()
+        flash('Project deleted successfully!', 'success')
+    else:
+        flash('Project not deleted!', 'error')
+        return redirect(url_for('projects'))
     
+    return redirect(url_for('projects'))  
 
-@app.route('/login')
+#a section to add comments
+@app.route('/comment', methods=['POST'])
+def comment():
+    if request.method =='POST':
+        if not current_user.is_authenticated:
+            flash('You need to be logged in to comment','error')
+            return redirect(url_for('projects'))
+        
+        comment = Comments( 
+                            text = request.form['comment'],
+                            user_id = current_user.id,
+                            project_id = request.form['project_id'])
+        
+        db.session.add(comment)   
+        
+        db.session.commit()
+        flash('Comment added successfully!', 'success')
+    return redirect(url_for('projects'))
+
+#authentication
+
+@app.route('/register', methods=['GET', 'POST'])
+def register():
+    print(f"Request method: {request.method}")
+    if request.method == 'POST':
+        user = Users(username=request.form.get('username'),
+                     password=request.form.get('password'))
+        
+        db.session.add(user)
+        
+        db.session.commit()
+        
+        flash('User registered successfully!', 'success')
+        return redirect(url_for('login'))
+    return render_template('register.html')
+    
+@app.route('/login', methods=["GET", "POST"])
 def login():
+    if request.method == "POST":
+        user = Users.query.filter_by(
+            username=request.form.get('username')).first()
+        
+        if user.password == request.form.get('password'):
+            login_user(user)
+            flash('You were successfully logged in!', 'success')
+            print('Flashed: success')
+            return redirect(url_for('projects'))
+        else:
+            flash('Login failed! Please check your credentials and try again.', 'error')
+            print('Flashed: error')
+            return redirect(url_for('login'))
     return render_template('login.html')
 
+@app.route('/logout')
+def logout():
+    logout_user()
+    flash('You were successfully logged out!', 'success')
+    return redirect(url_for('login'))
+#end of authentication
+
 
         
 if __name__ == '__main__':
diff --git a/projects b/projects
deleted file mode 100644
index 33788f3fee5571a6cf93a2257b2a745c6af2186f..0000000000000000000000000000000000000000
Binary files a/projects and /dev/null differ
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000000000000000000000000000000000000..1e6c54b5f77c9197fa6c6ce76e538f65a7a8fef7
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,15 @@
+blinker==1.9.0
+click==8.1.7
+colorama==0.4.6
+Flask==3.1.0
+Flask-Login==0.6.3
+Flask-SQLAlchemy==3.1.1
+greenlet==3.1.1
+gunicorn==23.0.0
+itsdangerous==2.2.0
+Jinja2==3.1.4
+MarkupSafe==3.0.2
+packaging==24.2
+SQLAlchemy==2.0.36
+typing_extensions==4.12.2
+Werkzeug==3.1.3
diff --git a/sqlite/projects b/sqlite/projects
deleted file mode 100644
index d984962ed57bb3595cad57176119803906a67c98..0000000000000000000000000000000000000000
Binary files a/sqlite/projects and /dev/null differ
diff --git a/sqlite/sqldiff.exe b/sqlite/sqldiff.exe
deleted file mode 100644
index 2ca707f0961319446b974e97fe0da93cfc0a5b3b..0000000000000000000000000000000000000000
Binary files a/sqlite/sqldiff.exe and /dev/null differ
diff --git a/sqlite/sqlite3.exe b/sqlite/sqlite3.exe
deleted file mode 100644
index a6ae70626d4477fb330f4f7f748214c6b0e33139..0000000000000000000000000000000000000000
Binary files a/sqlite/sqlite3.exe and /dev/null differ
diff --git a/sqlite/sqlite3_analyzer.exe b/sqlite/sqlite3_analyzer.exe
deleted file mode 100644
index 06039a17cb397b0b3dd99e9333d5d41eb8c7f1df..0000000000000000000000000000000000000000
Binary files a/sqlite/sqlite3_analyzer.exe and /dev/null differ
diff --git a/sqlite/sqlite3_rsync.exe b/sqlite/sqlite3_rsync.exe
deleted file mode 100644
index be90ef3e215f54b13f212d2b63857a75acd65c6a..0000000000000000000000000000000000000000
Binary files a/sqlite/sqlite3_rsync.exe and /dev/null differ
diff --git a/static/CSS_styles/about.css b/static/CSS_styles/about.css
new file mode 100644
index 0000000000000000000000000000000000000000..7bfd4d9746acc4a909f290fc832f94c5ad913fcb
--- /dev/null
+++ b/static/CSS_styles/about.css
@@ -0,0 +1,14 @@
+.container {
+    max-width: 900px;
+    margin:0 auto;
+    padding: 20px;
+}
+.education-info {
+    margin-bottom: 3em;
+}
+.work-experience-info {
+    margin-bottom: 3em;
+}   
+.skills-info {
+    margin-bottom: 3em;
+}   
\ No newline at end of file
diff --git a/static/CSS_styles/front-page.css b/static/CSS_styles/front-page.css
index 503344854a0e9fa7a9d4b4487539c491885bc6da..44dc7fdaffd37ac9b0fe4528e95cc4e0878abdf6 100644
--- a/static/CSS_styles/front-page.css
+++ b/static/CSS_styles/front-page.css
@@ -1,8 +1,11 @@
 
 /*style for photo*/
 .main-page-image{
-    width: 17em;
-    height: 10%;
+    width: 25em;
+    height: 40em;
+}
+.main-img-cont{
+    height: 20%;
 }
 /* Grid container */
 .container {
@@ -19,7 +22,7 @@
 /* Main page text */
 .main-page-text {
     flex: 2;
-    margin-right: 20px;
+    margin-right: 10px;
 }
 
 /* Main page image */
@@ -39,8 +42,18 @@
     display: flex;
     align-items: center;
     margin-bottom: 30px;
+    gap: 0;
+    flex:1;
+
+
 }
 .project-image {
-    width: 5em;
-    height: 5em;
+    width: 15em;
+    height: 15em;
+    border-radius: 15px;
+    box-shadow: #555 0px 0px 10px;
+}
+p {
+    margin-bottom: 3em;
+    margin-top: 3em;
 }
\ No newline at end of file
diff --git a/static/CSS_styles/layout.css b/static/CSS_styles/layout.css
index a7ce0d700c8c8c848c07786b7a71dc78411ec704..bb62533bc0045e98ac13734ff21da0890f6c4063 100644
--- a/static/CSS_styles/layout.css
+++ b/static/CSS_styles/layout.css
@@ -1,9 +1,21 @@
+ 
+
 body {
+    font-family: 'Delius', serif;
     margin: 0;
+    color : #4c4949;
+    background-image: url('/static/images/fabric_1.png');
+    background-size:auto;
+    background-position: center;
+
+   
 }
+
+
+
 /* Header styling */
 .navbar {
-    background-color: #f1f1f1;
+    background-color : none;
     overflow: hidden;
     display: flex;
     justify-content: space-between; /* Add this line */
@@ -11,6 +23,7 @@ body {
     padding:1em 1em;
     margin: 0;
     position: sticky;
+    
 }
 .navbar_items{
     display: flex;
@@ -18,10 +31,12 @@ body {
     list-style:none;
     padding: 0;
     margin: 0;
+    color: #555;
+    font-size: 1.5em;
 }
 
 .navbar a {
-    color: #333;
+    color: #555;
     text-decoration: none;
 }
 
@@ -42,8 +57,7 @@ body {
 }
 /* Footer styling */
 .footer {
-    background-color: #f1f1f1; /* Dark background color */
-    color: black; /* White text color */
+    background:none ; /* Dark background color */
     text-align: center; /* Center align text */
     padding: 0.1em; /* Padding around the footer */
     position: static; /* Position the footer */
@@ -73,6 +87,18 @@ body {
     font-weight: bold;
     margin-bottom: 0.5em; /* Space between title and contact list */
   }
+.error {
+    color: red;
+    margin-bottom: 1em;
+    display: flex;
+    justify-content: center;
+}
+.success {
+    color: green;
+    margin-bottom: 1em;
+    display: flex;
+    justify-content: center;
+}
 
 
 
diff --git a/static/CSS_styles/login.css b/static/CSS_styles/login.css
index 0d7ac2393fcfa23d366acf0966abb463497302d8..2a0c47e42b775fe3f41c8b467ec0814fae7a7d2b 100644
--- a/static/CSS_styles/login.css
+++ b/static/CSS_styles/login.css
@@ -1,10 +1,17 @@
+body{
+    font-family: 'Delius', serif;
+    margin: 0;
+    color : #4c4949;
+    
+    background-size:auto;
+    background-position: center;
+}
 .login-header {
     width: 100%;
     height: 100%;
     display: flex;
     justify-content: center;
     align-items: center;
-    background-color: rgb(228, 219, 219);
     flex-direction: column;
 }
 .login-body {
@@ -13,8 +20,22 @@
     display: flex;
     justify-content: center;
     align-items: center;
-    background-color: rgb(228, 219, 219);
     flex-direction: column;
-    border-top: 1em solid rgb(228, 219, 219);
+    border-top: 1em;
     
-}
\ No newline at end of file
+}
+h1 {
+    font-size: 2em;
+    margin-top: 3em;
+    margin-bottom: 3em;
+}
+input {
+    margin-bottom: 2em;
+}
+a {
+    margin-bottom: 1em;
+}
+button {
+    margin-bottom: 2em;
+}
+
diff --git a/static/CSS_styles/projects.css b/static/CSS_styles/projects.css
index cf5bf189db2c981f2acdeb0fe633066430e7c39b..6437db8e60cc043a74c3427467a00afee3df74af 100644
--- a/static/CSS_styles/projects.css
+++ b/static/CSS_styles/projects.css
@@ -1,21 +1,28 @@
-.container {
+.body {
+    font-family: 'Gill Sans', 'Gill Sans MT', Calibri, 'Trebuchet MS', sans-serif;
+}
+.each-proj-container {
     display: flex;
     justify-content: space-between;
-    border: 1px solid black;
+    margin-bottom: 0.5em;
+    margin-top: 5em;
 }
 .main-body {
     width: 60%;
     margin: auto;
-    border: 1px solid red;
 }
 
 .project-image {
     width: 100%;
+    border-radius: 15px;
+    box-shadow: #555 0px 0px 10px;
 }
 
-.project-desc {
+.description {
     margin-bottom: 0.5em;
     font-size: 1em;
+    background-color: #E8E8E7;
+    background: none;
 }
 
 /* Add project button */
@@ -26,11 +33,11 @@
     margin-bottom: 3%;
 }
 figure p {
-    font-size: 9px;
-    text-align: center;
+    font-size: 0.5em;
+    text-align: left;
     vertical-align: middle;
 }
-/*pop up modal*/
+/*pop up modal for adding project*/
 .modal {
     display: none;
     position: fixed;
@@ -42,13 +49,13 @@ figure p {
     overflow: auto;
     background-color: rgb(0,0,0);
     background-color: rgba(0,0,0,0.4);
+    
 }
 .modal-content {
-    background-color: #fefefe;
+    
     margin: 15% auto;
     padding: 20px;
-    border: 1px solid #888;
-    width: 60%;
+    width: 50%;
     display: flex;
     flex-direction:row;
 }
@@ -64,4 +71,47 @@ figure p {
     color: black;
     text-decoration: none;
     cursor: pointer;
-  }
\ No newline at end of file
+  }
+/*comment section CSS*/
+#comments-header{
+    display: flex;
+    justify-content: left;
+}
+.comment-box{
+    display: flex;
+    flex-direction: row;
+}
+.user-comments{
+    display: flex;
+    flex-direction: column;
+    background: none;
+    border-radius: 4px;
+    box-shadow: #c02828 4px;
+    
+    
+}
+.each-comment{
+    margin-left: 0.1em;
+    margin-top: 0.4em;
+    border-bottom: #888 solid 1px;
+}
+
+
+form {
+    background-color: #E8E8E7;
+}
+
+.comments-section{
+    box-shadow: #555 0px 0px 6px;
+}
+.comments-section textarea{
+    width: 95%;
+    height: 80px;
+    background-color: #E8E8E7;
+    border-radius: 2px;
+}
+.delete-form{
+    display: flex;
+    justify-content: flex-end;
+}
+
diff --git a/static/CSS_styles/register.css b/static/CSS_styles/register.css
new file mode 100644
index 0000000000000000000000000000000000000000..b53404121dacb5ec2b2b6dabf5e2012cf4b5b925
--- /dev/null
+++ b/static/CSS_styles/register.css
@@ -0,0 +1,34 @@
+.login-header {
+    width: 100%;
+    height: 100%;
+    display: flex;
+    justify-content: center;
+    align-items: center;
+    background:none;
+    flex-direction: column;
+}
+.register-body {
+    width: 100%;
+    height: 100%;
+    display: flex;
+    justify-content: center;
+    align-items: center;
+    background:none;
+    flex-direction: column;
+    border-top: 1em;
+    
+}
+h1 {
+    font-size: 2em;
+    margin-top: 4em;
+    margin-bottom: 3em;
+}
+input {
+    margin-bottom: 2em;
+}
+a {
+    margin-bottom: 1em;
+}
+button {
+    margin-bottom: 2.5em;
+}
diff --git a/static/cv.pdf b/static/cv.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..5810f585f9ab0e414bb63af6d5c8df70a4e4a367
Binary files /dev/null and b/static/cv.pdf differ
diff --git a/static/images/fabric_1.png b/static/images/fabric_1.png
new file mode 100644
index 0000000000000000000000000000000000000000..f4d34329e45dba473b2377b72386598952ab752a
Binary files /dev/null and b/static/images/fabric_1.png differ
diff --git a/static/images/my_photo.jpg b/static/images/my_photo.jpg
deleted file mode 100644
index 15e7742d95852c9b9100524249869b0d55dd45cd..0000000000000000000000000000000000000000
Binary files a/static/images/my_photo.jpg and /dev/null differ
diff --git a/static/images/my_photo.png b/static/images/my_photo.png
new file mode 100644
index 0000000000000000000000000000000000000000..4cfcd60d9c5e581adcc44aa3bff31470b288e1bf
Binary files /dev/null and b/static/images/my_photo.png differ
diff --git a/static/images/project1.png b/static/images/project1.png
deleted file mode 100644
index 58b7d2e95f67fcb218640d4b063ceb3c869c6beb..0000000000000000000000000000000000000000
Binary files a/static/images/project1.png and /dev/null differ
diff --git a/static/javascript/add_proj.js b/static/javascript/add_proj.js
index 78e1580790fa2bf26f06d16929d9591888411ef8..f61dc0a2a691385cdefaedd4125ae9a5a4ab4389 100644
--- a/static/javascript/add_proj.js
+++ b/static/javascript/add_proj.js
@@ -20,7 +20,7 @@
     
  /* submit form */
 const form = document.getElementById("proj_form");
-const responseMessage = document.getElementById("responseMessage");
+const project_container = document.getElementById("project-container");
 
 form.addEventListener('submit', async (e) => {
     e.preventDefault();
@@ -72,12 +72,50 @@ form.addEventListener('submit', async (e) => {
             console.error("Failed to add project");
             return;
         }
+
+
+
+        const new_proj = await add_proj.json();
+        const new_proj_html = `
+        <figure>
+            <div class="each-proj-container">
+                <figcaption class="project-desc"><strong>${new_proj.name}</strong></figcaption>
+                <figcaption class="project-desc">${new_proj.proj_type}</figcaption>
+            </div>
+            <img src="${new_proj.photo_url}" alt="${new_proj.name}" class="project-image"></img>
+            <div class="project-desc">
+                <h6>Project description:</h6>
+                <p class="description">${new_proj.description}</p>
+            </div>
+            <div>
+                <h6>Code accessed here:</h6>
+                <p class="description"><a href=${project.link}>${project.link}</a></p>
+            </div>
+            <div id="comments-header">
+                <h6>Comments:</h6>
+            </div>
+            <form action="/comment" method="POST" class="comments-section">
+                <input type="hidden" name="project_id" value="${new_proj.id}">
+                <div class="comment-box">
+                    <textarea name="comment" placeholder="Add a comment..." required></textarea>
+                    <button type="submit">Submit</button>
+                </div>
+            </form>
+            <div class="user-comments">
+            </div>
+        </figure>
+        `;
+        project_container.insertAdjacentHTML('beforeend', new_proj_html);
+
         form.reset();
         modal.style.display = "none";
+
     } catch (error) {
         console.error("Error:", error);
     }
 });
+
+
     
 
         
diff --git a/static/uploads/cat_mountain.jpg b/static/uploads/cat_mountain.jpg
deleted file mode 100644
index cce09b70c35f295b499d3c4574da2dd6f9ff413e..0000000000000000000000000000000000000000
Binary files a/static/uploads/cat_mountain.jpg and /dev/null differ
diff --git a/static/uploads/firstWebsite.png b/static/uploads/firstWebsite.png
new file mode 100644
index 0000000000000000000000000000000000000000..9bbf61923bdffc6d470f651100842c808cf6638a
Binary files /dev/null and b/static/uploads/firstWebsite.png differ
diff --git a/static/uploads/pythonChallenges.png b/static/uploads/pythonChallenges.png
new file mode 100644
index 0000000000000000000000000000000000000000..b90e32cfa346af0206a2826e12ceedb98b9462d4
Binary files /dev/null and b/static/uploads/pythonChallenges.png differ
diff --git a/templates/about.html b/templates/about.html
new file mode 100644
index 0000000000000000000000000000000000000000..9d7cb581c6cfd425509412a2126290d16e8561ad
--- /dev/null
+++ b/templates/about.html
@@ -0,0 +1,54 @@
+{% extends "layout.html" %}
+
+{% block extra_CSS %}
+<link rel ="stylesheet" href="{{url_for('static', filename='CSS_styles/about.css')}}">
+{% endblock %}
+
+{% block content %}
+<div class="container">
+    <div class ="education">
+        <div class="education-info">
+            <h2>Education</h2>
+            <p>MSC in Computing</p>
+            <p>Cardiff Univserity</p>
+            <p>2024-Present</p>
+            <p>Key Modules:</p>
+            <p>Fundamentals of Programming, Data Structures and Algorithms, Cloud Computing, Software engineering</p>
+        </div>
+        <div class="education-info">
+            <p>BA Business and Management (Project Management)</p>
+            <p>Bournemouth University</p>
+            <p>2021-2024</p>
+            <p>2:2</p>
+            <p>Key Modules:</p>
+            <p>Project Management, Business Analysis, Supply Chain Management, Business Strategy</p>
+        </div>
+    </div>
+    <div class ="work-experience">
+        <h2>Work/Project Experience</h2>
+        <div class="work-experience-info">
+            <p><strong>Event co-ordinator</strong></p>
+            <p>Depot Company – Cardiff | Present</p>
+            <li>Helped plan and execute multiple successful events </li>
+            <li>Ensure venues are prepared according to client’s specifications</li>
+            <li>Ensure all staff are aware of their responsibilities during event and keep to schedule</li>
+        </div>
+        <div class="work-experience-info">
+            <p><strong>Research project (Use of AI in agile software development projects)</strong></p>
+            <p>Bournemouth University | 2023</p>
+            <li>Carried out Qualitative research with industry professionals with varying degree of seniority</li>
+            <li>Carried out data analysis from respondents and visualised data using power bi </li>
+            <li>Presented findings to academic staff and industry professionals</li>
+        </div>
+    </div>
+    <div class ="skills">
+        <h2>Skills</h2>
+        <div class="skills-info">
+            <p><strong>Programming Languages:</strong> Python, JavaScript, flask, HTML, CSS, Node.js</p>
+            <p><strong>Tools:</strong> Visual Studio, Jupyter Notebook, Power BI</p>
+            <p><strong>Soft Skills:</strong> Teamwork, Communication, Problem Solving, Time Management</p>
+        </div>
+        <h2>CV available to download here:</h2>
+        <a href="{{url_for('static', filename='CV.pdf')}}" download>Download CV</a>
+</div>
+{% endblock %}
\ No newline at end of file
diff --git a/templates/front-page.html b/templates/front-page.html
index 1507866c120df20cd1737354ad9158f3530da7e3..539c5322195eb8fa31b1d129fa4f8be2e503c152 100644
--- a/templates/front-page.html
+++ b/templates/front-page.html
@@ -9,22 +9,28 @@
 <div class="container">
     <div class="about-section">
         <div class="main-page-text">
-            <p>Hi, I'm Kacper Polczynski</p>
+            <h1>Hi, I'm Kacper Polczynski</h1>
+            <p>I’m a driven Project Management graduate and Computing Master’s student with a passion for leveraging technology to solve complex problems and drive impactful change.</p>
+            <p>With a strong foundation in business IT and hands-on experience in project coordination, data analysis, and cyber resilience, I’m excited to showcase my journey and contributions to dynamic industries like healthcare, technology, and financial services.</p>                
+            <p>Explore my portfolio to discover:</p>
+            <li>My latest projects and collaborations</li>
+            <li>Skills and Experience</li>
+            <li>Professional and academic background</li>
+            <p>Lets connect and explore how we can create something exceptional together!</p>
         </div>
-        <div>
-            <img src="../static/images/my_photo.jpg" alt="Kacper Polczynski" class="main-page-image">
+        <div class ="main-img-cont">
+            <img src="../static/images/my_photo.png" alt="Kacper Polczynski" class="main-page-image">
         </div>
     </div>  
     <h2>Current Projects</h2>
-        <div class="current-project">
-{% for project in projects %}
-        <figure>
-            <div class="container">
-                <img src="{{ project.photo_url }}" alt="{{project.name}}" class="project-image">
-                <figcaption class="project-name">{{project.name}}</figcaption>  
-            </div>
-        </figure>
-{% endfor %}
+</div>
+<div class="current-project">
+        {% for project in projects [:4]%}
+                <figure>
+                    <img src="{{ project.photo_url }}" alt="{{project.name}}" class="project-image">
+                    <figcaption class="project-name">{{project.name}}</figcaption> 
+                </figure>
+        {% endfor %}
 </div>
 
 {% endblock %}
\ No newline at end of file
diff --git a/templates/layout.html b/templates/layout.html
index d9f5eec01b95a247abad778d76597d7d76803180..2f34871d396893df3732c27c0dd214d731fab1a6 100644
--- a/templates/layout.html
+++ b/templates/layout.html
@@ -7,6 +7,9 @@
         <meta name="description" content="">
         <meta name="viewport" content="width=device-width, initial-scale=1">
         <link rel="stylesheet" type="text/css" href="../static/CSS_styles/layout.css">
+        <link rel="preconnect" href="https://fonts.googleapis.com">
+        <link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
+        <link href="https://fonts.googleapis.com/css2?family=Delius&display=swap" rel="stylesheet">
         {% block extra_CSS %}
         {% endblock %}
         <script src="https://kit.fontawesome.com/f79cc66706.js" crossorigin="anonymous"></script>
@@ -16,9 +19,13 @@
         <nav class="navbar">
             <ul class="navbar_items">
                 <li class="home"><a href="{{url_for('frontpage') }}" class="home"><i class="fa-solid fa-house"></i></a></li>
-                <li><a href="?">About</a></li>
+                <li><a href="{{url_for('about') }}">About</a></li>
                 <li><a href="{{url_for('projects') }}">Projects</a></li>
+                {% if current_user.is_authenticated%}
+                <li><a href="{{url_for('logout') }}">logout</a></li>
+                {%else%}
                 <li><a href="{{url_for('login') }}">login</a></li>
+                {%endif%}
             </ul>
         </nav>
 
diff --git a/templates/login.html b/templates/login.html
index 87b1b28fa74b19a35e9881233b63ffb7e6570a17..de813defab88ef543ac2dbce08256a4ae93b4793 100644
--- a/templates/login.html
+++ b/templates/login.html
@@ -4,16 +4,38 @@
 {% endblock %}
 {% block content %}
 <div class="login-container">
-    <form action="{{url_for('login')}}">
         <div class="login-header">
             <h1>Login</h1>
         </div>
-        <div class="login-body">
-            <label for="username">Username:</label>
-            <input type="text" id="username" name="username" required>
-            <label for="password">Password:</label>
-            <input type="password" id="password" name="password" required>
-            <button type="submit">Login</button>
+        {% with messages = get_flashed_messages(category_filter=["success"]) %}
+        {% if messages %}
+            <div class="success">
+            {% for message in messages %}
+                    <h5>{{ message }}</h5>
+            {% endfor %}
+            </div>
+        {% endif %}
+        {% endwith %}
+
+{% with messages = get_flashed_messages(category_filter=["error"]) %}
+    {% if messages %}
+        <div class="error">
+            {% for message in messages %}
+                <h5>{{ message }}</h5>
+            {% endfor %}
+        </div>
+    {% endif %}
+{% endwith %}
+        <div>
+            <form action="/login" method="POST" class="login-body">
+                <label for="username">Username:</label>
+                <input type="text" id="username" name="username" required>
+                <label for="password">Password:</label>
+                <input type="password" id="password" name="password" required>
+                <a href="{{url_for('register')}}">Don't have an account? Register here</a>
+                <button type="submit">Login</button>
+            </form>
+        </div>
         </div>
     </form>
 {% endblock %}
\ No newline at end of file
diff --git a/templates/projects.html b/templates/projects.html
index e33179eeb03b07e19ef5ba64e3cdbf11f89e1613..f9a9fe16e95f5fb243871455885b79f945e7487b 100644
--- a/templates/projects.html
+++ b/templates/projects.html
@@ -4,9 +4,33 @@
 {% endblock %}
 {% block content %}
 <div class="main-body">
+    {% with messages = get_flashed_messages(category_filter=["error"])%}
+        {% if messages %}
+            <div id="error">
+                {% for message in messages %}
+                    <h5>{{message}}</h5>
+                {% endfor %}
+            </div>
+        {% endif %}
+    {% endwith %}
+    
+    {% with messages = get_flashed_messages(category_filter=["success"])%}
+        {% if messages %}
+            <div class="success">
+                {% for message in messages %}
+                    <h5>{{message}}</h5>
+                {% endfor %}
+            </div>
+        {% endif %}
+    {% endwith %}
+
+    
+
+    {% if current_user.username == 'admin' %}
     <div class="add-btn">
         <button id="add-proj-btn">Add project</button>
     </div>
+    {% endif %}
     <!-- Pop Up to Add Projects-->
     <div id ="myModal" class="modal">
         <div class = "modal-content">
@@ -23,26 +47,51 @@
                 <ul><label for="photo_url">Photo url:</label>
                 <input type="file" id="photo_url" name="photo_url" accept="image/*" required></ul>
                 <ul><button type="submit">Add Project</button></ul>
+            </form>
         </div>
     </div>
-    <div id="responseMessage"></div>
+    <div id="project-container"></div>
     <script src="../static/javascript/add_proj.js"></script>
-
     {% for project in projects %}
     <figure>
-        <div class="container">
-            <figcaption class="project-desc">{{project.name}}</figcaption>
-            <figcaption class="project-desc">{{project.proj_type}}</figcaption>
+        <div class="each-proj-container">
+            <figcaption class="project-desc"><strong>{{project.name}}</figcaption>
+            <figcaption class="project-desc">{{project.proj_type}}</strong></figcaption>
         </div>
         <img src="{{ project.photo_url }}" alt="{{project.name}}" class="project-image">
+        {% if current_user.username == 'admin' %}
+        <form action="/delete_project" method="POST" class="delete-form" onsubmit="return confrim('Are you sure you want to delete this project?');">
+            <input type="hidden" name="project_id" value="{{project.id}}">
+            <button type="submit" class="delete-proj-btn">Delete Project</button>
+        </form>
+        {% endif %}
         <div class="project-desc">
-            <p>Project description:{{project.description}}</p>
+            <h6>Project description:</h6>
+            <p class="description">{{project.description}}</p>
         </div>
         <div>
-            <p>Code accessed here:{{project.link}}</p>
+            <h6>Code accessed here:</h6>
+            <p class="description"><a href={{project.link}}>{{project.link}}</a></p>
+        </div>
+        <div id="comments-header">
+            <h6>Comments:</h6>
         </div>
-        <div class="comments">
-            <p>Comments:</p>
+        <form action="/comment" method="POST" class="comments-section">
+            <input type="hidden" name="project_id" value="{{project.id}}">
+            <div class="comment-box">
+                <textarea name="comment" placeholder="Add a comment..."></textarea>
+                <button type="submit" >Submit</button>
+            </div>
+        </form>
+        <div class="user-comments">
+        {% if comments.get(project.id) %} 
+            {% for comment in comments[project.id] %}
+            <div class ="each-comment">
+                <h7>{{comment.username}}</h7>
+                <p>{{comment.text}}<p>
+            </div>
+            {% endfor%}
+        {% endif%}
         </div>
     </figure>
     {% endfor%}
diff --git a/templates/register.html b/templates/register.html
new file mode 100644
index 0000000000000000000000000000000000000000..d384d892fc5996bd99b3420d0e7fdc3325f7cced
--- /dev/null
+++ b/templates/register.html
@@ -0,0 +1,24 @@
+{% extends "layout.html" %}
+{% block extra_CSS %}
+<link rel="stylesheet" type="text/css" href="../static/CSS_styles/register.css">
+{% endblock %}
+{% block content %}
+<div class="login-container">
+        <div class="login-header">
+            <h1>Register</h1>
+        </div>
+        <div>
+            <form action="/register" method='POST' class="register-body">
+                <label for="username">Username:</label>
+                <input type="text" id="username" name="username" required>
+                <label for="password">Password:</label>
+                <input type="password" id="password" name="password" required>
+                <button type="submit">register</button>
+            </form>
+
+        <script src="../static/javascript/register.js"></script>
+        </div>
+        </div>
+    </form>
+{% endblock %}
+ 
\ No newline at end of file